language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-kubernetes/src/main/java/org/apache/flink/kubernetes/KubernetesResourceManagerDriver.java | {
"start": 21934,
"end": 22187
} | class ____ extends FlinkException {
private static final long serialVersionUID = 1L;
RetryableException(String message) {
super(message);
}
}
/** Internal type of the pod event. */
private | RetryableException |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/ManagedChannelImpl.java | {
"start": 54565,
"end": 55820
} | class ____ implements Runnable {
@Override
public void run() {
if (terminating) {
oobChannel.shutdown();
}
if (!terminated) {
// If channel has not terminated, it will track the subchannel and block termination
// for it.
oobChannels.add(oobChannel);
}
}
}
syncContext.execute(new AddOobChannel());
return oobChannel;
}
@Deprecated
@Override
public ManagedChannelBuilder<?> createResolvingOobChannelBuilder(String target) {
return createResolvingOobChannelBuilder(target, new DefaultChannelCreds())
// Override authority to keep the old behavior.
// createResolvingOobChannelBuilder(String target) will be deleted soon.
.overrideAuthority(getAuthority());
}
// TODO(creamsoup) prevent main channel to shutdown if oob channel is not terminated
// TODO(zdapeng) register the channel as a subchannel of the parent channel in channelz.
@Override
public ManagedChannelBuilder<?> createResolvingOobChannelBuilder(
final String target, final ChannelCredentials channelCreds) {
checkNotNull(channelCreds, "channelCreds");
final | AddOobChannel |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-jsonb/runtime/src/main/java/io/quarkus/resteasy/jsonb/vertx/VertxJson.java | {
"start": 879,
"end": 4221
} | class ____ {
private VertxJson() {
// avoid direct instantiation
}
private final static Base64.Encoder BASE64_ENCODER = Base64.getUrlEncoder().withoutPadding();
public static void copy(JsonObject object, jakarta.json.JsonObject origin) {
origin.keySet().forEach(key -> {
JsonValue value = origin.get(key);
JsonValue.ValueType kind = value.getValueType();
switch (kind) {
case STRING:
object.put(key, origin.getString(key));
break;
case NULL:
object.putNull(key);
break;
case TRUE:
object.put(key, true);
break;
case FALSE:
object.put(key, false);
break;
case NUMBER:
JsonNumber number = origin.getJsonNumber(key);
if (number.isIntegral()) {
object.put(key, number.longValue());
} else {
object.put(key, number.doubleValue());
}
break;
case ARRAY:
JsonArray array = new JsonArray();
copy(array, origin.getJsonArray(key));
object.put(key, array);
break;
case OBJECT:
JsonObject json = new JsonObject();
copy(json, origin.getJsonObject(key));
object.put(key, json);
break;
default:
throw new IllegalArgumentException("Unknown JSON Value " + kind);
}
});
}
public static void copy(JsonArray array, jakarta.json.JsonArray origin) {
for (int i = 0; i < origin.size(); i++) {
JsonValue value = origin.get(i);
JsonValue.ValueType kind = value.getValueType();
switch (kind) {
case STRING:
array.add(origin.getString(i));
break;
case TRUE:
array.add(true);
break;
case FALSE:
array.add(false);
break;
case NULL:
array.addNull();
break;
case NUMBER:
JsonNumber number = origin.getJsonNumber(i);
if (number.isIntegral()) {
array.add(number.longValue());
} else {
array.add(number.doubleValue());
}
break;
case ARRAY:
JsonArray newArray = new JsonArray();
copy(newArray, origin.getJsonArray(i));
array.add(newArray);
break;
case OBJECT:
JsonObject newObject = new JsonObject();
copy(newObject, origin.getJsonObject(i));
array.add(newObject);
break;
default:
throw new IllegalArgumentException("Unknown JSON Value " + kind);
}
}
}
public static | VertxJson |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java | {
"start": 35385,
"end": 38690
} | enum ____ {
ONE,
TWO,
THREE
}
public void testEnum() throws IOException {
TestEnum value = randomFrom(TestEnum.values());
TestStreamOutput output = new TestStreamOutput();
output.writeEnum(value);
StreamInput input = output.bytes().streamInput();
assertEquals(value, input.readEnum(TestEnum.class));
assertEquals(0, input.available());
}
public void testInvalidEnum() throws IOException {
TestStreamOutput output = new TestStreamOutput();
int randomNumber = randomInt();
boolean validEnum = randomNumber >= 0 && randomNumber < TestEnum.values().length;
output.writeVInt(randomNumber);
StreamInput input = output.bytes().streamInput();
if (validEnum) {
assertEquals(TestEnum.values()[randomNumber], input.readEnum(TestEnum.class));
} else {
IOException ex = expectThrows(IOException.class, () -> input.readEnum(TestEnum.class));
assertEquals("Unknown TestEnum ordinal [" + randomNumber + "]", ex.getMessage());
}
assertEquals(0, input.available());
}
private static void assertEqualityAfterSerialize(TimeValue value, int expectedSize) throws IOException {
TestStreamOutput out = new TestStreamOutput();
out.writeTimeValue(value);
assertEquals(expectedSize, out.size());
StreamInput in = out.bytes().streamInput();
TimeValue inValue = in.readTimeValue();
assertThat(inValue, equalTo(value));
assertThat(inValue.duration(), equalTo(value.duration()));
assertThat(inValue.timeUnit(), equalTo(value.timeUnit()));
}
public void testTimeValueSerialize() throws Exception {
assertEqualityAfterSerialize(new TimeValue(100, TimeUnit.DAYS), 3);
assertEqualityAfterSerialize(TimeValue.timeValueNanos(-1), 2);
assertEqualityAfterSerialize(TimeValue.timeValueNanos(1), 2);
assertEqualityAfterSerialize(TimeValue.timeValueSeconds(30), 2);
final TimeValue timeValue = new TimeValue(randomIntBetween(0, 1024), randomFrom(TimeUnit.values()));
TestStreamOutput out = new TestStreamOutput();
out.writeZLong(timeValue.duration());
assertEqualityAfterSerialize(timeValue, 1 + out.bytes().length());
}
public void testTimeValueInterning() throws IOException {
try (var bytesOut = new BytesStreamOutput()) {
bytesOut.writeTimeValue(randomBoolean() ? TimeValue.MINUS_ONE : new TimeValue(-1, TimeUnit.MILLISECONDS));
bytesOut.writeTimeValue(randomBoolean() ? TimeValue.ZERO : new TimeValue(0, TimeUnit.MILLISECONDS));
bytesOut.writeTimeValue(randomBoolean() ? TimeValue.THIRTY_SECONDS : new TimeValue(30, TimeUnit.SECONDS));
bytesOut.writeTimeValue(randomBoolean() ? TimeValue.ONE_MINUTE : new TimeValue(1, TimeUnit.MINUTES));
try (var in = bytesOut.bytes().streamInput()) {
assertSame(TimeValue.MINUS_ONE, in.readTimeValue());
assertSame(TimeValue.ZERO, in.readTimeValue());
assertSame(TimeValue.THIRTY_SECONDS, in.readTimeValue());
assertSame(TimeValue.ONE_MINUTE, in.readTimeValue());
}
}
}
private static | TestEnum |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/service/spi/DuplicationStrategy.java | {
"start": 371,
"end": 480
} | interface ____ {
/**
* The enumerated list of actions available on duplication match
*/
| DuplicationStrategy |
java | elastic__elasticsearch | libs/geo/src/main/java/org/elasticsearch/geometry/MultiPolygon.java | {
"start": 571,
"end": 1041
} | class ____ extends GeometryCollection<Polygon> {
public static final MultiPolygon EMPTY = new MultiPolygon();
private MultiPolygon() {}
public MultiPolygon(List<Polygon> polygons) {
super(polygons);
}
@Override
public ShapeType type() {
return ShapeType.MULTIPOLYGON;
}
@Override
public <T, E extends Exception> T visit(GeometryVisitor<T, E> visitor) throws E {
return visitor.visit(this);
}
}
| MultiPolygon |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableScalarXMapTest.java | {
"start": 1370,
"end": 1754
} | class ____ implements ObservableSource<Integer>, Supplier<Integer> {
@Override
public void subscribe(Observer<? super Integer> observer) {
EmptyDisposable.error(new TestException(), observer);
}
@Override
public Integer get() throws Exception {
throw new TestException();
}
}
static final | CallablePublisher |
java | apache__camel | components/camel-barcode/src/test/java/org/apache/camel/dataformat/barcode/BarcodeDataFormatTest.java | {
"start": 7593,
"end": 7951
} | class ____.
*/
@Test
final void testGetParams() throws IOException {
try (BarcodeDataFormat instance = new BarcodeDataFormat()) {
instance.start();
BarcodeParameters result = instance.getParams();
assertNotNull(result);
}
}
/**
* Test of getWriterHintMap method, of | BarcodeDataFormat |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/runtime/MongoClients.java | {
"start": 12674,
"end": 13369
} | class ____ implements Block<SocketSettings.Builder> {
public SocketSettingsBuilder(MongoClientConfig config) {
this.config = config;
}
private final MongoClientConfig config;
@Override
public void apply(SocketSettings.Builder builder) {
if (config.connectTimeout().isPresent()) {
builder.connectTimeout((int) config.connectTimeout().get().toMillis(), TimeUnit.MILLISECONDS);
}
if (config.readTimeout().isPresent()) {
builder.readTimeout((int) config.readTimeout().get().toMillis(), TimeUnit.MILLISECONDS);
}
}
}
private static | SocketSettingsBuilder |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ClearDeploymentCacheAction.java | {
"start": 929,
"end": 1308
} | class ____ extends ActionType<ClearDeploymentCacheAction.Response> {
public static final ClearDeploymentCacheAction INSTANCE = new ClearDeploymentCacheAction();
public static final String NAME = "cluster:admin/xpack/ml/trained_models/deployment/clear_cache";
private ClearDeploymentCacheAction() {
super(NAME);
}
public static | ClearDeploymentCacheAction |
java | netty__netty | buffer/src/main/java/io/netty/buffer/package-info.java | {
"start": 2138,
"end": 6209
} | interface ____ than
* introducing an incompatible type.
*
* <h3>Transparent Zero Copy</h3>
*
* To lift up the performance of a network application to the extreme, you need
* to reduce the number of memory copy operation. You might have a set of
* buffers that could be sliced and combined to compose a whole message. Netty
* provides a composite buffer which allows you to create a new buffer from the
* arbitrary number of existing buffers with no memory copy. For example, a
* message could be composed of two parts; header and body. In a modularized
* application, the two parts could be produced by different modules and
* assembled later when the message is sent out.
* <pre>
* +--------+----------+
* | header | body |
* +--------+----------+
* </pre>
* If {@link java.nio.ByteBuffer} were used, you would have to create a new big
* buffer and copy the two parts into the new buffer. Alternatively, you can
* perform a gathering write operation in NIO, but it restricts you to represent
* the composite of buffers as an array of {@link java.nio.ByteBuffer}s rather
* than a single buffer, breaking the abstraction and introducing complicated
* state management. Moreover, it's of no use if you are not going to read or
* write from an NIO channel.
* <pre>
* // The composite type is incompatible with the component type.
* ByteBuffer[] message = new ByteBuffer[] { header, body };
* </pre>
* By contrast, {@link io.netty.buffer.ByteBuf} does not have such
* caveats because it is fully extensible and has a built-in composite buffer
* type.
* <pre>
* // The composite type is compatible with the component type.
* {@link io.netty.buffer.ByteBuf} message = {@link io.netty.buffer.Unpooled}.wrappedBuffer(header, body);
*
* // Therefore, you can even create a composite by mixing a composite and an
* // ordinary buffer.
* {@link io.netty.buffer.ByteBuf} messageWithFooter = {@link io.netty.buffer.Unpooled}.wrappedBuffer(message, footer);
*
* // Because the composite is still a {@link io.netty.buffer.ByteBuf}, you can access its content
* // easily, and the accessor method will behave just like it's a single buffer
* // even if the region you want to access spans over multiple components. The
* // unsigned integer being read here is located across body and footer.
* messageWithFooter.getUnsignedInt(
* messageWithFooter.readableBytes() - footer.readableBytes() - 1);
* </pre>
*
* <h3>Automatic Capacity Extension</h3>
*
* Many protocols define variable length messages, which means there's no way to
* determine the length of a message until you construct the message or it is
* difficult and inconvenient to calculate the length precisely. It is just
* like when you build a {@link java.lang.String}. You often estimate the length
* of the resulting string and let {@link java.lang.StringBuffer} expand itself
* on demand.
* <pre>
* // A new dynamic buffer is created. Internally, the actual buffer is created
* // lazily to avoid potentially wasted memory space.
* {@link io.netty.buffer.ByteBuf} b = {@link io.netty.buffer.Unpooled}.buffer(4);
*
* // When the first write attempt is made, the internal buffer is created with
* // the specified initial capacity (4).
* b.writeByte('1');
*
* b.writeByte('2');
* b.writeByte('3');
* b.writeByte('4');
*
* // When the number of written bytes exceeds the initial capacity (4), the
* // internal buffer is reallocated automatically with a larger capacity.
* b.writeByte('5');
* </pre>
*
* <h3>Better Performance</h3>
*
* Most frequently used buffer implementation of
* {@link io.netty.buffer.ByteBuf} is a very thin wrapper of a
* byte array (i.e. {@code byte[]}). Unlike {@link java.nio.ByteBuffer}, it has
* no complicated boundary check and index compensation, and therefore it is
* easier for a JVM to optimize the buffer access. More complicated buffer
* implementation is used only for sliced or composite buffers, and it performs
* as well as {@link java.nio.ByteBuffer}.
*/
package io.netty.buffer;
| rather |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/materializedtable/CreateMaterializedTableOperation.java | {
"start": 1508,
"end": 2891
} | class ____
implements CreateOperation, MaterializedTableOperation {
private final ObjectIdentifier tableIdentifier;
private final ResolvedCatalogMaterializedTable materializedTable;
public CreateMaterializedTableOperation(
ObjectIdentifier tableIdentifier, ResolvedCatalogMaterializedTable materializedTable) {
this.tableIdentifier = tableIdentifier;
this.materializedTable = materializedTable;
}
@Override
public TableResultInternal execute(Context ctx) {
// create materialized table in catalog
ctx.getCatalogManager().createTable(materializedTable, tableIdentifier, false);
return TableResultImpl.TABLE_RESULT_OK;
}
public ObjectIdentifier getTableIdentifier() {
return tableIdentifier;
}
public ResolvedCatalogMaterializedTable getCatalogMaterializedTable() {
return materializedTable;
}
@Override
public String asSummaryString() {
Map<String, Object> params = new LinkedHashMap<>();
params.put("materializedTable", materializedTable);
params.put("identifier", tableIdentifier);
return OperationUtils.formatWithChildren(
"CREATE MATERIALIZED TABLE",
params,
Collections.emptyList(),
Operation::asSummaryString);
}
}
| CreateMaterializedTableOperation |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/JavaUserDefinedAggFunctions.java | {
"start": 2280,
"end": 2989
} | class ____ extends AggregateFunction<Long, VarSumAcc> {
@Override
public VarSumAcc createAccumulator() {
return new VarSumAcc();
}
public void accumulate(VarSumAcc acc, Integer... args) {
for (Integer x : args) {
if (x != null) {
acc.sum += x.longValue();
}
}
}
@Override
public Long getValue(VarSumAcc accumulator) {
return accumulator.sum;
}
@Override
public TypeInformation<Long> getResultType() {
return Types.LONG;
}
}
/**
* Only used for test. The difference between the | VarSum1AggFunction |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/grpc/auto/BiRequestStreamGrpc.java | {
"start": 8986,
"end": 9159
} | class ____
extends BiRequestStreamBaseDescriptorSupplier {
BiRequestStreamFileDescriptorSupplier() {}
}
private static final | BiRequestStreamFileDescriptorSupplier |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootContextLoaderTests.java | {
"start": 17403,
"end": 17550
} | class ____ extends AnnotationConfigApplicationContext {
}
@Configuration(proxyBeanMethods = false)
static | CustomAnnotationConfigApplicationContext |
java | spring-projects__spring-security | crypto/src/main/java/org/springframework/security/crypto/password/Pbkdf2PasswordEncoder.java | {
"start": 9138,
"end": 9244
} | enum ____ {
PBKDF2WithHmacSHA1, PBKDF2WithHmacSHA256, PBKDF2WithHmacSHA512
}
}
| SecretKeyFactoryAlgorithm |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetTransformAction.java | {
"start": 5271,
"end": 10116
} | class ____ implements Writeable, ToXContentObject {
private static final ParseField TYPE = new ParseField("type");
private static final ParseField REASON = new ParseField("reason");
private final String type;
private final String reason;
public Error(String type, String reason) {
this.type = Objects.requireNonNull(type);
this.reason = Objects.requireNonNull(reason);
}
public Error(StreamInput in) throws IOException {
this.type = in.readString();
this.reason = in.readString();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TYPE.getPreferredName(), type);
builder.field(REASON.getPreferredName(), reason);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(type);
out.writeString(reason);
}
}
public static final String INVALID_TRANSFORMS_DEPRECATION_WARNING = "Found [{}] invalid transforms";
private static final ParseField INVALID_TRANSFORMS = new ParseField("invalid_transforms");
private static final ParseField ERRORS = new ParseField("errors");
private final List<Error> errors;
public Response(List<TransformConfig> transformConfigs, long count, List<Error> errors) {
super(new QueryPage<>(transformConfigs, count, TransformField.TRANSFORMS));
this.errors = errors;
}
public Response(StreamInput in) throws IOException {
super(in);
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
if (in.readBoolean()) {
this.errors = in.readCollectionAsList(Error::new);
} else {
this.errors = null;
}
} else {
this.errors = null;
}
}
public List<TransformConfig> getTransformConfigurations() {
return getResources().results();
}
public long getTransformConfigurationCount() {
return getResources().count();
}
public List<Error> getErrors() {
return errors;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
List<String> invalidTransforms = new ArrayList<>();
builder.startObject();
builder.field(TransformField.COUNT.getPreferredName(), getResources().count());
// XContentBuilder does not support passing the params object for Iterables
builder.field(TransformField.TRANSFORMS.getPreferredName());
builder.startArray();
for (TransformConfig configResponse : getResources().results()) {
configResponse.toXContent(builder, params);
ValidationException validationException = configResponse.validate(null);
if (validationException != null) {
invalidTransforms.add(configResponse.getId());
}
}
builder.endArray();
if (invalidTransforms.isEmpty() == false) {
builder.startObject(INVALID_TRANSFORMS.getPreferredName());
builder.field(TransformField.COUNT.getPreferredName(), invalidTransforms.size());
builder.field(TransformField.TRANSFORMS.getPreferredName(), invalidTransforms);
builder.endObject();
deprecationLogger.warn(
DeprecationCategory.OTHER,
"invalid_transforms",
INVALID_TRANSFORMS_DEPRECATION_WARNING,
invalidTransforms.size()
);
}
if (errors != null) {
builder.field(ERRORS.getPreferredName(), errors);
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_1_0)) {
if (errors != null) {
out.writeBoolean(true);
out.writeCollection(errors);
} else {
out.writeBoolean(false);
}
}
}
@Override
protected Reader<TransformConfig> getReader() {
return TransformConfig::new;
}
}
}
| Error |
java | elastic__elasticsearch | x-pack/plugin/identity-provider/src/test/java/org/elasticsearch/xpack/idp/saml/authn/SuccessfulAuthenticationResponseMessageBuilderTests.java | {
"start": 1472,
"end": 7769
} | class ____ extends IdpSamlTestCase {
private SamlIdentityProvider idp;
private XmlValidator validator;
private SamlFactory samlFactory;
@Before
public void setupSaml() throws Exception {
SamlInit.initialize();
samlFactory = new SamlFactory();
validator = new XmlValidator("saml-schema-protocol-2.0.xsd");
idp = mock(SamlIdentityProvider.class);
when(idp.getEntityId()).thenReturn("https://cloud.elastic.co/saml/idp");
when(idp.getSigningCredential()).thenReturn(readCredentials("RSA", 2048));
when(idp.getServiceProviderDefaults()).thenReturn(new ServiceProviderDefaults("elastic-cloud", TRANSIENT, Duration.ofMinutes(5)));
}
public void testSignedResponseIsValidAgainstXmlSchema() throws Exception {
final Response response = buildResponse(null);
final String xml = super.toString(response);
assertThat(xml, containsString("SignedInfo>"));
validator.validate(xml);
}
public void testSignedResponseWithCustomAttributes() throws Exception {
// Create custom attributes
Map<String, List<String>> attributeMap = new HashMap<>();
attributeMap.put("customAttr1", Collections.singletonList("value1"));
List<String> multipleValues = new ArrayList<>();
multipleValues.add("value2A");
multipleValues.add("value2B");
attributeMap.put("customAttr2", multipleValues);
SamlInitiateSingleSignOnAttributes attributes = new SamlInitiateSingleSignOnAttributes(attributeMap);
// Build response with custom attributes
final Response response = buildResponse(attributes);
final String xml = super.toString(response);
// Validate that response is correctly signed
assertThat(xml, containsString("SignedInfo>"));
validator.validate(xml);
// Verify custom attributes are included
boolean foundCustomAttr1 = false;
boolean foundCustomAttr2 = false;
for (AttributeStatement statement : response.getAssertions().get(0).getAttributeStatements()) {
for (Attribute attribute : statement.getAttributes()) {
String name = attribute.getName();
if (name.equals("customAttr1")) {
foundCustomAttr1 = true;
assertEquals(1, attribute.getAttributeValues().size());
assertThat(attribute.getAttributeValues().get(0).getDOM().getTextContent(), containsString("value1"));
} else if (name.equals("customAttr2")) {
foundCustomAttr2 = true;
assertEquals(2, attribute.getAttributeValues().size());
assertThat(attribute.getAttributeValues().get(0).getDOM().getTextContent(), containsString("value2A"));
assertThat(attribute.getAttributeValues().get(1).getDOM().getTextContent(), containsString("value2B"));
}
}
}
assertTrue("Custom attribute 'customAttr1' not found in SAML response", foundCustomAttr1);
assertTrue("Custom attribute 'customAttr2' not found in SAML response", foundCustomAttr2);
}
public void testRejectInvalidCustomAttributes() throws Exception {
final var customAttributes = new SamlInitiateSingleSignOnAttributes(
Map.of("https://idp.example.org/attribute/department", Collections.singletonList("engineering"))
);
// Build response with custom attributes
final IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
() -> buildResponse(
new SamlServiceProvider.AttributeNames(
"https://idp.example.org/attribute/principal",
null,
null,
null,
Set.of("https://idp.example.org/attribute/avatar")
),
customAttributes
)
);
assertThat(ex.getMessage(), containsString("custom attribute [https://idp.example.org/attribute/department]"));
assertThat(ex.getMessage(), containsString("allowed attribute names are [https://idp.example.org/attribute/avatar]"));
}
private Response buildResponse(@Nullable SamlInitiateSingleSignOnAttributes customAttributes) throws Exception {
return buildResponse(
new SamlServiceProvider.AttributeNames(
"principal",
null,
null,
null,
customAttributes == null ? Set.of() : customAttributes.getAttributes().keySet()
),
customAttributes
);
}
private Response buildResponse(
final SamlServiceProvider.AttributeNames attributes,
@Nullable SamlInitiateSingleSignOnAttributes customAttributes
) throws Exception {
final Clock clock = Clock.systemUTC();
final SamlServiceProvider sp = mock(SamlServiceProvider.class);
final String baseServiceUrl = "https://" + randomAlphaOfLength(32) + ".us-east-1.aws.found.io/";
final String acs = baseServiceUrl + "api/security/saml/callback";
when(sp.getEntityId()).thenReturn(baseServiceUrl);
when(sp.getAssertionConsumerService()).thenReturn(URI.create(acs).toURL());
when(sp.getAuthnExpiry()).thenReturn(Duration.ofMinutes(10));
when(sp.getAttributeNames()).thenReturn(attributes);
final UserServiceAuthentication user = mock(UserServiceAuthentication.class);
when(user.getPrincipal()).thenReturn(randomAlphaOfLengthBetween(4, 12));
when(user.getRoles()).thenReturn(Set.of(randomArray(1, 5, String[]::new, () -> randomAlphaOfLengthBetween(4, 12))));
when(user.getEmail()).thenReturn(randomAlphaOfLength(8) + "@elastic.co");
when(user.getName()).thenReturn(randomAlphaOfLength(6) + " " + randomAlphaOfLength(8));
when(user.getServiceProvider()).thenReturn(sp);
final SuccessfulAuthenticationResponseMessageBuilder builder = new SuccessfulAuthenticationResponseMessageBuilder(
samlFactory,
clock,
idp
);
return builder.build(user, null, customAttributes);
}
}
| SuccessfulAuthenticationResponseMessageBuilderTests |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/ExtractionUtils.java | {
"start": 34969,
"end": 37907
} | class ____ has not the required information
parameterNames =
extractedNames.subList(
offset,
Math.min(
executable.getParameterCount() + offset,
extractedNames.size()));
}
if (parameterNames.size() != executable.getParameterCount()) {
return null;
}
return parameterNames;
}
private static ClassReader getClassReader(Class<?> cls) {
final String className = cls.getName().replaceFirst("^.*\\.", "") + ".class";
try (InputStream i = cls.getResourceAsStream(className)) {
return new ClassReader(i);
} catch (IOException e) {
throw new IllegalStateException("Could not instantiate ClassReader.", e);
}
}
/**
* Extracts the parameter names and descriptors from a constructor or method. Assuming the
* existence of a local variable table.
*
* <p>For example:
*
* <pre>{@code
* public WC(java.lang.String arg0, long arg1) { // <init> //(Ljava/lang/String;J)V
* <localVar:index=0 , name=this , desc=Lorg/apache/flink/WC;, sig=null, start=L1, end=L2>
* <localVar:index=1 , name=word , desc=Ljava/lang/String;, sig=null, start=L1, end=L2>
* <localVar:index=2 , name=frequency , desc=J, sig=null, start=L1, end=L2>
* <localVar:index=2 , name=otherLocal , desc=J, sig=null, start=L1, end=L2>
* <localVar:index=2 , name=otherLocal2 , desc=J, sig=null, start=L1, end=L2>
* }
* }</pre>
*
* <p>If a constructor or method has multiple identical local variables that are not initialized
* like:
*
* <pre>{@code
* String localVariable;
* if (generic == null) {
* localVariable = "null";
* } else if (generic < 0) {
* localVariable = "negative";
* } else if (generic > 0) {
* localVariable = "positive";
* } else {
* localVariable = "zero";
* }
* }</pre>
*
* <p>Its local variable table is as follows:
*
* <pre>{@code
* Start Length Slot Name Signature
* 7 3 2 localVariable Ljava/lang/String;
* 22 3 2 localVariable Ljava/lang/String;
* 37 3 2 localVariable Ljava/lang/String;
* 0 69 0 this ...;
* 0 69 1 generic Ljava/lang/Long;
* 43 26 2 localVariable Ljava/lang/String;
* }</pre>
*
* <p>The method parameters are always at the head in the 'slot' list.
*
* <p>NOTE: the first parameter may be "this" if the function is not static. See more at <a
* href="https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-3.html">3.6. Receiving
* Arguments</a>
*/
private static | file |
java | apache__camel | components/camel-resilience4j/src/test/java/org/apache/camel/component/resilience4j/SpringResilienceRouteFallbackTest.java | {
"start": 1290,
"end": 2339
} | class ____ extends CamelSpringTestSupport {
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"org/apache/camel/component/resilience4j/SpringResilienceRouteFallbackTest.xml");
}
@Test
public void testResilience() throws Exception {
test("direct:start");
}
@Test
public void testResilienceWithTimeOut() throws Exception {
test("direct:start.with.timeout.enabled");
}
private void test(String endPointUri) throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceived("Fallback message");
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_SUCCESSFUL_EXECUTION, false);
getMockEndpoint("mock:result").expectedPropertyReceived(CircuitBreakerConstants.RESPONSE_FROM_FALLBACK, true);
template.sendBody(endPointUri, "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
}
| SpringResilienceRouteFallbackTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsPhase.java | {
"start": 1381,
"end": 4936
} | class ____ implements FetchSubPhase {
private final FetchPhase fetchPhase;
public InnerHitsPhase(FetchPhase fetchPhase) {
this.fetchPhase = fetchPhase;
}
@Override
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext) {
if (searchContext.innerHits() == null || searchContext.innerHits().getInnerHits().isEmpty()) {
return null;
}
Map<String, InnerHitsContext.InnerHitSubContext> innerHits = searchContext.innerHits().getInnerHits();
StoredFieldsSpec storedFieldsSpec = new StoredFieldsSpec(requiresSource(innerHits.values()), false, Set.of());
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
}
@Override
public StoredFieldsSpec storedFieldsSpec() {
return storedFieldsSpec;
}
@Override
public void process(HitContext hitContext) throws IOException {
SearchHit hit = hitContext.hit();
Source rootSource = searchContext.getRootSource(hitContext);
hitExecute(innerHits, hit, rootSource);
}
};
}
private static boolean requiresSource(Collection<? extends SearchContext> subContexts) {
boolean requiresSource = false;
for (SearchContext sc : subContexts) {
requiresSource |= sc.sourceRequested();
requiresSource |= sc.fetchFieldsContext() != null;
requiresSource |= sc.highlight() != null;
}
return requiresSource;
}
private void hitExecute(Map<String, InnerHitsContext.InnerHitSubContext> innerHits, SearchHit hit, Source rootSource)
throws IOException {
for (Map.Entry<String, InnerHitsContext.InnerHitSubContext> entry : innerHits.entrySet()) {
InnerHitsContext.InnerHitSubContext innerHitsContext = entry.getValue();
TopDocsAndMaxScore topDoc = innerHitsContext.topDocs(hit);
Map<String, SearchHits> results = hit.getInnerHits();
if (results == null) {
hit.setInnerHits(results = new HashMap<>());
}
innerHitsContext.queryResult().topDocs(topDoc, innerHitsContext.sort() == null ? null : innerHitsContext.sort().formats);
int[] docIdsToLoad = new int[topDoc.topDocs.scoreDocs.length];
for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) {
docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc;
}
innerHitsContext.setRootId(hit.getId());
innerHitsContext.setRootLookup(rootSource);
fetchPhase.execute(innerHitsContext, docIdsToLoad, null);
FetchSearchResult fetchResult = innerHitsContext.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();
for (int j = 0; j < internalHits.length; j++) {
ScoreDoc scoreDoc = topDoc.topDocs.scoreDocs[j];
SearchHit searchHitFields = internalHits[j];
searchHitFields.score(scoreDoc.score);
if (scoreDoc instanceof FieldDoc fieldDoc) {
searchHitFields.sortValues(fieldDoc.fields, innerHitsContext.sort().formats);
}
}
var h = fetchResult.hits();
assert hit.isPooled() || h.isPooled() == false;
results.put(entry.getKey(), h);
h.mustIncRef();
}
}
}
| InnerHitsPhase |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/security/authenticator/SaslClientCallbackHandler.java | {
"start": 2265,
"end": 5563
} | class ____ implements AuthenticateCallbackHandler {
private String mechanism;
@Override
public void configure(Map<String, ?> configs, String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
this.mechanism = saslMechanism;
}
@Override
public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
Subject subject = SecurityManagerCompatibility.get().current();
for (Callback callback : callbacks) {
if (callback instanceof NameCallback) {
NameCallback nc = (NameCallback) callback;
if (subject != null && !subject.getPublicCredentials(String.class).isEmpty()) {
nc.setName(subject.getPublicCredentials(String.class).iterator().next());
} else
nc.setName(nc.getDefaultName());
} else if (callback instanceof PasswordCallback) {
if (subject != null && !subject.getPrivateCredentials(String.class).isEmpty()) {
char[] password = subject.getPrivateCredentials(String.class).iterator().next().toCharArray();
((PasswordCallback) callback).setPassword(password);
} else {
String errorMessage = "Could not login: the client is being asked for a password, but the Kafka" +
" client code does not currently support obtaining a password from the user.";
throw new UnsupportedCallbackException(callback, errorMessage);
}
} else if (callback instanceof RealmCallback) {
RealmCallback rc = (RealmCallback) callback;
rc.setText(rc.getDefaultText());
} else if (callback instanceof AuthorizeCallback) {
AuthorizeCallback ac = (AuthorizeCallback) callback;
String authId = ac.getAuthenticationID();
String authzId = ac.getAuthorizationID();
ac.setAuthorized(authId.equals(authzId));
if (ac.isAuthorized())
ac.setAuthorizedID(authzId);
} else if (callback instanceof ScramExtensionsCallback) {
if (ScramMechanism.isScram(mechanism) && subject != null && !subject.getPublicCredentials(Map.class).isEmpty()) {
@SuppressWarnings("unchecked")
Map<String, String> extensions = (Map<String, String>) subject.getPublicCredentials(Map.class).iterator().next();
((ScramExtensionsCallback) callback).extensions(extensions);
}
} else if (callback instanceof SaslExtensionsCallback) {
if (!SaslConfigs.GSSAPI_MECHANISM.equals(mechanism) &&
subject != null && !subject.getPublicCredentials(SaslExtensions.class).isEmpty()) {
SaslExtensions extensions = subject.getPublicCredentials(SaslExtensions.class).iterator().next();
((SaslExtensionsCallback) callback).extensions(extensions);
}
} else {
throw new UnsupportedCallbackException(callback, "Unrecognized SASL ClientCallback");
}
}
}
@Override
public void close() {
}
}
| SaslClientCallbackHandler |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/config/DefaultJcaListenerContainerFactory.java | {
"start": 1227,
"end": 3873
} | class ____ extends JmsActivationSpecConfig
implements JmsListenerContainerFactory<JmsMessageEndpointManager> {
private @Nullable ResourceAdapter resourceAdapter;
private @Nullable JmsActivationSpecFactory activationSpecFactory;
private @Nullable DestinationResolver destinationResolver;
private @Nullable Object transactionManager;
private @Nullable Integer phase;
/**
* @see JmsMessageEndpointManager#setResourceAdapter(ResourceAdapter)
*/
public void setResourceAdapter(ResourceAdapter resourceAdapter) {
this.resourceAdapter = resourceAdapter;
}
/**
* @see JmsMessageEndpointManager#setActivationSpecFactory(JmsActivationSpecFactory)
*/
public void setActivationSpecFactory(JmsActivationSpecFactory activationSpecFactory) {
this.activationSpecFactory = activationSpecFactory;
}
/**
* @see JmsMessageEndpointManager#setDestinationResolver(DestinationResolver)
*/
public void setDestinationResolver(DestinationResolver destinationResolver) {
this.destinationResolver = destinationResolver;
}
/**
* @see JmsMessageEndpointManager#setTransactionManager(Object)
*/
public void setTransactionManager(Object transactionManager) {
this.transactionManager = transactionManager;
}
/**
* @see JmsMessageEndpointManager#setPhase(int)
*/
public void setPhase(int phase) {
this.phase = phase;
}
@Override
public JmsMessageEndpointManager createListenerContainer(JmsListenerEndpoint endpoint) {
if (this.destinationResolver != null && this.activationSpecFactory != null) {
throw new IllegalStateException("Specify either 'activationSpecFactory' or " +
"'destinationResolver', not both. If you define a dedicated JmsActivationSpecFactory bean, " +
"specify the custom DestinationResolver there (if possible)");
}
JmsMessageEndpointManager instance = createContainerInstance();
if (this.resourceAdapter != null) {
instance.setResourceAdapter(this.resourceAdapter);
}
if (this.activationSpecFactory != null) {
instance.setActivationSpecFactory(this.activationSpecFactory);
}
if (this.destinationResolver != null) {
instance.setDestinationResolver(this.destinationResolver);
}
if (this.transactionManager != null) {
instance.setTransactionManager(this.transactionManager);
}
if (this.phase != null) {
instance.setPhase(this.phase);
}
instance.setActivationSpecConfig(this);
endpoint.setupListenerContainer(instance);
return instance;
}
/**
* Create an empty container instance.
*/
protected JmsMessageEndpointManager createContainerInstance() {
return new JmsMessageEndpointManager();
}
}
| DefaultJcaListenerContainerFactory |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/beanvalidation/CollectionActionsValidationStatelessTest.java | {
"start": 2127,
"end": 2539
} | class ____ {
public Author() {
}
public Author(long id, String firstName, String lastName, List<Book> books) {
this.firstName = firstName;
this.lastName = lastName;
this.books = books;
this.id = id;
}
@Id
Long id;
String firstName;
String lastName;
@OneToMany
@JoinColumn(name = "bookId")
@Size(min = 10)
List<Book> books;
}
@Table(name = "book")
@Entity
static | Author |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/derivedidentities/e3/b3/Dependent.java | {
"start": 236,
"end": 469
} | class ____ {
@EmbeddedId
DependentId id;
@JoinColumn(name = "FIRSTNAME", referencedColumnName = "FIRSTNAME")
@JoinColumn(name = "LASTNAME", referencedColumnName = "lastName")
@MapsId("empPK")
@ManyToOne
Employee emp;
}
| Dependent |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/flogger/FloggerPerWithoutRateLimitTest.java | {
"start": 1994,
"end": 2450
} | enum ____ {
ONE,
TWO;
}
public void test() {
logger.atInfo().log("foo");
logger.atInfo().per(E.ONE).atMostEvery(1, TimeUnit.HOURS).log("foo");
logger.atInfo().atMostEvery(1, TimeUnit.HOURS).per(E.ONE).log("foo");
logger.atInfo().per(E.ONE).every(10).log("foo");
}
}
""")
.doTest();
}
}
| E |
java | quarkusio__quarkus | extensions/infinispan-client/runtime/src/main/java/io/quarkus/infinispan/client/CacheResult.java | {
"start": 1932,
"end": 2408
} | interface ____ {
/**
* The name of the cache.
*/
@Nonbinding
String cacheName();
/**
* Delay in milliseconds before the lock on cache miss is interrupted. If such interruption happens, the cached method will
* be invoked and its result will be returned without being cached. A value of {@code 0} (which is the default one) means
* that the lock timeout is disabled.
*/
@Nonbinding
long lockTimeout() default 0;
}
| CacheResult |
java | elastic__elasticsearch | libs/native/src/main/java/org/elasticsearch/nativeaccess/NoopNativeAccess.java | {
"start": 683,
"end": 3093
} | class ____ implements NativeAccess {
private static final Logger logger = LogManager.getLogger(NativeAccess.class);
NoopNativeAccess() {}
@Override
public boolean definitelyRunningAsRoot() {
logger.warn("Cannot check if running as root because native access is not available");
return false;
}
@Override
public ProcessLimits getProcessLimits() {
logger.warn("Cannot get process limits because native access is not available");
return new ProcessLimits(ProcessLimits.UNKNOWN, ProcessLimits.UNKNOWN, ProcessLimits.UNKNOWN);
}
@Override
public void tryLockMemory() {
logger.warn("Cannot lock memory because native access is not available");
}
@Override
public boolean isMemoryLocked() {
return false;
}
@Override
public void tryInstallExecSandbox() {
logger.warn("Cannot install system call filter because native access is not available");
}
@Override
public ExecSandboxState getExecSandboxState() {
return ExecSandboxState.NONE;
}
@Override
public OptionalLong allocatedSizeInBytes(Path path) {
logger.warn("Cannot get allocated size of file [" + path + "] because native access is not available");
return OptionalLong.empty();
}
@Override
public void tryPreallocate(Path file, long size) {
logger.warn("Cannot preallocate file size because native access is not available");
}
@Override
public Systemd systemd() {
logger.warn("Cannot get systemd access because native access is not available");
return null;
}
@Override
public Zstd getZstd() {
logger.warn("cannot compress with zstd because native access is not available");
return null;
}
@Override
public CloseableByteBuffer newSharedBuffer(int len) {
logger.warn("cannot allocate buffer because native access is not available");
return null;
}
@Override
public CloseableByteBuffer newConfinedBuffer(int len) {
logger.warn("cannot allocate buffer because native access is not available");
return null;
}
@Override
public Optional<VectorSimilarityFunctions> getVectorSimilarityFunctions() {
logger.warn("cannot get vector distance because native access is not available");
return Optional.empty();
}
}
| NoopNativeAccess |
java | spring-projects__spring-security | rsocket/src/test/java/org/springframework/security/rsocket/core/CaptureSecurityContextSocketAcceptor.java | {
"start": 1114,
"end": 1676
} | class ____ implements SocketAcceptor {
private final RSocket accept;
private SecurityContext securityContext;
CaptureSecurityContextSocketAcceptor(RSocket accept) {
this.accept = accept;
}
@Override
public Mono<RSocket> accept(ConnectionSetupPayload setup, RSocket sendingSocket) {
return ReactiveSecurityContextHolder.getContext()
.doOnNext((securityContext) -> this.securityContext = securityContext)
.thenReturn(this.accept);
}
SecurityContext getSecurityContext() {
return this.securityContext;
}
}
| CaptureSecurityContextSocketAcceptor |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/association/toone/CriteriaJoinFetchTest.java | {
"start": 1409,
"end": 5946
} | class ____ {
private static SQLStatementInspector statementInspector;
@BeforeEach
public void setUp(EntityManagerFactoryScope scope) {
statementInspector = scope.getCollectingStatementInspector();
scope.inTransaction(
entityManager -> {
Customer customer0 = new Customer( 1, "William P. Keaton" );
Customer customer1 = new Customer( 2, "Kate P. Hudson" );
entityManager.persist( customer0 );
entityManager.persist( customer1 );
Note note0 = new Note( 3, "Note for address 0" );
Note note1 = new Note( 4, "Note for address 1" );
Address address0 = new Address( 5, "Flit street", "London", note0, customer0 );
Address address1 = new Address( 6, "via Marconi", "Pavia", note1, customer1 );
entityManager.persist( address0 );
entityManager.persist( address1 );
customer0.setAddress( address0 );
customer1.setAddress( address1 );
}
);
}
@AfterEach
public void tearDown(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testCriteriaFetchSingularAttribute(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
statementInspector.clear();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Customer> criteriaQuery = criteriaBuilder.createQuery( Customer.class );
final From<Customer, Customer> customer = criteriaQuery.from( Customer.class );
final EntityType<Customer> customerEntityType = entityManager.getMetamodel()
.entity( Customer.class );
final SingularAttribute<? super Customer, Address> address = (SingularAttribute<? super Customer, Address>) customerEntityType.getSingularAttribute(
"address" );
customer.fetch( address, JoinType.INNER );
criteriaQuery.select( customer );
final TypedQuery<Customer> query = entityManager.createQuery( criteriaQuery );
List<Customer> result = query.getResultList();
assertThat( result.size(), is( 2 ) );
assertThat( statementInspector.getSqlQueries().size(), is( 3 ) );
Customer customer1 = result.get( 0 );
Note note = customer1.getAddress().getNote();
assertThat( note, notNullValue() );
if ( customer1.getId() == 1 ) {
assertThat( note.getId(), is( 3 ) );
}
else {
assertThat( note.getId(), is( 4 ) );
}
}
);
}
@Test
public void testCriteriaFetchSingularAttribute2(EntityManagerFactoryScope scope) {
scope.inTransaction(
entityManager -> {
statementInspector.clear();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Customer> criteriaQuery = criteriaBuilder.createQuery( Customer.class );
final From<Customer, Customer> customer = criteriaQuery.from( Customer.class );
final EntityType<Customer> customerEntityType = entityManager.getMetamodel()
.entity( Customer.class );
final SingularAttribute<? super Customer, Address> address = (SingularAttribute<? super Customer, Address>) customerEntityType.getSingularAttribute(
"address" );
final Fetch<Customer, Address> fetch = customer.fetch( address, JoinType.INNER );
fetch.fetch( entityManager.getMetamodel()
.entity( Address.class ).getSingularAttribute( "note" ), JoinType.INNER );
criteriaQuery.select( customer );
final TypedQuery<Customer> query = entityManager.createQuery( criteriaQuery );
final List<Customer> result = query.getResultList();
assertThat( result.size(), is( 2 ) );
assertThat( statementInspector.getSqlQueries().size(), is( 1 ) );
final Customer customer1 = result.get( 0 );
final Note note = customer1.getAddress().getNote();
assertThat( note, notNullValue() );
if ( customer1.getId() == 1 ) {
assertThat( note.getId(), is( 3 ) );
}
else {
assertThat( note.getId(), is( 4 ) );
}
assertThat( statementInspector.getSqlQueries().size(), is( 1 ) );
}
);
}
@Test
public void testFind(EntityManagerFactoryScope scope) {
statementInspector.clear();
scope.inTransaction(
entityManager -> {
Customer customer = entityManager.find( Customer.class, 2 );
final Note note = customer.getAddress().getNote();
assertThat( note.getId(), is( 4 ) );
assertThat( statementInspector.getSqlQueries().size(), is( 1 ) );
}
);
}
@Entity(name = "Customer")
@Table(name = "CUSTOMER_TABLE")
public static | CriteriaJoinFetchTest |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java | {
"start": 30939,
"end": 65291
} | class ____ {
public Reserver getReserver() {
return this;
}
public String NE = "abc";
public String ne = "def";
public int DIV = 1;
public int div = 3;
public Map<String, String> m = new HashMap<>();
Reserver() {
m.put("NE", "xyz");
}
}
StandardEvaluationContext context = new StandardEvaluationContext(new Reserver());
SpelExpressionParser parser = new SpelExpressionParser();
String ex = "getReserver().NE";
SpelExpression exp = parser.parseRaw(ex);
String value = (String) exp.getValue(context);
assertThat(value).isEqualTo("abc");
ex = "getReserver().ne";
exp = parser.parseRaw(ex);
value = (String) exp.getValue(context);
assertThat(value).isEqualTo("def");
ex = "getReserver().m[NE]";
exp = parser.parseRaw(ex);
value = (String) exp.getValue(context);
assertThat(value).isEqualTo("xyz");
ex = "getReserver().DIV";
exp = parser.parseRaw(ex);
assertThat(exp.getValue(context)).isEqualTo(1);
ex = "getReserver().div";
exp = parser.parseRaw(ex);
assertThat(exp.getValue(context)).isEqualTo(3);
exp = parser.parseRaw("NE");
assertThat(exp.getValue(context)).isEqualTo("abc");
}
@Test
void reservedWordProperties_SPR9862() {
StandardEvaluationContext context = new StandardEvaluationContext();
SpelExpressionParser parser = new SpelExpressionParser();
SpelExpression expression = parser.parseRaw("T(org.springframework.expression.spel.testresources.le.div.mod.reserved.Reserver).CONST");
Object value = expression.getValue(context);
assertThat(value).isEqualTo(Reserver.CONST);
}
/**
* We add property accessors in the order:
* First, Second, Third, Fourth.
* They are not utilized in this order; preventing a priority or order of operations
* in evaluation of SPEL expressions for a given context.
*/
@Test
void propertyAccessorOrder_SPR8211() {
ExpressionParser expressionParser = new SpelExpressionParser();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new ContextObject());
evaluationContext.addPropertyAccessor(new TestPropertyAccessor("firstContext"));
evaluationContext.addPropertyAccessor(new TestPropertyAccessor("secondContext"));
evaluationContext.addPropertyAccessor(new TestPropertyAccessor("thirdContext"));
evaluationContext.addPropertyAccessor(new TestPropertyAccessor("fourthContext"));
assertThat(expressionParser.parseExpression("shouldBeFirst").getValue(evaluationContext)).isEqualTo("first");
assertThat(expressionParser.parseExpression("shouldBeSecond").getValue(evaluationContext)).isEqualTo("second");
assertThat(expressionParser.parseExpression("shouldBeThird").getValue(evaluationContext)).isEqualTo("third");
assertThat(expressionParser.parseExpression("shouldBeFourth").getValue(evaluationContext)).isEqualTo("fourth");
}
/**
* Test the ability to subclass the ReflectiveMethodResolver and change how it
* determines the set of methods for a type.
*/
@Test
void customStaticFunctions_SPR9038() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
List<MethodResolver> methodResolvers = List.of(new ReflectiveMethodResolver() {
@Override
protected Method[] getMethods(Class<?> type) {
return new Method[] {ReflectionUtils.findMethod(Integer.class, "parseInt", String.class, int.class)};
}
});
context.setMethodResolvers(methodResolvers);
Expression expression = parser.parseExpression("parseInt('-FF', 16)");
Integer result = expression.getValue(context, "", Integer.class);
assertThat(result).isEqualTo(-255);
}
@Test
void array() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = null;
Object result = null;
expression = parser.parseExpression("new java.lang.Long[0].class");
result = expression.getValue(context);
assertThat(result).asString().isEqualTo("class [Ljava.lang.Long;");
expression = parser.parseExpression("T(java.lang.Long[])");
result = expression.getValue(context);
assertThat(result).asString().isEqualTo("class [Ljava.lang.Long;");
expression = parser.parseExpression("T(java.lang.String[][][])");
result = expression.getValue(context);
assertThat(result).asString().isEqualTo("class [[[Ljava.lang.String;");
assertThat(((SpelExpression) expression).toStringAST()).isEqualTo("T(java.lang.String[][][])");
expression = parser.parseExpression("new int[0].class");
result = expression.getValue(context);
assertThat(result).asString().isEqualTo("class [I");
expression = parser.parseExpression("T(int[][])");
result = expression.getValue(context);
assertThat(result).asString().isEqualTo("class [[I");
}
@Test
void SPR9486_floatFunctionResolver() {
Number expectedResult = Math.abs(-10.2f);
ExpressionParser parser = new SpelExpressionParser();
SPR9486_FunctionsClass testObject = new SPR9486_FunctionsClass();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("abs(-10.2f)");
Number result = expression.getValue(context, testObject, Number.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_addFloatWithDouble() {
Number expectedNumber = 10.21f + 10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f + 10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_addFloatWithFloat() {
Number expectedNumber = 10.21f + 10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f + 10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_subtractFloatWithDouble() {
Number expectedNumber = 10.21f - 10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f - 10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_subtractFloatWithFloat() {
Number expectedNumber = 10.21f - 10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f - 10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_multiplyFloatWithDouble() {
Number expectedNumber = 10.21f * 10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f * 10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_multiplyFloatWithFloat() {
Number expectedNumber = 10.21f * 10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f * 10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatDivideByFloat() {
Number expectedNumber = -10.21f / -10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f / -10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatDivideByDouble() {
Number expectedNumber = -10.21f / -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f / -10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatEqFloatUnaryMinus() {
Boolean expectedResult = -10.21f == -10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f == -10.2f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatEqDoubleUnaryMinus() {
Boolean expectedResult = -10.21f == -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f == -10.2");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatEqFloat() {
Boolean expectedResult = 10.215f == 10.2109f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.215f == 10.2109f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatEqDouble() {
Boolean expectedResult = 10.215f == 10.2109;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.215f == 10.2109");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatNotEqFloat() {
Boolean expectedResult = 10.215f != 10.2109f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.215f != 10.2109f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatNotEqDouble() {
Boolean expectedResult = 10.215f != 10.2109;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.215f != 10.2109");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatLessThanFloat() {
Boolean expectedNumber = -10.21f < -10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f < -10.2f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatLessThanDouble() {
Boolean expectedNumber = -10.21f < -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f < -10.2");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatLessThanOrEqualFloat() {
Boolean expectedNumber = -10.21f <= -10.22f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f <= -10.22f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatLessThanOrEqualDouble() {
Boolean expectedNumber = -10.21f <= -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f <= -10.2");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatGreaterThanFloat() {
Boolean expectedNumber = -10.21f > -10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f > -10.2f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatGreaterThanDouble() {
Boolean expectedResult = -10.21f > -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f > -10.2");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatGreaterThanOrEqualFloat() {
Boolean expectedNumber = -10.21f >= -10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f >= -10.2f");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedNumber);
}
@Test
void SPR9486_floatGreaterThanEqualDouble() {
Boolean expectedResult = -10.21f >= -10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("-10.21f >= -10.2");
Boolean result = expression.getValue(context, null, Boolean.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatModulusFloat() {
Number expectedResult = 10.21f % 10.2f;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f % 10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatModulusDouble() {
Number expectedResult = 10.21f % 10.2;
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f % 10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatPowerFloat() {
Number expectedResult = Math.pow(10.21f, -10.2f);
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f ^ -10.2f");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9486_floatPowerDouble() {
Number expectedResult = Math.pow(10.21f, 10.2);
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Expression expression = parser.parseExpression("10.21f ^ 10.2");
Number result = expression.getValue(context, null, Number.class);
assertThat(result).isEqualTo(expectedResult);
}
@Test
void SPR9994_bridgeMethods() throws Exception {
ReflectivePropertyAccessor accessor = new ReflectivePropertyAccessor();
StandardEvaluationContext context = new StandardEvaluationContext();
GenericImplementation target = new GenericImplementation();
accessor.write(context, target, "property", "1");
assertThat(target.value).isEqualTo(1);
TypedValue value = accessor.read(context, target, "property");
assertThat(value.getValue()).isEqualTo(1);
assertThat(value.getTypeDescriptor().getType()).isEqualTo(Integer.class);
}
@Test
void SPR10162_onlyBridgeMethod() throws Exception {
ReflectivePropertyAccessor accessor = new ReflectivePropertyAccessor();
StandardEvaluationContext context = new StandardEvaluationContext();
Object target = new OnlyBridgeMethod();
TypedValue value = accessor.read(context, target, "property");
assertThat(value.getValue()).isNull();
assertThat(value.getTypeDescriptor().getType()).isEqualTo(Integer.class);
}
@Test
void SPR10091_simpleTestValueType() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new BooleanHolder());
Class<?> valueType = parser.parseExpression("simpleProperty").getValueType(evaluationContext);
assertThat(valueType).isEqualTo(Boolean.class);
}
@Test
void SPR10091_simpleTestValue() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new BooleanHolder());
Object value = parser.parseExpression("simpleProperty").getValue(evaluationContext);
assertThat(value).isInstanceOf(Boolean.class);
}
@Test
void SPR10091_primitiveTestValueType() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new BooleanHolder());
Class<?> valueType = parser.parseExpression("primitiveProperty").getValueType(evaluationContext);
assertThat(valueType).isEqualTo(Boolean.class);
}
@Test
void SPR10091_primitiveTestValue() {
ExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new BooleanHolder());
Object value = parser.parseExpression("primitiveProperty").getValue(evaluationContext);
assertThat(value).isInstanceOf(Boolean.class);
}
@Test
void SPR16123() {
ExpressionParser parser = new SpelExpressionParser();
parser.parseExpression("simpleProperty").setValue(new BooleanHolder(), null);
assertThatExceptionOfType(EvaluationException.class)
.isThrownBy(() -> parser.parseExpression("primitiveProperty").setValue(new BooleanHolder(), null));
}
@Test
void SPR10146_malformedExpressions() {
doTestSpr10146("/foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("*foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("%foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("<foo", "EL1070E: Problem parsing left operand");
doTestSpr10146(">foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("&&foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("||foo", "EL1070E: Problem parsing left operand");
doTestSpr10146("|foo", "EL1069E: Missing expected character '|'");
}
private void doTestSpr10146(String expression, String expectedMessage) {
assertThatExceptionOfType(SpelParseException.class)
.isThrownBy(() -> new SpelExpressionParser().parseExpression(expression))
.withMessageContaining(expectedMessage);
}
@Test
void SPR10125() {
StandardEvaluationContext context = new StandardEvaluationContext();
String fromInterface = parser.parseExpression("T(" + StaticFinalImpl1.class.getName() + ").VALUE").getValue(
context, String.class);
assertThat(fromInterface).isEqualTo("interfaceValue");
String fromClass = parser.parseExpression("T(" + StaticFinalImpl2.class.getName() + ").VALUE").getValue(
context, String.class);
assertThat(fromClass).isEqualTo("interfaceValue");
}
@Test
void SPR10210() {
StandardEvaluationContext context = new StandardEvaluationContext();
context.setVariable("bridgeExample", new org.springframework.expression.spel.spr10210.D());
Expression parseExpression = parser.parseExpression("#bridgeExample.bridgeMethod()");
parseExpression.getValue(context);
}
@Test
void SPR10328() {
assertThatExceptionOfType(SpelParseException.class)
.isThrownBy(() -> parser.parseExpression("$[]"))
.withMessageContaining("EL1071E: A required selection expression has not been specified");
}
@Test
void SPR10452() {
SpelParserConfiguration configuration = new SpelParserConfiguration(false, false);
ExpressionParser parser = new SpelExpressionParser(configuration);
StandardEvaluationContext context = new StandardEvaluationContext();
Expression spel = parser.parseExpression("#enumType.values()");
context.setVariable("enumType", ABC.class);
Object result = spel.getValue(context);
assertThat(result).isNotNull();
assertThat(result.getClass().isArray()).isTrue();
assertThat(Array.get(result, 0)).isEqualTo(ABC.A);
assertThat(Array.get(result, 1)).isEqualTo(ABC.B);
assertThat(Array.get(result, 2)).isEqualTo(ABC.C);
context.setVariable("enumType", XYZ.class);
result = spel.getValue(context);
assertThat(result).isNotNull();
assertThat(result.getClass().isArray()).isTrue();
assertThat(Array.get(result, 0)).isEqualTo(XYZ.X);
assertThat(Array.get(result, 1)).isEqualTo(XYZ.Y);
assertThat(Array.get(result, 2)).isEqualTo(XYZ.Z);
}
@Test
void SPR9495() {
SpelParserConfiguration configuration = new SpelParserConfiguration(false, false);
ExpressionParser parser = new SpelExpressionParser(configuration);
StandardEvaluationContext context = new StandardEvaluationContext();
Expression spel = parser.parseExpression("#enumType.values()");
context.setVariable("enumType", ABC.class);
Object result = spel.getValue(context);
assertThat(result).isNotNull();
assertThat(result.getClass().isArray()).isTrue();
assertThat(Array.get(result, 0)).isEqualTo(ABC.A);
assertThat(Array.get(result, 1)).isEqualTo(ABC.B);
assertThat(Array.get(result, 2)).isEqualTo(ABC.C);
context.addMethodResolver((context2, targetObject, name, argumentTypes) -> (context1, target, arguments) -> {
try {
Method method = XYZ.class.getMethod("values");
Object value = method.invoke(target, arguments);
return new TypedValue(value, new TypeDescriptor(new MethodParameter(method, -1)).narrow(value));
}
catch (Exception ex) {
throw new AccessException(ex.getMessage(), ex);
}
});
result = spel.getValue(context);
assertThat(result).isNotNull();
assertThat(result.getClass().isArray()).isTrue();
assertThat(Array.get(result, 0)).isEqualTo(XYZ.X);
assertThat(Array.get(result, 1)).isEqualTo(XYZ.Y);
assertThat(Array.get(result, 2)).isEqualTo(XYZ.Z);
}
@Test // https://github.com/spring-projects/spring-framework/issues/15119
void SPR10486() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Spr10486 rootObject = new Spr10486();
Expression classNameExpression = parser.parseExpression("class.name");
Expression nameExpression = parser.parseExpression("name");
assertThat(classNameExpression.getValue(context, rootObject)).isEqualTo(Spr10486.class.getName());
assertThat(nameExpression.getValue(context, rootObject)).isEqualTo("name");
}
@Test
void SPR11142() {
SpelExpressionParser parser = new SpelExpressionParser();
StandardEvaluationContext context = new StandardEvaluationContext();
Spr11142 rootObject = new Spr11142();
Expression expression = parser.parseExpression("something");
assertThatExceptionOfType(SpelEvaluationException.class)
.isThrownBy(() -> expression.getValue(context, rootObject))
.withMessageContaining("'something' cannot be found");
}
@Test
void SPR9194() {
TestClass2 one = new TestClass2("abc");
TestClass2 two = new TestClass2("abc");
Map<String, TestClass2> map = new HashMap<>();
map.put("one", one);
map.put("two", two);
SpelExpressionParser parser = new SpelExpressionParser();
Expression expr = parser.parseExpression("['one'] == ['two']");
assertThat(expr.getValue(map, Boolean.class)).isTrue();
}
@Test
void SPR11348() {
Collection<String> coll = new LinkedHashSet<>();
coll.add("one");
coll.add("two");
coll = Collections.unmodifiableCollection(coll);
SpelExpressionParser parser = new SpelExpressionParser();
Expression expr = parser.parseExpression("new java.util.ArrayList(#root)");
Object value = expr.getValue(coll);
assertThat(value).isInstanceOf(ArrayList.class)
.asInstanceOf(InstanceOfAssertFactories.list(String.class)).containsExactly("one", "two");
}
@Test
void SPR11445_simple() {
StandardEvaluationContext context = new StandardEvaluationContext(new Spr11445Class());
Expression expr = new SpelExpressionParser().parseRaw("echo(parameter())");
assertThat(expr.getValue(context)).isEqualTo(1);
}
@Test
void SPR11445_beanReference() {
StandardEvaluationContext context = new StandardEvaluationContext();
context.setBeanResolver(new Spr11445Class());
Expression expr = new SpelExpressionParser().parseRaw("@bean.echo(@bean.parameter())");
assertThat(expr.getValue(context)).isEqualTo(1);
}
@Test
@SuppressWarnings("unchecked")
void SPR11494() {
Expression exp = new SpelExpressionParser().parseExpression("T(java.util.Arrays).asList('a','b')");
List<String> list = (List<String>) exp.getValue();
assertThat(list).hasSize(2);
}
@Test
void SPR11609() {
StandardEvaluationContext sec = new StandardEvaluationContext();
sec.addPropertyAccessor(new MapAccessor());
Expression exp = new SpelExpressionParser().parseExpression(
"T(org.springframework.expression.spel.SpelReproTests$MapWithConstant).X");
assertThat(exp.getValue(sec)).isEqualTo(1);
}
@Test
void SPR9735() {
Item item = new Item();
item.setName("parent");
Item item1 = new Item();
item1.setName("child1");
Item item2 = new Item();
item2.setName("child2");
item.add(item1);
item.add(item2);
ExpressionParser parser = new SpelExpressionParser();
EvaluationContext context = new StandardEvaluationContext();
Expression exp = parser.parseExpression("#item[0].name");
context.setVariable("item", item);
assertThat(exp.getValue(context)).isEqualTo("child1");
}
@Test
void SPR12502() {
SpelExpressionParser parser = new SpelExpressionParser();
Expression expression = parser.parseExpression("#root.getClass().getName()");
assertThat(expression.getValue(new UnnamedUser())).isEqualTo(UnnamedUser.class.getName());
assertThat(expression.getValue(new NamedUser())).isEqualTo(NamedUser.class.getName());
}
@Test // gh-17127, SPR-12522
void arraysAsListWithNoArguments() {
SpelExpressionParser parser = new SpelExpressionParser();
Expression expression = parser.parseExpression("T(java.util.Arrays).asList()");
List<?> value = expression.getValue(List.class);
assertThat(value).isEmpty();
}
@Test // gh-33013
void arraysAsListWithSingleEmptyStringArgument() {
SpelExpressionParser parser = new SpelExpressionParser();
Expression expression = parser.parseExpression("T(java.util.Arrays).asList('')");
List<?> value = expression.getValue(List.class);
assertThat(value).asInstanceOf(list(String.class)).containsExactly("");
}
@Test
void SPR12803() {
StandardEvaluationContext sec = new StandardEvaluationContext();
sec.setVariable("iterable", Collections.emptyList());
SpelExpressionParser parser = new SpelExpressionParser();
Expression expression = parser.parseExpression("T(org.springframework.expression.spel.SpelReproTests.FooLists).newArrayList(#iterable)");
assertThat(expression.getValue(sec)).isInstanceOf(ArrayList.class);
}
@Test
void SPR12808() {
SpelExpressionParser parser = new SpelExpressionParser();
Expression expression = parser.parseExpression("T(org.springframework.expression.spel.SpelReproTests.DistanceEnforcer).from(#no)");
StandardEvaluationContext sec = new StandardEvaluationContext();
sec.setVariable("no", 1);
assertThat(expression.getValue(sec).toString()).startsWith("Integer");
sec = new StandardEvaluationContext();
sec.setVariable("no", 1.0F);
assertThat(expression.getValue(sec).toString()).startsWith("Number");
sec = new StandardEvaluationContext();
sec.setVariable("no", "1.0");
assertThat(expression.getValue(sec).toString()).startsWith("Object");
}
@Test
@SuppressWarnings("rawtypes")
void SPR13055() {
List<Map<String, Object>> myPayload = new ArrayList<>();
Map<String, Object> v1 = new HashMap<>();
Map<String, Object> v2 = new HashMap<>();
v1.put("test11", "test11");
v1.put("test12", "test12");
v2.put("test21", "test21");
v2.put("test22", "test22");
myPayload.add(v1);
myPayload.add(v2);
EvaluationContext context = new StandardEvaluationContext(myPayload);
ExpressionParser parser = new SpelExpressionParser();
String ex = "#root.![T(org.springframework.util.StringUtils).collectionToCommaDelimitedString(#this.values())]";
List res = parser.parseExpression(ex).getValue(context, List.class);
assertThat(res.toString()).isEqualTo("[test12,test11, test22,test21]");
res = parser.parseExpression("#root.![#this.values()]").getValue(context,
List.class);
assertThat(res.toString()).isEqualTo("[[test12, test11], [test22, test21]]");
res = parser.parseExpression("#root.![values()]").getValue(context, List.class);
assertThat(res.toString()).isEqualTo("[[test12, test11], [test22, test21]]");
}
@Test
void AccessingFactoryBean_spr9511() {
StandardEvaluationContext context = new StandardEvaluationContext();
context.setBeanResolver(new MyBeanResolver());
Expression expr = new SpelExpressionParser().parseRaw("@foo");
assertThat(expr.getValue(context)).isEqualTo("custard");
expr = new SpelExpressionParser().parseRaw("&foo");
assertThat(expr.getValue(context)).isEqualTo("foo factory");
assertThatExceptionOfType(SpelParseException.class)
.isThrownBy(() -> new SpelExpressionParser().parseRaw("&@foo"))
.satisfies(ex -> {
assertThat(ex.getMessageCode()).isEqualTo(SpelMessage.INVALID_BEAN_REFERENCE);
assertThat(ex.getPosition()).isEqualTo(0);
});
assertThatExceptionOfType(SpelParseException.class)
.isThrownBy(() -> new SpelExpressionParser().parseRaw("@&foo"))
.satisfies(ex -> {
assertThat(ex.getMessageCode()).isEqualTo(SpelMessage.INVALID_BEAN_REFERENCE);
assertThat(ex.getPosition()).isEqualTo(0);
});
}
@Test
void SPR12035() {
ExpressionParser parser = new SpelExpressionParser();
Expression expression1 = parser.parseExpression("list.?[ value>2 ].size()!=0");
assertThat(expression1.getValue(new BeanClass(new ListOf(1.1), new ListOf(2.2)), Boolean.class)).isTrue();
Expression expression2 = parser.parseExpression("list.?[ T(java.lang.Math).abs(value) > 2 ].size()!=0");
assertThat(expression2.getValue(new BeanClass(new ListOf(1.1), new ListOf(-2.2)), Boolean.class)).isTrue();
}
@Test
void SPR13055_maps() {
EvaluationContext context = new StandardEvaluationContext();
ExpressionParser parser = new SpelExpressionParser();
Expression ex = parser.parseExpression("{'a':'y','b':'n'}.![value=='y'?key:null]");
assertThat(ex.getValue(context).toString()).isEqualTo("[a, null]");
ex = parser.parseExpression("{2:4,3:6}.![T(java.lang.Math).abs(#this.key) + 5]");
assertThat(ex.getValue(context).toString()).isEqualTo("[7, 8]");
ex = parser.parseExpression("{2:4,3:6}.![T(java.lang.Math).abs(#this.value) + 5]");
assertThat(ex.getValue(context).toString()).isEqualTo("[9, 11]");
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
void SPR10417() {
List list1 = new ArrayList();
list1.add("a");
list1.add("b");
list1.add("x");
List list2 = new ArrayList();
list2.add("c");
list2.add("x");
EvaluationContext context = new StandardEvaluationContext();
context.setVariable("list1", list1);
context.setVariable("list2", list2);
// #this should be the element from list1
Expression ex = parser.parseExpression("#list1.?[#list2.contains(#this)]");
Object result = ex.getValue(context);
assertThat(result).asString().isEqualTo("[x]");
// toString() should be called on the element from list1
ex = parser.parseExpression("#list1.?[#list2.contains(toString())]");
result = ex.getValue(context);
assertThat(result).asString().isEqualTo("[x]");
List list3 = new ArrayList();
list3.add(1);
list3.add(2);
list3.add(3);
list3.add(4);
context = new StandardEvaluationContext();
context.setVariable("list3", list3);
ex = parser.parseExpression("#list3.?[#this > 2]");
result = ex.getValue(context);
assertThat(result).asString().isEqualTo("[3, 4]");
ex = parser.parseExpression("#list3.?[#this >= T(java.lang.Math).abs(T(java.lang.Math).abs(#this))]");
result = ex.getValue(context);
assertThat(result).asString().isEqualTo("[1, 2, 3, 4]");
}
@Test
@SuppressWarnings({ "unchecked", "rawtypes" })
void SPR10417_maps() {
Map map1 = new HashMap();
map1.put("A", 65);
map1.put("B", 66);
map1.put("X", 66);
Map map2 = new HashMap();
map2.put("X", 66);
EvaluationContext context = new StandardEvaluationContext();
context.setVariable("map1", map1);
context.setVariable("map2", map2);
// #this should be the element from list1
Expression ex = parser.parseExpression("#map1.?[#map2.containsKey(#this.getKey())]");
Object result = ex.getValue(context);
assertThat(result).asString().isEqualTo("{X=66}");
ex = parser.parseExpression("#map1.?[#map2.containsKey(key)]");
result = ex.getValue(context);
assertThat(result).asString().isEqualTo("{X=66}");
}
@Test
void SPR13918() {
EvaluationContext context = new StandardEvaluationContext();
context.setVariable("encoding", "UTF-8");
Expression ex = parser.parseExpression("T(java.nio.charset.Charset).forName(#encoding)");
Object result = ex.getValue(context);
assertThat(result).isEqualTo(StandardCharsets.UTF_8);
}
@Test
void SPR16032() {
EvaluationContext context = new StandardEvaluationContext();
context.setVariable("str", "a\0b");
Expression ex = parser.parseExpression("#str?.split('\0')");
Object result = ex.getValue(context);
assertThat(ObjectUtils.nullSafeEquals(result, new String[] {"a", "b"})).isTrue();
}
static | Reserver |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/convertor/TimeObjectDecoder.java | {
"start": 876,
"end": 1111
} | class ____ implements MultiDecoder<Time> {
@Override
public Time decode(List<Object> parts, State state) {
return new Time(((Long) parts.get(0)).intValue(), ((Long) parts.get(1)).intValue());
}
}
| TimeObjectDecoder |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/AfterThrowingAdviceBindingTests.java | {
"start": 3508,
"end": 3609
} | interface ____ makes it easy to test this aspect is
// working as expected through mocking.
public | that |
java | quarkusio__quarkus | extensions/micrometer/runtime/src/main/java/io/quarkus/micrometer/runtime/config/PrometheusConfigGroup.java | {
"start": 186,
"end": 1082
} | interface ____ extends MicrometerConfig.CapabilityEnabled {
/**
* Support for export to Prometheus.
* <p>
* Support for Prometheus will be enabled if Micrometer
* support is enabled, the PrometheusMeterRegistry is on the classpath
* and either this value is true, or this value is unset and
* {@code quarkus.micrometer.registry-enabled-default} is true.
*/
@Override
Optional<Boolean> enabled();
/**
* The path for the prometheus metrics endpoint (produces text/plain). The default value is
* `metrics` and is resolved relative to the non-application endpoint (`q`), e.g.
* `${quarkus.http.root-path}/${quarkus.http.non-application-root-path}/metrics`.
* If an absolute path is specified (`/metrics`), the prometheus endpoint will be served
* from the configured path.
*
* If the management | PrometheusConfigGroup |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/resource/bitmap/GranularRoundedCorners.java | {
"start": 394,
"end": 2316
} | class ____ extends BitmapTransformation {
private static final String ID = "com.bumptech.glide.load.resource.bitmap.GranularRoundedCorners";
private static final byte[] ID_BYTES = ID.getBytes(CHARSET);
private final float topLeft;
private final float topRight;
private final float bottomRight;
private final float bottomLeft;
/** Provide the radii to round the corners of the bitmap. */
public GranularRoundedCorners(
float topLeft, float topRight, float bottomRight, float bottomLeft) {
this.topLeft = topLeft;
this.topRight = topRight;
this.bottomRight = bottomRight;
this.bottomLeft = bottomLeft;
}
@Override
protected Bitmap transform(
@NonNull BitmapPool pool, @NonNull Bitmap toTransform, int outWidth, int outHeight) {
return TransformationUtils.roundedCorners(
pool, toTransform, topLeft, topRight, bottomRight, bottomLeft);
}
@Override
public boolean equals(Object o) {
if (o instanceof GranularRoundedCorners) {
GranularRoundedCorners other = (GranularRoundedCorners) o;
return topLeft == other.topLeft
&& topRight == other.topRight
&& bottomRight == other.bottomRight
&& bottomLeft == other.bottomLeft;
}
return false;
}
@Override
public int hashCode() {
int hashCode = Util.hashCode(ID.hashCode(), Util.hashCode(topLeft));
hashCode = Util.hashCode(topRight, hashCode);
hashCode = Util.hashCode(bottomRight, hashCode);
return Util.hashCode(bottomLeft, hashCode);
}
@Override
public void updateDiskCacheKey(@NonNull MessageDigest messageDigest) {
messageDigest.update(ID_BYTES);
byte[] radiusData =
ByteBuffer.allocate(16)
.putFloat(topLeft)
.putFloat(topRight)
.putFloat(bottomRight)
.putFloat(bottomLeft)
.array();
messageDigest.update(radiusData);
}
}
| GranularRoundedCorners |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/scheduling/annotation/ScheduledAnnotationBeanPostProcessorObservabilityTests.java | {
"start": 9553,
"end": 9949
} | class ____ extends TaskTester {
@Scheduled(fixedDelay = 10_000, initialDelay = 5_000)
void hasCurrentObservation() {
Observation observation = this.observationRegistry.getCurrentObservation();
assertThat(observation).isNotNull();
assertThat(observation.getContext()).isInstanceOf(ScheduledTaskObservationContext.class);
this.latch.countDown();
}
}
static | CurrentObservationBean |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/reactive/server/MockServerConfigurer.java | {
"start": 1422,
"end": 2019
} | interface ____ {
/**
* Invoked immediately, i.e. before this method returns.
* @param serverSpec the serverSpec to which the configurer is added
*/
default void afterConfigureAdded(WebTestClient.MockServerSpec<?> serverSpec) {
}
/**
* Invoked just before the mock server is built. Use this hook to inspect
* and/or modify application-declared filters and exception handlers.
* @param builder the builder for the {@code HttpHandler} that will handle
* requests (i.e. the mock server)
*/
default void beforeServerCreated(WebHttpHandlerBuilder builder) {
}
}
| MockServerConfigurer |
java | apache__flink | flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dql/SqlShowPartitions.java | {
"start": 1499,
"end": 3343
} | class ____ extends SqlCall {
public static final SqlSpecialOperator OPERATOR =
new SqlSpecialOperator("SHOW PARTITIONS", SqlKind.OTHER);
private final SqlIdentifier tableIdentifier;
@Nullable private final SqlNodeList partitionSpec;
public SqlShowPartitions(
SqlParserPos pos, SqlIdentifier tableName, @Nullable SqlNodeList partitionSpec) {
super(pos);
this.tableIdentifier = requireNonNull(tableName, "tableName should not be null");
this.partitionSpec = partitionSpec;
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
public List<SqlNode> getOperandList() {
List<SqlNode> operands = new ArrayList<>();
operands.add(tableIdentifier);
operands.add(partitionSpec);
return operands;
}
@Override
public void unparse(SqlWriter writer, int leftPrec, int rightPrec) {
writer.keyword("SHOW PARTITIONS");
tableIdentifier.unparse(writer, leftPrec, rightPrec);
SqlNodeList partitionSpec = getPartitionSpec();
if (partitionSpec != null && partitionSpec.size() > 0) {
writer.keyword("PARTITION");
partitionSpec.unparse(
writer, getOperator().getLeftPrec(), getOperator().getRightPrec());
}
}
public String[] fullTableName() {
return tableIdentifier.names.toArray(new String[0]);
}
/**
* Returns the partition spec if the SHOW should be applied to partitions, and null otherwise.
*/
public SqlNodeList getPartitionSpec() {
return partitionSpec;
}
/** Get partition spec as key-value strings. */
public LinkedHashMap<String, String> getPartitionKVs() {
return SqlParseUtils.getPartitionKVs(getPartitionSpec());
}
}
| SqlShowPartitions |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/StaleStateException.java | {
"start": 457,
"end": 1035
} | class ____ extends HibernateException {
/**
* Constructs a {@code StaleStateException} using the supplied message.
*
* @param message The message explaining the exception condition
*/
public StaleStateException(String message) {
super( message );
}
/**
* Constructs a {@code StaleStateException} using the supplied message
* and cause.
*
* @param message The message explaining the exception condition
* @param cause An exception to wrap
*/
public StaleStateException(String message, Exception cause) {
super( message, cause );
}
}
| StaleStateException |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/SessionUtils.java | {
"start": 730,
"end": 2052
} | class ____ {
private SessionUtils() {}
public static Block[] fromPages(List<Attribute> schema, List<Page> pages, BlockFactory blockFactory) {
int positionCount = pages.stream().mapToInt(Page::getPositionCount).sum();
Block.Builder[] builders = new Block.Builder[schema.size()];
Block[] blocks;
try {
for (int b = 0; b < builders.length; b++) {
builders[b] = PlannerUtils.toElementType(schema.get(b).dataType()).newBlockBuilder(positionCount, blockFactory);
}
for (Page p : pages) {
for (int b = 0; b < builders.length; b++) {
builders[b].copyFrom(p.getBlock(b), 0, p.getPositionCount());
}
}
blocks = Block.Builder.buildAll(builders);
} finally {
Releasables.closeExpectNoException(builders);
}
return blocks;
}
public static long checkPagesBelowSize(List<Page> pages, ByteSizeValue maxSize, LongFunction<String> exceptionMessage) {
long currentSize = pages.stream().mapToLong(Page::ramBytesUsedByBlocks).sum();
if (currentSize > maxSize.getBytes()) {
throw new IllegalArgumentException(exceptionMessage.apply(currentSize));
}
return currentSize;
}
}
| SessionUtils |
java | google__gson | gson/src/test/java/com/google/gson/functional/PrintFormattingTest.java | {
"start": 1227,
"end": 2586
} | class ____ {
private Gson gson;
@Before
public void setUp() throws Exception {
gson = new Gson();
}
@Test
public void testCompactFormattingLeavesNoWhiteSpace() {
List<Object> list = new ArrayList<>();
list.add(new BagOfPrimitives());
list.add(new Nested());
list.add(new PrimitiveArray());
list.add(new ClassWithTransientFields<>());
String json = gson.toJson(list);
assertContainsNoWhiteSpace(json);
}
@Test
public void testJsonObjectWithNullValues() {
JsonObject obj = new JsonObject();
obj.addProperty("field1", "value1");
obj.addProperty("field2", (String) null);
String json = gson.toJson(obj);
assertThat(json).contains("field1");
assertThat(json).doesNotContain("field2");
}
@Test
public void testJsonObjectWithNullValuesSerialized() {
gson = new GsonBuilder().serializeNulls().create();
JsonObject obj = new JsonObject();
obj.addProperty("field1", "value1");
obj.addProperty("field2", (String) null);
String json = gson.toJson(obj);
assertThat(json).contains("field1");
assertThat(json).contains("field2");
}
@SuppressWarnings("LoopOverCharArray")
private static void assertContainsNoWhiteSpace(String str) {
for (char c : str.toCharArray()) {
assertThat(Character.isWhitespace(c)).isFalse();
}
}
}
| PrintFormattingTest |
java | apache__flink | flink-kubernetes/src/test/java/org/apache/flink/kubernetes/kubeclient/parameters/AbstractKubernetesParametersTest.java | {
"start": 6821,
"end": 7936
} | class ____ extends AbstractKubernetesParameters {
public TestingKubernetesParameters(Configuration flinkConfig) {
super(flinkConfig);
}
@Override
public Map<String, String> getLabels() {
throw new UnsupportedOperationException("NOT supported");
}
@Override
public Map<String, String> getSelectors() {
throw new UnsupportedOperationException("NOT supported");
}
@Override
public Map<String, String> getNodeSelector() {
throw new UnsupportedOperationException("NOT supported");
}
@Override
public Map<String, String> getEnvironments() {
throw new UnsupportedOperationException("NOT supported");
}
@Override
public Map<String, String> getAnnotations() {
throw new UnsupportedOperationException("NOT supported");
}
@Override
public List<Map<String, String>> getTolerations() {
throw new UnsupportedOperationException("NOT supported");
}
}
}
| TestingKubernetesParameters |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/issues/Camel14624Test.java | {
"start": 1432,
"end": 2397
} | class ____ extends CamelTestSupport {
@Override
protected void bindToRegistry(Registry registry) throws Exception {
TransactedPolicy required = new SpringTransactionPolicy(new MockTransactionManager());
registry.bind("required", required);
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:test")
.transacted("required")
.to("mock:result");
}
};
}
@Test
public void testRoundtrip() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:test", "Hello World");
MockEndpoint.assertIsSatisfied(context);
}
@Override
public boolean isDumpRouteCoverage() {
return true;
}
| Camel14624Test |
java | apache__camel | components/camel-jetty/src/test/java/org/apache/camel/component/jetty/SpringJettyNoConnectionRedeliveryTest.java | {
"start": 1447,
"end": 2933
} | class ____ extends CamelSpringTestSupport {
@RegisterExtension
protected AvailablePortFinder.Port port = AvailablePortFinder.find();
@Override
protected AbstractXmlApplicationContext createApplicationContext() {
return newAppContext("jetty-noconnection-redelivery.xml");
}
protected Map<String, String> getTranslationProperties() {
Map<String, String> map = super.getTranslationProperties();
map.put("port", port.toString());
return map;
}
@Test
public void testConnectionOk() {
String reply = template.requestBody("direct:start", "World", String.class);
assertEquals("Bye World", reply);
}
@Test
public void testConnectionNotOk() throws Exception {
// stop Jetty route so there should not be a connection
context.getRouteController().stopRoute("jetty");
Exchange exchange = template.request("direct:start", new Processor() {
public void process(Exchange exchange) {
exchange.getIn().setBody("Moon");
}
});
assertTrue(exchange.isFailed());
// there should be a connect exception as cause
ConnectException ce = exchange.getException(ConnectException.class);
assertNotNull(ce);
assertEquals(true, exchange.getIn().getHeader(Exchange.REDELIVERED));
assertEquals(4, exchange.getIn().getHeader(Exchange.REDELIVERY_COUNTER));
}
}
| SpringJettyNoConnectionRedeliveryTest |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/aggfunctions/MinWithRetractAggFunctionTest.java | {
"start": 16821,
"end": 17630
} | class ____
extends MinWithRetractAggFunctionTestBase<Integer> {
@Override
protected List<List<Integer>> getInputValueSets() {
return Arrays.asList(
Arrays.asList(0, 1000, 100, null, 10),
Arrays.asList(null, null, null, null, null),
Arrays.asList(null, 1));
}
@Override
protected List<Integer> getExpectedResults() {
return Arrays.asList(0, null, 1);
}
@Override
protected AggregateFunction<Integer, MinWithRetractAccumulator<Integer>> getAggregator() {
return new MinWithRetractAggFunction<>(DataTypes.DATE().getLogicalType());
}
}
/** Test for {@link TimeType}. */
@Nested
final | DateMinWithRetractAggFunctionTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/PipelinedSubpartitionTest.java | {
"start": 22820,
"end": 23261
} | class ____ extends PipelinedSubpartition {
FailurePipelinedSubpartition(
int index, int receiverExclusiveBuffersPerChannel, ResultPartition parent) {
super(index, receiverExclusiveBuffersPerChannel, Integer.MAX_VALUE, parent);
}
@Override
Throwable getFailureCause() {
return new RuntimeException("Expected test exception");
}
}
}
| FailurePipelinedSubpartition |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/DurationStatisticSummary.java | {
"start": 1563,
"end": 4659
} | class ____ implements Serializable {
private static final long serialVersionUID = 6776381340896518486L;
/** Statistic key. */
private final String key;
/** Are these success or failure statistics. */
private final boolean success;
/** Count of operation invocations. */
private final long count;
/** Max duration; -1 if unknown. */
private final long max;
/** Min duration; -1 if unknown. */
private final long min;
/** Mean duration -may be null. */
private final MeanStatistic mean;
/**
* Constructor.
* @param key Statistic key.
* @param success Are these success or failure statistics.
* @param count Count of operation invocations.
* @param max Max duration; -1 if unknown.
* @param min Min duration; -1 if unknown.
* @param mean Mean duration -may be null. (will be cloned)
*/
public DurationStatisticSummary(final String key,
final boolean success,
final long count,
final long max,
final long min,
@Nullable final MeanStatistic mean) {
this.key = key;
this.success = success;
this.count = count;
this.max = max;
this.min = min;
this.mean = mean == null ? null : mean.clone();
}
public String getKey() {
return key;
}
public boolean isSuccess() {
return success;
}
public long getCount() {
return count;
}
public long getMax() {
return max;
}
public long getMin() {
return min;
}
public MeanStatistic getMean() {
return mean;
}
@Override
public String toString() {
return "DurationStatisticSummary{" +
"key='" + key + '\'' +
", success=" + success +
", counter=" + count +
", max=" + max +
", mean=" + mean +
'}';
}
/**
* Fetch the duration timing summary of success or failure operations
* from an IO Statistics source.
* If the duration key is unknown, the summary will be incomplete.
* @param source source of data
* @param key duration statistic key
* @param success fetch success statistics, or if false, failure stats.
* @return a summary of the statistics.
*/
public static DurationStatisticSummary fetchDurationSummary(
IOStatistics source,
String key,
boolean success) {
String fullkey = success ? key : key + SUFFIX_FAILURES;
return new DurationStatisticSummary(key, success,
source.counters().getOrDefault(fullkey, 0L),
source.maximums().getOrDefault(fullkey + SUFFIX_MAX, -1L),
source.minimums().getOrDefault(fullkey + SUFFIX_MIN, -1L),
source.meanStatistics()
.get(fullkey + SUFFIX_MEAN));
}
/**
* Fetch the duration timing summary from an IOStatistics source.
* If the duration key is unknown, the summary will be incomplete.
* @param source source of data
* @param key duration statistic key
* @return a summary of the statistics.
*/
public static DurationStatisticSummary fetchSuccessSummary(
IOStatistics source,
String key) {
return fetchDurationSummary(source, key, true);
}
}
| DurationStatisticSummary |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authentication/dao/DaoAuthenticationProviderTests.java | {
"start": 27377,
"end": 27831
} | class ____ implements UserDetailsService {
private String password = "koala";
@Override
public UserDetails loadUserByUsername(String username) {
if ("rod".equals(username)) {
return new User("rod", this.password, true, true, true, true, ROLES_12);
}
throw new UsernameNotFoundException("Could not find: " + username);
}
void setPassword(String password) {
this.password = password;
}
}
private | MockUserDetailsServiceUserRod |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/SecureDataNodeStarter.java | {
"start": 1922,
"end": 7412
} | class ____ {
private final boolean isSaslEnabled;
private final boolean isRpcPortPrivileged;
private final boolean isHttpPortPrivileged;
private final ServerSocket streamingSocket;
private final ServerSocketChannel httpServerSocket;
public SecureResources(ServerSocket streamingSocket, ServerSocketChannel
httpServerSocket, boolean saslEnabled, boolean rpcPortPrivileged,
boolean httpPortPrivileged) {
this.streamingSocket = streamingSocket;
this.httpServerSocket = httpServerSocket;
this.isSaslEnabled = saslEnabled;
this.isRpcPortPrivileged = rpcPortPrivileged;
this.isHttpPortPrivileged = httpPortPrivileged;
}
public ServerSocket getStreamingSocket() { return streamingSocket; }
public ServerSocketChannel getHttpServerChannel() {
return httpServerSocket;
}
public boolean isSaslEnabled() {
return isSaslEnabled;
}
public boolean isRpcPortPrivileged() {
return isRpcPortPrivileged;
}
public boolean isHttpPortPrivileged() {
return isHttpPortPrivileged;
}
}
private String [] args;
private SecureResources resources;
@Override
public void init(DaemonContext context) throws Exception {
System.err.println("Initializing secure datanode resources");
// Create a new HdfsConfiguration object to ensure that the configuration in
// hdfs-site.xml is picked up.
Configuration conf = new HdfsConfiguration();
// Stash command-line arguments for regular datanode
args = context.getArguments();
resources = getSecureResources(conf);
}
@Override
public void start() throws Exception {
System.err.println("Starting regular datanode initialization");
DataNode.secureMain(args, resources);
}
@Override public void destroy() {}
@Override public void stop() throws Exception { /* Nothing to do */ }
/**
* Acquire privileged resources (i.e., the privileged ports) for the data
* node. The privileged resources consist of the port of the RPC server and
* the port of HTTP (not HTTPS) server.
*/
@VisibleForTesting
public static SecureResources getSecureResources(Configuration conf)
throws Exception {
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
boolean isSaslEnabled =
DataTransferSaslUtil.getSaslPropertiesResolver(conf) != null;
boolean isRpcPrivileged;
boolean isHttpPrivileged = false;
System.err.println("isSaslEnabled:" + isSaslEnabled);
// Obtain secure port for data streaming to datanode
InetSocketAddress streamingAddr = DataNode.getStreamingAddr(conf);
int socketWriteTimeout = conf.getInt(
DFSConfigKeys.DFS_DATANODE_SOCKET_WRITE_TIMEOUT_KEY,
HdfsConstants.WRITE_TIMEOUT);
int backlogLength = conf.getInt(
CommonConfigurationKeysPublic.IPC_SERVER_LISTEN_QUEUE_SIZE_KEY,
CommonConfigurationKeysPublic.IPC_SERVER_LISTEN_QUEUE_SIZE_DEFAULT);
ServerSocket ss = (socketWriteTimeout > 0) ?
ServerSocketChannel.open().socket() : new ServerSocket();
try {
ss.bind(streamingAddr, backlogLength);
} catch (BindException e) {
BindException newBe = appendMessageToBindException(e,
streamingAddr.toString());
throw newBe;
}
// Check that we got the port we need
if (ss.getLocalPort() != streamingAddr.getPort()) {
throw new RuntimeException(
"Unable to bind on specified streaming port in secure "
+ "context. Needed " + streamingAddr.getPort() + ", got "
+ ss.getLocalPort());
}
isRpcPrivileged = SecurityUtil.isPrivilegedPort(ss.getLocalPort());
System.err.println("Opened streaming server at " + streamingAddr);
// Bind a port for the web server. The code intends to bind HTTP server to
// privileged port only, as the client can authenticate the server using
// certificates if they are communicating through SSL.
final ServerSocketChannel httpChannel;
if (policy.isHttpEnabled()) {
httpChannel = ServerSocketChannel.open();
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
try {
httpChannel.socket().bind(infoSocAddr);
} catch (BindException e) {
BindException newBe = appendMessageToBindException(e,
infoSocAddr.toString());
throw newBe;
}
InetSocketAddress localAddr = (InetSocketAddress) httpChannel.socket()
.getLocalSocketAddress();
if (localAddr.getPort() != infoSocAddr.getPort()) {
throw new RuntimeException("Unable to bind on specified info port in " +
"secure context. Needed " + infoSocAddr.getPort() + ", got " +
ss.getLocalPort());
}
System.err.println("Successfully obtained privileged resources (streaming port = "
+ ss + " ) (http listener port = " + localAddr.getPort() +")");
isHttpPrivileged = SecurityUtil.isPrivilegedPort(localAddr.getPort());
System.err.println("Opened info server at " + infoSocAddr);
} else {
httpChannel = null;
}
return new SecureResources(ss, httpChannel, isSaslEnabled,
isRpcPrivileged, isHttpPrivileged);
}
private static BindException appendMessageToBindException(BindException e,
String msg) {
BindException newBe = new BindException(e.getMessage() + " " + msg);
newBe.initCause(e.getCause());
newBe.setStackTrace(e.getStackTrace());
return newBe;
}
}
| SecureResources |
java | apache__flink | flink-python/src/test/java/org/apache/flink/table/runtime/typeutils/serializers/python/ArrayDataSerializerTest.java | {
"start": 1610,
"end": 2315
} | class ____ extends SerializerTestBase<ArrayData> {
@Override
protected TypeSerializer<ArrayData> createSerializer() {
return new ArrayDataSerializer(new BigIntType(), LongSerializer.INSTANCE);
}
@Override
protected int getLength() {
return -1;
}
@Override
protected Class<ArrayData> getTypeClass() {
return ArrayData.class;
}
@Override
protected ArrayData[] getTestData() {
return new BinaryArrayData[] {BinaryArrayData.fromPrimitiveArray(new long[] {100L})};
}
}
/** Test for ArrayData with ArrayData data type. */
static | BaseArrayWithPrimitiveTest |
java | google__guice | core/test/com/googlecode/guice/PackageVisibilityTestModule.java | {
"start": 569,
"end": 634
} | class ____ implements PackagePrivateInterface {}
}
| PackagePrivateImpl |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/DeleteQuerySubqueryReferencingTargetPropertyTest.java | {
"start": 2636,
"end": 2810
} | class ____ {
@Id
@GeneratedValue
private Integer id;
@ManyToOne(optional = false)
private Root root;
public Detail(Root root) {
this.root = root;
}
}
}
| Detail |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/RememberMeConfigurerTests.java | {
"start": 22819,
"end": 23459
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.anyRequest().hasRole("USER")
)
.sessionManagement((sessionManagement) -> sessionManagement
.maximumSessions(1)
)
.formLogin(withDefaults())
.rememberMe(withDefaults());
return http.build();
// @formatter:on
}
@Bean
UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(PasswordEncodedUser.user());
}
}
@Configuration
@EnableWebSecurity
static | RememberMeMaximumSessionsConfig |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterPartitionPropertiesOperation.java | {
"start": 1423,
"end": 2986
} | class ____ extends AlterPartitionOperation {
private final CatalogPartition catalogPartition;
public AlterPartitionPropertiesOperation(
ObjectIdentifier tableIdentifier,
CatalogPartitionSpec partitionSpec,
CatalogPartition catalogPartition) {
super(tableIdentifier, partitionSpec);
this.catalogPartition = catalogPartition;
}
public CatalogPartition getCatalogPartition() {
return catalogPartition;
}
@Override
public String asSummaryString() {
String spec = OperationUtils.formatPartitionSpec(partitionSpec);
String properties = OperationUtils.formatProperties(catalogPartition.getProperties());
return String.format(
"ALTER TABLE %s PARTITION (%s) SET (%s)",
tableIdentifier.asSummaryString(), spec, properties);
}
@Override
public TableResultInternal execute(Context ctx) {
Catalog catalog =
ctx.getCatalogManager()
.getCatalogOrThrowException(getTableIdentifier().getCatalogName());
try {
catalog.alterPartition(
getTableIdentifier().toObjectPath(),
getPartitionSpec(),
getCatalogPartition(),
ignoreIfTableNotExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (Exception e) {
throw new TableException(String.format("Could not execute %s", asSummaryString()), e);
}
}
}
| AlterPartitionPropertiesOperation |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/MockStorageInterface.java | {
"start": 12058,
"end": 17252
} | class ____ implements CloudBlobWrapper {
protected final URI uri;
protected HashMap<String, String> metadata =
new HashMap<String, String>();
protected BlobProperties properties;
protected MockCloudBlobWrapper(URI uri, HashMap<String, String> metadata,
int length) {
this.uri = uri;
this.metadata = metadata;
this.properties = new BlobProperties();
this.properties=updateLastModifed(this.properties);
this.properties=updateLength(this.properties,length);
}
protected BlobProperties updateLastModifed(BlobProperties properties){
try{
Method setLastModified =properties.getClass().
getDeclaredMethod("setLastModified", Date.class);
setLastModified.setAccessible(true);
setLastModified.invoke(this.properties,
Calendar.getInstance(TimeZone.getTimeZone("UTC")).getTime());
}catch(Exception e){
throw new RuntimeException(e);
}
return properties;
}
protected BlobProperties updateLength(BlobProperties properties,int length) {
try{
Method setLength =properties.getClass().
getDeclaredMethod("setLength", long.class);
setLength.setAccessible(true);
setLength.invoke(this.properties, length);
}catch (Exception e){
throw new RuntimeException(e);
}
return properties;
}
protected void refreshProperties(boolean getMetadata) {
if (backingStore.exists(convertUriToDecodedString(uri))) {
byte[] content = backingStore.getContent(convertUriToDecodedString(uri));
properties = new BlobProperties();
this.properties=updateLastModifed(this.properties);
this.properties=updateLength(this.properties, content.length);
if (getMetadata) {
metadata = backingStore.getMetadata(convertUriToDecodedString(uri));
}
}
}
@Override
public CloudBlobContainer getContainer() throws URISyntaxException,
StorageException {
return null;
}
@Override
public CloudBlobDirectory getParent() throws URISyntaxException,
StorageException {
return null;
}
@Override
public URI getUri() {
return uri;
}
@Override
public HashMap<String, String> getMetadata() {
return metadata;
}
@Override
public void setMetadata(HashMap<String, String> metadata) {
this.metadata = metadata;
}
@Override
public void startCopyFromBlob(CloudBlobWrapper sourceBlob, BlobRequestOptions options,
OperationContext opContext, boolean overwriteDestination) throws StorageException, URISyntaxException {
if (!overwriteDestination && backingStore.exists(convertUriToDecodedString(uri))) {
throw new StorageException("BlobAlreadyExists",
"The blob already exists.",
HttpURLConnection.HTTP_CONFLICT,
null,
null);
}
backingStore.copy(convertUriToDecodedString(sourceBlob.getUri()), convertUriToDecodedString(uri));
//TODO: set the backingStore.properties.CopyState and
// update azureNativeFileSystemStore.waitForCopyToComplete
}
@Override
public CopyState getCopyState() {
return this.properties.getCopyState();
}
@Override
public void delete(OperationContext opContext, SelfRenewingLease lease)
throws StorageException {
backingStore.delete(convertUriToDecodedString(uri));
}
@Override
public boolean exists(OperationContext opContext) throws StorageException {
return backingStore.exists(convertUriToDecodedString(uri));
}
@Override
public void downloadAttributes(OperationContext opContext)
throws StorageException {
refreshProperties(true);
}
@Override
public BlobProperties getProperties() {
return properties;
}
@Override
public InputStream openInputStream(BlobRequestOptions options,
OperationContext opContext) throws StorageException {
return new ByteArrayInputStream(
backingStore.getContent(convertUriToDecodedString(uri)));
}
@Override
public void uploadMetadata(OperationContext opContext)
throws StorageException {
backingStore.setMetadata(convertUriToDecodedString(uri), metadata);
}
@Override
public void downloadRange(long offset, long length, OutputStream os,
BlobRequestOptions options, OperationContext opContext)
throws StorageException {
if (offset < 0 || length <= 0) {
throw new IndexOutOfBoundsException();
}
if (!backingStore.exists(convertUriToDecodedString(uri))) {
throw new StorageException("BlobNotFound",
"Resource does not exist.",
HttpURLConnection.HTTP_NOT_FOUND,
null,
null);
}
byte[] content = backingStore.getContent(convertUriToDecodedString(uri));
try {
os.write(content, (int) offset, (int) length);
} catch (IOException e) {
throw new StorageException("Unknown error", "Unexpected error", e);
}
}
}
| MockCloudBlobWrapper |
java | greenrobot__EventBus | EventBusTest/src/org/greenrobot/eventbus/indexed/EventBusAndroidOrderTestWithIndex.java | {
"start": 107,
"end": 308
} | class ____ extends EventBusAndroidOrderTest {
@Override
public void setUp() throws Exception {
eventBus = Indexed.build();
super.setUp();
}
}
| EventBusAndroidOrderTestWithIndex |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_hasSizeLessThanOrEqualTo_Test.java | {
"start": 806,
"end": 1186
} | class ____ extends ObjectArrayAssertBaseTest {
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.hasSizeLessThanOrEqualTo(6);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHasSizeLessThanOrEqualTo(getInfo(assertions), getActual(assertions), 6);
}
}
| ObjectArrayAssert_hasSizeLessThanOrEqualTo_Test |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/handler/job/rescaling/RescalingHandlers.java | {
"start": 3801,
"end": 5273
} | class ____
extends StatusHandler<
RestfulGateway, AsynchronousOperationInfo, RescalingStatusMessageParameters> {
public RescalingStatusHandler(
GatewayRetriever<? extends RestfulGateway> leaderRetriever,
Duration timeout,
Map<String, String> responseHeaders) {
super(leaderRetriever, timeout, responseHeaders, RescalingStatusHeaders.getInstance());
}
@Override
public CompletableFuture<AsynchronousOperationResult<AsynchronousOperationInfo>>
handleRequest(
@Nonnull final HandlerRequest<EmptyRequestBody> request,
@Nonnull final RestfulGateway gateway)
throws RestHandlerException {
throw featureDisabledException();
}
@Override
protected AsynchronousJobOperationKey getOperationKey(
HandlerRequest<EmptyRequestBody> request) {
throw new UnsupportedOperationException();
}
@Override
protected AsynchronousOperationInfo exceptionalOperationResultResponse(
Throwable throwable) {
throw new UnsupportedOperationException();
}
@Override
protected AsynchronousOperationInfo operationResultResponse(Acknowledge operationResult) {
throw new UnsupportedOperationException();
}
}
}
| RescalingStatusHandler |
java | quarkusio__quarkus | extensions/vertx/deployment/src/test/java/io/quarkus/vertx/CodecRegistrationTest.java | {
"start": 9017,
"end": 9236
} | class ____ {
private final String name;
CustomType2(String name) {
this.name = name;
}
public String getName() {
return name;
}
}
static | CustomType2 |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/common/io/OutputFormatBaseTest.java | {
"start": 1579,
"end": 10256
} | class ____ {
private static final Duration DEFAULT_MAX_CONCURRENT_REQUESTS_TIMEOUT =
Duration.ofMillis(Long.MAX_VALUE);
@Test
void testSuccessfulWrite() throws Exception {
try (TestOutputFormat testOutputFormat = createOpenedTestOutputFormat()) {
testOutputFormat.enqueueCompletableFuture(CompletableFuture.completedFuture(null));
final int originalPermits = testOutputFormat.getAvailablePermits();
assertThat(originalPermits).isPositive();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
testOutputFormat.writeRecord("hello");
assertThat(testOutputFormat.getAvailablePermits()).isEqualTo(originalPermits);
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
}
}
@Test
void testThrowErrorOnClose() throws Exception {
TestOutputFormat testOutputFormat = createTestOutputFormat();
testOutputFormat.open(FirstAttemptInitializationContext.of(1, 1));
Exception cause = new RuntimeException();
testOutputFormat.enqueueCompletableFuture(FutureUtils.completedExceptionally(cause));
testOutputFormat.writeRecord("none");
assertThatThrownBy(testOutputFormat::close)
.isInstanceOf(IOException.class)
.hasCauseReference(cause);
}
@Test
void testThrowErrorOnWrite() throws Exception {
try (TestOutputFormat testOutputFormat = createOpenedTestOutputFormat()) {
Exception cause = new RuntimeException();
testOutputFormat.enqueueCompletableFuture(FutureUtils.completedExceptionally(cause));
testOutputFormat.writeRecord("none");
// should fail because the first write failed and the second will check for asynchronous
// errors (throwable set by the async callback)
assertThatThrownBy(
() -> testOutputFormat.writeRecord("none"),
"Sending of second value should have failed.")
.isInstanceOf(IOException.class)
.hasCauseReference(cause);
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
}
}
@Test
void testWaitForPendingUpdatesOnClose() throws Exception {
try (TestOutputFormat testOutputFormat = createOpenedTestOutputFormat()) {
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
testOutputFormat.enqueueCompletableFuture(completableFuture);
testOutputFormat.writeRecord("hello");
assertThat(testOutputFormat.getAcquiredPermits()).isOne();
CheckedThread checkedThread =
new CheckedThread("Flink-OutputFormatBaseTest") {
@Override
public void go() throws Exception {
testOutputFormat.close();
}
};
checkedThread.start();
while (checkedThread.getState() != Thread.State.TIMED_WAITING) {
Thread.sleep(5);
}
assertThat(testOutputFormat.getAcquiredPermits()).isOne();
// start writing
completableFuture.complete(null);
// wait for the close
checkedThread.sync();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
}
}
@Test
void testReleaseOnSuccess() throws Exception {
try (TestOutputFormat openedTestOutputFormat = createOpenedTestOutputFormat()) {
assertThat(openedTestOutputFormat.getAvailablePermits()).isOne();
assertThat(openedTestOutputFormat.getAcquiredPermits()).isZero();
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
openedTestOutputFormat.enqueueCompletableFuture(completableFuture);
openedTestOutputFormat.writeRecord("hello");
assertThat(openedTestOutputFormat.getAvailablePermits()).isZero();
assertThat(openedTestOutputFormat.getAcquiredPermits()).isOne();
// start writing
completableFuture.complete(null);
assertThat(openedTestOutputFormat.getAvailablePermits()).isOne();
assertThat(openedTestOutputFormat.getAcquiredPermits()).isZero();
}
}
@Test
void testReleaseOnFailure() throws Exception {
TestOutputFormat testOutputFormat = createOpenedTestOutputFormat();
assertThat(testOutputFormat.getAvailablePermits()).isOne();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
testOutputFormat.enqueueCompletableFuture(completableFuture);
testOutputFormat.writeRecord("none");
assertThat(testOutputFormat.getAvailablePermits()).isZero();
assertThat(testOutputFormat.getAcquiredPermits()).isOne();
completableFuture.completeExceptionally(new RuntimeException());
assertThat(testOutputFormat.getAvailablePermits()).isOne();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
assertThatThrownBy(testOutputFormat::close);
}
@Test
void testReleaseOnThrowingSend() throws Exception {
Function<String, CompletionStage<Void>> failingSendFunction =
ignoredRecord -> {
throw new RuntimeException("expected");
};
try (TestOutputFormat testOutputFormat =
createOpenedTestOutputFormat(failingSendFunction)) {
assertThat(testOutputFormat.getAvailablePermits()).isOne();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
try {
testOutputFormat.writeRecord("none");
} catch (RuntimeException ignored) {
/// there is no point asserting on the exception that we have set,
// just avoid the test failure
}
// writeRecord acquires a permit that is then released when send fails
assertThat(testOutputFormat.getAvailablePermits()).isOne();
assertThat(testOutputFormat.getAcquiredPermits()).isZero();
}
}
@Test
void testMaxConcurrentRequestsReached() throws Exception {
try (TestOutputFormat testOutputFormat =
createOpenedTestOutputFormat(Duration.ofMillis(1))) {
CompletableFuture<Void> completableFuture = new CompletableFuture<>();
testOutputFormat.enqueueCompletableFuture(completableFuture);
testOutputFormat.enqueueCompletableFuture(completableFuture);
testOutputFormat.writeRecord("writeRecord #1");
// writing a second time while the first request is still not completed and the
// outputFormat is set for maxConcurrentRequests=1 will fail
assertThatThrownBy(
() -> testOutputFormat.writeRecord("writeRecord #2"),
"Sending value should have experienced a TimeoutException.")
.hasCauseInstanceOf(TimeoutException.class);
completableFuture.complete(null);
}
}
private static TestOutputFormat createTestOutputFormat() {
final TestOutputFormat testOutputFormat =
new TestOutputFormat(1, DEFAULT_MAX_CONCURRENT_REQUESTS_TIMEOUT);
testOutputFormat.configure(new Configuration());
return testOutputFormat;
}
private static TestOutputFormat createOpenedTestOutputFormat() throws Exception {
return createOpenedTestOutputFormat(DEFAULT_MAX_CONCURRENT_REQUESTS_TIMEOUT);
}
private static TestOutputFormat createOpenedTestOutputFormat(
Duration maxConcurrentRequestsTimeout) throws Exception {
final TestOutputFormat testOutputFormat =
new TestOutputFormat(1, maxConcurrentRequestsTimeout);
testOutputFormat.configure(new Configuration());
testOutputFormat.open(FirstAttemptInitializationContext.of(1, 1));
return testOutputFormat;
}
private static TestOutputFormat createOpenedTestOutputFormat(
Function<String, CompletionStage<Void>> sendFunction) throws Exception {
final TestOutputFormat testOutputFormat =
new TestOutputFormat(1, DEFAULT_MAX_CONCURRENT_REQUESTS_TIMEOUT, sendFunction);
testOutputFormat.configure(new Configuration());
testOutputFormat.open(FirstAttemptInitializationContext.of(1, 1));
return testOutputFormat;
}
private static | OutputFormatBaseTest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/seda/SedaDefaultBlockWhenFullTest.java | {
"start": 1389,
"end": 3410
} | class ____ extends ContextTestSupport {
private static final int QUEUE_SIZE = 1;
private static final int DELAY = 100;
private static final int DELAY_LONG = 1000;
private static final String MOCK_URI = "mock:blockWhenFullOutput";
private static final String SIZE_PARAM = "?size=%d";
private static final String BLOCK_WHEN_FULL_URI = "seda:blockingFoo" + String.format(SIZE_PARAM, QUEUE_SIZE) + "&timeout=0";
private static final String DEFAULT_URI
= "seda:foo" + String.format(SIZE_PARAM, QUEUE_SIZE) + "&blockWhenFull=false&timeout=0";
@Override
protected Registry createCamelRegistry() throws Exception {
SedaComponent component = new SedaComponent();
component.setDefaultBlockWhenFull(true);
Registry registry = super.createCamelRegistry();
registry.bind("seda", component);
return registry;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
fromF(BLOCK_WHEN_FULL_URI).delay(DELAY_LONG).to(MOCK_URI);
// use same delay as above on purpose
from(DEFAULT_URI).delay(DELAY).to("mock:whatever");
}
};
}
@Test
public void testSedaEndpoints() {
assertFalse(context.getEndpoint(DEFAULT_URI, SedaEndpoint.class).isBlockWhenFull());
assertTrue(context.getEndpoint(BLOCK_WHEN_FULL_URI, SedaEndpoint.class).isBlockWhenFull());
}
@Test
public void testSedaDefaultWhenFull() {
SedaEndpoint seda = context.getEndpoint(DEFAULT_URI, SedaEndpoint.class);
assertFalse(seda.isBlockWhenFull(),
"Seda Endpoint is not setting the correct default (should be false) for \"blockWhenFull\"");
Exception e = assertThrows(Exception.class, () -> sendTwoOverCapacity(DEFAULT_URI, QUEUE_SIZE),
"The route didn't fill the queue beyond capacity: test | SedaDefaultBlockWhenFullTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/core/Observable.java | {
"start": 377733,
"end": 577688
} | class ____ use to try and cast the upstream items into
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code clazz} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/map.html">ReactiveX operators documentation: Map</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<U> cast(@NonNull Class<U> clazz) {
Objects.requireNonNull(clazz, "clazz is null");
return map(Functions.castFunction(clazz));
}
/**
* Collects items emitted by the finite source {@code Observable} into a single mutable data structure and returns
* a {@link Single} that emits this structure.
* <p>
* <img width="640" height="330" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/collect.2.v3.png" alt="">
* <p>
* This is a simplified version of {@code reduce} that does not need to return the state on each pass.
* <p>
* Note that this operator requires the upstream to signal {@code onComplete} for the accumulator object to
* be emitted. Sources that are infinite and never complete will never emit anything through this
* operator and an infinite source may lead to a fatal {@link OutOfMemoryError}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code collect} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U> the accumulator and output type
* @param initialItemSupplier
* the mutable data structure that will collect the items
* @param collector
* a function that accepts the {@code state} and an emitted item, and modifies the accumulator accordingly
* accordingly
* @return the new {@code Single} instance
* @throws NullPointerException if {@code initialItemSupplier} or {@code collector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/reduce.html">ReactiveX operators documentation: Reduce</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Single<U> collect(@NonNull Supplier<? extends U> initialItemSupplier, @NonNull BiConsumer<? super U, ? super T> collector) {
Objects.requireNonNull(initialItemSupplier, "initialItemSupplier is null");
Objects.requireNonNull(collector, "collector is null");
return RxJavaPlugins.onAssembly(new ObservableCollectSingle<>(this, initialItemSupplier, collector));
}
/**
* Collects items emitted by the finite source {@code Observable} into a single mutable data structure and returns
* a {@link Single} that emits this structure.
* <p>
* <img width="640" height="330" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/collectInto.o.v3.png" alt="">
* <p>
* This is a simplified version of {@code reduce} that does not need to return the state on each pass.
* <p>
* Note that this operator requires the upstream to signal {@code onComplete} for the accumulator object to
* be emitted. Sources that are infinite and never complete will never emit anything through this
* operator and an infinite source may lead to a fatal {@link OutOfMemoryError}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code collectInto} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U> the accumulator and output type
* @param initialItem
* the mutable data structure that will collect the items
* @param collector
* a function that accepts the {@code state} and an emitted item, and modifies the accumulator accordingly
* accordingly
* @return the new {@code Single} instance
* @throws NullPointerException if {@code initialItem} or {@code collector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/reduce.html">ReactiveX operators documentation: Reduce</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Single<U> collectInto(@NonNull U initialItem, @NonNull BiConsumer<? super U, ? super T> collector) {
Objects.requireNonNull(initialItem, "initialItem is null");
return collect(Functions.justSupplier(initialItem), collector);
}
/**
* Transform the current {@code Observable} by applying a particular {@link ObservableTransformer} function to it.
* <p>
* This method operates on the {@code Observable} itself whereas {@link #lift} operates on the {@link ObservableSource}'s
* {@link Observer}s.
* <p>
* If the operator you are creating is designed to act on the individual items emitted by the current
* {@code Observable}, use {@link #lift}. If your operator is designed to transform the current {@code Observable} as a whole
* (for instance, by applying a particular set of existing RxJava operators to it) use {@code compose}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code compose} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the output {@code ObservableSource}
* @param composer implements the function that transforms the current {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code composer} is {@code null}
* @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a>
*/
@SuppressWarnings("unchecked")
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> compose(@NonNull ObservableTransformer<? super T, ? extends R> composer) {
return wrap(((ObservableTransformer<T, R>) Objects.requireNonNull(composer, "composer is null")).apply(this));
}
/**
* Returns a new {@code Observable} that emits items resulting from applying a function that you supply to each item
* emitted by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then emitting the items
* that result from concatenating those returned {@code ObservableSource}s.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <p>
* Note that there is no guarantee where the given {@code mapper} function will be executed; it could be on the subscribing thread,
* on the upstream thread signaling the new item to be mapped or on the thread where the inner source terminates. To ensure
* the {@code mapper} function is confined to a known thread, use the {@link #concatMap(Function, int, Scheduler)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the type of the inner {@code ObservableSource} sources and thus the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @see #concatMap(Function, int, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper) {
return concatMap(mapper, 2);
}
/**
* Returns a new {@code Observable} that emits items resulting from applying a function that you supply to each item
* emitted by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then emitting the items
* that result from concatenating those returned {@code ObservableSource}s.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <p>
* Note that there is no guarantee where the given {@code mapper} function will be executed; it could be on the subscribing thread,
* on the upstream thread signaling the new item to be mapped or on the thread where the inner source terminates. To ensure
* the {@code mapper} function is confined to a known thread, use the {@link #concatMap(Function, int, Scheduler)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the type of the inner {@code ObservableSource} sources and thus the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param bufferSize
* the number of elements expected from the current {@code Observable} to be buffered
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @see #concatMap(Function, int, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
if (this instanceof ScalarSupplier) {
@SuppressWarnings("unchecked")
T v = ((ScalarSupplier<T>)this).get();
if (v == null) {
return empty();
}
return ObservableScalarXMap.scalarXMap(v, mapper);
}
return RxJavaPlugins.onAssembly(new ObservableConcatMap<>(this, mapper, bufferSize, ErrorMode.IMMEDIATE));
}
/**
* Returns a new {@code Observable} that emits items resulting from applying a function that you supply to each item
* emitted by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then emitting the items
* that result from concatenating those returned {@code ObservableSource}s.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <p>
* The difference between {@link #concatMap(Function, int)} and this operator is that this operator guarantees the {@code mapper}
* function is executed on the specified scheduler.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMap} executes the given {@code mapper} function on the provided {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the type of the inner {@code ObservableSource} sources and thus the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param bufferSize
* the number of elements expected from the current {@code Observable} to be buffered
* @param scheduler
* the scheduler where the {@code mapper} function will be executed
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code scheduler} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @since 3.0.0
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final <@NonNull R> Observable<R> concatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper, int bufferSize, @NonNull Scheduler scheduler) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
Objects.requireNonNull(scheduler, "scheduler is null");
return RxJavaPlugins.onAssembly(new ObservableConcatMapScheduler<>(this, mapper, bufferSize, ErrorMode.IMMEDIATE, scheduler));
}
/**
* Maps each of the items into an {@link ObservableSource}, subscribes to them one after the other,
* one at a time and emits their values in order
* while delaying any error from either this or any of the inner {@code ObservableSource}s
* till all of them terminate.
* <p>
* <img width="640" height="348" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapDelayError.o.png" alt="">
* <p>
* Note that there is no guarantee where the given {@code mapper} function will be executed; it could be on the subscribing thread,
* on the upstream thread signaling the new item to be mapped or on the thread where the inner source terminates. To ensure
* the {@code mapper} function is confined to a known thread, use the {@link #concatMapDelayError(Function, boolean, int, Scheduler)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the result value type
* @param mapper the function that maps the items of the current {@code Observable} into the inner {@code ObservableSource}s.
* @return the new {@code Observable} instance with the concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapDelayError(Function, boolean, int, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapDelayError(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper) {
return concatMapDelayError(mapper, true, bufferSize());
}
/**
* Maps each of the items into an {@link ObservableSource}, subscribes to them one after the other,
* one at a time and emits their values in order
* while delaying any error from either this or any of the inner {@code ObservableSource}s
* till all of them terminate.
* <p>
* <img width="640" height="348" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapDelayError.o.png" alt="">
* <p>
* Note that there is no guarantee where the given {@code mapper} function will be executed; it could be on the subscribing thread,
* on the upstream thread signaling the new item to be mapped or on the thread where the inner source terminates. To ensure
* the {@code mapper} function is confined to a known thread, use the {@link #concatMapDelayError(Function, boolean, int, Scheduler)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the result value type
* @param mapper the function that maps the items of the current {@code Observable} into the inner {@code ObservableSource}s.
* @param tillTheEnd
* if {@code true}, all errors from the outer and inner {@code ObservableSource} sources are delayed until the end,
* if {@code false}, an error from the main source is signaled when the current {@code Observable} source terminates
* @param bufferSize
* the number of elements expected from the current {@code Observable} to be buffered
* @return the new {@code Observable} instance with the concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapDelayError(Function, boolean, int, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapDelayError(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
boolean tillTheEnd, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
if (this instanceof ScalarSupplier) {
@SuppressWarnings("unchecked")
T v = ((ScalarSupplier<T>)this).get();
if (v == null) {
return empty();
}
return ObservableScalarXMap.scalarXMap(v, mapper);
}
return RxJavaPlugins.onAssembly(new ObservableConcatMap<>(this, mapper, bufferSize, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY));
}
/**
* Maps each of the items into an {@link ObservableSource}, subscribes to them one after the other,
* one at a time and emits their values in order
* while delaying any error from either this or any of the inner {@code ObservableSource}s
* till all of them terminate.
* <p>
* <img width="640" height="348" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapDelayError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the result value type
* @param mapper the function that maps the items of the current {@code Observable} into the inner {@code ObservableSource}s.
* @param tillTheEnd
* if {@code true}, all errors from the outer and inner {@code ObservableSource} sources are delayed until the end,
* if {@code false}, an error from the main source is signaled when the current {@code Observable} source terminates
* @param bufferSize
* the number of elements expected from the current {@code Observable} to be buffered
* @param scheduler
* the scheduler where the {@code mapper} function will be executed
* @return the new {@code Observable} instance with the concatenation behavior
* @throws NullPointerException if {@code mapper} or {@code scheduler} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapDelayError(Function, boolean, int)
* @since 3.0.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final <@NonNull R> Observable<R> concatMapDelayError(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
boolean tillTheEnd, int bufferSize, @NonNull Scheduler scheduler) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
Objects.requireNonNull(scheduler, "scheduler is null");
return RxJavaPlugins.onAssembly(new ObservableConcatMapScheduler<>(this, mapper, bufferSize, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY, scheduler));
}
/**
* Maps a sequence of values into {@link ObservableSource}s and concatenates these {@code ObservableSource}s eagerly into a single
* {@code Observable} sequence.
* <p>
* Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the
* current {@code Observable}s. The operator buffers the values emitted by these {@code ObservableSource}s and then drains them in
* order, each one after the previous one completes.
* <p>
* <img width="640" height="361" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapEager.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This method does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the value type
* @param mapper the function that maps a sequence of values into a sequence of {@code ObservableSource}s that will be
* eagerly concatenated
* @return the new {@code Observable} instance with the specified concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapEager(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper) {
return concatMapEager(mapper, Integer.MAX_VALUE, bufferSize());
}
/**
* Maps a sequence of values into {@link ObservableSource}s and concatenates these {@code ObservableSource}s eagerly into a single
* {@code Observable} sequence.
* <p>
* Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the
* current {@code Observable}s. The operator buffers the values emitted by these {@code ObservableSource}s and then drains them in
* order, each one after the previous one completes.
* <p>
* <img width="640" height="361" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapEager.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This method does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the value type
* @param mapper the function that maps a sequence of values into a sequence of {@code ObservableSource}s that will be
* eagerly concatenated
* @param maxConcurrency the maximum number of concurrent subscribed {@code ObservableSource}s
* @param bufferSize hints about the number of expected items from each inner {@code ObservableSource}, must be positive
* @return the new {@code Observable} instance with the specified concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} or {@code bufferSize} is non-positive
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapEager(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
int maxConcurrency, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapEager<>(this, mapper, ErrorMode.IMMEDIATE, maxConcurrency, bufferSize));
}
/**
* Maps a sequence of values into {@link ObservableSource}s and concatenates these {@code ObservableSource}s eagerly into a single
* {@code Observable} sequence.
* <p>
* Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the
* current {@code Observable}s. The operator buffers the values emitted by these {@code ObservableSource}s and then drains them in
* order, each one after the previous one completes.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapEagerDelayError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This method does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the value type
* @param mapper the function that maps a sequence of values into a sequence of {@code ObservableSource}s that will be
* eagerly concatenated
* @param tillTheEnd
* if {@code true}, all errors from the outer and inner {@code ObservableSource} sources are delayed until the end,
* if {@code false}, an error from the main source is signaled when the current {@code Observable} source terminates
* @return the new {@code Observable} instance with the specified concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapEagerDelayError(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
boolean tillTheEnd) {
return concatMapEagerDelayError(mapper, tillTheEnd, Integer.MAX_VALUE, bufferSize());
}
/**
* Maps a sequence of values into {@link ObservableSource}s and concatenates these {@code ObservableSource}s eagerly into a single
* {@code Observable} sequence.
* <p>
* Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the
* current {@code Observable}s. The operator buffers the values emitted by these {@code ObservableSource}s and then drains them in
* order, each one after the previous one completes.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapEagerDelayError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This method does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the value type
* @param mapper the function that maps a sequence of values into a sequence of {@code ObservableSource}s that will be
* eagerly concatenated
* @param tillTheEnd
* if {@code true}, exceptions from the current {@code Observable} and all the inner {@code ObservableSource}s are delayed until
* all of them terminate, if {@code false}, exception from the current {@code Observable} is delayed until the
* currently running {@code ObservableSource} terminates
* @param maxConcurrency the maximum number of concurrent subscribed {@code ObservableSource}s
* @param bufferSize
* the number of elements expected from the current {@code Observable} and each inner {@code ObservableSource} to be buffered
* @return the new {@code Observable} instance with the specified concatenation behavior
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} or {@code bufferSize} is non-positive
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapEagerDelayError(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
boolean tillTheEnd, int maxConcurrency, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapEager<>(this, mapper, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY, maxConcurrency, bufferSize));
}
/**
* Maps each element of the current {@code Observable} into {@link CompletableSource}s, subscribes to them one at a time in
* order and waits until the upstream and all {@code CompletableSource}s complete.
* <p>
* <img width="640" height="506" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapCompletable.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.6 - experimental
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns a {@code CompletableSource}
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable concatMapCompletable(@NonNull Function<? super T, ? extends CompletableSource> mapper) {
return concatMapCompletable(mapper, 2);
}
/**
* Maps each element of the current {@code Observable} into {@link CompletableSource}s, subscribes to them one at a time in
* order and waits until the upstream and all {@code CompletableSource}s complete.
* <p>
* <img width="640" height="506" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapCompletable.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.6 - experimental
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns a {@code CompletableSource}
*
* @param capacityHint
* the number of upstream items expected to be buffered until the current {@code CompletableSource}, mapped from
* the current item, completes.
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code capacityHint} is non-positive
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable concatMapCompletable(@NonNull Function<? super T, ? extends CompletableSource> mapper, int capacityHint) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(capacityHint, "capacityHint");
return RxJavaPlugins.onAssembly(new ObservableConcatMapCompletable<>(this, mapper, ErrorMode.IMMEDIATE, capacityHint));
}
/**
* Maps the upstream items into {@link CompletableSource}s and subscribes to them one after the
* other terminates, delaying all errors till both the current {@code Observable} and all
* inner {@code CompletableSource}s terminate.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapCompletableDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param mapper the function called with the upstream item and should return
* a {@code CompletableSource} to become the next source to
* be subscribed to
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapCompletable(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable concatMapCompletableDelayError(@NonNull Function<? super T, ? extends CompletableSource> mapper) {
return concatMapCompletableDelayError(mapper, true, 2);
}
/**
* Maps the upstream items into {@link CompletableSource}s and subscribes to them one after the
* other terminates, optionally delaying all errors till both the current {@code Observable} and all
* inner {@code CompletableSource}s terminate.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapCompletableDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param mapper the function called with the upstream item and should return
* a {@code CompletableSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code CompletableSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code CompletableSource} terminates and only then is
* it emitted to the downstream.
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapCompletable(Function)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable concatMapCompletableDelayError(@NonNull Function<? super T, ? extends CompletableSource> mapper, boolean tillTheEnd) {
return concatMapCompletableDelayError(mapper, tillTheEnd, 2);
}
/**
* Maps the upstream items into {@link CompletableSource}s and subscribes to them one after the
* other terminates, optionally delaying all errors till both the current {@code Observable} and all
* inner {@code CompletableSource}s terminate.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMap.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapCompletableDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param mapper the function called with the upstream item and should return
* a {@code CompletableSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code CompletableSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code CompletableSource} terminates and only then is
* it emitted to the downstream.
* @param bufferSize The number of upstream items expected to be buffered so that fresh items are
* ready to be mapped when a previous {@code CompletableSource} terminates.
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapCompletable(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable concatMapCompletableDelayError(@NonNull Function<? super T, ? extends CompletableSource> mapper, boolean tillTheEnd, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapCompletable<>(this, mapper, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY, bufferSize));
}
/**
* Returns an {@code Observable} that concatenate each item emitted by the current {@code Observable} with the values in an
* {@link Iterable} corresponding to that item that is generated by a selector.
* <p>
* <img width="640" height="275" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapIterable.o.png" alt="">
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapIterable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of item emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code Iterable} sequence of values for when given an item emitted by the
* current {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<U> concatMapIterable(@NonNull Function<? super T, @NonNull ? extends Iterable<? extends U>> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new ObservableFlattenIterable<>(this, mapper));
}
/**
* Maps the upstream items into {@link MaybeSource}s and subscribes to them one after the
* other succeeds or completes, emits their success value if available or terminates immediately if
* either the current {@code Observable} or the current inner {@code MaybeSource} fail.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapMaybe.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code MaybeSource}s
* @param mapper the function called with the upstream item and should return
* a {@code MaybeSource} to become the next source to
* be subscribed to
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapMaybeDelayError(Function)
* @see #concatMapMaybe(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapMaybe(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper) {
return concatMapMaybe(mapper, 2);
}
/**
* Maps the upstream items into {@link MaybeSource}s and subscribes to them one after the
* other succeeds or completes, emits their success value if available or terminates immediately if
* either the current {@code Observable} or the current inner {@code MaybeSource} fail.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapMaybe.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code MaybeSource}s
* @param mapper the function called with the upstream item and should return
* a {@code MaybeSource} to become the next source to
* be subscribed to
* @param bufferSize The number of upstream items expected to be buffered so that fresh items are
* ready to be mapped when a previous {@code MaybeSource} terminates.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapMaybe(Function)
* @see #concatMapMaybeDelayError(Function, boolean, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapMaybe(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapMaybe<>(this, mapper, ErrorMode.IMMEDIATE, bufferSize));
}
/**
* Maps the upstream items into {@link MaybeSource}s and subscribes to them one after the
* other terminates, emits their success value if available and delaying all errors
* till both the current {@code Observable} and all inner {@code MaybeSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapMaybeDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapMaybeDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code MaybeSource}s
* @param mapper the function called with the upstream item and should return
* a {@code MaybeSource} to become the next source to
* be subscribed to
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapMaybe(Function)
* @see #concatMapMaybeDelayError(Function, boolean)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapMaybeDelayError(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper) {
return concatMapMaybeDelayError(mapper, true, 2);
}
/**
* Maps the upstream items into {@link MaybeSource}s and subscribes to them one after the
* other terminates, emits their success value if available and optionally delaying all errors
* till both the current {@code Observable} and all inner {@code MaybeSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapMaybeDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapMaybeDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code MaybeSource}s
* @param mapper the function called with the upstream item and should return
* a {@code MaybeSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code MaybeSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code MaybeSource} terminates and only then is
* it emitted to the downstream.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapMaybe(Function, int)
* @see #concatMapMaybeDelayError(Function, boolean, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapMaybeDelayError(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper, boolean tillTheEnd) {
return concatMapMaybeDelayError(mapper, tillTheEnd, 2);
}
/**
* Maps the upstream items into {@link MaybeSource}s and subscribes to them one after the
* other terminates, emits their success value if available and optionally delaying all errors
* till both the current {@code Observable} and all inner {@code MaybeSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapMaybeDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapMaybeDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code MaybeSource}s
* @param mapper the function called with the upstream item and should return
* a {@code MaybeSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code MaybeSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code MaybeSource} terminates and only then is
* it emitted to the downstream.
* @param bufferSize The number of upstream items expected to be buffered so that fresh items are
* ready to be mapped when a previous {@code MaybeSource} terminates.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapMaybe(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapMaybeDelayError(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper, boolean tillTheEnd, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapMaybe<>(this, mapper, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY, bufferSize));
}
/**
* Maps the upstream items into {@link SingleSource}s and subscribes to them one after the
* other succeeds, emits their success values or terminates immediately if
* either the current {@code Observable} or the current inner {@code SingleSource} fail.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapSingle.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapSingle} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code SingleSource}s
* @param mapper the function called with the upstream item and should return
* a {@code SingleSource} to become the next source to
* be subscribed to
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapSingleDelayError(Function)
* @see #concatMapSingle(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapSingle(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper) {
return concatMapSingle(mapper, 2);
}
/**
* Maps the upstream items into {@link SingleSource}s and subscribes to them one after the
* other succeeds, emits their success values or terminates immediately if
* either the current {@code Observable} or the current inner {@code SingleSource} fail.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapSingle.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapSingle} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code SingleSource}s
* @param mapper the function called with the upstream item and should return
* a {@code SingleSource} to become the next source to
* be subscribed to
* @param bufferSize The number of upstream items expected to be buffered so that fresh items are
* ready to be mapped when a previous {@code SingleSource} terminates.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapSingle(Function)
* @see #concatMapSingleDelayError(Function, boolean, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapSingle(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapSingle<>(this, mapper, ErrorMode.IMMEDIATE, bufferSize));
}
/**
* Maps the upstream items into {@link SingleSource}s and subscribes to them one after the
* other succeeds or fails, emits their success values and delays all errors
* till both the current {@code Observable} and all inner {@code SingleSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapSingleDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapSingleDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code SingleSource}s
* @param mapper the function called with the upstream item and should return
* a {@code SingleSource} to become the next source to
* be subscribed to
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapSingle(Function)
* @see #concatMapSingleDelayError(Function, boolean)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapSingleDelayError(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper) {
return concatMapSingleDelayError(mapper, true, 2);
}
/**
* Maps the upstream items into {@link SingleSource}s and subscribes to them one after the
* other succeeds or fails, emits their success values and optionally delays all errors
* till both the current {@code Observable} and all inner {@code SingleSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapSingleDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapSingleDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code SingleSource}s
* @param mapper the function called with the upstream item and should return
* a {@code SingleSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code SingleSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code SingleSource} terminates and only then is
* it emitted to the downstream.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see #concatMapSingle(Function, int)
* @see #concatMapSingleDelayError(Function, boolean, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapSingleDelayError(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper, boolean tillTheEnd) {
return concatMapSingleDelayError(mapper, tillTheEnd, 2);
}
/**
* Maps the upstream items into {@link SingleSource}s and subscribes to them one after the
* other succeeds or fails, emits their success values and optionally delays errors
* till both the current {@code Observable} and all inner {@code SingleSource}s terminate.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concatMapSingleDelayError.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatMapSingleDelayError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.11 - experimental
* @param <R> the result type of the inner {@code SingleSource}s
* @param mapper the function called with the upstream item and should return
* a {@code SingleSource} to become the next source to
* be subscribed to
* @param tillTheEnd If {@code true}, errors from the current {@code Observable} or any of the
* inner {@code SingleSource}s are delayed until all
* of them terminate. If {@code false}, an error from the current
* {@code Observable} is delayed until the current inner
* {@code SingleSource} terminates and only then is
* it emitted to the downstream.
* @param bufferSize The number of upstream items expected to be buffered so that fresh items are
* ready to be mapped when a previous {@code SingleSource} terminates.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see #concatMapSingle(Function, int)
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> concatMapSingleDelayError(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper, boolean tillTheEnd, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableConcatMapSingle<>(this, mapper, tillTheEnd ? ErrorMode.END : ErrorMode.BOUNDARY, bufferSize));
}
/**
* Returns an {@code Observable} that first emits the items emitted from the current {@code Observable}, then items
* from the {@code other} {@link ObservableSource} without interleaving them.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concat.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param other
* an {@code ObservableSource} to be concatenated after the current
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> concatWith(@NonNull ObservableSource<? extends T> other) {
Objects.requireNonNull(other, "other is null");
return concat(this, other);
}
/**
* Returns an {@code Observable} that emits the items from the current {@code Observable} followed by the success item or error event
* of the {@code other} {@link SingleSource}.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concat.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.10 - experimental
* @param other the {@code SingleSource} whose signal should be emitted after the current {@code Observable} completes normally.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other} is {@code null}
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> concatWith(@NonNull SingleSource<? extends T> other) {
Objects.requireNonNull(other, "other is null");
return RxJavaPlugins.onAssembly(new ObservableConcatWithSingle<>(this, other));
}
/**
* Returns an {@code Observable} that emits the items from the current {@code Observable} followed by the success item or terminal events
* of the other {@link MaybeSource}.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concat.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.10 - experimental
* @param other the {@code MaybeSource} whose signal should be emitted after the current {@code Observable} completes normally.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other} is {@code null}
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> concatWith(@NonNull MaybeSource<? extends T> other) {
Objects.requireNonNull(other, "other is null");
return RxJavaPlugins.onAssembly(new ObservableConcatWithMaybe<>(this, other));
}
/**
* Returns an {@code Observable} that emits items from the current {@code Observable} and when it completes normally, the
* other {@link CompletableSource} is subscribed to and the returned {@code Observable} emits its terminal events.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/concat.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.1.10 - experimental
* @param other the {@code CompletableSource} to subscribe to once the current {@code Observable} completes normally
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other} is {@code null}
* @since 2.2
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> concatWith(@NonNull CompletableSource other) {
Objects.requireNonNull(other, "other is null");
return RxJavaPlugins.onAssembly(new ObservableConcatWithCompletable<>(this, other));
}
/**
* Returns a {@link Single} that emits a {@link Boolean} that indicates whether the current {@code Observable} emitted a
* specified item.
* <p>
* <img width="640" height="320" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/contains.2.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code contains} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param item
* the item to search for in the emissions from the current {@code Observable}
* @return the new {@code Single} instance
* @throws NullPointerException if {@code item} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/contains.html">ReactiveX operators documentation: Contains</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<Boolean> contains(@NonNull Object item) {
Objects.requireNonNull(item, "item is null");
return any(Functions.equalsWith(item));
}
/**
* Returns a {@link Single} that counts the total number of items emitted by the current {@code Observable} and emits
* this count as a 64-bit {@link Long}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/count.2.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code count} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Single} instance
* @see <a href="http://reactivex.io/documentation/operators/count.html">ReactiveX operators documentation: Count</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<Long> count() {
return RxJavaPlugins.onAssembly(new ObservableCountSingle<>(this));
}
/**
* Returns an {@code Observable} that mirrors the current {@code Observable}, except that it drops items emitted by the
* current {@code Observable} that are followed by another item within a computed debounce duration
* denoted by an item emission or completion from a generated inner {@link ObservableSource} for that original item.
* <p>
* <img width="640" height="425" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/debounce.f.v3.png" alt="">
* <p>
* The delivery of the item happens on the thread of the first {@code onNext} or {@code onComplete}
* signal of the generated {@code ObservableSource} sequence,
* which if takes too long, a newer item may arrive from the upstream, causing the
* generated sequence to get disposed, which may also interrupt any downstream blocking operation
* (yielding an {@code InterruptedException}). It is recommended processing items
* that may take long time to be moved to another thread via {@link #observeOn} applied after
* {@code debounce} itself.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code debounce} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the debounce value type (ignored)
* @param debounceIndicator
* function to return a sequence that indicates the throttle duration for each item via its own emission or completion
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code debounceIndicator} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/debounce.html">ReactiveX operators documentation: Debounce</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<T> debounce(@NonNull Function<? super T, ? extends ObservableSource<U>> debounceIndicator) {
Objects.requireNonNull(debounceIndicator, "debounceIndicator is null");
return RxJavaPlugins.onAssembly(new ObservableDebounce<>(this, debounceIndicator));
}
/**
* Returns an {@code Observable} that mirrors the current {@code Observable}, except that it drops items emitted by the
* current {@code Observable} that are followed by newer items before a timeout value expires. The timer resets on
* each emission.
* <p>
* <em>Note:</em> If items keep being emitted by the current {@code Observable} faster than the timeout then no items
* will be emitted by the resulting {@code Observable}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/debounce.v3.png" alt="">
* <p>
* Delivery of the item after the grace period happens on the {@code computation} {@link Scheduler}'s
* {@code Worker} which if takes too long, a newer item may arrive from the upstream, causing the
* {@code Worker}'s task to get disposed, which may also interrupt any downstream blocking operation
* (yielding an {@code InterruptedException}). It is recommended processing items
* that may take long time to be moved to another thread via {@link #observeOn} applied after
* {@code debounce} itself.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code debounce} operates by default on the {@code computation} {@code Scheduler}.</dd>
* </dl>
*
* @param timeout
* the length of the window of time that must pass after the emission of an item from the current
* {@code Observable} in which the {@code Observable} emits no items in order for the item to be emitted by the
* resulting {@code Observable}
* @param unit
* the unit of time for the specified {@code timeout}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/debounce.html">ReactiveX operators documentation: Debounce</a>
* @see #throttleWithTimeout(long, TimeUnit)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.COMPUTATION)
@NonNull
public final Observable<T> debounce(long timeout, @NonNull TimeUnit unit) {
return debounce(timeout, unit, Schedulers.computation());
}
/**
* Returns an {@code Observable} that mirrors the current {@code Observable}, except that it drops items emitted by the
* current {@code Observable} that are followed by newer items before a timeout value expires on a specified
* {@link Scheduler}. The timer resets on each emission.
* <p>
* <em>Note:</em> If items keep being emitted by the current {@code Observable} faster than the timeout then no items
* will be emitted by the resulting {@code Observable}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/debounce.s.v3.png" alt="">
* <p>
* Delivery of the item after the grace period happens on the given {@code Scheduler}'s
* {@code Worker} which if takes too long, a newer item may arrive from the upstream, causing the
* {@code Worker}'s task to get disposed, which may also interrupt any downstream blocking operation
* (yielding an {@code InterruptedException}). It is recommended processing items
* that may take long time to be moved to another thread via {@link #observeOn} applied after
* {@code debounce} itself.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>You specify which {@code Scheduler} this operator will use.</dd>
* </dl>
*
* @param timeout
* the time each item has to be "the most recent" of those emitted by the current {@code Observable} to
* ensure that it's not dropped
* @param unit
* the unit of time for the specified {@code timeout}
* @param scheduler
* the {@code Scheduler} to use internally to manage the timers that handle the timeout for each
* item
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} or {@code scheduler} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/debounce.html">ReactiveX operators documentation: Debounce</a>
* @see #throttleWithTimeout(long, TimeUnit, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final Observable<T> debounce(long timeout, @NonNull TimeUnit unit, @NonNull Scheduler scheduler) {
Objects.requireNonNull(unit, "unit is null");
Objects.requireNonNull(scheduler, "scheduler is null");
return RxJavaPlugins.onAssembly(new ObservableDebounceTimed<>(this, timeout, unit, scheduler, null));
}
/**
* Returns an {@code Observable} that mirrors the current {@code Observable}, except that it drops items emitted by the
* current {@code Observable} that are followed by newer items before a timeout value expires on a specified
* {@link Scheduler}. The timer resets on each emission.
* <p>
* <em>Note:</em> If items keep being emitted by the current {@code Observable} faster than the timeout then no items
* will be emitted by the resulting {@code Observable}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/debounce.s.v3.png" alt="">
* <p>
* Delivery of the item after the grace period happens on the given {@code Scheduler}'s
* {@code Worker} which if takes too long, a newer item may arrive from the upstream, causing the
* {@code Worker}'s task to get disposed, which may also interrupt any downstream blocking operation
* (yielding an {@code InterruptedException}). It is recommended processing items
* that may take long time to be moved to another thread via {@link #observeOn} applied after
* {@code debounce} itself.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>You specify which {@code Scheduler} this operator will use.</dd>
* </dl>
*
* @param timeout
* the time each item has to be "the most recent" of those emitted by the current {@code Observable} to
* ensure that it's not dropped
* @param unit
* the unit of time for the specified {@code timeout}
* @param scheduler
* the {@code Scheduler} to use internally to manage the timers that handle the timeout for each
* item
* @param onDropped
* called with the current entry when it has been replaced by a new one
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} or {@code scheduler} is {@code null} } or {@code onDropped} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/debounce.html">ReactiveX operators documentation: Debounce</a>
* @see #throttleWithTimeout(long, TimeUnit, Scheduler, Consumer)
* @since 3.1.6 - Experimental
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
@Experimental
public final Observable<T> debounce(long timeout, @NonNull TimeUnit unit, @NonNull Scheduler scheduler, @NonNull Consumer<? super T> onDropped) {
Objects.requireNonNull(unit, "unit is null");
Objects.requireNonNull(scheduler, "scheduler is null");
Objects.requireNonNull(onDropped, "onDropped is null");
return RxJavaPlugins.onAssembly(new ObservableDebounceTimed<>(this, timeout, unit, scheduler, onDropped));
}
/**
* Returns an {@code Observable} that emits the items emitted by the current {@code Observable} or a specified default item
* if the current {@code Observable} is empty.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/defaultIfEmpty.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code defaultIfEmpty} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param defaultItem
* the item to emit if the current {@code Observable} emits no items
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code defaultItem} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/defaultifempty.html">ReactiveX operators documentation: DefaultIfEmpty</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> defaultIfEmpty(@NonNull T defaultItem) {
Objects.requireNonNull(defaultItem, "defaultItem is null");
return switchIfEmpty(just(defaultItem));
}
/**
* Returns an {@code Observable} that delays the emissions of the current {@code Observable} via
* a per-item derived {@link ObservableSource}'s item emission or termination, on a per source item basis.
* <p>
* <img width="640" height="450" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.o.v3.png" alt="">
* <p>
* <em>Note:</em> the resulting {@code Observable} will immediately propagate any {@code onError} notification
* from the current {@code Observable}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code delay} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the item delay value type (ignored)
* @param itemDelayIndicator
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}, which is
* then used to delay the emission of that item by the resulting {@code Observable} until the {@code ObservableSource}
* returned from {@code itemDelay} emits an item
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code itemDelayIndicator} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<T> delay(@NonNull Function<? super T, ? extends ObservableSource<U>> itemDelayIndicator) {
Objects.requireNonNull(itemDelayIndicator, "itemDelayIndicator is null");
return flatMap(ObservableInternalHelper.itemDelay(itemDelayIndicator));
}
/**
* Returns an {@code Observable} that emits the items emitted by the current {@code Observable} shifted forward in time by a
* specified delay. An error notification from the current {@code Observable} is not delayed.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd>
* </dl>
*
* @param time
* the delay to shift the source by
* @param unit
* the {@link TimeUnit} in which {@code period} is defined
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
* @see #delay(long, TimeUnit, boolean)
* @see #delay(long, TimeUnit, Scheduler)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.COMPUTATION)
@NonNull
public final Observable<T> delay(long time, @NonNull TimeUnit unit) {
return delay(time, unit, Schedulers.computation(), false);
}
/**
* Returns an {@code Observable} that emits the items emitted by the current {@code Observable} shifted forward in time by a
* specified delay. If {@code delayError} is {@code true}, error notifications will also be delayed.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd>
* </dl>
*
* @param time
* the delay to shift the source by
* @param unit
* the {@link TimeUnit} in which {@code period} is defined
* @param delayError
* if {@code true}, the upstream exception is signaled with the given delay, after all preceding normal elements,
* if {@code false}, the upstream exception is signaled immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
* @see #delay(long, TimeUnit, Scheduler, boolean)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.COMPUTATION)
@NonNull
public final Observable<T> delay(long time, @NonNull TimeUnit unit, boolean delayError) {
return delay(time, unit, Schedulers.computation(), delayError);
}
/**
* Returns an {@code Observable} that emits the items emitted by the current {@code Observable} shifted forward in time by a
* specified delay. An error notification from the current {@code Observable} is not delayed.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.s.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>You specify which {@link Scheduler} this operator will use.</dd>
* </dl>
*
* @param time
* the delay to shift the source by
* @param unit
* the time unit of {@code delay}
* @param scheduler
* the {@code Scheduler} to use for delaying
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} or {@code scheduler} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final Observable<T> delay(long time, @NonNull TimeUnit unit, @NonNull Scheduler scheduler) {
return delay(time, unit, scheduler, false);
}
/**
* Returns an {@code Observable} that emits the items emitted by the current {@code Observable} shifted forward in time by a
* specified delay. If {@code delayError} is {@code true}, error notifications will also be delayed.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.s.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>You specify which {@link Scheduler} this operator will use.</dd>
* </dl>
*
* @param time
* the delay to shift the source by
* @param unit
* the time unit of {@code delay}
* @param scheduler
* the {@code Scheduler} to use for delaying
* @param delayError
* if {@code true}, the upstream exception is signaled with the given delay, after all preceding normal elements,
* if {@code false}, the upstream exception is signaled immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} or {@code scheduler} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final Observable<T> delay(long time, @NonNull TimeUnit unit, @NonNull Scheduler scheduler, boolean delayError) {
Objects.requireNonNull(unit, "unit is null");
Objects.requireNonNull(scheduler, "scheduler is null");
return RxJavaPlugins.onAssembly(new ObservableDelay<>(this, time, unit, scheduler, delayError));
}
/**
* Returns an {@code Observable} that delays the subscription to and emissions from the current {@code Observable} via
* {@link ObservableSource}s for the subscription itself and on a per-item basis.
* <p>
* <img width="640" height="450" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delay.oo.v3.png" alt="">
* <p>
* <em>Note:</em> the resulting {@code Observable} will immediately propagate any {@code onError} notification
* from the current {@code Observable}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code delay} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the subscription delay value type (ignored)
* @param <V>
* the item delay value type (ignored)
* @param subscriptionIndicator
* a function that returns an {@code ObservableSource} that triggers the subscription to the current {@code Observable}
* once it emits any item
* @param itemDelayIndicator
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}, which is
* then used to delay the emission of that item by the resulting {@code Observable} until the {@code ObservableSource}
* returned from {@code itemDelay} emits an item
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code subscriptionIndicator} or {@code itemDelayIndicator} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull V> Observable<T> delay(@NonNull ObservableSource<U> subscriptionIndicator,
@NonNull Function<? super T, ? extends ObservableSource<V>> itemDelayIndicator) {
return delaySubscription(subscriptionIndicator).delay(itemDelayIndicator);
}
/**
* Returns an {@code Observable} that delays the subscription to the current {@code Observable}
* until the other {@link ObservableSource} emits an element or completes normally.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delaySubscription.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This method does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U> the value type of the other {@code Observable}, irrelevant
* @param subscriptionIndicator the other {@code ObservableSource} that should trigger the subscription
* to the current {@code Observable}.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code subscriptionIndicator} is {@code null}
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<T> delaySubscription(@NonNull ObservableSource<U> subscriptionIndicator) {
Objects.requireNonNull(subscriptionIndicator, "subscriptionIndicator is null");
return RxJavaPlugins.onAssembly(new ObservableDelaySubscriptionOther<>(this, subscriptionIndicator));
}
/**
* Returns an {@code Observable} that delays the subscription to the current {@code Observable} by a given amount of time.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delaySubscription.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>This version of {@code delaySubscription} operates by default on the {@code computation} {@link Scheduler}.</dd>
* </dl>
*
* @param time
* the time to delay the subscription
* @param unit
* the time unit of {@code delay}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.COMPUTATION)
@NonNull
public final Observable<T> delaySubscription(long time, @NonNull TimeUnit unit) {
return delaySubscription(time, unit, Schedulers.computation());
}
/**
* Returns an {@code Observable} that delays the subscription to the current {@code Observable} by a given amount of time,
* both waiting and subscribing on a given {@link Scheduler}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/delaySubscription.s.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>You specify which {@code Scheduler} this operator will use.</dd>
* </dl>
*
* @param time
* the time to delay the subscription
* @param unit
* the time unit of {@code delay}
* @param scheduler
* the {@code Scheduler} on which the waiting and subscription will happen
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code unit} or {@code scheduler} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/delay.html">ReactiveX operators documentation: Delay</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.CUSTOM)
@NonNull
public final Observable<T> delaySubscription(long time, @NonNull TimeUnit unit, @NonNull Scheduler scheduler) {
return delaySubscription(timer(time, unit, scheduler));
}
/**
* Returns an {@code Observable} that reverses the effect of {@link #materialize materialize} by transforming the
* {@link Notification} objects extracted from the source items via a selector function
* into their respective {@link Observer} signal types.
* <p>
* <img width="640" height="335" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/dematerialize.v3.png" alt="">
* <p>
* The intended use of the {@code selector} function is to perform a
* type-safe identity mapping (see example) on a source that is already of type
* {@code Notification<T>}. The Java language doesn't allow
* limiting instance methods to a certain generic argument shape, therefore,
* a function is used to ensure the conversion remains type safe.
* <p>
* When the upstream signals an {@link Notification#createOnError(Throwable) onError} or
* {@link Notification#createOnComplete() onComplete} item, the
* returned {@code Observable} disposes of the flow and terminates with that type of terminal event:
* <pre><code>
* Observable.just(createOnNext(1), createOnComplete(), createOnNext(2))
* .doOnDispose(() -> System.out.println("Disposed!"));
* .dematerialize(notification -> notification)
* .test()
* .assertResult(1);
* </code></pre>
* If the upstream signals {@code onError} or {@code onComplete} directly, the flow is terminated
* with the same event.
* <pre><code>
* Observable.just(createOnNext(1), createOnNext(2))
* .dematerialize(notification -> notification)
* .test()
* .assertResult(1, 2);
* </code></pre>
* If this behavior is not desired, the completion can be suppressed by applying {@link #concatWith(ObservableSource)}
* with a {@link #never()} source.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code dematerialize} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* <p>History: 2.2.4 - experimental
*
* @param <R> the output value type
* @param selector function that returns the upstream item and should return a {@code Notification} to signal
* the corresponding {@code Observer} event to the downstream.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code selector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/materialize-dematerialize.html">ReactiveX operators documentation: Dematerialize</a>
* @since 3.0.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> dematerialize(@NonNull Function<? super T, Notification<R>> selector) {
Objects.requireNonNull(selector, "selector is null");
return RxJavaPlugins.onAssembly(new ObservableDematerialize<>(this, selector));
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct
* based on {@link Object#equals(Object)} comparison.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinct.v3.png" alt="">
* <p>
* It is recommended the elements' class {@code T} in the flow overrides the default {@code Object.equals()}
* and {@link Object#hashCode()} to provide meaningful comparison between items as the default Java
* implementation only considers reference equivalence.
* <p>
* By default, {@code distinct()} uses an internal {@link HashSet} per {@link Observer} to remember
* previously seen items and uses {@link java.util.Set#add(Object)} returning {@code false} as the
* indicator for duplicates.
* <p>
* Note that this internal {@code HashSet} may grow unbounded as items won't be removed from it by
* the operator. Therefore, using very long or infinite upstream (with very distinct elements) may lead
* to {@link OutOfMemoryError}.
* <p>
* Customizing the retention policy can happen only by providing a custom {@link java.util.Collection} implementation
* to the {@link #distinct(Function, Supplier)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinct} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Observable} instance
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
* @see #distinct(Function)
* @see #distinct(Function, Supplier)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> distinct() {
return distinct(Functions.identity(), Functions.createHashSet());
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct according
* to a key selector function and based on {@link Object#equals(Object)} comparison of the objects
* returned by the key selector function.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinct.key.v3.png" alt="">
* <p>
* It is recommended the keys' class {@code K} overrides the default {@code Object.equals()}
* and {@link Object#hashCode()} to provide meaningful comparison between the key objects as the default
* Java implementation only considers reference equivalence.
* <p>
* By default, {@code distinct()} uses an internal {@link HashSet} per {@link Observer} to remember
* previously seen keys and uses {@link java.util.Set#add(Object)} returning {@code false} as the
* indicator for duplicates.
* <p>
* Note that this internal {@code HashSet} may grow unbounded as keys won't be removed from it by
* the operator. Therefore, using very long or infinite upstream (with very distinct keys) may lead
* to {@link OutOfMemoryError}.
* <p>
* Customizing the retention policy can happen only by providing a custom {@link java.util.Collection} implementation
* to the {@link #distinct(Function, Supplier)} overload.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinct} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <K> the key type
* @param keySelector
* a function that projects an emitted item to a key value that is used to decide whether an item
* is distinct from another one or not
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
* @see #distinct(Function, Supplier)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K> Observable<T> distinct(@NonNull Function<? super T, K> keySelector) {
return distinct(keySelector, Functions.createHashSet());
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct according
* to a key selector function and based on {@link Object#equals(Object)} comparison of the objects
* returned by the key selector function.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinct.key.v3.png" alt="">
* <p>
* It is recommended the keys' class {@code K} overrides the default {@code Object.equals()}
* and {@link Object#hashCode()} to provide meaningful comparison between the key objects as
* the default Java implementation only considers reference equivalence.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinct} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <K> the key type
* @param keySelector
* a function that projects an emitted item to a key value that is used to decide whether an item
* is distinct from another one or not
* @param collectionSupplier
* function called for each individual {@link Observer} to return a {@link Collection} subtype for holding the extracted
* keys and whose {@code add()} method's return indicates uniqueness.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} or {@code collectionSupplier} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K> Observable<T> distinct(@NonNull Function<? super T, K> keySelector, @NonNull Supplier<? extends Collection<? super K>> collectionSupplier) {
Objects.requireNonNull(keySelector, "keySelector is null");
Objects.requireNonNull(collectionSupplier, "collectionSupplier is null");
return RxJavaPlugins.onAssembly(new ObservableDistinct<>(this, keySelector, collectionSupplier));
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct from their
* immediate predecessors based on {@link Object#equals(Object)} comparison.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinctUntilChanged.v3.png" alt="">
* <p>
* It is recommended the elements' class {@code T} in the flow overrides the default {@code Object.equals()} to provide
* meaningful comparison between items as the default Java implementation only considers reference equivalence.
* Alternatively, use the {@link #distinctUntilChanged(BiPredicate)} overload and provide a comparison function
* in case the class {@code T} can't be overridden with custom {@code equals()} or the comparison itself
* should happen on different terms or properties of the class {@code T}.
* <p>
* Note that the operator always retains the latest item from upstream regardless of the comparison result
* and uses it in the next comparison with the next upstream item.
* <p>
* Note that if element type {@code T} in the flow is mutable, the comparison of the previous and current
* item may yield unexpected results if the items are mutated externally. Common cases are mutable
* {@link CharSequence}s or {@link List}s where the objects will actually have the same
* references when they are modified and {@code distinctUntilChanged} will evaluate subsequent items as same.
* To avoid such situation, it is recommended that mutable data is converted to an immutable one,
* for example using {@code map(CharSequence::toString)} or {@code map(list -> Collections.unmodifiableList(new ArrayList<>(list)))}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinctUntilChanged} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Observable} instance
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
* @see #distinctUntilChanged(BiPredicate)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> distinctUntilChanged() {
return distinctUntilChanged(Functions.identity());
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct from their
* immediate predecessors, according to a key selector function and based on {@link Object#equals(Object)} comparison
* of those objects returned by the key selector function.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinctUntilChanged.key.v3.png" alt="">
* <p>
* It is recommended the keys' class {@code K} overrides the default {@code Object.equals()} to provide
* meaningful comparison between the key objects as the default Java implementation only considers reference equivalence.
* Alternatively, use the {@link #distinctUntilChanged(BiPredicate)} overload and provide a comparison function
* in case the class {@code K} can't be overridden with custom {@code equals()} or the comparison itself
* should happen on different terms or properties of the item class {@code T} (for which the keys can be
* derived via a similar selector).
* <p>
* Note that the operator always retains the latest key from upstream regardless of the comparison result
* and uses it in the next comparison with the next key derived from the next upstream item.
* <p>
* Note that if element type {@code T} in the flow is mutable, the comparison of the previous and current
* item may yield unexpected results if the items are mutated externally. Common cases are mutable
* {@link CharSequence}s or {@link List}s where the objects will actually have the same
* references when they are modified and {@code distinctUntilChanged} will evaluate subsequent items as same.
* To avoid such situation, it is recommended that mutable data is converted to an immutable one,
* for example using {@code map(CharSequence::toString)} or {@code map(list -> Collections.unmodifiableList(new ArrayList<>(list)))}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinctUntilChanged} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <K> the key type
* @param keySelector
* a function that projects an emitted item to a key value that is used to decide whether an item
* is distinct from another one or not
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K> Observable<T> distinctUntilChanged(@NonNull Function<? super T, K> keySelector) {
Objects.requireNonNull(keySelector, "keySelector is null");
return RxJavaPlugins.onAssembly(new ObservableDistinctUntilChanged<>(this, keySelector, ObjectHelper.equalsPredicate()));
}
/**
* Returns an {@code Observable} that emits all items emitted by the current {@code Observable} that are distinct from their
* immediate predecessors when compared with each other via the provided comparator function.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/distinctUntilChanged.v3.png" alt="">
* <p>
* Note that the operator always retains the latest item from upstream regardless of the comparison result
* and uses it in the next comparison with the next upstream item.
* <p>
* Note that if element type {@code T} in the flow is mutable, the comparison of the previous and current
* item may yield unexpected results if the items are mutated externally. Common cases are mutable
* {@link CharSequence}s or {@link List}s where the objects will actually have the same
* references when they are modified and {@code distinctUntilChanged} will evaluate subsequent items as same.
* To avoid such situation, it is recommended that mutable data is converted to an immutable one,
* for example using {@code map(CharSequence::toString)} or {@code map(list -> Collections.unmodifiableList(new ArrayList<>(list)))}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code distinctUntilChanged} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param comparer the function that receives the previous item and the current item and is
* expected to return {@code true} if the two are equal, thus skipping the current value.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code comparer} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/distinct.html">ReactiveX operators documentation: Distinct</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> distinctUntilChanged(@NonNull BiPredicate<? super T, ? super T> comparer) {
Objects.requireNonNull(comparer, "comparer is null");
return RxJavaPlugins.onAssembly(new ObservableDistinctUntilChanged<>(this, Functions.identity(), comparer));
}
/**
* Calls the specified {@link Consumer} with the current item after this item has been emitted to the downstream.
* <p>
* Note that the {@code onAfterNext} action is shared between subscriptions and as such
* should be thread-safe.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doAfterNext.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doAfterNext} does not operate by default on a particular {@link Scheduler}.</dd>
* <dt><b>Operator-fusion:</b></dt>
* <dd>This operator supports boundary-limited synchronous or asynchronous queue-fusion.</dd>
* </dl>
* <p>History: 2.0.1 - experimental
* @param onAfterNext the {@code Consumer} that will be called after emitting an item from upstream to the downstream
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onAfterNext} is {@code null}
* @since 2.1
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doAfterNext(@NonNull Consumer<? super T> onAfterNext) {
Objects.requireNonNull(onAfterNext, "onAfterNext is null");
return RxJavaPlugins.onAssembly(new ObservableDoAfterNext<>(this, onAfterNext));
}
/**
* Registers an {@link Action} to be called when the current {@code Observable} invokes either
* {@link Observer#onComplete onComplete} or {@link Observer#onError onError}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doAfterTerminate.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doAfterTerminate} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onAfterTerminate
* an {@code Action} to be invoked after the current {@code Observable} finishes
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onAfterTerminate} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
* @see #doOnTerminate(Action)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doAfterTerminate(@NonNull Action onAfterTerminate) {
Objects.requireNonNull(onAfterTerminate, "onAfterTerminate is null");
return doOnEach(Functions.emptyConsumer(), Functions.emptyConsumer(), Functions.EMPTY_ACTION, onAfterTerminate);
}
/**
* Calls the specified action after the current {@code Observable} signals {@code onError} or {@code onCompleted} or gets disposed by
* the downstream.
* <p>In case of a race between a terminal event and a dispose call, the provided {@code onFinally} action
* is executed once per subscription.
* <p>Note that the {@code onFinally} action is shared between subscriptions and as such
* should be thread-safe.
* <p>
* <img width="640" height="282" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doFinally.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doFinally} does not operate by default on a particular {@link Scheduler}.</dd>
* <dt><b>Operator-fusion:</b></dt>
* <dd>This operator supports boundary-limited synchronous or asynchronous queue-fusion.</dd>
* </dl>
* <p>History: 2.0.1 - experimental
* @param onFinally the action called when the current {@code Observable} terminates or gets disposed
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onFinally} is {@code null}
* @since 2.1
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doFinally(@NonNull Action onFinally) {
Objects.requireNonNull(onFinally, "onFinally is null");
return RxJavaPlugins.onAssembly(new ObservableDoFinally<>(this, onFinally));
}
/**
* Calls the given shared {@link Action} if the downstream disposes the sequence.
* <p>
* The action is shared between subscriptions and thus may be called concurrently from multiple
* threads; the action must be thread safe.
* <p>
* If the action throws a runtime exception, that exception is rethrown by the {@code dispose()} call,
* sometimes as a {@link CompositeException} if there were multiple exceptions along the way.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnDispose.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnDispose} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onDispose
* the action that gets called when the current {@code Observable}'s {@link Disposable} is disposed
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onDispose} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnDispose(@NonNull Action onDispose) {
return doOnLifecycle(Functions.emptyConsumer(), onDispose);
}
/**
* Returns an {@code Observable} that invokes an {@link Action} when the current {@code Observable} calls {@code onComplete}.
* <p>
* <img width="640" height="358" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnComplete.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnComplete} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onComplete
* the action to invoke when the current {@code Observable} calls {@code onComplete}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onComplete} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnComplete(@NonNull Action onComplete) {
return doOnEach(Functions.emptyConsumer(), Functions.emptyConsumer(), onComplete, Functions.EMPTY_ACTION);
}
/**
* Calls the appropriate {@code onXXX} consumer (shared between all {@link Observer}s) whenever a signal with the same type
* passes through, before forwarding them to the downstream.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnEach.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnEach} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext the {@link Consumer} to invoke when the current {@code Observable} calls {@code onNext}
* @param onError the {@code Consumer} to invoke when the current {@code Observable} calls {@code onError}
* @param onComplete the {@link Action} to invoke when the current {@code Observable} calls {@code onComplete}
* @param onAfterTerminate the {@code Action} to invoke when the current {@code Observable} calls {@code onAfterTerminate}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onNext}, {@code onError}, {@code onComplete} or {@code onAfterTerminate} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
private Observable<T> doOnEach(@NonNull Consumer<? super T> onNext, @NonNull Consumer<? super Throwable> onError, @NonNull Action onComplete, @NonNull Action onAfterTerminate) {
Objects.requireNonNull(onNext, "onNext is null");
Objects.requireNonNull(onError, "onError is null");
Objects.requireNonNull(onComplete, "onComplete is null");
Objects.requireNonNull(onAfterTerminate, "onAfterTerminate is null");
return RxJavaPlugins.onAssembly(new ObservableDoOnEach<>(this, onNext, onError, onComplete, onAfterTerminate));
}
/**
* Returns an {@code Observable} that invokes a {@link Consumer} with the appropriate {@link Notification}
* object when the current {@code Observable} signals an item or terminates.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnEach.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnEach} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNotification
* the action to invoke for each item emitted by the current {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onNotification} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnEach(@NonNull Consumer<? super Notification<T>> onNotification) {
Objects.requireNonNull(onNotification, "onNotification is null");
return doOnEach(
Functions.notificationOnNext(onNotification),
Functions.notificationOnError(onNotification),
Functions.notificationOnComplete(onNotification),
Functions.EMPTY_ACTION
);
}
/**
* Returns an {@code Observable} that forwards the items and terminal events of the current
* {@code Observable} to its {@link Observer}s and to the given shared {@code Observer} instance.
* <p>
* In case the {@code onError} of the supplied observer throws, the downstream will receive a composite
* exception containing the original exception and the exception thrown by {@code onError}. If either the
* {@code onNext} or the {@code onComplete} method of the supplied observer throws, the downstream will be
* terminated and will receive this thrown exception.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnEach.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnEach} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param observer
* the observer to be notified about {@code onNext}, {@code onError} and {@code onComplete} events on its
* respective methods before the actual downstream {@code Observer} gets notified.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code observer} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnEach(@NonNull Observer<? super T> observer) {
Objects.requireNonNull(observer, "observer is null");
return doOnEach(
ObservableInternalHelper.observerOnNext(observer),
ObservableInternalHelper.observerOnError(observer),
ObservableInternalHelper.observerOnComplete(observer),
Functions.EMPTY_ACTION);
}
/**
* Calls the given {@link Consumer} with the error {@link Throwable} if the current {@code Observable} failed before forwarding it to
* the downstream.
* <p>
* In case the {@code onError} action throws, the downstream will receive a composite exception containing
* the original exception and the exception thrown by {@code onError}.
* <p>
* <img width="640" height="355" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnError.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onError
* the action to invoke if the current {@code Observable} calls {@code onError}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onError} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnError(@NonNull Consumer<? super Throwable> onError) {
return doOnEach(Functions.emptyConsumer(), onError, Functions.EMPTY_ACTION, Functions.EMPTY_ACTION);
}
/**
* Calls the appropriate {@code onXXX} method (shared between all {@link Observer}s) for the lifecycle events of
* the sequence (subscription, disposal).
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnLifecycle.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnLifecycle} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onSubscribe
* a {@link Consumer} called with the {@link Disposable} sent via {@link Observer#onSubscribe(Disposable)}
* @param onDispose
* called when the downstream disposes the {@code Disposable} via {@code dispose()}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onSubscribe} or {@code onDispose} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnLifecycle(@NonNull Consumer<? super Disposable> onSubscribe, @NonNull Action onDispose) {
Objects.requireNonNull(onSubscribe, "onSubscribe is null");
Objects.requireNonNull(onDispose, "onDispose is null");
return RxJavaPlugins.onAssembly(new ObservableDoOnLifecycle<>(this, onSubscribe, onDispose));
}
/**
* Calls the given {@link Consumer} with the value emitted by the current {@code Observable} before forwarding it to the downstream.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnNext.o.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnNext} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext
* the action to invoke when the current {@code Observable} calls {@code onNext}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onNext} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnNext(@NonNull Consumer<? super T> onNext) {
return doOnEach(onNext, Functions.emptyConsumer(), Functions.EMPTY_ACTION, Functions.EMPTY_ACTION);
}
/**
* Returns an {@code Observable} so that it invokes the given {@link Consumer} when the current {@code Observable} is subscribed from
* its {@link Observer}s. Each subscription will result in an invocation of the given action except when the
* current {@code Observable} is reference counted, in which case the current {@code Observable} will invoke
* the given action for the first subscription.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnSubscribe.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnSubscribe} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onSubscribe
* the {@code Consumer} that gets called when an {@code Observer} subscribes to the current {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onSubscribe} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnSubscribe(@NonNull Consumer<? super Disposable> onSubscribe) {
return doOnLifecycle(onSubscribe, Functions.EMPTY_ACTION);
}
/**
* Returns an {@code Observable} so that it invokes an action when the current {@code Observable} calls {@code onComplete} or
* {@code onError}.
* <p>
* <img width="640" height="327" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doOnTerminate.o.png" alt="">
* <p>
* This differs from {@code doAfterTerminate} in that this happens <em>before</em> the {@code onComplete} or
* {@code onError} notification.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code doOnTerminate} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onTerminate
* the action to invoke when the current {@code Observable} calls {@code onComplete} or {@code onError}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onTerminate} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a>
* @see #doAfterTerminate(Action)
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> doOnTerminate(@NonNull Action onTerminate) {
Objects.requireNonNull(onTerminate, "onTerminate is null");
return doOnEach(Functions.emptyConsumer(),
Functions.actionConsumer(onTerminate), onTerminate,
Functions.EMPTY_ACTION);
}
/**
* Returns a {@link Maybe} that emits the single item at a specified index in a sequence of emissions from
* the current {@code Observable} or completes if the current {@code Observable} signals fewer elements than index.
* <p>
* <img width="640" height="363" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/elementAt.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code elementAt} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param index
* the zero-based index of the item to retrieve
* @return the new {@code Maybe} instance
* @throws IndexOutOfBoundsException
* if {@code index} is negative
* @see <a href="http://reactivex.io/documentation/operators/elementat.html">ReactiveX operators documentation: ElementAt</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Maybe<T> elementAt(long index) {
if (index < 0) {
throw new IndexOutOfBoundsException("index >= 0 required but it was " + index);
}
return RxJavaPlugins.onAssembly(new ObservableElementAtMaybe<>(this, index));
}
/**
* Returns a {@link Single} that emits the item found at a specified index in a sequence of emissions from
* the current {@code Observable}, or a default item if that index is out of range.
* <p>
* <img width="640" height="354" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/elementAtDefault.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code elementAt} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param index
* the zero-based index of the item to retrieve
* @param defaultItem
* the default item
* @return the new {@code Single} instance
* @throws NullPointerException if {@code defaultItem} is {@code null}
* @throws IndexOutOfBoundsException
* if {@code index} is negative
* @see <a href="http://reactivex.io/documentation/operators/elementat.html">ReactiveX operators documentation: ElementAt</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> elementAt(long index, @NonNull T defaultItem) {
if (index < 0) {
throw new IndexOutOfBoundsException("index >= 0 required but it was " + index);
}
Objects.requireNonNull(defaultItem, "defaultItem is null");
return RxJavaPlugins.onAssembly(new ObservableElementAtSingle<>(this, index, defaultItem));
}
/**
* Returns a {@link Single} that emits the item found at a specified index in a sequence of emissions from the current {@code Observable}
* or signals a {@link NoSuchElementException} if the current {@code Observable} signals fewer elements than index.
* <p>
* <img width="640" height="362" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/elementAtOrError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code elementAtOrError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param index
* the zero-based index of the item to retrieve
* @return the new {@code Single} instance
* @throws IndexOutOfBoundsException
* if {@code index} is negative
* @see <a href="http://reactivex.io/documentation/operators/elementat.html">ReactiveX operators documentation: ElementAt</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> elementAtOrError(long index) {
if (index < 0) {
throw new IndexOutOfBoundsException("index >= 0 required but it was " + index);
}
return RxJavaPlugins.onAssembly(new ObservableElementAtSingle<>(this, index, null));
}
/**
* Filters items emitted by the current {@code Observable} by only emitting those that satisfy a specified {@link Predicate}.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/filter.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code filter} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param predicate
* a function that evaluates each item emitted by the current {@code Observable}, returning {@code true}
* if it passes the filter
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code predicate} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/filter.html">ReactiveX operators documentation: Filter</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> filter(@NonNull Predicate<? super T> predicate) {
Objects.requireNonNull(predicate, "predicate is null");
return RxJavaPlugins.onAssembly(new ObservableFilter<>(this, predicate));
}
/**
* Returns a {@link Maybe} that emits only the very first item emitted by the current {@code Observable}, or
* completes if the current {@code Observable} is empty.
* <p>
* <img width="640" height="286" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/firstElement.m.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code firstElement} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Maybe} instance
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX operators documentation: First</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Maybe<T> firstElement() {
return elementAt(0L);
}
/**
* Returns a {@link Single} that emits only the very first item emitted by the current {@code Observable}, or a default item
* if the current {@code Observable} completes without emitting any items.
* <p>
* <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/first.s.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code first} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param defaultItem
* the default item to emit if the current {@code Observable} doesn't emit anything
* @return the new {@code Single} instance
* @throws NullPointerException if {@code defaultItem} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX operators documentation: First</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> first(@NonNull T defaultItem) {
return elementAt(0L, defaultItem);
}
/**
* Returns a {@link Single} that emits only the very first item emitted by the current {@code Observable} or
* signals a {@link NoSuchElementException} if the current {@code Observable} is empty.
* <p>
* <img width="640" height="435" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/firstOrError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code firstOrError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Single} instance
* @see <a href="http://reactivex.io/documentation/operators/first.html">ReactiveX operators documentation: First</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> firstOrError() {
return elementAtOrError(0L);
}
/**
* Returns an {@code Observable} that emits items based on applying a function that you supply to each item emitted
* by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then merging those returned
* {@code ObservableSource}s and emitting the results of this merger.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMap.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the inner {@code ObservableSource}s and the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper) {
return flatMap(mapper, false);
}
/**
* Returns an {@code Observable} that emits items based on applying a function that you supply to each item emitted
* by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then merging those returned
* {@code ObservableSource}s and emitting the results of this merger.
* <p>
* <img width="640" height="356" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapDelayError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the inner {@code ObservableSource}s and the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper, boolean delayErrors) {
return flatMap(mapper, delayErrors, Integer.MAX_VALUE);
}
/**
* Returns an {@code Observable} that emits items based on applying a function that you supply to each item emitted
* by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then merging those returned
* {@code ObservableSource}s and emitting the results of this merger, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="442" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapMaxConcurrency.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the inner {@code ObservableSource}s and the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper, boolean delayErrors, int maxConcurrency) {
return flatMap(mapper, delayErrors, maxConcurrency, bufferSize());
}
/**
* Returns an {@code Observable} that emits items based on applying a function that you supply to each item emitted
* by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then merging those returned
* {@code ObservableSource}s and emitting the results of this merger, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="442" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapMaxConcurrency.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the inner {@code ObservableSource}s and the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @param bufferSize
* the number of elements expected from each inner {@code ObservableSource} to be buffered
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} or {@code bufferSize} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper,
boolean delayErrors, int maxConcurrency, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
ObjectHelper.verifyPositive(maxConcurrency, "maxConcurrency");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
if (this instanceof ScalarSupplier) {
@SuppressWarnings("unchecked")
T v = ((ScalarSupplier<T>)this).get();
if (v == null) {
return empty();
}
return ObservableScalarXMap.scalarXMap(v, mapper);
}
return RxJavaPlugins.onAssembly(new ObservableFlatMap<>(this, mapper, delayErrors, maxConcurrency, bufferSize));
}
/**
* Returns an {@code Observable} that applies a function to each item emitted or notification raised by the current
* {@code Observable} and then flattens the {@link ObservableSource}s returned from these functions and emits the resulting items.
* <p>
* <img width="640" height="410" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.nce.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R>
* the result type
* @param onNextMapper
* a function that returns an {@code ObservableSource} to merge for each item emitted by the current {@code Observable}
* @param onErrorMapper
* a function that returns an {@code ObservableSource} to merge for an {@code onError} notification from the current
* {@code Observable}
* @param onCompleteSupplier
* a function that returns an {@code ObservableSource} to merge for an {@code onComplete} notification from the current
* {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onNextMapper} or {@code onErrorMapper} or {@code onCompleteSupplier} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(
@NonNull Function<? super T, ? extends ObservableSource<? extends R>> onNextMapper,
@NonNull Function<? super Throwable, ? extends ObservableSource<? extends R>> onErrorMapper,
@NonNull Supplier<? extends ObservableSource<? extends R>> onCompleteSupplier) {
Objects.requireNonNull(onNextMapper, "onNextMapper is null");
Objects.requireNonNull(onErrorMapper, "onErrorMapper is null");
Objects.requireNonNull(onCompleteSupplier, "onCompleteSupplier is null");
return merge(new ObservableMapNotification<>(this, onNextMapper, onErrorMapper, onCompleteSupplier));
}
/**
* Returns an {@code Observable} that applies a function to each item emitted or notification raised by the current
* {@code Observable} and then flattens the {@link ObservableSource}s returned from these functions and emits the resulting items,
* while limiting the maximum number of concurrent subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="410" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.nce.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R>
* the result type
* @param onNextMapper
* a function that returns an {@code ObservableSource} to merge for each item emitted by the current {@code Observable}
* @param onErrorMapper
* a function that returns an {@code ObservableSource} to merge for an {@code onError} notification from the current
* {@code Observable}
* @param onCompleteSupplier
* a function that returns an {@code ObservableSource} to merge for an {@code onComplete} notification from the current
* {@code Observable}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code onNextMapper} or {@code onErrorMapper} or {@code onCompleteSupplier} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(
@NonNull Function<? super T, ? extends ObservableSource<? extends R>> onNextMapper,
@NonNull Function<Throwable, ? extends ObservableSource<? extends R>> onErrorMapper,
@NonNull Supplier<? extends ObservableSource<? extends R>> onCompleteSupplier,
int maxConcurrency) {
Objects.requireNonNull(onNextMapper, "onNextMapper is null");
Objects.requireNonNull(onErrorMapper, "onErrorMapper is null");
Objects.requireNonNull(onCompleteSupplier, "onCompleteSupplier is null");
return merge(new ObservableMapNotification<>(this, onNextMapper, onErrorMapper, onCompleteSupplier), maxConcurrency);
}
/**
* Returns an {@code Observable} that emits items based on applying a function that you supply to each item emitted
* by the current {@code Observable}, where that function returns an {@link ObservableSource}, and then merging those returned
* {@code ObservableSource}s and emitting the results of this merger, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="442" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapMaxConcurrency.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <R> the value type of the inner {@code ObservableSource}s and the output type
* @param mapper
* a function that, when applied to an item emitted by the current {@code Observable}, returns an
* {@code ObservableSource}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends R>> mapper, int maxConcurrency) {
return flatMap(mapper, false, maxConcurrency, bufferSize());
}
/**
* Returns an {@code Observable} that emits the results of a specified function to the pair of values emitted by the
* current {@code Observable} and the mapped inner {@link ObservableSource}.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.r.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of items emitted by the collection {@code ObservableSource}
* @param <R>
* the type of items emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}
* @param combiner
* a function that combines one item emitted by each of the source and collection {@code ObservableSource}s and
* returns an item to be emitted by the resulting {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends R> combiner) {
return flatMap(mapper, combiner, false, bufferSize(), bufferSize());
}
/**
* Returns an {@code Observable} that emits the results of a specified function to the pair of values emitted by the
* current {@code Observable} and the mapped inner {@link ObservableSource}.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.r.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of items emitted by the collection {@code ObservableSource}
* @param <R>
* the type of items emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}
* @param combiner
* a function that combines one item emitted by each of the source and collection {@code ObservableSource}s and
* returns an item to be emitted by the resulting {@code Observable}
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends R> combiner, boolean delayErrors) {
return flatMap(mapper, combiner, delayErrors, bufferSize(), bufferSize());
}
/**
* Returns an {@code Observable} that emits the results of a specified function to the pair of values emitted by the
* current {@code Observable} and the mapped inner {@link ObservableSource}, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.r.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of items emitted by the collection {@code ObservableSource}
* @param <R>
* the type of items emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}
* @param combiner
* a function that combines one item emitted by each of the source and collection {@code ObservableSource}s and
* returns an item to be emitted by the resulting {@code Observable}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends R> combiner, boolean delayErrors, int maxConcurrency) {
return flatMap(mapper, combiner, delayErrors, maxConcurrency, bufferSize());
}
/**
* Returns an {@code Observable} that emits the results of a specified function to the pair of values emitted by the
* current {@code Observable} and the mapped inner {@link ObservableSource}, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.r.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of items emitted by the collection {@code ObservableSource}
* @param <R>
* the type of items emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}
* @param combiner
* a function that combines one item emitted by each of the source and collection {@code ObservableSource}s and
* returns an item to be emitted by the resulting {@code Observable}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @param delayErrors
* if {@code true}, exceptions from the current {@code Observable} and all inner {@code ObservableSource}s are delayed until all of them terminate
* if {@code false}, the first one signaling an exception will terminate the whole sequence immediately
* @param bufferSize
* the number of elements expected from the inner {@code ObservableSource} to be buffered
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} or {@code bufferSize} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends R> combiner, boolean delayErrors, int maxConcurrency, int bufferSize) {
Objects.requireNonNull(mapper, "mapper is null");
Objects.requireNonNull(combiner, "combiner is null");
return flatMap(ObservableInternalHelper.flatMapWithCombiner(mapper, combiner), delayErrors, maxConcurrency, bufferSize);
}
/**
* Returns an {@code Observable} that emits the results of a specified function to the pair of values emitted by the
* current {@code Observable} and the mapped inner {@link ObservableSource}, while limiting the maximum number of concurrent
* subscriptions to these {@code ObservableSource}s.
* <p>
* <img width="640" height="390" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/mergeMap.r.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the type of items emitted by the collection {@code ObservableSource}
* @param <R>
* the type of items emitted by the resulting {@code Observable}
* @param mapper
* a function that returns an {@code ObservableSource} for each item emitted by the current {@code Observable}
* @param combiner
* a function that combines one item emitted by each of the source and collection {@code ObservableSource}s and
* returns an item to be emitted by the resulting {@code Observable}
* @param maxConcurrency
* the maximum number of {@code ObservableSource}s that may be subscribed to concurrently
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @throws IllegalArgumentException if {@code maxConcurrency} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull R> Observable<R> flatMap(@NonNull Function<? super T, ? extends ObservableSource<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends R> combiner, int maxConcurrency) {
return flatMap(mapper, combiner, false, maxConcurrency, bufferSize());
}
/**
* Maps each element of the current {@code Observable} into {@link CompletableSource}s, subscribes to them and
* waits until the upstream and all {@code CompletableSource}s complete.
* <p>
* <img width="640" height="424" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapCompletable.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param mapper the function that received each source value and transforms them into {@code CompletableSource}s.
* @throws NullPointerException if {@code mapper} is {@code null}
* @return the new {@link Completable} instance
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable flatMapCompletable(@NonNull Function<? super T, ? extends CompletableSource> mapper) {
return flatMapCompletable(mapper, false);
}
/**
* Maps each element of the current {@code Observable} into {@link CompletableSource}s, subscribes to them and
* waits until the upstream and all {@code CompletableSource}s complete, optionally delaying all errors.
* <p>
* <img width="640" height="362" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapCompletableDelayError.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param mapper the function that received each source value and transforms them into {@code CompletableSource}s.
* @param delayErrors if {@code true}, errors from the upstream and inner {@code CompletableSource}s are delayed until all of them
* terminate.
* @return the new {@link Completable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable flatMapCompletable(@NonNull Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new ObservableFlatMapCompletableCompletable<>(this, mapper, delayErrors));
}
/**
* Merges {@link Iterable}s generated by a mapper {@link Function} for each individual item emitted by
* the current {@code Observable} into a single {@code Observable} sequence.
* <p>
* <img width="640" height="343" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapIterable.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapIterable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the output type and the element type of the {@code Iterable}s
* @param mapper
* a function that returns an {@code Iterable} sequence of values for when given an item emitted by the
* current {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U> Observable<U> flatMapIterable(@NonNull Function<? super T, @NonNull ? extends Iterable<? extends U>> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new ObservableFlattenIterable<>(this, mapper));
}
/**
* Merges {@link Iterable}s generated by a mapper {@link Function} for each individual item emitted by
* the current {@code Observable} into a single {@code Observable} sequence where the resulting items will
* be the combination of the original item and each inner item of the respective {@code Iterable} as returned
* by the {@code resultSelector} {@link BiFunction}.
* <p>
* <img width="640" height="410" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapIterable.o.r.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapIterable} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <U>
* the element type of the {@code Iterable}s
* @param <V>
* the output type as determined by the {@code resultSelector} function
* @param mapper
* a function that returns an {@code Iterable} sequence of values for each item emitted by the current
* {@code Observable}
* @param combiner
* a function that returns an item based on the item emitted by the current {@code Observable} and the
* next item of the {@code Iterable} returned for that original item by the {@code mapper}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} or {@code combiner} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull U, @NonNull V> Observable<V> flatMapIterable(@NonNull Function<? super T, @NonNull ? extends Iterable<? extends U>> mapper,
@NonNull BiFunction<? super T, ? super U, ? extends V> combiner) {
Objects.requireNonNull(mapper, "mapper is null");
Objects.requireNonNull(combiner, "combiner is null");
return flatMap(ObservableInternalHelper.flatMapIntoIterable(mapper), combiner, false, bufferSize(), bufferSize());
}
/**
* Maps each element of the current {@code Observable} into {@link MaybeSource}s, subscribes to all of them
* and merges their {@code onSuccess} values, in no particular order, into a single {@code Observable} sequence.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapMaybe.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the result value type
* @param mapper the function that received each source value and transforms them into {@code MaybeSource}s.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMapMaybe(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper) {
return flatMapMaybe(mapper, false);
}
/**
* Maps each element of the current {@code Observable} into {@link MaybeSource}s, subscribes to them
* and merges their {@code onSuccess} values, in no particular order, into a single {@code Observable} sequence,
* optionally delaying all errors.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapMaybe.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the result value type
* @param mapper the function that received each source value and transforms them into {@code MaybeSource}s.
* @param delayErrors if {@code true}, errors from the upstream and inner {@code MaybeSource}s are delayed until all of them
* terminate.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMapMaybe(@NonNull Function<? super T, ? extends MaybeSource<? extends R>> mapper, boolean delayErrors) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new ObservableFlatMapMaybe<>(this, mapper, delayErrors));
}
/**
* Maps each element of the current {@code Observable} into {@link SingleSource}s, subscribes to all of them
* and merges their {@code onSuccess} values, in no particular order, into a single {@code Observable} sequence.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapSingle.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapSingle} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the result value type
* @param mapper the function that received each source value and transforms them into {@code SingleSource}s.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMapSingle(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper) {
return flatMapSingle(mapper, false);
}
/**
* Maps each element of the current {@code Observable} into {@link SingleSource}s, subscribes to them
* and merges their {@code onSuccess} values, in no particular order, into a single {@code Observable} sequence,
* optionally delaying all errors.
* <p>
* <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flatMapSingle.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code flatMapSingle} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @param <R> the result value type
* @param mapper the function that received each source value and transforms them into {@code SingleSource}s.
* @param delayErrors if {@code true}, errors from the upstream and inner {@code SingleSource}s are delayed until each of them
* terminates.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code mapper} is {@code null}
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull R> Observable<R> flatMapSingle(@NonNull Function<? super T, ? extends SingleSource<? extends R>> mapper, boolean delayErrors) {
Objects.requireNonNull(mapper, "mapper is null");
return RxJavaPlugins.onAssembly(new ObservableFlatMapSingle<>(this, mapper, delayErrors));
}
/**
* Subscribes to the {@link ObservableSource} and calls a {@link Consumer} for each item of the current {@code Observable}
* on its emission thread.
* <p>
* <img width="640" height="264" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/forEach.o.png" alt="">
* <p>
* Alias to {@link #subscribe(Consumer)}
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code forEach} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext
* the {@code Consumer} to execute for each item.
* @return
* a {@link Disposable} that allows disposing the sequence if the current {@code Observable} runs asynchronously
* @throws NullPointerException
* if {@code onNext} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Disposable forEach(@NonNull Consumer<? super T> onNext) {
return subscribe(onNext);
}
/**
* Subscribes to the {@link ObservableSource} and calls a {@link Predicate} for each item of the current {@code Observable},
* on its emission thread, until the predicate returns {@code false}.
* <p>
* <img width="640" height="273" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/forEachWhile.o.png" alt="">
* <p>
* If the {@code Observable} emits an error, it is wrapped into an
* {@link OnErrorNotImplementedException}
* and routed to the {@link RxJavaPlugins#onError(Throwable)} handler.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code forEachWhile} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext
* the {@code Predicate} to execute for each item.
* @return
* a {@link Disposable} that allows disposing the sequence if the current {@code Observable} runs asynchronously
* @throws NullPointerException
* if {@code onNext} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Disposable forEachWhile(@NonNull Predicate<? super T> onNext) {
return forEachWhile(onNext, Functions.ON_ERROR_MISSING, Functions.EMPTY_ACTION);
}
/**
* Subscribes to the {@link ObservableSource} and calls a {@link Predicate} for each item or a {@link Consumer} with the error
* of the current {@code Observable}, on their original emission threads, until the predicate returns {@code false}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code forEachWhile} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext
* the {@code Predicate} to execute for each item.
* @param onError
* the {@code Consumer} to execute when an error is emitted.
* @return
* a {@link Disposable} that allows disposing the sequence if the current {@code Observable} runs asynchronously
* @throws NullPointerException
* if {@code onNext} or {@code onError} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Disposable forEachWhile(@NonNull Predicate<? super T> onNext, @NonNull Consumer<? super Throwable> onError) {
return forEachWhile(onNext, onError, Functions.EMPTY_ACTION);
}
/**
* Subscribes to the {@link ObservableSource} and calls a {@link Predicate} for each item, a {@link Consumer} with the error
* or an {@link Action} upon completion of the current {@code Observable}, on their original emission threads,
* until the predicate returns {@code false}.
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code forEachWhile} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param onNext
* the {@code Predicate} to execute for each item.
* @param onError
* the {@code Consumer} to execute when an error is emitted.
* @param onComplete
* the {@code Action} to execute when completion is signaled.
* @return
* a {@link Disposable} that allows disposing the sequence if the current {@code Observable} runs asynchronously
* @throws NullPointerException
* if {@code onNext} or {@code onError} or {@code onComplete} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Disposable forEachWhile(@NonNull Predicate<? super T> onNext, @NonNull Consumer<? super Throwable> onError,
@NonNull Action onComplete) {
Objects.requireNonNull(onNext, "onNext is null");
Objects.requireNonNull(onError, "onError is null");
Objects.requireNonNull(onComplete, "onComplete is null");
ForEachWhileObserver<T> o = new ForEachWhileObserver<>(onNext, onError, onComplete);
subscribe(o);
return o;
}
/**
* Groups the items emitted by the current {@code Observable} according to a specified criterion, and emits these
* grouped items as {@link GroupedObservable}s.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupBy.v3.png" alt="">
* <p>
* Each emitted {@code GroupedObservable} allows only a single {@link Observer} to subscribe to it during its
* lifetime and if this {@code Observer} calls {@code dispose()} before the
* source terminates, the next emission by the source having the same key will trigger a new
* {@code GroupedObservable} emission.
* <p>
* <em>Note:</em> A {@code GroupedObservable} will cache the items it is to emit until such time as it
* is subscribed to. For this reason, in order to avoid memory leaks, you should not simply ignore those
* {@code GroupedObservable}s that do not concern you. Instead, you can signal to them that they may
* discard their buffers by applying an operator like {@link #ignoreElements} to them.
* <p>
* Note also that ignoring groups or subscribing later (i.e., on another thread) will result in
* so-called group abandonment where a group will only contain one element and the group will be
* re-created over and over as new upstream items trigger a new group. The behavior is
* a trade-off between no-dataloss, upstream cancellation and excessive group creation.
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupBy} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param keySelector
* a function that extracts the key for each item
* @param <K>
* the key type
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/groupby.html">ReactiveX operators documentation: GroupBy</a>
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K> Observable<GroupedObservable<K, T>> groupBy(@NonNull Function<? super T, ? extends K> keySelector) {
return groupBy(keySelector, (Function)Functions.identity(), false, bufferSize());
}
/**
* Groups the items emitted by the current {@code Observable} according to a specified criterion, and emits these
* grouped items as {@link GroupedObservable}s.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupBy.v3.png" alt="">
* <p>
* Each emitted {@code GroupedObservable} allows only a single {@link Observer} to subscribe to it during its
* lifetime and if this {@code Observer} calls {@code dispose()} before the
* source terminates, the next emission by the source having the same key will trigger a new
* {@code GroupedObservable} emission.
* <p>
* <em>Note:</em> A {@code GroupedObservable} will cache the items it is to emit until such time as it
* is subscribed to. For this reason, in order to avoid memory leaks, you should not simply ignore those
* {@code GroupedObservable}s that do not concern you. Instead, you can signal to them that they may
* discard their buffers by applying an operator like {@link #ignoreElements} to them.
* <p>
* Note also that ignoring groups or subscribing later (i.e., on another thread) will result in
* so-called group abandonment where a group will only contain one element and the group will be
* re-created over and over as new upstream items trigger a new group. The behavior is
* a trade-off between no-dataloss, upstream cancellation and excessive group creation.
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupBy} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param keySelector
* a function that extracts the key for each item
* @param <K>
* the key type
* @param delayError
* if {@code true}, the exception from the current {@code Observable} is delayed in each group until that specific group emitted
* the normal values; if {@code false}, the exception bypasses values in the groups and is reported immediately.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/groupby.html">ReactiveX operators documentation: GroupBy</a>
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K> Observable<GroupedObservable<K, T>> groupBy(@NonNull Function<? super T, ? extends K> keySelector, boolean delayError) {
return groupBy(keySelector, (Function)Functions.identity(), delayError, bufferSize());
}
/**
* Groups the items emitted by the current {@code Observable} according to a specified criterion, and emits these
* grouped items as {@link GroupedObservable}s.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupBy.v3.png" alt="">
* <p>
* Each emitted {@code GroupedObservable} allows only a single {@link Observer} to subscribe to it during its
* lifetime and if this {@code Observer} calls {@code dispose()} before the
* source terminates, the next emission by the source having the same key will trigger a new
* {@code GroupedObservable} emission.
* <p>
* <em>Note:</em> A {@code GroupedObservable} will cache the items it is to emit until such time as it
* is subscribed to. For this reason, in order to avoid memory leaks, you should not simply ignore those
* {@code GroupedObservable}s that do not concern you. Instead, you can signal to them that they may
* discard their buffers by applying an operator like {@link #ignoreElements} to them.
* <p>
* Note also that ignoring groups or subscribing later (i.e., on another thread) will result in
* so-called group abandonment where a group will only contain one element and the group will be
* re-created over and over as new upstream items trigger a new group. The behavior is
* a trade-off between no-dataloss, upstream cancellation and excessive group creation.
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupBy} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param keySelector
* a function that extracts the key for each item
* @param valueSelector
* a function that extracts the return element for each item
* @param <K>
* the key type
* @param <V>
* the element type
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} or {@code valueSelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/groupby.html">ReactiveX operators documentation: GroupBy</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K, @NonNull V> Observable<GroupedObservable<K, V>> groupBy(@NonNull Function<? super T, ? extends K> keySelector,
Function<? super T, ? extends V> valueSelector) {
return groupBy(keySelector, valueSelector, false, bufferSize());
}
/**
* Groups the items emitted by the current {@code Observable} according to a specified criterion, and emits these
* grouped items as {@link GroupedObservable}s.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupBy.v3.png" alt="">
* <p>
* Each emitted {@code GroupedObservable} allows only a single {@link Observer} to subscribe to it during its
* lifetime and if this {@code Observer} calls {@code dispose()} before the
* source terminates, the next emission by the source having the same key will trigger a new
* {@code GroupedObservable} emission.
* <p>
* <em>Note:</em> A {@code GroupedObservable} will cache the items it is to emit until such time as it
* is subscribed to. For this reason, in order to avoid memory leaks, you should not simply ignore those
* {@code GroupedObservable}s that do not concern you. Instead, you can signal to them that they may
* discard their buffers by applying an operator like {@link #ignoreElements} to them.
* <p>
* Note also that ignoring groups or subscribing later (i.e., on another thread) will result in
* so-called group abandonment where a group will only contain one element and the group will be
* re-created over and over as new upstream items trigger a new group. The behavior is
* a trade-off between no-dataloss, upstream cancellation and excessive group creation.
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupBy} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param keySelector
* a function that extracts the key for each item
* @param valueSelector
* a function that extracts the return element for each item
* @param <K>
* the key type
* @param <V>
* the element type
* @param delayError
* if {@code true}, the exception from the current {@code Observable} is delayed in each group until that specific group emitted
* the normal values; if {@code false}, the exception bypasses values in the groups and is reported immediately.
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} or {@code valueSelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/groupby.html">ReactiveX operators documentation: GroupBy</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K, @NonNull V> Observable<GroupedObservable<K, V>> groupBy(@NonNull Function<? super T, ? extends K> keySelector,
@NonNull Function<? super T, ? extends V> valueSelector, boolean delayError) {
return groupBy(keySelector, valueSelector, delayError, bufferSize());
}
/**
* Groups the items emitted by the current {@code Observable} according to a specified criterion, and emits these
* grouped items as {@link GroupedObservable}s.
* <p>
* <img width="640" height="360" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupBy.v3.png" alt="">
* <p>
* Each emitted {@code GroupedObservable} allows only a single {@link Observer} to subscribe to it during its
* lifetime and if this {@code Observer} calls {@code dispose()} before the
* source terminates, the next emission by the source having the same key will trigger a new
* {@code GroupedObservable} emission.
* <p>
* <em>Note:</em> A {@code GroupedObservable} will cache the items it is to emit until such time as it
* is subscribed to. For this reason, in order to avoid memory leaks, you should not simply ignore those
* {@code GroupedObservable}s that do not concern you. Instead, you can signal to them that they may
* discard their buffers by applying an operator like {@link #ignoreElements} to them.
* <p>
* Note also that ignoring groups or subscribing later (i.e., on another thread) will result in
* so-called group abandonment where a group will only contain one element and the group will be
* re-created over and over as new upstream items trigger a new group. The behavior is
* a trade-off between no-dataloss, upstream cancellation and excessive group creation.
*
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupBy} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param keySelector
* a function that extracts the key for each item
* @param valueSelector
* a function that extracts the return element for each item
* @param delayError
* if {@code true}, the exception from the current {@code Observable} is delayed in each group until that specific group emitted
* the normal values; if {@code false}, the exception bypasses values in the groups and is reported immediately.
* @param bufferSize
* the hint for how many {@code GroupedObservable}s and element in each {@code GroupedObservable} should be buffered
* @param <K>
* the key type
* @param <V>
* the element type
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code keySelector} or {@code valueSelector} is {@code null}
* @throws IllegalArgumentException if {@code bufferSize} is non-positive
* @see <a href="http://reactivex.io/documentation/operators/groupby.html">ReactiveX operators documentation: GroupBy</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull K, @NonNull V> Observable<GroupedObservable<K, V>> groupBy(@NonNull Function<? super T, ? extends K> keySelector,
@NonNull Function<? super T, ? extends V> valueSelector,
boolean delayError, int bufferSize) {
Objects.requireNonNull(keySelector, "keySelector is null");
Objects.requireNonNull(valueSelector, "valueSelector is null");
ObjectHelper.verifyPositive(bufferSize, "bufferSize");
return RxJavaPlugins.onAssembly(new ObservableGroupBy<>(this, keySelector, valueSelector, bufferSize, delayError));
}
/**
* Returns an {@code Observable} that correlates two {@link ObservableSource}s when they overlap in time and groups the results.
* <p>
* There are no guarantees in what order the items get combined when multiple
* items from one or both source {@code ObservableSource}s overlap.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/groupJoin.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code groupJoin} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <TRight> the value type of the right {@code ObservableSource} source
* @param <TLeftEnd> the element type of the left duration {@code ObservableSource}s
* @param <TRightEnd> the element type of the right duration {@code ObservableSource}s
* @param <R> the result type
* @param other
* the other {@code ObservableSource} to correlate items from the current {@code Observable} with
* @param leftEnd
* a function that returns an {@code ObservableSource} whose emissions indicate the duration of the values of
* the current {@code Observable}
* @param rightEnd
* a function that returns an {@code ObservableSource} whose emissions indicate the duration of the values of
* the {@code right} {@code ObservableSource}
* @param resultSelector
* a function that takes an item emitted by each {@code ObservableSource} and returns the value to be emitted
* by the resulting {@code Observable}
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other}, {@code leftEnd}, {@code rightEnd} or {@code resultSelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/join.html">ReactiveX operators documentation: Join</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull TRight, @NonNull TLeftEnd, @NonNull TRightEnd, @NonNull R> Observable<R> groupJoin(
@NonNull ObservableSource<? extends TRight> other,
@NonNull Function<? super T, ? extends ObservableSource<TLeftEnd>> leftEnd,
@NonNull Function<? super TRight, ? extends ObservableSource<TRightEnd>> rightEnd,
@NonNull BiFunction<? super T, ? super Observable<TRight>, ? extends R> resultSelector
) {
Objects.requireNonNull(other, "other is null");
Objects.requireNonNull(leftEnd, "leftEnd is null");
Objects.requireNonNull(rightEnd, "rightEnd is null");
Objects.requireNonNull(resultSelector, "resultSelector is null");
return RxJavaPlugins.onAssembly(new ObservableGroupJoin<>(
this, other, leftEnd, rightEnd, resultSelector));
}
/**
* Hides the identity of the current {@code Observable} and its {@link Disposable}.
* <p>
* Allows hiding extra features such as {@link io.reactivex.rxjava3.subjects.Subject}'s
* {@link Observer} methods or preventing certain identity-based
* optimizations (fusion).
* <p>
* <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/hide.o.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code hide} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
* @return the new {@code Observable} instance
*
* @since 2.0
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Observable<T> hide() {
return RxJavaPlugins.onAssembly(new ObservableHide<>(this));
}
/**
* Ignores all items emitted by the current {@code Observable} and only calls {@code onComplete} or {@code onError}.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/ignoreElements.2.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code ignoreElements} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@link Completable} instance
* @see <a href="http://reactivex.io/documentation/operators/ignoreelements.html">ReactiveX operators documentation: IgnoreElements</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Completable ignoreElements() {
return RxJavaPlugins.onAssembly(new ObservableIgnoreElementsCompletable<>(this));
}
/**
* Returns a {@link Single} that emits {@code true} if the current {@code Observable} is empty, otherwise {@code false}.
* <p>
* In Rx.Net this is negated as the {@code any} {@link Observer} but we renamed this in RxJava to better match Java
* naming idioms.
* <p>
* <img width="640" height="320" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/isEmpty.2.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code isEmpty} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Single} instance
* @see <a href="http://reactivex.io/documentation/operators/contains.html">ReactiveX operators documentation: Contains</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<Boolean> isEmpty() {
return all(Functions.alwaysFalse());
}
/**
* Correlates the items emitted by two {@link ObservableSource}s based on overlapping durations.
* <p>
* There are no guarantees in what order the items get combined when multiple
* items from one or both source {@code ObservableSource}s overlap.
* <p>
* <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/join_.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code join} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param <TRight> the value type of the right {@code ObservableSource} source
* @param <TLeftEnd> the element type of the left duration {@code ObservableSource}s
* @param <TRightEnd> the element type of the right duration {@code ObservableSource}s
* @param <R> the result type
* @param other
* the second {@code ObservableSource} to join items from
* @param leftEnd
* a function to select a duration for each item emitted by the current {@code Observable}, used to
* determine overlap
* @param rightEnd
* a function to select a duration for each item emitted by the {@code right} {@code ObservableSource}, used to
* determine overlap
* @param resultSelector
* a function that computes an item to be emitted by the resulting {@code Observable} for any two
* overlapping items emitted by the two {@code ObservableSource}s
* @return the new {@code Observable} instance
* @throws NullPointerException if {@code other}, {@code leftEnd}, {@code rightEnd} or {@code resultSelector} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/join.html">ReactiveX operators documentation: Join</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final <@NonNull TRight, @NonNull TLeftEnd, @NonNull TRightEnd, @NonNull R> Observable<R> join(
@NonNull ObservableSource<? extends TRight> other,
@NonNull Function<? super T, ? extends ObservableSource<TLeftEnd>> leftEnd,
@NonNull Function<? super TRight, ? extends ObservableSource<TRightEnd>> rightEnd,
@NonNull BiFunction<? super T, ? super TRight, ? extends R> resultSelector
) {
Objects.requireNonNull(other, "other is null");
Objects.requireNonNull(leftEnd, "leftEnd is null");
Objects.requireNonNull(rightEnd, "rightEnd is null");
Objects.requireNonNull(resultSelector, "resultSelector is null");
return RxJavaPlugins.onAssembly(new ObservableJoin<T, TRight, TLeftEnd, TRightEnd, R>(
this, other, leftEnd, rightEnd, resultSelector));
}
/**
* Returns a {@link Maybe} that emits the last item emitted by the current {@code Observable} or
* completes if the current {@code Observable} is empty.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/lastElement.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code lastElement} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Maybe} instance
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX operators documentation: Last</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Maybe<T> lastElement() {
return RxJavaPlugins.onAssembly(new ObservableLastMaybe<>(this));
}
/**
* Returns a {@link Single} that emits only the last item emitted by the current {@code Observable}, or a default item
* if the current {@code Observable} completes without emitting any items.
* <p>
* <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/last.2.v3.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code last} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @param defaultItem
* the default item to emit if the current {@code Observable} is empty
* @return the new {@code Single} instance
* @throws NullPointerException if {@code defaultItem} is {@code null}
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX operators documentation: Last</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> last(@NonNull T defaultItem) {
Objects.requireNonNull(defaultItem, "defaultItem is null");
return RxJavaPlugins.onAssembly(new ObservableLastSingle<>(this, defaultItem));
}
/**
* Returns a {@link Single} that emits only the last item emitted by the current {@code Observable} or
* signals a {@link NoSuchElementException} if the current {@code Observable} is empty.
* <p>
* <img width="640" height="236" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/lastOrError.png" alt="">
* <dl>
* <dt><b>Scheduler:</b></dt>
* <dd>{@code lastOrError} does not operate by default on a particular {@link Scheduler}.</dd>
* </dl>
*
* @return the new {@code Single} instance
* @see <a href="http://reactivex.io/documentation/operators/last.html">ReactiveX operators documentation: Last</a>
*/
@CheckReturnValue
@SchedulerSupport(SchedulerSupport.NONE)
@NonNull
public final Single<T> lastOrError() {
return RxJavaPlugins.onAssembly(new ObservableLastSingle<>(this, null));
}
/**
* <strong>This method requires advanced knowledge about building operators, please consider
* other standard composition methods first;</strong>
* Returns an {@code Observable} which, when subscribed to, invokes the {@link ObservableOperator#apply(Observer) apply(Observer)} method
* of the provided {@link ObservableOperator} for each individual downstream {@link Observer} and allows the
* insertion of a custom operator by accessing the downstream's {@code Observer} during this subscription phase
* and providing a new {@code Observer}, containing the custom operator's intended business logic, that will be
* used in the subscription process going further upstream.
* <p>
* Generally, such a new {@code Observer} will wrap the downstream's {@code Observer} and forwards the
* {@code onNext}, {@code onError} and {@code onComplete} events from the upstream directly or according to the
* emission pattern the custom operator's business logic requires. In addition, such operator can intercept the
* flow control calls of {@code dispose} and {@code isDisposed} that would have traveled upstream and perform
* additional actions depending on the same business logic requirements.
* <p>
* Example:
* <pre><code>
* // Step 1: Create the consumer type that will be returned by the ObservableOperator.apply():
*
* public final | to |
java | apache__camel | dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java | {
"start": 195185,
"end": 197498
} | class ____ extends YamlDeserializerBase<CustomTransformerDefinition> {
public CustomTransformerDefinitionDeserializer() {
super(CustomTransformerDefinition.class);
}
@Override
protected CustomTransformerDefinition newInstance() {
return new CustomTransformerDefinition();
}
@Override
protected boolean setProperty(CustomTransformerDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "className": {
String val = asText(node);
target.setClassName(val);
break;
}
case "fromType": {
String val = asText(node);
target.setFromType(val);
break;
}
case "name": {
String val = asText(node);
target.setName(val);
break;
}
case "ref": {
String val = asText(node);
target.setRef(val);
break;
}
case "scheme": {
String val = asText(node);
target.setScheme(val);
break;
}
case "toType": {
String val = asText(node);
target.setToType(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "customValidator",
types = org.apache.camel.model.validator.CustomValidatorDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Custom Validator",
description = "To use a custom validator on the route level.",
deprecated = false,
properties = {
@YamlProperty(name = "className", type = "string", description = "Set a | CustomTransformerDefinitionDeserializer |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3529QuotedCliArgTest.java | {
"start": 1131,
"end": 2032
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that the command line processing doesn't choke on things like -Da=" ".
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-3529");
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setForkJvm(true); // NOTE: We want to go through the launcher script
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("-Dtest.a= ");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
Properties props = verifier.loadProperties("target/pom.properties");
assertEquals("- -", props.getProperty("project.properties.propA"));
}
}
| MavenITmng3529QuotedCliArgTest |
java | spring-projects__spring-framework | spring-core-test/src/test/java/org/springframework/core/test/tools/CompiledTests.java | {
"start": 4446,
"end": 5752
} | class ____ implements java.util.function.Supplier<String> {
public HelloWorld(String name) {
}
public String get() {
return "Hello World!";
}
}
""");
TestCompiler.forSystem().compile(sourceFile,
compiled -> assertThatIllegalStateException().isThrownBy(
() -> compiled.getInstance(Supplier.class)));
}
@Test
void getInstanceReturnsInstance() {
TestCompiler.forSystem().compile(SourceFile.of(HELLO_WORLD),
compiled -> assertThat(compiled.getInstance(Supplier.class)).isNotNull());
}
@Test
void getInstanceByNameReturnsInstance() {
SourceFiles sourceFiles = SourceFiles.of(SourceFile.of(HELLO_WORLD),
SourceFile.of(HELLO_SPRING));
TestCompiler.forSystem().compile(sourceFiles,
compiled -> assertThat(compiled.getInstance(Supplier.class,
"com.example.HelloWorld")).isNotNull());
}
@Test
void getAllCompiledClassesReturnsCompiledClasses() {
SourceFiles sourceFiles = SourceFiles.of(SourceFile.of(HELLO_WORLD),
SourceFile.of(HELLO_SPRING));
TestCompiler.forSystem().compile(sourceFiles, compiled -> {
List<Class<?>> classes = compiled.getAllCompiledClasses();
assertThat(classes.stream().map(Class::getName)).containsExactlyInAnyOrder(
"com.example.HelloWorld", "com.example.HelloSpring");
});
}
}
| HelloWorld |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/mapping/SelectablePath.java | {
"start": 425,
"end": 3007
} | class ____ implements Serializable, DotIdentifierSequence {
private final SelectablePath parent;
private final String name;
private final int index;
public SelectablePath(String root) {
this.parent = null;
this.name = root.intern();
this.index = 0;
}
private SelectablePath(SelectablePath parent, String name) {
this.parent = parent;
this.name = name;
this.index = parent.index + 1;
}
public static SelectablePath parse(String path) {
if ( path == null || path.isEmpty() ) {
return null;
}
else {
final var parts = split( ".", path );
var selectablePath = new SelectablePath( parts[0] );
for ( int i = 1; i < parts.length; i++ ) {
selectablePath = selectablePath.append( parts[i] );
}
return selectablePath;
}
}
public SelectablePath[] getParts() {
final var array = new SelectablePath[index + 1];
parts( array );
return array;
}
private void parts(SelectablePath[] array) {
if ( parent != null ) {
parent.parts( array );
}
array[index] = this;
}
public SelectablePath[] relativize(SelectablePath basePath) {
final var array = new SelectablePath[index - basePath.index];
relativize( array, basePath );
return array;
}
private boolean relativize(SelectablePath[] array, SelectablePath basePath) {
if ( equals( basePath ) ) {
return true;
}
if ( parent != null ) {
if ( parent.relativize( array, basePath ) ) {
array[index - basePath.index - 1] = this;
return true;
}
}
return false;
}
public String getSelectableName() {
return name;
}
@Override
public SelectablePath getParent() {
return parent;
}
@Override
public SelectablePath append(String selectableName) {
return new SelectablePath( this, selectableName );
}
@Override
public String getLocalName() {
return name;
}
@Override
public String getFullPath() {
return toString();
}
@Override
public String toString() {
final var string = new StringBuilder( name.length() * index );
toString( string );
return string.toString();
}
private void toString(StringBuilder sb) {
if ( parent != null ) {
parent.toString( sb );
sb.append( '.' );
}
sb.append( name );
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( !(o instanceof SelectablePath that) ) {
return false;
}
return Objects.equals( parent, that.parent )
&& name.equals( that.name );
}
@Override
public int hashCode() {
int result = parent != null ? parent.hashCode() : 0;
result = 31 * result + name.hashCode();
return result;
}
}
| SelectablePath |
java | quarkusio__quarkus | extensions/elytron-security-ldap/runtime/src/main/java/io/quarkus/elytron/security/ldap/DelegatingLdapContext.java | {
"start": 1107,
"end": 15974
} | interface ____ {
void handle(DirContext context) throws NamingException;
}
DelegatingLdapContext(DirContext delegating, CloseHandler closeHandler, SocketFactory socketFactory)
throws NamingException {
this.delegating = delegating;
this.closeHandler = closeHandler;
this.socketFactory = socketFactory;
}
// for needs of newInstance()
private DelegatingLdapContext(DirContext delegating, SocketFactory socketFactory) {
this.delegating = delegating;
this.closeHandler = null; // close handler should not be applied to copy
this.socketFactory = socketFactory;
}
public LdapContext newInitialLdapContext(Hashtable<?, ?> environment, Control[] connCtls) throws NamingException {
ClassLoader previous = setSocketFactory();
try {
return new InitialLdapContext(environment, null);
} finally {
unsetSocketFactory(previous);
}
}
@Override
public void close() throws NamingException {
if (closeHandler == null) {
delegating.close();
} else {
closeHandler.handle(delegating);
}
}
// for needs of search()
private NamingEnumeration<SearchResult> wrap(NamingEnumeration<SearchResult> delegating) {
return new NamingEnumeration<SearchResult>() {
@Override
public boolean hasMoreElements() {
ClassLoader previous = setSocketFactory();
try {
return delegating.hasMoreElements();
} finally {
unsetSocketFactory(previous);
}
}
@Override
public SearchResult nextElement() {
ClassLoader previous = setSocketFactory();
try {
return delegating.nextElement();
} finally {
unsetSocketFactory(previous);
}
}
@Override
public SearchResult next() throws NamingException {
ClassLoader previous = setSocketFactory();
try {
return delegating.next();
} finally {
unsetSocketFactory(previous);
}
}
@Override
public boolean hasMore() throws NamingException {
ClassLoader previous = setSocketFactory();
try {
return delegating.hasMore();
} finally {
unsetSocketFactory(previous);
}
}
@Override
public void close() throws NamingException {
delegating.close();
}
};
}
public DelegatingLdapContext wrapReferralContextObtaining(ReferralException e) throws NamingException {
ClassLoader previous = setSocketFactory();
try {
return new DelegatingLdapContext((DirContext) e.getReferralContext(), socketFactory);
} finally {
unsetSocketFactory(previous);
}
}
@Override
public String toString() {
return super.toString() + "->" + delegating.toString();
}
// LdapContext specific
@Override
public ExtendedResponse extendedOperation(ExtendedRequest request) throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
return ((LdapContext) delegating).extendedOperation(request);
}
@Override
public LdapContext newInstance(Control[] requestControls) throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
LdapContext newContext = ((LdapContext) delegating).newInstance(requestControls);
return new DelegatingLdapContext(newContext, socketFactory);
}
@Override
public void reconnect(Control[] controls) throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
ClassLoader previous = setSocketFactory();
try {
((LdapContext) delegating).reconnect(controls);
} finally {
unsetSocketFactory(previous);
}
}
@Override
public Control[] getConnectControls() throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
return ((LdapContext) delegating).getConnectControls();
}
@Override
public void setRequestControls(Control[] requestControls) throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
((LdapContext) delegating).setRequestControls(requestControls);
}
@Override
public Control[] getRequestControls() throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
return ((LdapContext) delegating).getRequestControls();
}
@Override
public Control[] getResponseControls() throws NamingException {
if (!(delegating instanceof LdapContext))
throw Assert.unsupported();
return ((LdapContext) delegating).getResponseControls();
}
// DirContext methods delegates only
@Override
public void bind(String name, Object obj, Attributes attrs) throws NamingException {
delegating.bind(name, obj, attrs);
}
@Override
public Attributes getAttributes(Name name) throws NamingException {
return delegating.getAttributes(name);
}
@Override
public Attributes getAttributes(String name) throws NamingException {
return delegating.getAttributes(name);
}
@Override
public Attributes getAttributes(Name name, String[] attrIds) throws NamingException {
return delegating.getAttributes(name, attrIds);
}
@Override
public Attributes getAttributes(String name, String[] attrIds) throws NamingException {
return delegating.getAttributes(name, attrIds);
}
@Override
public void modifyAttributes(Name name, int mod_op, Attributes attrs) throws NamingException {
delegating.modifyAttributes(name, mod_op, attrs);
}
@Override
public void modifyAttributes(String name, int mod_op, Attributes attrs) throws NamingException {
delegating.modifyAttributes(name, mod_op, attrs);
}
@Override
public void modifyAttributes(Name name, ModificationItem[] mods) throws NamingException {
delegating.modifyAttributes(name, mods);
}
@Override
public void modifyAttributes(String name, ModificationItem[] mods) throws NamingException {
delegating.modifyAttributes(name, mods);
}
@Override
public void bind(Name name, Object obj, Attributes attrs) throws NamingException {
delegating.bind(name, obj, attrs);
}
@Override
public void rebind(Name name, Object obj, Attributes attrs) throws NamingException {
delegating.rebind(name, obj, attrs);
}
@Override
public void rebind(String name, Object obj, Attributes attrs) throws NamingException {
delegating.rebind(name, obj, attrs);
}
@Override
public DirContext createSubcontext(Name name, Attributes attrs) throws NamingException {
return delegating.createSubcontext(name, attrs);
}
@Override
public DirContext createSubcontext(String name, Attributes attrs) throws NamingException {
return delegating.createSubcontext(name, attrs);
}
@Override
public DirContext getSchema(Name name) throws NamingException {
return delegating.getSchema(name);
}
@Override
public DirContext getSchema(String name) throws NamingException {
return delegating.getSchema(name);
}
@Override
public DirContext getSchemaClassDefinition(Name name) throws NamingException {
return delegating.getSchemaClassDefinition(name);
}
@Override
public DirContext getSchemaClassDefinition(String name) throws NamingException {
return delegating.getSchemaClassDefinition(name);
}
@Override
public NamingEnumeration<SearchResult> search(Name name, Attributes matchingAttributes, String[] attributesToReturn)
throws NamingException {
return wrap(delegating.search(name, matchingAttributes, attributesToReturn));
}
@Override
public NamingEnumeration<SearchResult> search(String name, Attributes matchingAttributes, String[] attributesToReturn)
throws NamingException {
return wrap(delegating.search(name, matchingAttributes, attributesToReturn));
}
@Override
public NamingEnumeration<SearchResult> search(Name name, Attributes matchingAttributes) throws NamingException {
return wrap(delegating.search(name, matchingAttributes));
}
@Override
public NamingEnumeration<SearchResult> search(String name, Attributes matchingAttributes) throws NamingException {
return wrap(delegating.search(name, matchingAttributes));
}
@Override
public NamingEnumeration<SearchResult> search(Name name, String filter, SearchControls cons) throws NamingException {
return wrap(delegating.search(name, filter, cons));
}
@Override
public NamingEnumeration<SearchResult> search(String name, String filter, SearchControls cons) throws NamingException {
return wrap(delegating.search(name, filter, cons));
}
@Override
public NamingEnumeration<SearchResult> search(Name name, String filterExpr, Object[] filterArgs, SearchControls cons)
throws NamingException {
return wrap(delegating.search(name, filterExpr, filterArgs, cons));
}
@Override
public NamingEnumeration<SearchResult> search(String name, String filterExpr, Object[] filterArgs, SearchControls cons)
throws NamingException {
return wrap(delegating.search(name, filterExpr, filterArgs, cons));
}
@Override
public Object lookup(Name name) throws NamingException {
return delegating.lookup(name);
}
@Override
public Object lookup(String name) throws NamingException {
return delegating.lookup(name);
}
@Override
public void bind(Name name, Object obj) throws NamingException {
delegating.bind(name, obj);
}
@Override
public void bind(String name, Object obj) throws NamingException {
delegating.bind(name, obj);
}
@Override
public void rebind(Name name, Object obj) throws NamingException {
delegating.rebind(name, obj);
}
@Override
public void rebind(String name, Object obj) throws NamingException {
delegating.rebind(name, obj);
}
@Override
public void unbind(Name name) throws NamingException {
delegating.unbind(name);
}
@Override
public void unbind(String name) throws NamingException {
delegating.unbind(name);
}
@Override
public void rename(Name oldName, Name newName) throws NamingException {
delegating.rename(oldName, newName);
}
@Override
public void rename(String oldName, String newName) throws NamingException {
delegating.rename(oldName, newName);
}
@Override
public NamingEnumeration<NameClassPair> list(Name name) throws NamingException {
return delegating.list(name);
}
@Override
public NamingEnumeration<NameClassPair> list(String name) throws NamingException {
return delegating.list(name);
}
@Override
public NamingEnumeration<Binding> listBindings(Name name) throws NamingException {
return delegating.listBindings(name);
}
@Override
public NamingEnumeration<Binding> listBindings(String name) throws NamingException {
return delegating.listBindings(name);
}
@Override
public void destroySubcontext(Name name) throws NamingException {
delegating.destroySubcontext(name);
}
@Override
public void destroySubcontext(String name) throws NamingException {
delegating.destroySubcontext(name);
}
@Override
public Context createSubcontext(Name name) throws NamingException {
return delegating.createSubcontext(name);
}
@Override
public Context createSubcontext(String name) throws NamingException {
return delegating.createSubcontext(name);
}
@Override
public Object lookupLink(Name name) throws NamingException {
return delegating.lookupLink(name);
}
@Override
public Object lookupLink(String name) throws NamingException {
return delegating.lookupLink(name);
}
@Override
public NameParser getNameParser(Name name) throws NamingException {
return delegating.getNameParser(name);
}
@Override
public NameParser getNameParser(String name) throws NamingException {
return delegating.getNameParser(name);
}
@Override
public Name composeName(Name name, Name prefix) throws NamingException {
return delegating.composeName(name, prefix);
}
@Override
public String composeName(String name, String prefix) throws NamingException {
return delegating.composeName(name, prefix);
}
@Override
public Object addToEnvironment(String propName, Object propVal) throws NamingException {
return delegating.addToEnvironment(propName, propVal);
}
@Override
public Object removeFromEnvironment(String propName) throws NamingException {
return delegating.removeFromEnvironment(propName);
}
@Override
public Hashtable<?, ?> getEnvironment() throws NamingException {
return delegating.getEnvironment();
}
@Override
public String getNameInNamespace() throws NamingException {
return delegating.getNameInNamespace();
}
private ClassLoader setSocketFactory() {
if (socketFactory != null) {
ThreadLocalSSLSocketFactory.set(socketFactory);
return setClassLoaderTo(getSocketFactoryClassLoader());
}
return null;
}
private void unsetSocketFactory(ClassLoader previous) {
if (socketFactory != null) {
ThreadLocalSSLSocketFactory.unset();
setClassLoaderTo(previous);
}
}
private ClassLoader getSocketFactoryClassLoader() {
return ThreadLocalSSLSocketFactory.class.getClassLoader();
}
private ClassLoader setClassLoaderTo(final ClassLoader targetClassLoader) {
final Thread currentThread = Thread.currentThread();
final ClassLoader original = currentThread.getContextClassLoader();
currentThread.setContextClassLoader(targetClassLoader);
return original;
}
}
| CloseHandler |
java | quarkusio__quarkus | independent-projects/bootstrap/app-model/src/main/java/io/quarkus/bootstrap/model/ApplicationModel.java | {
"start": 595,
"end": 3452
} | interface ____ {
/**
* Main application artifact
*
* @return main application artifact
*/
ResolvedDependency getAppArtifact();
/**
* Returns application dependencies that are included into the runtime and augmentation (Quarkus build time)
* classpath.
*
* @return application runtime and build time dependencies
*/
Collection<ResolvedDependency> getDependencies();
/**
* Returns application dependencies with the requested flags set.
*
* @param flags dependency flags that must be set for a dependency to be included in the result
* @return application dependencies that have requested flags set
*/
Iterable<ResolvedDependency> getDependencies(int flags);
/**
* Returns application dependencies that have any of the flags combined in the value of the {@code flags} arguments set.
*
* @param flags dependency flags to match
* @return application dependencies that matched the flags
*/
Iterable<ResolvedDependency> getDependenciesWithAnyFlag(int flags);
/**
* Returns application dependencies that have any of the flags passed in as arguments set.
*
* @param flags dependency flags to match
* @return application dependencies that matched the flags
*/
default Iterable<ResolvedDependency> getDependenciesWithAnyFlag(int... flags) {
if (flags.length == 0) {
throw new IllegalArgumentException("Flags are empty");
}
int combined = flags[0];
for (int i = 1; i < flags.length; ++i) {
combined |= flags[i];
}
return getDependenciesWithAnyFlag(combined);
}
/**
* Runtime dependencies of an application
*
* @return runtime dependencies of an application
*/
Collection<ResolvedDependency> getRuntimeDependencies();
/**
* Quarkus platforms (BOMs) found in the configuration of an application
*
* @return Quarkus platforms (BOMs) found in the configuration of an application
*/
PlatformImports getPlatforms();
/**
* Quarkus platform configuration properties
*
* @return Quarkus platform configuration properties
*/
default Map<String, String> getPlatformProperties() {
final PlatformImports platformImports = getPlatforms();
return platformImports == null ? Map.of() : platformImports.getPlatformProperties();
}
/**
* Extension capability requirements collected from the extensions found on the classpath of an application
*
* @return Extension capability requirements collected from the extensions found on the classpath of an application
*/
Collection<ExtensionCapabilities> getExtensionCapabilities();
/**
* Class loading parent-first artifacts
*
* @return | ApplicationModel |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/bugs/_1170/AdderTest.java | {
"start": 719,
"end": 2617
} | class ____ {
@IssueKey("1170")
@ProcessorTest
public void testWildcardAdder() {
Source source = new Source();
source.addWithoutWildcard( "mouse" );
source.addWildcardInTarget( "mouse" );
source.addWildcardInSource( "mouse" );
source.addWildcardInBoth( "mouse" );
source.addWildcardAdderToSetter( "mouse" );
Target target = AdderSourceTargetMapper.INSTANCE.toTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getWithoutWildcards() ).containsExactly( 2L );
assertThat( target.getWildcardInSources() ).containsExactly( 2L );
( (ListAssert<Long>) assertThat( target.getWildcardInTargets() ) ).containsExactly( 2L );
( (ListAssert<Long>) assertThat( target.getWildcardInBoths() ) ).containsExactly( 2L );
assertThat( target.getWildcardAdderToSetters() ).containsExactly( 2L );
}
@IssueKey("1170")
@ProcessorTest
public void testWildcardAdderTargetToSource() {
Target target = new Target();
target.addWithoutWildcard( 2L );
target.addWildcardInTarget( 2L );
target.getWildcardInSources().add( 2L );
target.addWildcardInBoth( 2L );
target.setWildcardAdderToSetters( Arrays.asList( 2L ) );
Source source = AdderSourceTargetMapper.INSTANCE.toSource( target );
assertThat( source ).isNotNull();
assertThat( source.getWithoutWildcards() ).containsExactly( "mouse" );
( (ListAssert<String>) assertThat( source.getWildcardInSources() ) ).containsExactly( "mouse" );
assertThat( source.getWildcardInTargets() ).containsExactly( "mouse" );
( (ListAssert<String>) assertThat( source.getWildcardInBoths() ) ).containsExactly( "mouse" );
( (ListAssert<String>) assertThat( source.getWildcardAdderToSetters() ) ).containsExactly( "mouse" );
}
}
| AdderTest |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/factory/Mappers.java | {
"start": 1042,
"end": 1235
} | interface ____ {
*
* CustomerMapper INSTANCE = Mappers.getMapper( CustomerMapper.class );
*
* // mapping methods...
* }
* </pre>
*
* @author Gunnar Morling
*/
public | CustomerMapper |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/vertx/src/test/java/org/jboss/resteasy/reactive/server/vertx/test/resteasy/async/filters/AsyncRequestFilterResource.java | {
"start": 570,
"end": 3823
} | class ____ {
private static final Logger LOG = Logger.getLogger(AsyncRequestFilterResource.class);
@GET
public Response threeSyncRequestFilters(@Context ServerRequestContext ctx,
@HeaderParam("Filter1") @DefaultValue("") String filter1,
@HeaderParam("Filter2") @DefaultValue("") String filter2,
@HeaderParam("Filter3") @DefaultValue("") String filter3,
@HeaderParam("PreMatchFilter1") @DefaultValue("") String preMatchFilter1,
@HeaderParam("PreMatchFilter2") @DefaultValue("") String preMatchFilter2,
@HeaderParam("PreMatchFilter3") @DefaultValue("") String preMatchFilter3) {
// boolean async = isAsync(filter1)
// || isAsync(filter2)
// || isAsync(filter3)
// || isAsync(preMatchFilter1)
// || isAsync(preMatchFilter2)
// || isAsync(preMatchFilter3);
// if (async != ctx.isSuspended())
// return Response.serverError().entity("Request suspention is wrong").build();
return Response.ok("resource").build();
}
@Path("non-response")
@GET
public String threeSyncRequestFiltersNonResponse(@Context ServerRequestContext ctx,
@HeaderParam("Filter1") @DefaultValue("") String filter1,
@HeaderParam("Filter2") @DefaultValue("") String filter2,
@HeaderParam("Filter3") @DefaultValue("") String filter3,
@HeaderParam("PreMatchFilter1") @DefaultValue("") String preMatchFilter1,
@HeaderParam("PreMatchFilter2") @DefaultValue("") String preMatchFilter2,
@HeaderParam("PreMatchFilter3") @DefaultValue("") String preMatchFilter3) {
// boolean async = isAsync(filter1)
// || isAsync(filter2)
// || isAsync(filter3)
// || isAsync(preMatchFilter1)
// || isAsync(preMatchFilter2)
// || isAsync(preMatchFilter3);
// if (async != ctx.isSuspended())
// throw new WebApplicationException(Response.serverError().entity("Request suspention is wrong").build());
return "resource";
}
@Path("async")
@GET
public CompletionStage<Response> async() {
ExecutorService executor = Executors.newSingleThreadExecutor();
CompletableFuture<Response> resp = new CompletableFuture<>();
executor.submit(() -> {
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
LOG.error("Error:", e);
}
resp.complete(Response.ok("resource").build());
});
return resp;
}
@Path("callback")
@GET
public String callback() {
return "hello";
}
@Path("callback-async")
@GET
public CompletionStage<String> callbackAsync() {
return CompletableFuture.completedFuture("hello");
}
private boolean isAsync(String filter) {
return filter.equals("async-pass")
|| filter.equals("async-fail");
}
}
| AsyncRequestFilterResource |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java | {
"start": 1857,
"end": 1934
} | class ____ extends IndexShardTestCase {
static | IndexingMemoryControllerTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSStripedInputStreamWithRandomECPolicy.java | {
"start": 1090,
"end": 1605
} | class ____ extends
TestDFSStripedInputStream {
private static final Logger LOG = LoggerFactory.getLogger(
TestDFSStripedInputStreamWithRandomECPolicy.class.getName());
private ErasureCodingPolicy ecPolicy;
public TestDFSStripedInputStreamWithRandomECPolicy() {
ecPolicy = StripedFileTestUtil.getRandomNonDefaultECPolicy();
LOG.info("{}", ecPolicy.toString());
}
@Override
public ErasureCodingPolicy getEcPolicy() {
return ecPolicy;
}
}
| TestDFSStripedInputStreamWithRandomECPolicy |
java | quarkusio__quarkus | extensions/websockets-next/runtime/src/main/java/io/quarkus/websockets/next/runtime/telemetry/ErrorInterceptor.java | {
"start": 322,
"end": 429
} | interface ____ permits ErrorCountingInterceptor {
void intercept(Throwable throwable);
}
| ErrorInterceptor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeDeath.java | {
"start": 7577,
"end": 14274
} | class ____ extends SubjectInheritingThread {
volatile boolean running;
final MiniDFSCluster cluster;
final Configuration conf;
Modify(Configuration conf, MiniDFSCluster cluster) {
running = true;
this.cluster = cluster;
this.conf = conf;
}
@Override
public void work() {
while (running) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
continue;
}
// check if all threads have a new stamp.
// If so, then all workers have finished at least one file
// since the last stamp.
boolean loop = false;
for (int i = 0; i < numThreads; i++) {
if (workload[i].getStamp() == 0) {
loop = true;
break;
}
}
if (loop) {
continue;
}
// Now it is guaranteed that there will be at least one valid
// replica of a file.
for (int i = 0; i < replication - 1; i++) {
// pick a random datanode to shutdown
int victim = AppendTestUtil.nextInt(numDatanodes);
try {
System.out.println("Stopping datanode " + victim);
cluster.restartDataNode(victim);
// cluster.startDataNodes(conf, 1, true, null, null);
} catch (IOException e) {
System.out.println("TestDatanodeDeath Modify exception " + e);
assertTrue(false, "TestDatanodeDeath Modify exception " + e);
running = false;
}
}
// set a new stamp for all workers
for (int i = 0; i < numThreads; i++) {
workload[i].resetStamp();
}
}
}
// Make the thread exit.
void close() {
running = false;
this.interrupt();
}
}
/**
* Test that writing to files is good even when datanodes in the pipeline
* dies.
*/
private void complexTest() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 2000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 2);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_RECONSTRUCTION_PENDING_TIMEOUT_SEC_KEY, 2);
conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, 5000);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numDatanodes).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
Modify modThread = null;
try {
// Create threads and make them run workload concurrently.
workload = new Workload[numThreads];
for (int i = 0; i < numThreads; i++) {
workload[i] = new Workload(AppendTestUtil.nextLong(), fs, i, numberOfFiles, replication, 0);
workload[i].start();
}
// Create a thread that kills existing datanodes and creates new ones.
modThread = new Modify(conf, cluster);
modThread.start();
// wait for all transactions to get over
for (int i = 0; i < numThreads; i++) {
try {
System.out.println("Waiting for thread " + i + " to complete...");
workload[i].join();
// if most of the threads are done, then stop restarting datanodes.
if (i >= numThreads/2) {
modThread.close();
}
} catch (InterruptedException e) {
i--; // retry
}
}
} finally {
if (modThread != null) {
modThread.close();
try {
modThread.join();
} catch (InterruptedException e) {}
}
fs.close();
cluster.shutdown();
}
}
/**
* Write to one file, then kill one datanode in the pipeline and then
* close the file.
*/
private void simpleTest(int datanodeToKill) throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 2000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_RECONSTRUCTION_PENDING_TIMEOUT_SEC_KEY, 2);
conf.setInt(HdfsClientConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, 5000);
int myMaxNodes = 5;
System.out.println("SimpleTest starting with DataNode to Kill " +
datanodeToKill);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(myMaxNodes).build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
short repl = 3;
Path filename = new Path("simpletest.dat");
try {
// create a file and write one block of data
System.out.println("SimpleTest creating file " + filename);
FSDataOutputStream stm = createFile(fs, filename, repl);
DFSOutputStream dfstream = (DFSOutputStream)
(stm.getWrappedStream());
// these are test settings
dfstream.setChunksPerPacket(5);
final long myseed = AppendTestUtil.nextLong();
byte[] buffer = AppendTestUtil.randomBytes(myseed, fileSize);
int mid = fileSize/4;
stm.write(buffer, 0, mid);
DatanodeInfo[] targets = dfstream.getPipeline();
int count = 5;
while (count-- > 0 && targets == null) {
try {
System.out.println("SimpleTest: Waiting for pipeline to be created.");
Thread.sleep(1000);
} catch (InterruptedException e) {
}
targets = dfstream.getPipeline();
}
if (targets == null) {
int victim = AppendTestUtil.nextInt(myMaxNodes);
System.out.println("SimpleTest stopping datanode random " + victim);
cluster.stopDataNode(victim);
} else {
int victim = datanodeToKill;
System.out.println("SimpleTest stopping datanode " + targets[victim]);
cluster.stopDataNode(targets[victim].getXferAddr());
}
System.out.println("SimpleTest stopping datanode complete");
// write some more data to file, close and verify
stm.write(buffer, mid, fileSize - mid);
stm.close();
checkFile(fs, filename, repl, numBlocks, fileSize, myseed);
} catch (Throwable e) {
System.out.println("Simple Workload exception " + e);
e.printStackTrace();
assertTrue(false, e.toString());
} finally {
fs.close();
cluster.shutdown();
}
}
@Test
public void testSimple0() throws IOException {simpleTest(0);}
@Test
public void testSimple1() throws IOException {simpleTest(1);}
@Test
public void testSimple2() throws IOException {simpleTest(2);}
@Test
public void testComplex() throws IOException {complexTest();}
}
| Modify |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/EntityManagerHolder.java | {
"start": 1035,
"end": 1318
} | class ____ the thread,
* for a given {@link jakarta.persistence.EntityManagerFactory}.
*
* <p>Note: This is an SPI class, not intended to be used by applications.
*
* @author Juergen Hoeller
* @since 2.0
* @see JpaTransactionManager
* @see EntityManagerFactoryUtils
*/
public | to |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/output/KeyValueListOutput.java | {
"start": 562,
"end": 2506
} | class ____<K, V> extends CommandOutput<K, V, List<KeyValue<K, V>>>
implements StreamingOutput<KeyValue<K, V>> {
private boolean initialized;
private Subscriber<KeyValue<K, V>> subscriber;
private final Iterable<K> keys;
private Iterator<K> keyIterator;
private K key;
private boolean hasKey;
public KeyValueListOutput(RedisCodec<K, V> codec) {
super(codec, Collections.emptyList());
setSubscriber(ListSubscriber.instance());
this.keys = null;
}
public KeyValueListOutput(RedisCodec<K, V> codec, Iterable<K> keys) {
super(codec, Collections.emptyList());
setSubscriber(ListSubscriber.instance());
this.keys = keys;
}
@Override
public void set(ByteBuffer bytes) {
if (keys == null) {
if (!hasKey) {
key = codec.decodeKey(bytes);
hasKey = true;
return;
}
K key = this.key;
this.key = null;
this.hasKey = false;
subscriber.onNext(output, KeyValue.fromNullable(key, bytes == null ? null : codec.decodeValue(bytes)));
} else {
if (keyIterator == null) {
keyIterator = keys.iterator();
}
subscriber.onNext(output,
KeyValue.fromNullable(keyIterator.next(), bytes == null ? null : codec.decodeValue(bytes)));
}
}
@Override
public void multi(int count) {
if (!initialized) {
output = OutputFactory.newList(count);
initialized = true;
}
}
@Override
public void setSubscriber(Subscriber<KeyValue<K, V>> subscriber) {
LettuceAssert.notNull(subscriber, "Subscriber must not be null");
this.subscriber = subscriber;
}
@Override
public Subscriber<KeyValue<K, V>> getSubscriber() {
return subscriber;
}
}
| KeyValueListOutput |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/type/GenericsBoundedTest.java | {
"start": 1723,
"end": 1782
} | class ____ extends Document {}
// [databind#537]
| MyDoc |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/runtime/client/graal/RedisClientSubstitutions.java | {
"start": 224,
"end": 339
} | class ____ {
}
@TargetClass(className = "io.vertx.redis.client.impl.SentinelTopology")
final | RedisClientSubstitutions |
java | FasterXML__jackson-core | src/test/java/tools/jackson/core/unittest/read/loc/LocationInArrayTest.java | {
"start": 426,
"end": 2775
} | class ____ extends JacksonCoreTestBase
{
final JsonFactory JSON_F = new JsonFactory();
// for [core#229]
@Test
void offsetInArraysBytes() throws Exception {
_testOffsetInArrays(true);
}
// for [core#229]
@Test
void offsetInArraysChars() throws Exception {
_testOffsetInArrays(false);
}
private void _testOffsetInArrays(boolean useBytes) throws Exception
{
JsonParser p;
final String DOC = " [10, 251,\n 3 ]";
// first, char based:
p = useBytes ? JSON_F.createParser(ObjectReadContext.empty(), DOC.getBytes("UTF-8"))
: JSON_F.createParser(ObjectReadContext.empty(), DOC.toCharArray());
assertToken(JsonToken.START_ARRAY, p.nextToken());
_assertLocation(useBytes, p.currentTokenLocation(), 2L, 1, 3);
_assertLocation(useBytes, p.currentLocation(), 3L, 1, 4);
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
_assertLocation(useBytes, p.currentTokenLocation(), 3L, 1, 4);
assertEquals(10, p.getIntValue()); // just to ensure read proceeds to end
// 2-digits so
_assertLocation(useBytes, p.currentLocation(), 5L, 1, 6);
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
_assertLocation(useBytes, p.currentTokenLocation(), 7L, 1, 8);
assertEquals(251, p.getIntValue()); // just to ensure read proceeds to end
_assertLocation(useBytes, p.currentLocation(), 10L, 1, 11);
assertToken(JsonToken.VALUE_NUMBER_INT, p.nextToken());
_assertLocation(useBytes, p.currentTokenLocation(), 15L, 2, 4);
assertEquals(3, p.getIntValue());
_assertLocation(useBytes, p.currentLocation(), 16L, 2, 5);
assertToken(JsonToken.END_ARRAY, p.nextToken());
_assertLocation(useBytes, p.currentTokenLocation(), 18L, 2, 7);
_assertLocation(useBytes, p.currentLocation(), 19L, 2, 8);
p.close();
}
private void _assertLocation(boolean useBytes, TokenStreamLocation loc, long offset, int row, int col)
{
assertEquals(row, loc.getLineNr());
assertEquals(col, loc.getColumnNr());
if (useBytes) {
assertEquals(offset, loc.getByteOffset());
} else {
assertEquals(offset, loc.getCharOffset());
}
}
}
| LocationInArrayTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/operator/topn/KeyExtractorForInt.java | {
"start": 3202,
"end": 3800
} | class ____ extends KeyExtractorForInt {
private final IntBlock block;
MaxFromAscendingBlock(TopNEncoder encoder, byte nul, byte nonNul, IntBlock block) {
super(encoder, nul, nonNul);
this.block = block;
}
@Override
public int writeKey(BreakingBytesRefBuilder key, int position) {
if (block.isNull(position)) {
return nul(key);
}
return nonNul(key, block.getInt(block.getFirstValueIndex(position) + block.getValueCount(position) - 1));
}
}
static | MaxFromAscendingBlock |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/indices/recovery/StatelessUnpromotableRelocationAction.java | {
"start": 1174,
"end": 3368
} | class ____ extends LegacyActionRequest {
private final long recoveryId;
private final ShardId shardId;
private final String targetAllocationId;
private final long clusterStateVersion;
public Request(long recoveryId, ShardId shardId, String targetAllocationId, long clusterStateVersion) {
this.recoveryId = recoveryId;
this.shardId = shardId;
this.targetAllocationId = targetAllocationId;
this.clusterStateVersion = clusterStateVersion;
}
public Request(StreamInput in) throws IOException {
super(in);
recoveryId = in.readVLong();
shardId = new ShardId(in);
targetAllocationId = in.readString();
clusterStateVersion = in.readVLong();
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeVLong(recoveryId);
shardId.writeTo(out);
out.writeString(targetAllocationId);
out.writeVLong(clusterStateVersion);
}
public long getRecoveryId() {
return recoveryId;
}
public ShardId getShardId() {
return shardId;
}
public long getClusterStateVersion() {
return clusterStateVersion;
}
public String getTargetAllocationId() {
return targetAllocationId;
}
@Override
public boolean equals(Object o) {
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return recoveryId == request.recoveryId
&& clusterStateVersion == request.clusterStateVersion
&& Objects.equals(shardId, request.shardId)
&& Objects.equals(targetAllocationId, request.targetAllocationId);
}
@Override
public int hashCode() {
return Objects.hash(recoveryId, shardId, targetAllocationId, clusterStateVersion);
}
}
}
| Request |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/asyncwindow/processors/AsyncStateSliceSharedWindowAggProcessor.java | {
"start": 8770,
"end": 9492
} | class ____
implements AsyncMergeCallback<Long, Iterable<Long>>, Serializable {
private static final long serialVersionUID = 1L;
private static final StateFuture<Tuple2<RowData, RowData>> REUSABLE_FUTURE_RESULT =
StateFutureUtils.completedFuture(null);
private Long mergeTarget = null;
@Override
public StateFuture<Tuple2<RowData, RowData>> asyncMerge(
@Nullable Long mergeResult, Iterable<Long> toBeMerged, Long resultNamespace) {
this.mergeTarget = mergeResult;
return REUSABLE_FUTURE_RESULT;
}
public Long getMergeTarget() {
return mergeTarget;
}
}
}
| SliceMergeTargetHelper |
java | apache__spark | sql/core/src/test/gen-java/org/apache/spark/sql/execution/datasources/parquet/test/avro/ParquetAvroCompat.java | {
"start": 205,
"end": 5799
} | class ____ extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord {
public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"ParquetAvroCompat\",\"namespace\":\"org.apache.spark.sql.execution.datasources.parquet.test.avro\",\"fields\":[{\"name\":\"strings_column\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}},{\"name\":\"string_to_int_column\",\"type\":{\"type\":\"map\",\"values\":\"int\",\"avro.java.string\":\"String\"}},{\"name\":\"complex_column\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"array\",\"items\":{\"type\":\"record\",\"name\":\"Nested\",\"fields\":[{\"name\":\"nested_ints_column\",\"type\":{\"type\":\"array\",\"items\":\"int\"}},{\"name\":\"nested_string_column\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}}]}},\"avro.java.string\":\"String\"}}]}");
public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; }
@Deprecated public java.util.List<java.lang.String> strings_column;
@Deprecated public java.util.Map<java.lang.String,java.lang.Integer> string_to_int_column;
@Deprecated public java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> complex_column;
/**
* Default constructor. Note that this does not initialize fields
* to their default values from the schema. If that is desired then
* one should use <code>newBuilder()</code>.
*/
public ParquetAvroCompat() {}
/**
* All-args constructor.
*/
public ParquetAvroCompat(java.util.List<java.lang.String> strings_column, java.util.Map<java.lang.String,java.lang.Integer> string_to_int_column, java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> complex_column) {
this.strings_column = strings_column;
this.string_to_int_column = string_to_int_column;
this.complex_column = complex_column;
}
public org.apache.avro.Schema getSchema() { return SCHEMA$; }
// Used by DatumWriter. Applications should not call.
public java.lang.Object get(int field$) {
switch (field$) {
case 0: return strings_column;
case 1: return string_to_int_column;
case 2: return complex_column;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
// Used by DatumReader. Applications should not call.
@SuppressWarnings(value="unchecked")
public void put(int field$, java.lang.Object value$) {
switch (field$) {
case 0: strings_column = (java.util.List<java.lang.String>)value$; break;
case 1: string_to_int_column = (java.util.Map<java.lang.String,java.lang.Integer>)value$; break;
case 2: complex_column = (java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>>)value$; break;
default: throw new org.apache.avro.AvroRuntimeException("Bad index");
}
}
/**
* Gets the value of the 'strings_column' field.
*/
public java.util.List<java.lang.String> getStringsColumn() {
return strings_column;
}
/**
* Sets the value of the 'strings_column' field.
* @param value the value to set.
*/
public void setStringsColumn(java.util.List<java.lang.String> value) {
this.strings_column = value;
}
/**
* Gets the value of the 'string_to_int_column' field.
*/
public java.util.Map<java.lang.String,java.lang.Integer> getStringToIntColumn() {
return string_to_int_column;
}
/**
* Sets the value of the 'string_to_int_column' field.
* @param value the value to set.
*/
public void setStringToIntColumn(java.util.Map<java.lang.String,java.lang.Integer> value) {
this.string_to_int_column = value;
}
/**
* Gets the value of the 'complex_column' field.
*/
public java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> getComplexColumn() {
return complex_column;
}
/**
* Sets the value of the 'complex_column' field.
* @param value the value to set.
*/
public void setComplexColumn(java.util.Map<java.lang.String,java.util.List<org.apache.spark.sql.execution.datasources.parquet.test.avro.Nested>> value) {
this.complex_column = value;
}
/** Creates a new ParquetAvroCompat RecordBuilder */
public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder() {
return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder();
}
/** Creates a new ParquetAvroCompat RecordBuilder by copying an existing Builder */
public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder other) {
return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder(other);
}
/** Creates a new ParquetAvroCompat RecordBuilder by copying an existing ParquetAvroCompat instance */
public static org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder newBuilder(org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat other) {
return new org.apache.spark.sql.execution.datasources.parquet.test.avro.ParquetAvroCompat.Builder(other);
}
/**
* RecordBuilder for ParquetAvroCompat instances.
*/
public static | ParquetAvroCompat |
java | resilience4j__resilience4j | resilience4j-framework-common/src/main/java/io/github/resilience4j/common/timelimiter/configuration/CommonTimeLimiterConfigurationProperties.java | {
"start": 4819,
"end": 6687
} | class ____ {
private Duration timeoutDuration;
private Boolean cancelRunningFuture;
@Nullable
private Integer eventConsumerBufferSize;
@Nullable
private String baseConfig;
public Duration getTimeoutDuration() {
return timeoutDuration;
}
public InstanceProperties setTimeoutDuration(Duration timeoutDuration) {
Objects.requireNonNull(timeoutDuration);
if (timeoutDuration.isNegative()) {
throw new IllegalArgumentException(
"Illegal argument exponentialMaxWaitDuration: " + timeoutDuration + " is negative");
}
this.timeoutDuration = timeoutDuration;
return this;
}
public Boolean getCancelRunningFuture() {
return cancelRunningFuture;
}
public InstanceProperties setCancelRunningFuture(Boolean cancelRunningFuture) {
this.cancelRunningFuture = cancelRunningFuture;
return this;
}
public Integer getEventConsumerBufferSize() {
return eventConsumerBufferSize;
}
public InstanceProperties setEventConsumerBufferSize(Integer eventConsumerBufferSize) {
Objects.requireNonNull(eventConsumerBufferSize);
if (eventConsumerBufferSize < 1) {
throw new IllegalArgumentException("eventConsumerBufferSize must be greater than or equal to 1.");
}
this.eventConsumerBufferSize = eventConsumerBufferSize;
return this;
}
@Nullable
public String getBaseConfig() {
return baseConfig;
}
public InstanceProperties setBaseConfig(@Nullable String baseConfig) {
this.baseConfig = baseConfig;
return this;
}
}
}
| InstanceProperties |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/AnnotationMatcherTest.java | {
"start": 6725,
"end": 7612
} | class ____ {}
""");
assertCompiles(
nodeWithAnnotationMatches(
/* shouldMatch= */ true,
new AnnotationMatcher<Tree>(
AT_LEAST_ONE,
Matchers.<AnnotationTree>anyOf(
isType("com.google.SampleAnnotation1"),
isType("com.google.SampleAnnotation2")))));
assertCompiles(
nodeWithAnnotationMatches(
/* shouldMatch= */ true,
new AnnotationMatcher<Tree>(
ALL,
Matchers.<AnnotationTree>anyOf(
isType("com.google.SampleAnnotation1"),
isType("com.google.SampleAnnotation2")))));
}
@Test
public void matchOneAnnotationsOnClass() {
writeFile(
"A.java",
"""
package com.google;
@SampleAnnotation1
@SampleAnnotation2
public | A |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/PrefixedWindowKeySchemas.java | {
"start": 1944,
"end": 10639
} | class ____ implements KeySchema {
@Override
public Bytes upperRange(final Bytes key, final long to) {
if (key == null) {
// Put next prefix instead of null so that we can start from right prefix
// when scanning backwards
final byte nextPrefix = TIME_FIRST_PREFIX + 1;
return Bytes.wrap(ByteBuffer.allocate(PREFIX_SIZE).put(nextPrefix).array());
}
final byte[] maxKey = new byte[key.get().length];
Arrays.fill(maxKey, (byte) 0xFF);
return Bytes.wrap(ByteBuffer.allocate(PREFIX_SIZE + TIMESTAMP_SIZE + maxKey.length + SEQNUM_SIZE)
.put(TIME_FIRST_PREFIX)
.putLong(to)
.put(maxKey).putInt(Integer.MAX_VALUE)
.array());
}
@Override
public Bytes lowerRange(final Bytes key, final long from) {
if (key == null) {
return Bytes.wrap(ByteBuffer.allocate(PREFIX_SIZE + TIMESTAMP_SIZE)
.put(TIME_FIRST_PREFIX)
.putLong(from)
.array());
}
/*
* Larger timestamp or key's byte order can't be smaller than this lower range. Reason:
* 1. Timestamp is fixed length (with big endian byte order). Since we put timestamp
* first, larger timestamp will have larger byte order.
* 2. If timestamp is the same but key (k1) is larger than this lower range key (k2):
* a. If k2 is not a prefix of k1, then k1 will always have larger byte order no
* matter what seqnum k2 has
* b. If k2 is a prefix of k1, since k2's seqnum is 0, after k1 appends seqnum,
* it will always be larger than (k1 + seqnum).
*/
return Bytes.wrap(ByteBuffer.allocate(PREFIX_SIZE + TIMESTAMP_SIZE + key.get().length)
.put(TIME_FIRST_PREFIX)
.putLong(from)
.put(key.get())
.array());
}
@Override
public Bytes lowerRangeFixedSize(final Bytes key, final long from) {
return toStoreKeyBinary(key, Math.max(0, from), 0);
}
@Override
public Bytes upperRangeFixedSize(final Bytes key, final long to) {
return toStoreKeyBinary(key, to, Integer.MAX_VALUE);
}
@Override
public long segmentTimestamp(final Bytes key) {
return extractStoreTimestamp(key.get());
}
@Override
public HasNextCondition hasNextCondition(final Bytes binaryKeyFrom,
final Bytes binaryKeyTo, final long from, final long to, final boolean forward) {
return iterator -> {
while (iterator.hasNext()) {
final Bytes bytes = iterator.peekNextKey();
final byte prefix = extractPrefix(bytes.get());
if (prefix != TIME_FIRST_PREFIX) {
return false;
}
final long time = TimeFirstWindowKeySchema.extractStoreTimestamp(bytes.get());
// We can return false directly here since keys are sorted by time and if
// we get time larger than `to`, there won't be time within range.
if (forward && time > to) {
return false;
}
if (!forward && time < from) {
return false;
}
final Bytes keyBytes = Bytes.wrap(
TimeFirstWindowKeySchema.extractStoreKeyBytes(bytes.get()));
if ((binaryKeyFrom == null || keyBytes.compareTo(binaryKeyFrom) >= 0)
&& (binaryKeyTo == null || keyBytes.compareTo(binaryKeyTo) <= 0)
&& time >= from && time <= to) {
return true;
}
iterator.next();
}
return false;
};
}
@Override
public <S extends Segment> List<S> segmentsToSearch(final Segments<S> segments,
final long from,
final long to,
final boolean forward) {
return segments.segments(from, to, forward);
}
static byte[] extractStoreKeyBytes(final byte[] binaryKey) {
final byte[] bytes = new byte[binaryKey.length - TIMESTAMP_SIZE - SEQNUM_SIZE - PREFIX_SIZE];
System.arraycopy(binaryKey, TIMESTAMP_SIZE + PREFIX_SIZE, bytes, 0, bytes.length);
return bytes;
}
static long extractStoreTimestamp(final byte[] binaryKey) {
return ByteBuffer.wrap(binaryKey).getLong(PREFIX_SIZE);
}
public static Bytes toStoreKeyBinary(final Windowed<Bytes> timeKey,
final int seqnum) {
return toStoreKeyBinary(timeKey.key().get(), timeKey.window().start(), seqnum);
}
public static <K> Windowed<K> fromStoreKey(final byte[] binaryKey,
final long windowSize,
final Deserializer<K> deserializer,
final String topic) {
final K key = deserializer.deserialize(topic, extractStoreKeyBytes(binaryKey));
final Window window = extractStoreWindow(binaryKey, windowSize);
return new Windowed<>(key, window);
}
public static <K> Bytes toStoreKeyBinary(final Windowed<K> timeKey,
final int seqnum,
final StateSerdes<K, ?> serdes) {
final byte[] serializedKey = serdes.rawKey(timeKey.key());
return toStoreKeyBinary(serializedKey, timeKey.window().start(), seqnum);
}
public static <K> Bytes toStoreKeyBinary(final K key,
final long timestamp,
final int seqnum,
final StateSerdes<K, ?> serdes) {
final byte[] serializedKey = serdes.rawKey(key);
return toStoreKeyBinary(serializedKey, timestamp, seqnum);
}
// for store serdes
public static Bytes toStoreKeyBinary(final Bytes key,
final long timestamp,
final int seqnum) {
return toStoreKeyBinary(key.get(), timestamp, seqnum);
}
static Bytes toStoreKeyBinary(final byte[] serializedKey,
final long timestamp,
final int seqnum) {
final ByteBuffer buf = ByteBuffer.allocate(
PREFIX_SIZE + TIMESTAMP_SIZE + serializedKey.length + SEQNUM_SIZE);
buf.put(TIME_FIRST_PREFIX);
buf.putLong(timestamp);
buf.put(serializedKey);
buf.putInt(seqnum);
return Bytes.wrap(buf.array());
}
public static Windowed<Bytes> fromStoreBytesKey(final byte[] binaryKey,
final long windowSize) {
final Bytes key = Bytes.wrap(extractStoreKeyBytes(binaryKey));
final Window window = extractStoreWindow(binaryKey, windowSize);
return new Windowed<>(key, window);
}
static Window extractStoreWindow(final byte[] binaryKey,
final long windowSize) {
final long start = extractStoreTimestamp(binaryKey);
return timeWindowForSize(start, windowSize);
}
static int extractStoreSequence(final byte[] binaryKey) {
return ByteBuffer.wrap(binaryKey).getInt(binaryKey.length - SEQNUM_SIZE);
}
public static byte[] fromNonPrefixWindowKey(final byte[] binaryKey) {
final ByteBuffer buffer = ByteBuffer.allocate(PREFIX_SIZE + binaryKey.length).put(TIME_FIRST_PREFIX);
// Put timestamp
buffer.put(binaryKey, binaryKey.length - SEQNUM_SIZE - TIMESTAMP_SIZE, TIMESTAMP_SIZE);
buffer.put(binaryKey, 0, binaryKey.length - SEQNUM_SIZE - TIMESTAMP_SIZE);
buffer.put(binaryKey, binaryKey.length - SEQNUM_SIZE, SEQNUM_SIZE);
return buffer.array();
}
}
public static | TimeFirstWindowKeySchema |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/DesiredNodesMetadataSerializationTests.java | {
"start": 1058,
"end": 2855
} | class ____ extends ChunkedToXContentDiffableSerializationTestCase<Metadata.ClusterCustom> {
@Override
protected Metadata.ClusterCustom makeTestChanges(Metadata.ClusterCustom testInstance) {
if (randomBoolean()) {
return testInstance;
}
return mutate((DesiredNodesMetadata) testInstance);
}
@Override
protected Writeable.Reader<Diff<Metadata.ClusterCustom>> diffReader() {
return DesiredNodesMetadata::readDiffFrom;
}
@Override
protected Metadata.ClusterCustom doParseInstance(XContentParser parser) throws IOException {
return DesiredNodesMetadata.fromXContent(parser);
}
@Override
protected Writeable.Reader<Metadata.ClusterCustom> instanceReader() {
return DesiredNodesMetadata::new;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(
Collections.singletonList(
new NamedWriteableRegistry.Entry(Metadata.ClusterCustom.class, DesiredNodesMetadata.TYPE, DesiredNodesMetadata::new)
)
);
}
@Override
protected Metadata.ClusterCustom createTestInstance() {
return randomDesiredNodesMetadata();
}
@Override
protected Metadata.ClusterCustom mutateInstance(Metadata.ClusterCustom instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
private static DesiredNodesMetadata randomDesiredNodesMetadata() {
return new DesiredNodesMetadata(randomDesiredNodes());
}
private DesiredNodesMetadata mutate(DesiredNodesMetadata base) {
return new DesiredNodesMetadata(mutateDesiredNodes(base.getLatestDesiredNodes()));
}
}
| DesiredNodesMetadataSerializationTests |
java | quarkusio__quarkus | extensions/hibernate-search-orm-elasticsearch/deployment/src/test/java/io/quarkus/hibernate/search/orm/elasticsearch/test/configuration/ConfigActiveFalseStaticInjectionTest.java | {
"start": 503,
"end": 1678
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class).addClass(IndexedEntity.class))
.withConfigurationResource("application.properties")
.overrideConfigKey("quarkus.hibernate-search-orm.active", "false")
.assertException(e -> assertThat(e)
.hasMessageContainingAll(
"Hibernate Search for persistence unit '<default>' was deactivated through configuration properties",
"To avoid this exception while keeping the bean inactive", // Message from Arc with generic hints
"To activate Hibernate Search, set configuration property 'quarkus.hibernate-search-orm.active' to 'true'",
"This bean is injected into",
ConfigActiveFalseStaticInjectionTest.class.getName() + "#searchSession"));
@Inject
SearchSession searchSession;
@Test
public void test() {
Assertions.fail("Startup should have failed");
}
}
| ConfigActiveFalseStaticInjectionTest |
java | google__guava | android/guava-testlib/test/com/google/common/collect/testing/features/FeatureUtilTest.java | {
"start": 9600,
"end": 10037
} | class ____ extends BaseTester {}
ConflictingRequirementsException e =
assertThrows(
ConflictingRequirementsException.class, () -> buildTesterRequirements(Tester.class));
assertThat(e.getConflicts()).containsExactly(FOO);
assertThat(e.getSource()).isEqualTo(Tester.class);
}
public void testBuildTesterRequirements_classClassConflict_implied() {
@Require(value = IMPLIES_FOO, absent = FOO)
| Tester |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy/runtime/src/main/java/io/quarkus/resteasy/runtime/standalone/VertxOutputStream.java | {
"start": 371,
"end": 6698
} | class ____ extends AsyncOutputStream {
private final VertxHttpResponse response;
private final BufferAllocator allocator;
private ByteBuf pooledBuffer;
private long written;
private final long contentLength;
private boolean closed;
/**
* Construct a new instance. No write timeout is configured.
*
*/
public VertxOutputStream(VertxHttpResponse response, BufferAllocator allocator) {
this.allocator = allocator;
this.response = response;
Object length = response.getOutputHeaders().getFirst(HttpHeaders.CONTENT_LENGTH);
this.contentLength = length == null ? -1 : Long.parseLong(length.toString());
}
/**
* {@inheritDoc}
*/
public void write(final int b) throws IOException {
write(new byte[] { (byte) b }, 0, 1);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
public void write(final byte[] b, final int off, final int len) throws IOException {
if (len < 1) {
return;
}
if (closed) {
throw new IOException("Stream is closed");
}
int rem = len;
int idx = off;
ByteBuf buffer = pooledBuffer;
try {
if (buffer == null) {
pooledBuffer = buffer = allocator.allocateBuffer();
}
while (rem > 0) {
int toWrite = Math.min(rem, buffer.writableBytes());
buffer.writeBytes(b, idx, toWrite);
rem -= toWrite;
idx += toWrite;
if (!buffer.isWritable()) {
ByteBuf tmpBuf = buffer;
this.pooledBuffer = buffer = allocator.allocateBuffer();
response.writeBlocking(tmpBuf, false);
}
}
} catch (Exception e) {
if (buffer != null && buffer.refCnt() > 0) {
buffer.release();
pooledBuffer = null;
closed = true;
}
throw new IOException(e);
}
updateWritten(len);
}
void updateWritten(final long len) throws IOException {
this.written += len;
if (contentLength != -1 && this.written >= contentLength) {
flush();
close();
}
}
/**
* {@inheritDoc}
*/
public void flush() throws IOException {
if (closed) {
throw new IOException("Stream is closed");
}
try {
if (pooledBuffer != null) {
response.writeBlocking(pooledBuffer, false);
pooledBuffer = null;
}
} catch (Exception e) {
if (pooledBuffer != null) {
pooledBuffer.release();
pooledBuffer = null;
}
throw new IOException(e);
}
}
/**
* {@inheritDoc}
*/
public void close() throws IOException {
if (closed)
return;
try {
response.writeBlocking(pooledBuffer, true);
} catch (Exception e) {
throw new IOException(e);
} finally {
closed = true;
pooledBuffer = null;
}
}
@Override
public CompletionStage<Void> asyncFlush() {
return asyncFlush(false);
}
private CompletionStage<Void> asyncFlush(boolean isLast) {
if (closed) {
CompletableFuture<Void> ret = new CompletableFuture<>();
ret.completeExceptionally(new IOException("Stream is closed"));
return ret;
}
if (pooledBuffer != null) {
ByteBuf sentBuffer = pooledBuffer;
pooledBuffer = null;
return response.writeNonBlocking(sentBuffer, isLast);
}
return CompletableFuture.completedFuture(null);
}
@Override
public CompletionStage<Void> asyncWrite(final byte[] b, final int off, final int len) {
if (len < 1) {
return CompletableFuture.completedFuture(null);
}
if (closed) {
CompletableFuture<Void> ret = new CompletableFuture<>();
ret.completeExceptionally(new IOException("Stream is closed"));
return ret;
}
CompletableFuture<Void> ret = CompletableFuture.completedFuture(null);
ByteBuf wrappedBuffer = Unpooled.wrappedBuffer(b, off, len);
if (pooledBuffer == null) {
pooledBuffer = allocator.allocateBuffer();
}
pooledBuffer.writeBytes(wrappedBuffer, Math.min(pooledBuffer.writableBytes(), wrappedBuffer.readableBytes()));
if (pooledBuffer.writableBytes() == 0) {
CompletableFuture<Void> cf = new CompletableFuture<>();
ret = cf;
ByteBuf filled = pooledBuffer;
pooledBuffer = null;
response.writeNonBlocking(filled, false).whenComplete(new BiConsumer<Void, Throwable>() {
@Override
public void accept(Void unused, Throwable throwable) {
if (throwable != null) {
cf.completeExceptionally(throwable);
return;
}
pooledBuffer = allocator.allocateBuffer();
pooledBuffer.writeBytes(wrappedBuffer,
Math.min(pooledBuffer.writableBytes(), wrappedBuffer.readableBytes()));
if (pooledBuffer.writableBytes() == 0) {
ByteBuf filled = pooledBuffer;
pooledBuffer = null;
response.writeNonBlocking(filled, false).whenComplete(this);
} else {
cf.complete(null);
}
}
});
}
return ret.thenCompose(v -> asyncUpdateWritten(len));
}
CompletionStage<Void> asyncUpdateWritten(final long len) {
this.written += len;
if (contentLength != -1 && this.written >= contentLength) {
return asyncFlush(true).thenAccept(v -> {
closed = true;
});
}
return CompletableFuture.completedFuture(null);
}
}
| VertxOutputStream |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java | {
"start": 1446,
"end": 2655
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(Platforms.class);
private static final ServiceLoader<Platform> platforms = ServiceLoader.load(Platform.class);
public static void init(Configuration conf) throws IOException {
NativeSerialization.getInstance().reset();
synchronized (platforms) {
for (Platform platform : platforms) {
platform.init();
}
}
}
public static boolean support(String keyClassName,
INativeSerializer<?> serializer, JobConf job) {
synchronized (platforms) {
for (Platform platform : platforms) {
if (platform.support(keyClassName, serializer, job)) {
LOG.debug("platform " + platform.name() + " support key class"
+ keyClassName);
return true;
}
}
}
return false;
}
public static boolean define(Class<?> keyComparator) {
synchronized (platforms) {
for (Platform platform : platforms) {
if (platform.define(keyComparator)) {
LOG.debug("platform " + platform.name() + " define comparator "
+ keyComparator.getName());
return true;
}
}
}
return false;
}
}
| Platforms |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/AbstractThreadedActionListener.java | {
"start": 887,
"end": 2801
} | class ____<Response> implements ActionListener<Response> {
private static final Logger logger = LogManager.getLogger(AbstractThreadedActionListener.class);
protected final Executor executor;
protected final ActionListener<Response> delegate;
protected final boolean forceExecution;
protected AbstractThreadedActionListener(Executor executor, boolean forceExecution, ActionListener<Response> delegate) {
this.forceExecution = forceExecution;
this.executor = executor;
this.delegate = delegate;
}
@Override
public final void onFailure(final Exception e) {
executor.execute(new AbstractRunnable() {
@Override
public boolean isForceExecution() {
return forceExecution;
}
@Override
protected void doRun() {
delegate.onFailure(e);
}
@Override
public void onRejection(Exception rejectionException) {
rejectionException.addSuppressed(e);
try {
delegate.onFailure(rejectionException);
} catch (Exception doubleFailure) {
rejectionException.addSuppressed(doubleFailure);
onFailure(rejectionException);
}
}
@Override
public void onFailure(Exception e) {
logger.error(() -> "failed to execute failure callback on [" + AbstractThreadedActionListener.this + "]", e);
assert false : e;
}
@Override
public String toString() {
return AbstractThreadedActionListener.this + "/onFailure";
}
});
}
@Override
public final String toString() {
return getClass().getSimpleName() + "[" + executor + "/" + delegate + "]";
}
}
| AbstractThreadedActionListener |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StXMax.java | {
"start": 2241,
"end": 5609
} | class ____ extends UnaryScalarFunction {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "StXMax", StXMax::new);
@FunctionInfo(
returnType = "double",
preview = true,
appliesTo = { @FunctionAppliesTo(lifeCycle = FunctionAppliesToLifecycle.PREVIEW) },
description = "Extracts the maximum value of the `x` coordinates from the supplied geometry.\n"
+ "If the geometry is of type `geo_point` or `geo_shape` this is equivalent to extracting the maximum `longitude` value.",
examples = @Example(file = "spatial_shapes", tag = "st_x_y_min_max"),
depthOffset = 1 // So this appears as a subsection of ST_ENVELOPE
)
public StXMax(
Source source,
@Param(
name = "point",
type = { "geo_point", "geo_shape", "cartesian_point", "cartesian_shape" },
description = "Expression of type `geo_point`, `geo_shape`, `cartesian_point` or `cartesian_shape`. "
+ "If `null`, the function returns `null`."
) Expression field
) {
super(source, field);
}
private StXMax(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
protected TypeResolution resolveType() {
return isSpatial(field(), sourceText(), TypeResolutions.ParamOrdinal.DEFAULT);
}
@Override
public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
if (field().dataType() == GEO_POINT || field().dataType() == DataType.GEO_SHAPE) {
return new StXMaxFromWKBGeoEvaluator.Factory(source(), toEvaluator.apply(field()));
}
return new StXMaxFromWKBEvaluator.Factory(source(), toEvaluator.apply(field()));
}
@Override
public DataType dataType() {
return DOUBLE;
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new StXMax(source(), newChildren.get(0));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, StXMax::new, field());
}
@ConvertEvaluator(extraName = "FromWKB", warnExceptions = { IllegalArgumentException.class })
static double fromWellKnownBinary(BytesRef wkb) {
var geometry = UNSPECIFIED.wkbToGeometry(wkb);
if (geometry instanceof Point point) {
return point.getX();
}
var envelope = SpatialEnvelopeVisitor.visitCartesian(geometry);
if (envelope.isPresent()) {
return envelope.get().getMaxX();
}
throw new IllegalArgumentException("Cannot determine envelope of geometry");
}
@ConvertEvaluator(extraName = "FromWKBGeo", warnExceptions = { IllegalArgumentException.class })
static double fromWellKnownBinaryGeo(BytesRef wkb) {
var geometry = UNSPECIFIED.wkbToGeometry(wkb);
if (geometry instanceof Point point) {
return point.getX();
}
var envelope = SpatialEnvelopeVisitor.visitGeo(geometry, WrapLongitude.WRAP);
if (envelope.isPresent()) {
return envelope.get().getMaxX();
}
throw new IllegalArgumentException("Cannot determine envelope of geometry");
}
}
| StXMax |
java | elastic__elasticsearch | build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/SymbolicLinkPreservingTar.java | {
"start": 2014,
"end": 2513
} | class ____ extends Tar {
@Override
protected CopyAction createCopyAction() {
final ArchiveOutputStreamFactory compressor = switch (getCompression()) {
case BZIP2 -> Bzip2Archiver.getCompressor();
case GZIP -> GzipArchiver.getCompressor();
default -> new SimpleCompressor();
};
return new SymbolicLinkPreservingTarCopyAction(getArchiveFile(), compressor, isPreserveFileTimestamps());
}
private static | SymbolicLinkPreservingTar |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/BatchTask.java | {
"start": 69380,
"end": 69668
} | class ____
* instantiated without arguments using the null-ary constructor. Instantiation will fail if
* this constructor does not exist or is not public.
*
* @param <T> The generic type of the user code class.
* @param config The task configuration containing the | is |
java | google__guava | android/guava/src/com/google/common/primitives/UnsignedBytes.java | {
"start": 16013,
"end": 16913
} | enum ____ implements Comparator<byte[]> {
INSTANCE;
@Override
public int compare(byte[] left, byte[] right) {
int minLength = Math.min(left.length, right.length);
for (int i = 0; i < minLength; i++) {
int result = UnsignedBytes.compare(left[i], right[i]);
if (result != 0) {
return result;
}
}
return left.length - right.length;
}
@Override
public String toString() {
return "UnsignedBytes.lexicographicalComparator() (pure Java version)";
}
}
/**
* Returns the Unsafe-using Comparator, or falls back to the pure-Java implementation if unable
* to do so.
*/
static Comparator<byte[]> getBestComparator() {
try {
Class<?> theClass = Class.forName(UNSAFE_COMPARATOR_NAME);
// requireNonNull is safe because the | PureJavaComparator |
java | alibaba__nacos | example/src/main/java/com/alibaba/nacos/example/ConfigExample.java | {
"start": 988,
"end": 2504
} | class ____ {
public static void main(String[] args) throws NacosException, InterruptedException {
String serverAddr = "localhost";
String dataId = "test";
String group = "DEFAULT_GROUP";
Properties properties = new Properties();
properties.put("serverAddr", serverAddr);
ConfigService configService = NacosFactory.createConfigService(properties);
String content = configService.getConfig(dataId, group, 5000);
System.out.println("[config content] " + content);
configService.addListener(dataId, group, new Listener() {
@Override
public void receiveConfigInfo(String configInfo) {
System.out.println("receive:" + configInfo);
}
@Override
public Executor getExecutor() {
return null;
}
});
boolean isPublishOk = configService.publishConfig(dataId, group, "content");
System.out.println("[publish result] " + isPublishOk);
Thread.sleep(3000);
content = configService.getConfig(dataId, group, 5000);
System.out.println("[config content]: " + content);
boolean isRemoveOk = configService.removeConfig(dataId, group);
System.out.println("[delete result]: " + isRemoveOk);
Thread.sleep(3000);
content = configService.getConfig(dataId, group, 5000);
System.out.println("[config content]: " + content);
Thread.sleep(300000);
}
}
| ConfigExample |
java | dropwizard__dropwizard | dropwizard-e2e/src/test/java/com/example/app1/App1Test.java | {
"start": 1186,
"end": 5668
} | class ____ {
public static final DropwizardAppExtension<Configuration> RULE =
new DropwizardAppExtension<>(App1.class, "app1/config.yml", new ResourceConfigurationSourceProvider());
private static Client client;
@BeforeAll
public static void setup() {
final JerseyClientConfiguration config = new JerseyClientConfiguration();
// Avoid flakiness with default timeouts in CI builds
config.setTimeout(Duration.seconds(5));
client = new JerseyClientBuilder(RULE.getEnvironment())
.withProvider(new CustomJsonProvider(Jackson.newObjectMapper()))
.using(config)
.build("test client");
}
@Test
void custom204OnEmptyOptional() {
final Client client = new JerseyClientBuilder(RULE.getEnvironment()).build("test client 1");
final String url = String.format("http://localhost:%d/empty-optional", RULE.getLocalPort());
final Response response = client.target(url).request().get();
assertThat(response.getStatus()).isEqualTo(204);
}
@Test
void custom404OnViewRenderMissingMustacheTemplate() {
final Client client = new JerseyClientBuilder(RULE.getEnvironment()).build("test client 2");
final String url = String.format("http://localhost:%d/view-with-missing-tpl", RULE.getLocalPort());
final Response response = client.target(url).request().get();
assertThat(response.getStatus()).isEqualTo(404);
}
@Test
@Disabled("EOF is handled by Jetty since Jetty 12")
void earlyEofTest() throws IOException {
// Only eof test so we ensure it's false before test
((App1)RULE.getApplication()).wasEofExceptionHit = false;
final URL url = new URL(String.format("http://localhost:%d/mapper", RULE.getLocalPort()));
final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "application/json");
conn.setDoOutput(true);
conn.setFixedLengthStreamingMode(100000);
conn.getOutputStream().write("{".getBytes(StandardCharsets.UTF_8));
conn.disconnect();
await().atMost(5, TimeUnit.SECONDS)
.until(() -> ((App1) RULE.getApplication()).wasEofExceptionHit);
assertThat(((App1) RULE.getApplication()).wasEofExceptionHit).isTrue();
}
@Test
void customJsonProvider() {
final String url = String.format("http://localhost:%d/mapper", RULE.getLocalPort());
final String response = client.target(url)
.request()
.post(Entity.json("/** A Dropwizard specialty */\n{\"check\": \"mate\"}"), String.class);
assertThat(response).isEqualTo("/** A Dropwizard specialty */\n" +
"{\"check\":\"mate\",\"hello\":\"world\"}");
}
@Test
void customJsonProviderMissingHeader() {
final String url = String.format("http://localhost:%d/mapper", RULE.getLocalPort());
final Response response = client.target(url)
.request()
.post(Entity.json("{\"check\": \"mate\"}"));
assertThat(response.getStatus()).isEqualTo(400);
}
@Test
void customJsonProviderClient() {
final String url = String.format("http://localhost:%d/mapper", RULE.getLocalPort());
final String response = client.target(url)
.request()
.post(Entity.json(Collections.singletonMap("check", "mate")), String.class);
assertThat(response).isEqualTo("/** A Dropwizard specialty */\n" +
"{\"check\":\"mate\",\"hello\":\"world\"}");
}
@Test
void customJsonProviderRoundtrip() {
final String url = String.format("http://localhost:%d/mapper", RULE.getLocalPort());
final GenericType<Map<String, String>> typ = new GenericType<Map<String, String>>() {
};
final Map<String, String> response = client.target(url)
.request()
.post(Entity.json(Collections.singletonMap("check", "mate")), typ);
assertThat(response).containsExactly(entry("check", "mate"), entry("hello", "world"));
}
@Test
void customBodyWriterTest() {
final String url = String.format("http://localhost:%d/custom-class", RULE.getLocalPort());
final String response = client.target(url)
.request()
.get(String.class);
assertThat(response).isEqualTo("I'm a custom class");
}
}
| App1Test |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/subscribers/StrictSubscriberTest.java | {
"start": 995,
"end": 2024
} | class ____ extends RxJavaTest {
@Test
public void strictMode() {
final List<Object> list = new ArrayList<>();
Subscriber<Object> sub = new Subscriber<Object>() {
@Override
public void onSubscribe(Subscription s) {
s.request(10);
}
@Override
public void onNext(Object t) {
list.add(t);
}
@Override
public void onError(Throwable t) {
list.add(t);
}
@Override
public void onComplete() {
list.add("Done");
}
};
new Flowable<Object>() {
@Override
protected void subscribeActual(Subscriber<? super Object> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(s);
}
}.subscribe(sub);
assertTrue(list.toString(), list.get(0) instanceof StrictSubscriber);
}
static final | StrictSubscriberTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.