language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-ssh/src/main/java/org/apache/camel/component/ssh/ResourceBasedSSHKeyVerifier.java | {
"start": 1654,
"end": 7801
} | class ____ implements ServerKeyVerifier {
protected final Logger log = LoggerFactory.getLogger(getClass());
private CamelContext camelContext;
private boolean failOnUnknownHost;
private String knownHostsResource;
public ResourceBasedSSHKeyVerifier(CamelContext camelContext, String knownHostsResource) {
this(camelContext, knownHostsResource, false);
}
public ResourceBasedSSHKeyVerifier(CamelContext camelContext, String knownHostsResource,
boolean failOnUnknownHost) {
this.camelContext = camelContext;
this.knownHostsResource = knownHostsResource;
this.failOnUnknownHost = failOnUnknownHost;
}
@Override
public boolean verifyServerKey(ClientSession sshClientSession, SocketAddress remoteAddress, PublicKey serverKey) {
log.debug("Trying to find known_hosts file {}", knownHostsResource);
InputStream knownHostsInputStream = null;
try {
knownHostsInputStream = ResourceHelper.resolveMandatoryResourceAsInputStream(camelContext,
knownHostsResource);
List<String> possibleTokens = getKnownHostsFileTokensForSocketAddress(remoteAddress);
log.debug("Trying to match PublicKey against provided known_hosts file");
PublicKey matchingKey = findKeyForServerToken(knownHostsInputStream, possibleTokens);
if (matchingKey != null) {
log.debug("Found PublicKey match for server");
return Arrays.areEqual(matchingKey.getEncoded(), serverKey.getEncoded());
}
} catch (IOException ioException) {
log.debug(String.format("Could not find known_hosts file %s", knownHostsResource), ioException);
} finally {
IOHelper.close(knownHostsInputStream);
}
if (failOnUnknownHost) {
log.warn("Could not find matching key for client session, connection will fail due to configuration");
return false;
} else {
log.warn(
"Could not find matching key for client session, connection will continue anyway due to configuration");
return true;
}
}
private PublicKey findKeyForServerToken(InputStream knownHostsInputStream, List<String> possibleTokens) {
String knowHostsLines = camelContext.getTypeConverter().convertTo(String.class, knownHostsInputStream);
if (knowHostsLines == null) {
log.warn("Could not read from the known_hosts file input stream");
return null;
}
for (String s : knowHostsLines.split("\n")) {
String[] parts = s.split(" ");
if (parts.length != 3) {
log.warn("Found malformed entry in known_hosts file");
continue;
}
String entry = parts[0];
String key = parts[2];
for (String serverToken : possibleTokens) {
if (entry.contains(serverToken)) {
try {
return loadKey(key);
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
log.warn(String.format("Could not load key for server token %s", entry), e);
}
}
}
}
return null;
}
private List<String> getKnownHostsFileTokensForSocketAddress(SocketAddress remoteAddress) {
List<String> returnList = new LinkedList<>();
if (remoteAddress instanceof InetSocketAddress) {
InetSocketAddress inetSocketAddress = (InetSocketAddress) remoteAddress;
String hostName = inetSocketAddress.getHostName();
String ipAddress = inetSocketAddress.getAddress().getHostAddress();
String remotePort = String.valueOf(inetSocketAddress.getPort());
returnList.add(hostName);
returnList.add("[" + hostName + "]:" + remotePort);
returnList.add(ipAddress);
returnList.add("[" + ipAddress + "]:" + remotePort);
}
return returnList;
}
/*
* Decode the public key string, which is a base64 encoded string that consists
* of multiple parts: 1. public key type (ssh-rsa, ssh-dss, ...) 2. binary key
* data (May consists of multiple parts)
*
* Each part is composed by two sub-parts 1. Length of the part (4 bytes) 2.
* Binary part (length as defined by 1.)
*
* Uses SSHPublicKeyHolder to construct the actual PublicKey Object
*
* Note: Currently only supports RSA and DSA Public keys as required by
* https://tools.ietf.org/html/rfc4253#section-6.6
*
*/
PublicKey loadKey(String key) throws NoSuchAlgorithmException, InvalidKeySpecException {
SSHPublicKeyHolder sshPublicKeyHolder = new SSHPublicKeyHolder();
byte[] keyByteArray = Base64.getDecoder().decode(key);
int keyByteArrayCursor = 0;
byte[] tmpData = new byte[4];
int tmpCursor = 0;
boolean getLengthMode = true;
while (keyByteArrayCursor < keyByteArray.length) {
if (getLengthMode) {
if (tmpCursor < 4) {
tmpData[tmpCursor] = keyByteArray[keyByteArrayCursor];
tmpCursor++;
keyByteArrayCursor++;
continue;
} else {
tmpCursor = 0;
getLengthMode = false;
tmpData = new byte[byteArrayToInt(tmpData)];
}
}
tmpData[tmpCursor] = keyByteArray[keyByteArrayCursor];
tmpCursor++;
keyByteArrayCursor++;
if (tmpCursor == tmpData.length) {
sshPublicKeyHolder.push(tmpData);
getLengthMode = true;
tmpData = new byte[4];
tmpCursor = 0;
}
}
return sshPublicKeyHolder.toPublicKey();
}
private int byteArrayToInt(byte[] tmpData) {
return new BigInteger(tmpData).intValue();
}
}
| ResourceBasedSSHKeyVerifier |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/SharedStateRegistry.java | {
"start": 1434,
"end": 5083
} | interface ____ extends AutoCloseable {
/** A singleton object for the default implementation of a {@link SharedStateRegistryFactory} */
SharedStateRegistryFactory DEFAULT_FACTORY =
(deleteExecutor, checkpoints, recoveryClaimMode) -> {
SharedStateRegistry sharedStateRegistry =
new SharedStateRegistryImpl(deleteExecutor);
for (CompletedCheckpoint checkpoint : checkpoints) {
checkpoint.registerSharedStatesAfterRestored(
sharedStateRegistry, recoveryClaimMode);
}
return sharedStateRegistry;
};
/**
* Shortcut for {@link #registerReference(SharedStateRegistryKey, StreamStateHandle, long,
* boolean)} with preventDiscardingCreatedCheckpoint = false.
*/
default StreamStateHandle registerReference(
SharedStateRegistryKey registrationKey, StreamStateHandle state, long checkpointID) {
return registerReference(registrationKey, state, checkpointID, false);
}
/**
* Register a reference to the given shared state in the registry. The registry key should be
* based on the physical identifier of the state. If there is already a state handle registered
* under the same key and the 'new' state is not equal to the old one, an exception will be
* thrown.
*
* <p>IMPORTANT: the caller must use the returned state handle instead of the passed one because
* the registry might replace or update it.
*
* @param state the shared state for which we register a reference.
* @param checkpointID which uses the state
* @param preventDiscardingCreatedCheckpoint as long as this state is still in use. The
* "checkpoint that created the state" is recorded on the first state registration.
* @return the state handle registered under the given key. It might differ from the passed
* state handle, e.g. if it was a placeholder.
*/
StreamStateHandle registerReference(
SharedStateRegistryKey registrationKey,
StreamStateHandle state,
long checkpointID,
boolean preventDiscardingCreatedCheckpoint);
/**
* Unregister state that is not referenced by the given checkpoint ID or any newer.
*
* @param lowestCheckpointID which is still valid.
* @return a set of checkpointID which is still in use.
*/
Set<Long> unregisterUnusedState(long lowestCheckpointID);
/**
* Register given shared states in the registry.
*
* <p>NOTE: For state from checkpoints from other jobs or runs (i.e. after recovery), please use
* {@link #registerAllAfterRestored(CompletedCheckpoint, RecoveryClaimMode)}
*
* @param stateHandles The shared states to register.
* @param checkpointID which uses the states.
*/
void registerAll(Iterable<? extends CompositeStateHandle> stateHandles, long checkpointID);
/**
* Set the lowest checkpoint ID below which no state is discarded, inclusive.
*
* <p>After recovery from an incremental checkpoint, its state should NOT be discarded, even if
* {@link #unregisterUnusedState(long) not used} anymore (unless recovering in {@link
* RecoveryClaimMode#CLAIM CLAIM} mode).
*
* <p>This should hold for both cases: when recovering from that initial checkpoint; and from
* any subsequent checkpoint derived from it.
*/
void registerAllAfterRestored(CompletedCheckpoint checkpoint, RecoveryClaimMode mode);
void checkpointCompleted(long checkpointId);
}
| SharedStateRegistry |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ApplicationContextFactory.java | {
"start": 1342,
"end": 4046
} | interface ____ {
/**
* A default {@link ApplicationContextFactory} implementation that will create an
* appropriate context for the {@link WebApplicationType}.
*/
ApplicationContextFactory DEFAULT = new DefaultApplicationContextFactory();
/**
* Return the {@link Environment} type expected to be set on the
* {@link #create(WebApplicationType) created} application context. The result of this
* method can be used to convert an existing environment instance to the correct type.
* @param webApplicationType the web application type or {@code null}
* @return the expected application context type or {@code null} to use the default
* @since 2.6.14
*/
default @Nullable Class<? extends ConfigurableEnvironment> getEnvironmentType(
@Nullable WebApplicationType webApplicationType) {
return null;
}
/**
* Create a new {@link Environment} to be set on the
* {@link #create(WebApplicationType) created} application context. The result of this
* method must match the type returned by
* {@link #getEnvironmentType(WebApplicationType)}.
* @param webApplicationType the web application type or {@code null}
* @return an environment instance or {@code null} to use the default
* @since 2.6.14
*/
default @Nullable ConfigurableEnvironment createEnvironment(@Nullable WebApplicationType webApplicationType) {
return null;
}
/**
* Creates the {@link ConfigurableApplicationContext application context} for a
* {@link SpringApplication}, respecting the given {@code webApplicationType}.
* @param webApplicationType the web application type
* @return the newly created application context
*/
@Nullable ConfigurableApplicationContext create(@Nullable WebApplicationType webApplicationType);
/**
* Creates an {@code ApplicationContextFactory} that will create contexts by
* instantiating the given {@code contextClass} through its primary constructor.
* @param contextClass the context class
* @return the factory that will instantiate the context class
* @see BeanUtils#instantiateClass(Class)
*/
static ApplicationContextFactory ofContextClass(Class<? extends ConfigurableApplicationContext> contextClass) {
return of(() -> BeanUtils.instantiateClass(contextClass));
}
/**
* Creates an {@code ApplicationContextFactory} that will create contexts by calling
* the given {@link Supplier}.
* @param supplier the context supplier, for example
* {@code AnnotationConfigApplicationContext::new}
* @return the factory that will instantiate the context class
*/
static ApplicationContextFactory of(Supplier<ConfigurableApplicationContext> supplier) {
return (webApplicationType) -> supplier.get();
}
}
| ApplicationContextFactory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_fushou.java | {
"start": 1550,
"end": 1588
} | class ____ {
public L3() {
}
}
}
| L3 |
java | apache__camel | dsl/camel-kamelet-main/src/main/java/org/apache/camel/main/download/CamelCustomClassLoader.java | {
"start": 1032,
"end": 1677
} | class ____ extends ClassLoader {
private final CamelContext camelContext;
public CamelCustomClassLoader(ClassLoader parent, CamelContext camelContext) {
super(parent);
this.camelContext = camelContext;
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
for (ClassLoader cl : camelContext.getClassResolver().getClassLoaders()) {
try {
return cl.loadClass(name);
} catch (ClassNotFoundException e) {
// ignore
}
}
throw new ClassNotFoundException(name);
}
}
| CamelCustomClassLoader |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/AnnotationTypeMappingsTests.java | {
"start": 23108,
"end": 23176
} | interface ____ {
}
@Retention(RetentionPolicy.RUNTIME)
@AA
@AB
@ | AB |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/scheduling/quartz/CronTriggerFactoryBean.java | {
"start": 1459,
"end": 1649
} | class ____ the Spring bean name as job name, the Quartz default group ("DEFAULT")
* as job group, the current time as start time, and indefinite repetition, if not specified.
*
* <p>This | uses |
java | elastic__elasticsearch | libs/core/src/test/java/org/elasticsearch/core/internal/provider/EmbeddedImplClassLoaderTests.java | {
"start": 7028,
"end": 7271
} | class ____ reports the given version in its toString. */
static byte[] classBytesForVersion(int version) {
return InMemoryJavaCompiler.compile("p.FooBar", String.format(Locale.ENGLISH, """
package p;
public | that |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFileAttributes.java | {
"start": 1164,
"end": 1855
} | interface ____ extends INodeAttributes {
/** @return the file replication. */
short getFileReplication();
/** @return whether the file is striped (instead of contiguous) */
boolean isStriped();
/** @return whether the file is striped (instead of contiguous) */
BlockType getBlockType();
/** @return the ID of the ErasureCodingPolicy */
byte getErasureCodingPolicyID();
/** @return preferred block size in bytes */
long getPreferredBlockSize();
/** @return the header as a long. */
long getHeaderLong();
boolean metadataEquals(INodeFileAttributes other);
byte getLocalStoragePolicyID();
/** A copy of the inode file attributes */
static | INodeFileAttributes |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/ClientRequest.java | {
"start": 1185,
"end": 4354
} | class ____ {
private final String destination;
private final AbstractRequest.Builder<?> requestBuilder;
private final int correlationId;
private final String clientId;
private final long createdTimeMs;
private final boolean expectResponse;
private final int requestTimeoutMs;
private final RequestCompletionHandler callback;
/**
* @param destination The brokerId to send the request to
* @param requestBuilder The builder for the request to make
* @param correlationId The correlation id for this client request
* @param clientId The client ID to use for the header
* @param createdTimeMs The unix timestamp in milliseconds for the time at which this request was created.
* @param expectResponse Should we expect a response message or is this request complete once it is sent?
* @param callback A callback to execute when the response has been received (or null if no callback is necessary)
*/
public ClientRequest(String destination,
AbstractRequest.Builder<?> requestBuilder,
int correlationId,
String clientId,
long createdTimeMs,
boolean expectResponse,
int requestTimeoutMs,
RequestCompletionHandler callback) {
this.destination = destination;
this.requestBuilder = requestBuilder;
this.correlationId = correlationId;
this.clientId = clientId;
this.createdTimeMs = createdTimeMs;
this.expectResponse = expectResponse;
this.requestTimeoutMs = requestTimeoutMs;
this.callback = callback;
}
@Override
public String toString() {
return "ClientRequest(expectResponse=" + expectResponse +
", callback=" + callback +
", destination=" + destination +
", correlationId=" + correlationId +
", clientId=" + clientId +
", createdTimeMs=" + createdTimeMs +
", requestBuilder=" + requestBuilder +
")";
}
public boolean expectResponse() {
return expectResponse;
}
public ApiKeys apiKey() {
return requestBuilder.apiKey();
}
public RequestHeader makeHeader(short version) {
ApiKeys requestApiKey = apiKey();
return new RequestHeader(
new RequestHeaderData()
.setRequestApiKey(requestApiKey.id)
.setRequestApiVersion(version)
.setClientId(clientId)
.setCorrelationId(correlationId),
requestApiKey.requestHeaderVersion(version));
}
public AbstractRequest.Builder<?> requestBuilder() {
return requestBuilder;
}
public String destination() {
return destination;
}
public RequestCompletionHandler callback() {
return callback;
}
public long createdTimeMs() {
return createdTimeMs;
}
public int correlationId() {
return correlationId;
}
public int requestTimeoutMs() {
return requestTimeoutMs;
}
}
| ClientRequest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/reservation/ReservationAllocation.java | {
"start": 1440,
"end": 4984
} | interface ____
extends Comparable<ReservationAllocation> {
/**
* Returns the unique identifier {@link ReservationId} that represents the
* reservation
*
* @return reservationId the unique identifier {@link ReservationId} that
* represents the reservation
*/
ReservationId getReservationId();
/**
* Returns the original {@link ReservationDefinition} submitted by the client
*
* @return the {@link ReservationDefinition} submitted by the client
*/
ReservationDefinition getReservationDefinition();
/**
* Returns the time at which the reservation is activated.
*
* @return the time at which the reservation is activated
*/
long getStartTime();
/**
* Returns the time at which the reservation terminates.
*
* @return the time at which the reservation terminates
*/
long getEndTime();
/**
* Returns the map of resources requested against the time interval for which
* they were.
*
* @return the allocationRequests the map of resources requested against the
* time interval for which they were
*/
Map<ReservationInterval, Resource> getAllocationRequests();
/**
* Return a string identifying the plan to which the reservation belongs
*
* @return the plan to which the reservation belongs
*/
String getPlanName();
/**
* Returns the user who requested the reservation
*
* @return the user who requested the reservation
*/
String getUser();
/**
* Returns whether the reservation has gang semantics or not
*
* @return true if there is a gang request, false otherwise
*/
boolean containsGangs();
/**
* Sets the time at which the reservation was accepted by the system
*
* @param acceptedAt the time at which the reservation was accepted by the
* system
*/
void setAcceptanceTimestamp(long acceptedAt);
/**
* Returns the time at which the reservation was accepted by the system
*
* @return the time at which the reservation was accepted by the system
*/
long getAcceptanceTime();
/**
* Returns the capacity represented by cumulative resources reserved by the
* reservation at the specified point of time
*
* @param tick the time (UTC in ms) for which the reserved resources are
* requested
* @return the resources reserved at the specified time
*/
Resource getResourcesAtTime(long tick);
/**
* Return a RLE representation of used resources.
*
* @return a RLE encoding of resources allocated over time.
*/
RLESparseResourceAllocation getResourcesOverTime();
/**
* Return a RLE representation of used resources.
*
* @param start start of the time interval.
* @param end end of the time interval.
* @return a RLE encoding of resources allocated over time.
*/
RLESparseResourceAllocation getResourcesOverTime(long start, long end);
/**
* Get the periodicity of this reservation representing the time period of the
* periodic job. Period is represented in milliseconds for periodic jobs.
* Period is 0 for non-periodic jobs.
*
* @return periodicity of this reservation
*/
long getPeriodicity();
/**
* Set the periodicity of this reservation representing the time period of the
* periodic job. Period is represented in milliseconds for periodic jobs.
* Period is 0 for non-periodic jobs.
*
* @param period periodicity of this reservation
*/
@VisibleForTesting
void setPeriodicity(long period);
}
| ReservationAllocation |
java | google__guice | extensions/grapher/test/com/google/inject/grapher/TransitiveDependencyVisitorTest.java | {
"start": 6466,
"end": 6658
} | class ____ extends AbstractModule {
@Override
protected void configure() {
bind(ConstructedClass.class).toInstance(new ConstructedClass());
}
}
private static | InstanceModule |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/AdviceModeImportSelector.java | {
"start": 2069,
"end": 2509
} | class ____ (b) that the given annotation has an
* {@linkplain #getAdviceModeAttributeName() advice mode attribute} of type
* {@link AdviceMode}.
* <p>The {@link #selectImports(AdviceMode)} method is then invoked, allowing the
* concrete implementation to choose imports in a safe and convenient fashion.
* @throws IllegalArgumentException if expected annotation {@code A} is not present
* on the importing {@code @Configuration} | and |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/bind_in_foreach/BindInForeachTest.java | {
"start": 1123,
"end": 2257
} | class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void setUp() throws Exception {
try (Reader reader = Resources
.getResourceAsReader("org/apache/ibatis/submitted/bind_in_foreach/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/bind_in_foreach/CreateDB.sql");
}
@Test
void testBindInForeach() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
assertEquals(3, mapper.createUsers(List.of(2, 4, 6)));
List<User> users = mapper.selectUsers();
assertEquals(3, users.size());
assertEquals(1, users.get(0).getId());
assertEquals("User2", users.get(0).getName());
assertEquals(2, users.get(1).getId());
assertEquals("User4", users.get(1).getName());
assertEquals(3, users.get(2).getId());
assertEquals("User6", users.get(2).getName());
}
}
}
| BindInForeachTest |
java | apache__camel | components/camel-mongodb/src/main/java/org/apache/camel/component/mongodb/MongoDbEndpoint.java | {
"start": 2780,
"end": 42074
} | class ____ extends DefaultEndpoint implements EndpointServiceLocation {
private static final Logger LOG = LoggerFactory.getLogger(MongoDbEndpoint.class);
@UriParam(description = "Sets the connection bean used as a client for connecting to a database.")
private MongoClient mongoConnection;
@UriPath(description = "Sets the connection bean reference used to lookup a client for connecting to a database if no hosts parameter is present.")
@Metadata(required = true)
private String connectionBean;
@UriParam(label = "security", secret = true)
private String username;
@UriParam(label = "security", secret = true)
private String password;
@UriParam
private String hosts;
//Authentication configuration
@UriParam(label = "security")
private String authSource;
@UriParam
private String database;
@UriParam
private String collection;
@UriParam
private String collectionIndex;
@UriParam
private MongoDbOperation operation;
@UriParam(defaultValue = "true")
private boolean createCollection = true;
@UriParam(label = "advanced")
private boolean dynamicity;
@UriParam(label = "advanced")
private boolean writeResultAsHeader;
@UriParam(label = "consumer", enums = "tailable,changeStreams", defaultValue = "tailable")
private String consumerType;
@UriParam(label = "advanced", defaultValue = "1000", javaType = "java.time.Duration")
private long cursorRegenerationDelay = 1000L;
@UriParam(label = "consumer")
private String tailTrackIncreasingField;
@UriParam(label = "consumer,changeStream")
private String streamFilter;
@UriParam(label = "consumer", enums = "default,updateLookup,required,whenAvailable", defaultValue = "default")
private FullDocument fullDocument = FullDocument.DEFAULT;
// persistent tail tracking
@UriParam(label = "consumer")
private boolean persistentTailTracking;
@UriParam(label = "consumer")
private String persistentId;
@UriParam(label = "consumer")
private String tailTrackDb;
@UriParam(label = "consumer")
private String tailTrackCollection;
@UriParam(label = "consumer")
private String tailTrackField;
@UriParam(label = "common")
private MongoDbOutputType outputType;
//Server Selection Configuration
@UriParam(label = "advanced", defaultValue = "30000")
private Integer serverSelectionTimeoutMS = 30000;
@UriParam(label = "advanced", defaultValue = "15")
private Integer localThresholdMS = 15;
//Server Monitoring Configuration
@UriParam(label = "advanced")
private Integer heartbeatFrequencyMS;
//Replica set configuration
@UriParam(label = "advanced")
private String replicaSet;
//Connection Configuration
@UriParam(label = "security", defaultValue = "false")
private boolean tls;
@UriParam(label = "security", defaultValue = "false")
private boolean tlsAllowInvalidHostnames;
@UriParam(label = "advanced", defaultValue = "10000")
private Integer connectTimeoutMS = 10000;
@UriParam(label = "advanced", defaultValue = "0")
private Integer socketTimeoutMS = 0;
@UriParam(label = "advanced", defaultValue = "0")
private Integer maxIdleTimeMS = 0;
@UriParam(label = "advanced", defaultValue = "0")
private Integer maxLifeTimeMS = 0;
//Connection Pool Configuration
@UriParam(label = "advanced", defaultValue = "0")
private Integer minPoolSize = 0;
@UriParam(label = "advanced", defaultValue = "100")
private Integer maxPoolSize = 100;
@UriParam(label = "advanced", defaultValue = "2")
private Integer maxConnecting = 2;
@UriParam(label = "advanced", defaultValue = "120000")
private Integer waitQueueTimeoutMS = 120000;
//Write concern
@UriParam(label = "advanced", defaultValue = "ACKNOWLEDGED",
enums = "ACKNOWLEDGED,W1,W2,W3,UNACKNOWLEDGED,JOURNALED,MAJORITY")
private String writeConcern = "ACKNOWLEDGED";
//Read Preference
@UriParam(label = "advanced",
defaultValue = "PRIMARY",
enums = "PRIMARY,PRIMARY_PREFERRED,SECONDARY,SECONDARY_PREFERRED,NEAREST")
private String readPreference = "PRIMARY";
@UriParam(label = "advanced")
private String readPreferenceTags;
@UriParam(label = "advanced", defaultValue = "-1")
private Integer maxStalenessSeconds = -1;
//Server Handshake configuration
@UriParam(label = "advanced")
private String appName;
//Compressor configuration
@UriParam(label = "advanced")
private String compressors;
@UriParam(label = "advanced")
private Integer zlibCompressionLevel;
//SRV configuration
@UriParam(label = "advanced", defaultValue = "mongodb")
private String srvServiceName;
@UriParam(label = "advanced")
private Integer srvMaxHosts;
//General Configuration
@UriParam(label = "advanced", defaultValue = "true")
private boolean retryWrites = true;
@UriParam(label = "advanced", defaultValue = "true")
private boolean retryReads = true;
@UriParam(label = "advanced", defaultValue = "false")
private boolean directConnection;
@UriParam(label = "advanced")
private boolean loadBalanced;
//additional properties
@UriParam(description = "Set the whole Connection String/Uri for mongodb endpoint.",
label = "common")
private String connectionUriString;
// tailable cursor consumer by default
private MongoDbConsumerType dbConsumerType;
private MongoDbTailTrackingConfig tailTrackingConfig;
private MongoDatabase mongoDatabase;
private MongoCollection<Document> mongoCollection;
public MongoDbEndpoint() {
}
public MongoDbEndpoint(String uri, MongoDbComponent component) {
super(uri, component);
}
@Override
public MongoDbComponent getComponent() {
return (MongoDbComponent) super.getComponent();
}
@Override
public String getServiceUrl() {
if (connectionUriString != null) {
return connectionUriString;
} else if (hosts != null) {
return hosts;
}
return null;
}
@Override
public String getServiceProtocol() {
return "mongodb";
}
@Override
public Map<String, String> getServiceMetadata() {
if (username != null) {
return Map.of("username", username);
}
return null;
}
@Override
public Producer createProducer() {
validateProducerOptions();
initializeConnection();
return new MongoDbProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
validateConsumerOptions();
// we never create the collection
createCollection = false;
initializeConnection();
// select right consumer type
try {
dbConsumerType = ObjectHelper.isEmpty(consumerType)
? MongoDbConsumerType.tailable
: MongoDbConsumerType.valueOf(consumerType);
} catch (Exception e) {
throw new CamelMongoDbException("Consumer type not supported: " + consumerType, e);
}
Consumer consumer;
switch (dbConsumerType) {
case tailable:
consumer = new MongoDbTailableCursorConsumer(this, processor);
break;
case changeStreams:
consumer = new MongoDbChangeStreamsConsumer(this, processor);
break;
default:
throw new CamelMongoDbException("Consumer type not supported: " + dbConsumerType);
}
configureConsumer(consumer);
return consumer;
}
/**
* Check if outputType is compatible with operation. DbCursor and DocumentList applies to findAll. Document applies
* to others.
*/
@SuppressWarnings("unused")
// TODO: validate Output on createProducer method.
private void validateOutputType() {
if (!ObjectHelper.isEmpty(outputType)) {
if (DocumentList.equals(outputType) && !(findAll.equals(operation))) {
throw new IllegalArgumentException("outputType DocumentList is only compatible with operation findAll");
}
if (MongoIterable.equals(outputType) && !(findAll.equals(operation))) {
throw new IllegalArgumentException("outputType MongoIterable is only compatible with operation findAll");
}
if (Document.equals(outputType) && (findAll.equals(operation))) {
throw new IllegalArgumentException("outputType Document is not compatible with operation findAll");
}
}
}
private void validateProducerOptions() throws IllegalArgumentException {
// make our best effort to validate, options with defaults are checked
// against their defaults, which is not always a guarantee that
// they haven't been explicitly set, but it is enough
if (!ObjectHelper.isEmpty(dbConsumerType) || persistentTailTracking || !ObjectHelper.isEmpty(tailTrackDb)
|| !ObjectHelper.isEmpty(tailTrackCollection)
|| !ObjectHelper.isEmpty(tailTrackField) || cursorRegenerationDelay != 1000L) {
throw new IllegalArgumentException(
"dbConsumerType, tailTracking, cursorRegenerationDelay options cannot appear on a producer endpoint");
}
}
private void validateConsumerOptions() throws IllegalArgumentException {
// make our best effort to validate, options with defaults are checked
// against their defaults, which is not always a guarantee that
// they haven't been explicitly set, but it is enough
if (!ObjectHelper.isEmpty(operation) || dynamicity || outputType != null) {
throw new IllegalArgumentException(
"operation, dynamicity, outputType " + "options cannot appear on a consumer endpoint");
}
if (dbConsumerType == MongoDbConsumerType.tailable) {
if (tailTrackIncreasingField == null) {
throw new IllegalArgumentException(
"tailTrackIncreasingField option must be set for tailable cursor MongoDB consumer endpoint");
}
if (persistentTailTracking && (ObjectHelper.isEmpty(persistentId))) {
throw new IllegalArgumentException("persistentId is compulsory for persistent tail tracking");
}
}
}
/**
* Initialises the MongoDB connection using the Mongo object provided to the endpoint
*
* @throws CamelMongoDbException
*/
public void initializeConnection() throws CamelMongoDbException {
LOG.info("Initialising MongoDb endpoint: {}", this);
if (database == null || collection == null && !(getDbStats.equals(operation) || command.equals(operation))) {
throw new CamelMongoDbException("Missing required endpoint configuration: database and/or collection");
}
if (mongoConnection == null) {
mongoConnection = resolveMongoConnection();
if (mongoConnection == null) {
throw new CamelMongoDbException(
"Could not initialise MongoDbComponent. Could not resolve the mongo connection.");
}
}
mongoDatabase = mongoConnection.getDatabase(database);
if (mongoDatabase == null) {
throw new CamelMongoDbException("Could not initialise MongoDbComponent. Database " + database + " does not exist.");
}
if (collection != null) {
if (!createCollection && !databaseContainsCollection(collection)) {
throw new CamelMongoDbException(
"Could not initialise MongoDbComponent. Collection "
+ collection
+ " does not exist on the database and createCollection is false.");
}
mongoCollection = mongoDatabase.getCollection(collection, Document.class);
LOG.debug("MongoDb component initialised and endpoint bound to MongoDB collection with the following parameters. "
+ "Cluster description: {}, Db: {}, Collection: {}",
mongoConnection.getClusterDescription(), mongoDatabase.getName(), collection);
try {
if (ObjectHelper.isNotEmpty(collectionIndex)) {
ensureIndex(mongoCollection, createIndex());
}
} catch (Exception e) {
throw new CamelMongoDbException("Error creating index", e);
}
}
}
private boolean databaseContainsCollection(String collectionName) {
return StreamSupport.stream(mongoDatabase.listCollectionNames().spliterator(), false).anyMatch(collectionName::equals);
}
/**
* Add Index
*/
public void ensureIndex(MongoCollection<Document> aCollection, List<Bson> dynamicIndex) {
if (dynamicIndex != null && !dynamicIndex.isEmpty()) {
for (Bson index : dynamicIndex) {
LOG.debug("create Document Index {}", index);
aCollection.createIndex(index);
}
}
}
/**
* Create technical list index
*
* @return technical list index
*/
@SuppressWarnings("unchecked")
public List<Bson> createIndex() {
try {
List<Bson> indexList = new ArrayList<>();
if (ObjectHelper.isNotEmpty(collectionIndex)) {
HashMap<String, String> indexMap = new ObjectMapper().readValue(collectionIndex, HashMap.class);
for (Map.Entry<String, String> set : indexMap.entrySet()) {
Document index = new Document();
// MongoDB 2.4 upwards is restrictive about the type of the
// 'single field index' being
// in use below (set.getValue())) as only an integer value
// type is accepted, otherwise
// server will throw an exception, see more details:
// http://docs.mongodb.org/manual/release-notes/2.4/#improved-validation-of-index-types
index.put(set.getKey(), set.getValue());
indexList.add(index);
}
}
return indexList;
} catch (IOException e) {
throw new CamelMongoDbException("createIndex failed", e);
}
}
@Override
protected void doStart() throws Exception {
if (mongoConnection == null) {
mongoConnection = resolveMongoConnection();
} else {
LOG.debug("Resolved the connection provided by mongoConnection property parameter as {}", mongoConnection);
}
super.doStart();
}
private MongoClient resolveMongoConnection() {
MongoClient mongoClient;
if (this.hosts != null) {
String credentials = username == null ? "" : username;
if (!credentials.isEmpty()) {
credentials += this.password == null ? "@" : ":" + password + "@";
}
String connectionOptions = authSource == null ? "" : "/?authSource=" + authSource;
if (connectionUriString != null) {
mongoClient = MongoClients.create(connectionUriString);
} else {
mongoClient = MongoClients.create(String.format("mongodb://%s%s%s", credentials, hosts, connectionOptions));
}
LOG.debug("Connection created using provided credentials");
} else {
mongoClient = getComponent().getMongoConnection();
if (mongoClient == null) {
mongoClient = CamelContextHelper.mandatoryLookup(getCamelContext(), connectionBean, MongoClient.class);
LOG.debug("Resolved the connection provided by {} context reference as {}", connectionBean,
mongoConnection);
}
}
return mongoClient;
}
public String getConnectionBean() {
return connectionBean;
}
/**
* Name of {@link com.mongodb.client.MongoClient} to use.
*/
public void setConnectionBean(String connectionBean) {
this.connectionBean = connectionBean;
}
/**
* Sets the name of the MongoDB collection to bind to this endpoint
*/
public void setCollection(String collection) {
this.collection = collection;
}
public String getCollection() {
return collection;
}
/**
* Sets the collection index (JSON FORMAT : { "field1" : order1, "field2" : order2})
*/
public void setCollectionIndex(String collectionIndex) {
this.collectionIndex = collectionIndex;
}
public String getCollectionIndex() {
return collectionIndex;
}
/**
* Sets the operation this endpoint will execute against MongoDB.
*/
public void setOperation(String operation) throws CamelMongoDbException {
try {
this.operation = valueOf(operation);
} catch (IllegalArgumentException e) {
throw new CamelMongoDbException("Operation not supported", e);
}
}
/**
* Sets the operation this endpoint will execute against MongoDB.
*/
public void setOperation(MongoDbOperation operation) {
this.operation = operation;
}
public MongoDbOperation getOperation() {
return operation;
}
/**
* Sets the name of the MongoDB database to target
*/
public void setDatabase(String database) {
this.database = database;
}
public String getDatabase() {
return database;
}
/**
* Create the collection during initialisation if it doesn't exist. Default is true.
*/
public void setCreateCollection(boolean createCollection) {
this.createCollection = createCollection;
}
public boolean isCreateCollection() {
return createCollection;
}
/**
* Sets the Mongo instance that represents the backing connection
*/
public void setMongoConnection(MongoClient mongoConnection) {
this.mongoConnection = mongoConnection;
}
public MongoClient getMongoConnection() {
return mongoConnection;
}
/**
* Sets whether this endpoint will attempt to dynamically resolve the target database and collection from the
* incoming Exchange properties. Can be used to override at runtime the database and collection specified on the
* otherwise static endpoint URI. It is disabled by default to boost performance. Enabling it will take a minimal
* performance hit.
*
* @param dynamicity true or false indicated whether target database and collection should be calculated dynamically
* based on Exchange properties.
* @see MongoDbConstants#DATABASE
* @see MongoDbConstants#COLLECTION
*/
public void setDynamicity(boolean dynamicity) {
this.dynamicity = dynamicity;
}
public boolean isDynamicity() {
return dynamicity;
}
/**
* Reserved for future use, when more consumer types are supported.
*
* @param dbConsumerType key of the consumer type
* @throws CamelMongoDbException if the consumer type is not supported
*/
public void setDbConsumerType(String dbConsumerType) throws CamelMongoDbException {
try {
this.dbConsumerType = MongoDbConsumerType.valueOf(dbConsumerType);
} catch (IllegalArgumentException e) {
throw new CamelMongoDbException("Consumer type not supported", e);
}
}
public MongoDbConsumerType getDbConsumerType() {
return dbConsumerType;
}
public String getConsumerType() {
return consumerType;
}
/**
* Consumer type.
*/
public void setConsumerType(String consumerType) {
this.consumerType = consumerType;
}
public String getTailTrackDb() {
return tailTrackDb;
}
/**
* Indicates what database the tail tracking mechanism will persist to. If not specified, the current database will
* be picked by default. Dynamicity will not be taken into account even if enabled, i.e., the tail tracking database
* will not vary past endpoint initialization.
*/
public void setTailTrackDb(String tailTrackDb) {
this.tailTrackDb = tailTrackDb;
}
public String getTailTrackCollection() {
return tailTrackCollection;
}
/**
* Collection where tail tracking information will be persisted. If not specified,
* {@link MongoDbTailTrackingConfig#DEFAULT_COLLECTION} will be used by default.
*/
public void setTailTrackCollection(String tailTrackCollection) {
this.tailTrackCollection = tailTrackCollection;
}
public String getTailTrackField() {
return tailTrackField;
}
/**
* Field where the last tracked value will be placed. If not specified,
* {@link MongoDbTailTrackingConfig#DEFAULT_FIELD} will be used by default.
*/
public void setTailTrackField(String tailTrackField) {
this.tailTrackField = tailTrackField;
}
/**
* Enable persistent tail tracking, which is a mechanism to keep track of the last consumed message across system
* restarts. The next time the system is up, the endpoint will recover the cursor from the point where it last
* stopped slurping records.
*/
public void setPersistentTailTracking(boolean persistentTailTracking) {
this.persistentTailTracking = persistentTailTracking;
}
public boolean isPersistentTailTracking() {
return persistentTailTracking;
}
/**
* Correlation field in the incoming record which is of increasing nature and will be used to position the tailing
* cursor every time it is generated. The cursor will be (re)created with a query of type: tailTrackIncreasingField
* greater than lastValue (possibly recovered from persistent tail tracking). Can be of type Integer, Date, String,
* etc. NOTE: No support for dot notation at the current time, so the field should be at the top level of the
* document.
*/
public void setTailTrackIncreasingField(String tailTrackIncreasingField) {
this.tailTrackIncreasingField = tailTrackIncreasingField;
}
public String getTailTrackIncreasingField() {
return tailTrackIncreasingField;
}
public MongoDbTailTrackingConfig getTailTrackingConfig() {
if (tailTrackingConfig == null) {
tailTrackingConfig = new MongoDbTailTrackingConfig(
persistentTailTracking, tailTrackIncreasingField, tailTrackDb == null ? database : tailTrackDb,
tailTrackCollection,
tailTrackField, getPersistentId());
}
return tailTrackingConfig;
}
/**
* MongoDB tailable cursors will block until new data arrives. If no new data is inserted, after some time the
* cursor will be automatically freed and closed by the MongoDB server. The client is expected to regenerate the
* cursor if needed. This value specifies the time to wait before attempting to fetch a new cursor, and if the
* attempt fails, how long before the next attempt is made. Default value is 1000ms.
*/
public void setCursorRegenerationDelay(long cursorRegenerationDelay) {
this.cursorRegenerationDelay = cursorRegenerationDelay;
}
public long getCursorRegenerationDelay() {
return cursorRegenerationDelay;
}
/**
* One tail tracking collection can host many trackers for several tailable consumers. To keep them separate, each
* tracker should have its own unique persistentId.
*/
public void setPersistentId(String persistentId) {
this.persistentId = persistentId;
}
public String getPersistentId() {
return persistentId;
}
public boolean isWriteResultAsHeader() {
return writeResultAsHeader;
}
/**
* In write operations, it determines whether instead of returning WriteResult as the body of the OUT message, we
* transfer the IN message to the OUT and attach the WriteResult as a header.
*/
public void setWriteResultAsHeader(boolean writeResultAsHeader) {
this.writeResultAsHeader = writeResultAsHeader;
}
public MongoDbOutputType getOutputType() {
return outputType;
}
/**
* Convert the output of the producer to the selected type: DocumentList Document or MongoIterable. DocumentList or
* MongoIterable applies to findAll and aggregate. Document applies to all other operations.
*/
public void setOutputType(MongoDbOutputType outputType) {
this.outputType = outputType;
}
public MongoDatabase getMongoDatabase() {
return mongoDatabase;
}
public MongoCollection<Document> getMongoCollection() {
return mongoCollection;
}
public String getStreamFilter() {
return streamFilter;
}
/**
* Filter condition for change streams consumer.
*/
public void setStreamFilter(String streamFilter) {
this.streamFilter = streamFilter;
}
public FullDocument getFullDocument() {
return fullDocument;
}
/**
* Specifies whether changeStream consumer include a copy of the full document when modified by update operations.
* Possible values are default, updateLookup, required and whenAvailable.
*/
public void setFullDocument(FullDocument fullDocument) {
this.fullDocument = fullDocument;
}
/**
* Configure the connection bean with the level of acknowledgment requested from MongoDB for write operations to a
* standalone mongod, replicaset or cluster. Possible values are ACKNOWLEDGED, W1, W2, W3, UNACKNOWLEDGED, JOURNALED
* or MAJORITY.
*/
public void setWriteConcern(String writeConcern) {
this.writeConcern = writeConcern;
}
public String getWriteConcern() {
return this.writeConcern;
}
public WriteConcern getWriteConcernBean() {
WriteConcern writeConcernBean = WriteConcern.valueOf(getWriteConcern());
if (writeConcernBean == null) {
throw new IllegalArgumentException(String.format("Unknown WriteConcern configuration %s", getWriteConcern()));
}
return writeConcernBean;
}
/**
* Configure how MongoDB clients route read operations to the members of a replica set. Possible values are PRIMARY,
* PRIMARY_PREFERRED, SECONDARY, SECONDARY_PREFERRED or NEAREST
*/
public void setReadPreference(String readPreference) {
this.readPreference = readPreference;
}
public String getReadPreference() {
return this.readPreference;
}
public ReadPreference getReadPreferenceBean() {
// will throw an IllegalArgumentException if the input is incorrect
return ReadPreference.valueOf(getReadPreference());
}
public String getUsername() {
return username;
}
/**
* Username for mongodb connection
*/
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
/**
* User password for mongodb connection
*/
public void setPassword(String password) {
this.password = password;
}
public String getHosts() {
return hosts;
}
/**
* Host address of mongodb server in `[host]:[port]` format. It's possible to also use more than one address, as a
* comma separated list of hosts: `[host1]:[port1],[host2]:[port2]`. If this parameter is specified, the provided
* connectionBean is ignored.
*/
public void setHosts(String hosts) {
this.hosts = hosts;
}
public String getAuthSource() {
return authSource;
}
/**
* The database name associated with the user's credentials.
*/
public void setAuthSource(String authSource) {
this.authSource = authSource;
}
/**
* Specifies how long (in milliseconds) to block for server selection before throwing an exception. Default: 30,000
* milliseconds.
*/
public void setServerSelectionTimeoutMS(Integer serverSelectionTimeoutMS) {
this.serverSelectionTimeoutMS = serverSelectionTimeoutMS;
}
public Integer getServerSelectionTimeoutMS() {
return serverSelectionTimeoutMS;
}
/**
* The size (in milliseconds) of the latency window for selecting among multiple suitable MongoDB instances.
* Default: 15 milliseconds.
*/
public void setLocalThresholdMS(Integer localThresholdMS) {
this.localThresholdMS = localThresholdMS;
}
public Integer getLocalThresholdMS() {
return localThresholdMS;
}
/**
* heartbeatFrequencyMS controls when the driver checks the state of the MongoDB deployment. Specify the interval
* (in milliseconds) between checks, counted from the end of the previous check until the beginning of the next one.
* Default: Single-threaded drivers: 60 seconds. Multithreaded drivers: 10 seconds.
*/
public void setHeartbeatFrequencyMS(Integer heartbeatFrequencyMS) {
this.heartbeatFrequencyMS = heartbeatFrequencyMS;
}
public Integer getHeartbeatFrequencyMS() {
return heartbeatFrequencyMS;
}
/**
* Specifies that the connection string provided includes multiple hosts. When specified, the driver attempts to
* find all members of that set.
*/
public void setReplicaSet(String replicaSet) {
this.replicaSet = replicaSet;
}
public String getReplicaSet() {
return replicaSet;
}
/**
* Specifies that all communication with MongoDB instances should use TLS. Supersedes the ssl option. Default: false
*/
public void setTls(boolean tls) {
this.tls = tls;
}
public boolean isTls() {
return tls;
}
/**
* Specifies that the driver should allow invalid hostnames in the certificate for TLS connections. Supersedes
* sslInvalidHostNameAllowed. Has the same effect as tlsInsecure by setting tlsAllowInvalidHostnames to true.
* Default: false
*/
public void setTlsAllowInvalidHostnames(boolean tlsAllowInvalidHostnames) {
this.tlsAllowInvalidHostnames = tlsAllowInvalidHostnames;
}
public boolean isTlsAllowInvalidHostnames() {
return tlsAllowInvalidHostnames;
}
/**
* Specifies the maximum amount of time, in milliseconds, the Java driver waits for a connection to open before
* timing out. A value of 0 instructs the driver to never time out while waiting for a connection to open. Default:
* 10000 (10 seconds)
*/
public void setConnectTimeoutMS(Integer connectTimeoutMS) {
this.connectTimeoutMS = connectTimeoutMS;
}
public Integer getConnectTimeoutMS() {
return connectTimeoutMS;
}
/**
* Specifies the maximum amount of time, in milliseconds, the Java driver will wait to send or receive a request
* before timing out. A value of 0 instructs the driver to never time out while waiting to send or receive a
* request. Default: 0
*/
public void setSocketTimeoutMS(Integer socketTimeoutMS) {
this.socketTimeoutMS = socketTimeoutMS;
}
public Integer getSocketTimeoutMS() {
return socketTimeoutMS;
}
/**
* Specifies the maximum amount of time, in milliseconds, the Java driver will allow a pooled connection to idle
* before closing the connection. A value of 0 indicates that there is no upper bound on how long the driver can
* allow a pooled collection to be idle. Default: 0
*/
public void setMaxIdleTimeMS(Integer maxIdleTimeMS) {
this.maxIdleTimeMS = maxIdleTimeMS;
}
public Integer getMaxIdleTimeMS() {
return maxIdleTimeMS;
}
/**
* Specifies the maximum amount of time, in milliseconds, the Java driver will continue to use a pooled connection
* before closing the connection. A value of 0 indicates that there is no upper bound on how long the driver can
* keep a pooled connection open. Default: 0
*/
public void setMaxLifeTimeMS(Integer maxLifeTimeMS) {
this.maxLifeTimeMS = maxLifeTimeMS;
}
public Integer getMaxLifeTimeMS() {
return maxLifeTimeMS;
}
/**
* Specifies the minimum number of connections that must exist at any moment in a single connection pool. Default: 0
*/
public void setMinPoolSize(Integer minPoolSize) {
this.minPoolSize = minPoolSize;
}
public Integer getMinPoolSize() {
return minPoolSize;
}
/**
* The maximum number of connections in the connection pool. The default value is 100.
*/
public void setMaxPoolSize(Integer maxPoolSize) {
this.maxPoolSize = maxPoolSize;
}
public Integer getMaxPoolSize() {
return maxPoolSize;
}
/**
* Specifies the maximum number of connections a pool may be establishing concurrently. Default: 2
*/
public void setMaxConnecting(Integer maxConnecting) {
this.maxConnecting = maxConnecting;
}
public Integer getMaxConnecting() {
return maxConnecting;
}
/**
* Specifies the maximum amount of time, in milliseconds that a thread may wait for a connection to become
* available. Default: 120000 (120 seconds)
*/
public void setWaitQueueTimeoutMS(Integer waitQueueTimeoutMS) {
this.waitQueueTimeoutMS = waitQueueTimeoutMS;
}
public Integer getWaitQueueTimeoutMS() {
return waitQueueTimeoutMS;
}
/**
* A representation of a tag set as a comma-separated list of colon-separated key-value pairs, e.g. "dc:ny,rack:1".
* Spaces are stripped from the beginning and end of all keys and values. To specify a list of tag sets, using
* multiple readPreferenceTags, e.g., readPreferenceTags=dc:ny,rack:1;readPreferenceTags=dc:ny;readPreferenceTags=
* Note the empty value for the last one, which means match any secondary as a last resort. Order matters when using
* multiple readPreferenceTags.
*/
public void setReadPreferenceTags(String readPreferenceTags) {
this.readPreferenceTags = readPreferenceTags;
}
public String getReadPreferenceTags() {
return readPreferenceTags;
}
/**
* Specifies, in seconds, how stale a secondary can be before the driver stops communicating with that secondary.
* The minimum value is either 90 seconds or the heartbeat frequency plus 10 seconds, whichever is greater. For more
* information, see the server documentation for the maxStalenessSeconds option. Not providing a parameter or
* explicitly specifying -1 indicates that there should be no staleness check for secondaries. Default: -1
*/
public void setMaxStalenessSeconds(Integer maxStalenessSeconds) {
this.maxStalenessSeconds = maxStalenessSeconds;
}
public Integer getMaxStalenessSeconds() {
return maxStalenessSeconds;
}
/**
* Sets the logical name of the application. The application name may be used by the client to identify the
* application to the server, for use in server logs, slow query logs, and profile collection. Default: null
*/
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppName() {
return appName;
}
/**
* Specifies one or more compression algorithms that the driver will attempt to use to compress requests sent to the
* connected MongoDB instance. Possible values include: zlib, snappy, and zstd. Default: null
*/
public void setCompressors(String compressors) {
this.compressors = compressors;
}
public String getCompressors() {
return compressors;
}
/**
* Specifies the degree of compression that Zlib should use to decrease the size of requests to the connected
* MongoDB instance. The level can range from -1 to 9, with lower values compressing faster (but resulting in larger
* requests) and larger values compressing slower (but resulting in smaller requests). Default: null
*/
public void setZlibCompressionLevel(Integer zlibCompressionLevel) {
this.zlibCompressionLevel = zlibCompressionLevel;
}
public Integer getZlibCompressionLevel() {
return zlibCompressionLevel;
}
/**
* Specifies the service name of the SRV resource recordsthe driver retrieves to construct your seed list. You must
* use the DNS Seed List Connection Format in your connection URI to use this option. Default: mongodb
*/
public void setSrvServiceName(String srvServiceName) {
this.srvServiceName = srvServiceName;
}
public String getSrvServiceName() {
return srvServiceName;
}
/**
* The maximum number of hosts from the SRV record to connect to.
*/
public void setSrvMaxHosts(Integer srvMaxHosts) {
this.srvMaxHosts = srvMaxHosts;
}
public Integer getSrvMaxHosts() {
return srvMaxHosts;
}
/**
* Specifies that the driver must retry supported write operations if they fail due to a network error. Default:
* true
*/
public void setRetryWrites(boolean retryWrites) {
this.retryWrites = retryWrites;
}
public boolean isRetryWrites() {
return retryWrites;
}
/**
* Specifies that the driver must retry supported read operations if they fail due to a network error. Default: true
*/
public void setRetryReads(boolean retryReads) {
this.retryReads = retryReads;
}
public boolean isRetryReads() {
return retryReads;
}
/**
* Specifies that the driver must connect to the host directly. Default: false
*/
public void setDirectConnection(boolean directConnection) {
this.directConnection = directConnection;
}
public boolean isDirectConnection() {
return directConnection;
}
/**
* If true the driver will assume that it's connecting to MongoDB through a load balancer.
*/
public void setLoadBalanced(boolean loadBalanced) {
this.loadBalanced = loadBalanced;
}
public boolean isLoadBalanced() {
return loadBalanced;
}
/**
* Set the whole Connection String/Uri for mongodb endpoint. To be flexible and future proof about supporting all
* the mongodb client options
* https://www.mongodb.com/docs/drivers/java/sync/current/fundamentals/connection/connect/#connection-uri
*/
public void setConnectionUriString(String connectionUriString) {
this.connectionUriString = connectionUriString;
}
public String getConnectionUriString() {
return connectionUriString;
}
}
| MongoDbEndpoint |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/model/PreparableMutationOperation.java | {
"start": 602,
"end": 1965
} | interface ____ extends MutationOperation {
/**
* The SQL to be used when creating the PreparedStatement
*/
String getSqlString();
/**
* Get the list of parameter binders for the generated PreparedStatement
*/
List<JdbcParameterBinder> getParameterBinders();
/**
* Whether the operation is callable
*/
boolean isCallable();
/**
* The expected outcome of execution
*/
Expectation getExpectation();
/**
* Series of opt-out checks for whether the operation can be
* handled as part of a batch.
*
* @implNote This does not account for whether batching is enabled
* or not on the factory, just whether we can potentially batch it
* relative to the operation itself
*/
default boolean canBeBatched(BatchKey batchKey, int batchSize) {
if ( batchKey == null || batchSize < 2 ) {
return false;
}
// This should already be guaranteed by the batchKey being null
assert !getTableDetails().isIdentifierTable()
|| !( getMutationTarget() instanceof EntityMutationTarget entityMutationTarget
&& entityMutationTarget.getMutationDelegate( getMutationType() ) != null );
if ( getMutationType() == MutationType.UPDATE ) {
// we cannot batch updates against optional tables
if ( getTableDetails().isOptional() ) {
return false;
}
}
return getExpectation().canBeBatched();
}
}
| PreparableMutationOperation |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/onexception/OnExceptionOccurredProcessorTest.java | {
"start": 1154,
"end": 2588
} | class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("myProcessor", new MyProcessor());
return jndi;
}
@Test
public void testOnExceptionOccurred() throws Exception {
getMockEndpoint("mock:dead").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
MyProcessor myProcessor = context.getRegistry().lookupByNameAndType("myProcessor", MyProcessor.class);
// 1 = first time + 3 redelivery attempts
assertEquals(1 + 3, myProcessor.getInvoked());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
MyProcessor myProcessor = context.getRegistry().lookupByNameAndType("myProcessor", MyProcessor.class);
errorHandler(deadLetterChannel("mock:dead").maximumRedeliveries(3).redeliveryDelay(0)
.onExceptionOccurred(myProcessor));
from("direct:start").routeId("start").to("log:a").to("direct:foo").to("log:b");
from("direct:foo").routeId("foo").throwException(new IllegalArgumentException("Forced"));
}
};
}
public static | OnExceptionOccurredProcessorTest |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/DesiredNodeWithStatus.java | {
"start": 5325,
"end": 5889
} | enum ____ {
PENDING((short) 0),
ACTUALIZED((short) 1);
private final short value;
Status(short value) {
this.value = value;
}
// visible for testing
public short getValue() {
return value;
}
static Status fromValue(short value) {
return switch (value) {
case 0 -> PENDING;
case 1 -> ACTUALIZED;
default -> throw new IllegalArgumentException("Unknown status " + value);
};
}
}
}
| Status |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/AcknowledgementCommitCallbackHandler.java | {
"start": 1155,
"end": 2524
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(AcknowledgementCommitCallbackHandler.class);
private final AcknowledgementCommitCallback acknowledgementCommitCallback;
private boolean enteredCallback = false;
AcknowledgementCommitCallbackHandler(AcknowledgementCommitCallback acknowledgementCommitCallback) {
this.acknowledgementCommitCallback = acknowledgementCommitCallback;
}
public boolean hasEnteredCallback() {
return enteredCallback;
}
void onComplete(List<Map<TopicIdPartition, Acknowledgements>> acknowledgementsMapList) {
acknowledgementsMapList.forEach(acknowledgementsMap -> acknowledgementsMap.forEach((partition, acknowledgements) -> {
KafkaException exception = acknowledgements.getAcknowledgeException();
Set<Long> offsets = acknowledgements.getAcknowledgementsTypeMap().keySet();
Set<Long> offsetsCopy = Set.copyOf(offsets);
enteredCallback = true;
try {
acknowledgementCommitCallback.onComplete(Map.of(partition, offsetsCopy), exception);
} catch (Exception e) {
LOG.error("Exception thrown by acknowledgement commit callback", e);
} finally {
enteredCallback = false;
}
}));
}
}
| AcknowledgementCommitCallbackHandler |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/typelevel/MockitoBeansByTypeIntegrationTests.java | {
"start": 3364,
"end": 3582
} | class ____ implements MockTestInterface08 {
@Autowired
Service08 service08;
@Autowired
Service09 service09;
@MockitoBean
Service10 service10;
}
@Nested
@MockitoBean(types = Service12.class)
| BaseTestCase |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-web-thymeleaf/src/test/java/smoketest/web/thymeleaf/SampleWebUiApplicationTests.java | {
"start": 1623,
"end": 2384
} | class ____ {
@Autowired
private TestRestTemplate restTemplate;
@LocalServerPort
private int port;
@Test
void testHome() {
ResponseEntity<String> entity = this.restTemplate.getForEntity("/", String.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(entity.getBody()).contains("<title>Messages");
assertThat(entity.getBody()).doesNotContain("layout:fragment");
}
@Test
void testCreate() {
MultiValueMap<String, String> map = new LinkedMultiValueMap<>();
map.set("text", "FOO text");
map.set("summary", "FOO");
URI location = this.restTemplate.postForLocation("/", map);
assertThat(location).isNotNull();
assertThat(location.toString()).contains("localhost:" + this.port);
}
}
| SampleWebUiApplicationTests |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/test/java/org/springframework/boot/micrometer/metrics/autoconfigure/CompositeMeterRegistryAutoConfigurationTests.java | {
"start": 4441,
"end": 4689
} | class ____ {
@Bean
@Primary
MeterRegistry meterRegistryOne() {
return new TestMeterRegistry();
}
@Bean
MeterRegistry meterRegistryTwo() {
return new SimpleMeterRegistry();
}
}
static | MultipleMeterRegistriesWithOnePrimaryConfig |
java | elastic__elasticsearch | x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java | {
"start": 14341,
"end": 14566
} | class ____ extends ElasticsearchException {
TransformTaskUpdateException(String msg, Throwable cause, Object... args) {
super(msg, cause, args);
}
}
private static | TransformTaskUpdateException |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/benchmark/BenchmarkTests.java | {
"start": 7523,
"end": 8014
} | class ____ implements MethodBeforeAdvice {
public int beforeStringReturn;
@Override
public void before(Method method, Object[] args, Object target) {
++beforeStringReturn;
}
public static Advisor advisor() {
return new DefaultPointcutAdvisor(
new StaticMethodMatcherPointcut() {
@Override
public boolean matches(Method method, Class<?> targetClass) {
return method.getReturnType().equals(String.class);
}
},
new TraceBeforeAdvice());
}
}
| TraceBeforeAdvice |
java | micronaut-projects__micronaut-core | context/src/main/java/io/micronaut/logging/LoggingSystemException.java | {
"start": 743,
"end": 1655
} | class ____ extends RuntimeException {
/**
* Create exception with detailed message and cause.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
* @param cause the cause (which is saved for later retrieval by the
* {@link #getCause()} method). (A {@code null} value is
* permitted, and indicates that the cause is nonexistent or
* unknown.)
*/
public LoggingSystemException(String message, Throwable cause) {
super(message, cause);
}
/**
* Create exception with detailed message and cause.
*
* @param message the detail message (which is saved for later retrieval
* by the {@link #getMessage()} method).
*/
public LoggingSystemException(String message) {
super(message);
}
}
| LoggingSystemException |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/error/ShouldContainKey_create_Test.java | {
"start": 1136,
"end": 1870
} | class ____ {
@Test
void should_create_error_message_with_key_condition() {
// GIVEN
Map<?, ?> map = mapOf(entry("name", "Yoda"), entry("color", "green"));
ErrorMessageFactory factory = shouldContainKey(map, new TestCondition<>("test condition"));
// WHEN
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
// THEN
then(message).isEqualTo(format("[Test] %n" +
"Expecting actual:%n" +
" {\"color\"=\"green\", \"name\"=\"Yoda\"}%n" +
"to contain a key satisfying:%n" +
" test condition"));
}
}
| ShouldContainKey_create_Test |
java | jhy__jsoup | src/test/java/org/jsoup/parser/HtmlTreeBuilderStateTest.java | {
"start": 470,
"end": 3717
} | class ____ {
static List<Object[]> findConstantArrays(Class aClass) {
ArrayList<Object[]> array = new ArrayList<>();
Field[] fields = aClass.getDeclaredFields();
for (Field field : fields) {
int modifiers = field.getModifiers();
if (Modifier.isStatic(modifiers) && !Modifier.isPrivate(modifiers) && field.getType().isArray()) {
try {
array.add((Object[]) field.get(null));
} catch (IllegalAccessException e) {
throw new IllegalStateException(e);
}
}
}
return array;
}
static void ensureSorted(List<Object[]> constants) {
for (Object[] array : constants) {
Object[] copy = Arrays.copyOf(array, array.length);
Arrays.sort(array);
assertArrayEquals(array, copy);
}
}
@Test
public void ensureArraysAreSorted() {
List<Object[]> constants = findConstantArrays(Constants.class);
ensureSorted(constants);
assertEquals(39, constants.size());
}
@Test public void ensureTagSearchesAreKnownTags() {
List<Object[]> constants = findConstantArrays(Constants.class);
for (Object[] constant : constants) {
String[] tagNames = (String[]) constant;
for (String tagName : tagNames) {
if (StringUtil.inSorted(tagName, InBodyStartInputAttribs))
continue; // odd one out in the constant
assertTrue(Tag.isKnownTag(tagName), String.format("Unknown tag name: %s", tagName));
}
}
}
@Test
public void nestedAnchorElements01() {
String html = "<html>\n" +
" <body>\n" +
" <a href='#1'>\n" +
" <div>\n" +
" <a href='#2'>child</a>\n" +
" </div>\n" +
" </a>\n" +
" </body>\n" +
"</html>";
String s = Jsoup.parse(html).toString();
assertEquals("<html>\n" +
" <head></head>\n" +
" <body>\n" +
" <a href=\"#1\"> </a>\n" +
" <div>\n" +
" <a href=\"#1\"> </a><a href=\"#2\">child</a>\n" +
" </div>\n" +
" </body>\n" +
"</html>", s);
}
@Test
public void nestedAnchorElements02() {
String html = "<html>\n" +
" <body>\n" +
" <a href='#1'>\n" +
" <div>\n" +
" <div>\n" +
" <a href='#2'>child</a>\n" +
" </div>\n" +
" </div>\n" +
" </a>\n" +
" </body>\n" +
"</html>";
String s = Jsoup.parse(html).toString();
assertEquals("<html>\n" +
" <head></head>\n" +
" <body>\n" +
" <a href=\"#1\"> </a>\n" +
" <div>\n" +
" <a href=\"#1\"> </a>\n" +
" <div>\n" +
" <a href=\"#1\"> </a><a href=\"#2\">child</a>\n" +
" </div>\n" +
" </div>\n" +
" </body>\n" +
"</html>", s);
}
}
| HtmlTreeBuilderStateTest |
java | spring-projects__spring-boot | core/spring-boot-docker-compose/src/main/java/org/springframework/boot/docker/compose/service/connection/DockerComposeConnectionDetailsFactory.java | {
"start": 2117,
"end": 2424
} | class ____ {@link ConnectionDetailsFactory} implementations that provide
* {@link ConnectionDetails} from a {@link DockerComposeConnectionSource}.
*
* @param <D> the connection details type
* @author Moritz Halbritter
* @author Andy Wilkinson
* @author Phillip Webb
* @since 3.1.0
*/
public abstract | for |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/TestExecutionListenersNestedTests.java | {
"start": 4659,
"end": 4798
} | class ____ extends BaseTestExecutionListener {
@Override
protected String name() {
return BAR;
}
}
static | BarTestExecutionListener |
java | grpc__grpc-java | netty/src/main/java/io/grpc/netty/NettyChannelBuilder.java | {
"start": 28178,
"end": 29342
} | class ____ implements ProtocolNegotiator.ClientFactory {
private NegotiationType negotiationType = NegotiationType.TLS;
private SslContext sslContext;
@Override
public ProtocolNegotiator newNegotiator() {
SslContext localSslContext = sslContext;
if (negotiationType == NegotiationType.TLS && localSslContext == null) {
try {
localSslContext = GrpcSslContexts.forClient().build();
} catch (SSLException ex) {
throw new RuntimeException(ex);
}
}
return createProtocolNegotiatorByType(negotiationType, localSslContext,
managedChannelImplBuilder.getOffloadExecutorPool());
}
@Override
public int getDefaultPort() {
switch (negotiationType) {
case PLAINTEXT:
case PLAINTEXT_UPGRADE:
return GrpcUtil.DEFAULT_PORT_PLAINTEXT;
case TLS:
return GrpcUtil.DEFAULT_PORT_SSL;
default:
throw new AssertionError(negotiationType + " not handled");
}
}
}
/**
* Creates Netty transports. Exposed for internal use, as it should be private.
*/
private static final | DefaultProtocolNegotiator |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/resume/ResumeAction.java | {
"start": 858,
"end": 925
} | interface ____ integrations to run actions during resume
*/
public | for |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 36267,
"end": 36439
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return definesFunction( dialect, "array_set" );
}
}
public static | SupportsArraySet |
java | mapstruct__mapstruct | integrationtest/src/test/resources/externalbeanjar/mapper/src/main/java/org/mapstruct/itest/externalbeanjar/Issue1121Mapper.java | {
"start": 630,
"end": 825
} | interface ____ {
Issue1121Mapper INSTANCE = Mappers.getMapper( Issue1121Mapper.class );
@Mapping(target = "integer", source = "bigDecimal")
Target map(Source source);
}
| Issue1121Mapper |
java | google__dagger | javatests/dagger/functional/names/ComponentFactoryNameConflictsTest.java | {
"start": 987,
"end": 1037
} | class ____ {
// A | ComponentFactoryNameConflictsTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/JUnitIncompatibleTypeTest.java | {
"start": 2123,
"end": 2667
} | class ____ {
public void test() {
assertEquals(1, 2);
assertEquals(1, 2L);
assertEquals("foo", 1, 2);
assertNotEquals(1, 2);
assertNotEquals("foo", 1, 2);
}
}
""")
.doTest();
}
@Test
public void assertArrayEquals_mismatched() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import static org.junit.Assert.assertArrayEquals;
final | Test |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/launcher/core/LauncherFactoryTests.java | {
"start": 17476,
"end": 17775
} | class ____ implements LauncherSessionListener {
@Override
public void launcherSessionClosed(LauncherSession session) {
Object storedValue = session.getStore().get(Namespace.GLOBAL, "testKey");
assertThat(storedValue).isEqualTo("testValue");
}
}
static | LauncherSessionListenerClosedExample |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/processor/src/main/java/org/jboss/resteasy/reactive/common/processor/EndpointIndexer.java | {
"start": 19527,
"end": 20387
} | interface ____ abstract class, an Ambiguous Bean Resolution error occurs,
// since io.quarkus.arc.runtime.BeanContainerImpl.createFactory is run, even if the factory is never invoked
clazz.setFactory(factoryCreator.apply(classInfo.name().toString()));
}
Map<String, String> classLevelExceptionMappers = this.classLevelExceptionMappers.get(classInfo.name());
if (classLevelExceptionMappers != null) {
clazz.setClassLevelExceptionMappers(classLevelExceptionMappers);
}
List<ResourceMethod> methods = createEndpoints(classInfo, classInfo, new HashSet<>(), new HashSet<>(),
clazz.getPathParameters(), clazz.getPath(), considerApplication);
clazz.getMethods().addAll(methods);
// get an InjectableBean view of our | or |
java | redisson__redisson | redisson-spring-data/redisson-spring-data-40/src/test/java/org/redisson/spring/data/connection/RedissonPipelineConnectionTest.java | {
"start": 181,
"end": 2190
} | class ____ extends BaseConnectionTest {
@Test
public void testDel() {
RedissonConnection connection = new RedissonConnection(redisson);
byte[] key = "my_key".getBytes();
byte[] value = "my_value".getBytes();
connection.set(key, value);
connection.openPipeline();
connection.get(key);
connection.del(key);
List<Object> results = connection.closePipeline();
byte[] val = (byte[])results.get(0);
assertThat(val).isEqualTo(value);
Long res = (Long) results.get(1);
assertThat(res).isEqualTo(1);
}
@Test
public void testEcho() {
RedissonConnection connection = new RedissonConnection(redisson);
connection.openPipeline();
assertThat(connection.echo("test".getBytes())).isNull();
assertThat(connection.closePipeline().iterator().next()).isEqualTo("test".getBytes());
}
@Test
public void testSetGet() {
RedissonConnection connection = new RedissonConnection(redisson);
connection.openPipeline();
assertThat(connection.isPipelined()).isTrue();
connection.set("key".getBytes(), "value".getBytes());
assertThat(connection.get("key".getBytes())).isNull();
List<Object> result = connection.closePipeline();
assertThat(connection.isPipelined()).isFalse();
assertThat(result.get(1)).isEqualTo("value".getBytes());
}
@Test
public void testHSetGet() {
RedissonConnection connection = new RedissonConnection(redisson);
connection.openPipeline();
assertThat(connection.hSet("key".getBytes(), "field".getBytes(), "value".getBytes())).isNull();
assertThat(connection.hGet("key".getBytes(), "field".getBytes())).isNull();
List<Object> result = connection.closePipeline();
assertThat((Boolean)result.get(0)).isTrue();
assertThat(result.get(1)).isEqualTo("value".getBytes());
}
}
| RedissonPipelineConnectionTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/CouchbaseComponentBuilderFactory.java | {
"start": 6909,
"end": 8109
} | class ____
extends AbstractComponentBuilder<CouchbaseComponent>
implements CouchbaseComponentBuilder {
@Override
protected CouchbaseComponent buildConcreteComponent() {
return new CouchbaseComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((CouchbaseComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((CouchbaseComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((CouchbaseComponent) component).setAutowiredEnabled((boolean) value); return true;
case "healthCheckConsumerEnabled": ((CouchbaseComponent) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((CouchbaseComponent) component).setHealthCheckProducerEnabled((boolean) value); return true;
default: return false;
}
}
}
} | CouchbaseComponentBuilderImpl |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AtomicDouble.java | {
"start": 1140,
"end": 1301
} | class ____ extend {@code
* Number} to allow uniform access by tools and utilities that deal with numerically-based classes.
*
* <p><a id="bitEquals"></a>This | does |
java | quarkusio__quarkus | integration-tests/logging-min-level-set/src/test/java/io/quarkus/it/logging/minlevel/set/LoggingMinLevelBelowChildTest.java | {
"start": 447,
"end": 697
} | class ____ {
@Test
public void testTrace() {
given()
.when().get("/log/below/child/trace")
.then()
.statusCode(200)
.body(is("true"));
}
}
| LoggingMinLevelBelowChildTest |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/parser/deserializer/ExtraProcessor.java | {
"start": 127,
"end": 242
} | interface ____ extends ParseProcess {
void processExtra(Object object, String key, Object value);
}
| ExtraProcessor |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedStreamCachingStrategy.java | {
"start": 1144,
"end": 6817
} | class ____ extends ManagedService implements ManagedStreamCachingStrategyMBean {
private final CamelContext camelContext;
private final StreamCachingStrategy streamCachingStrategy;
private final String[] allowClasses;
private final String[] denyClasses;
public ManagedStreamCachingStrategy(CamelContext camelContext, StreamCachingStrategy streamCachingStrategy) {
super(camelContext, streamCachingStrategy);
this.camelContext = camelContext;
this.streamCachingStrategy = streamCachingStrategy;
if (streamCachingStrategy.getAllowClasses() != null) {
this.allowClasses = streamCachingStrategy.getAllowClasses()
.stream().map(Class::getName)
.toArray(String[]::new);
} else {
this.allowClasses = null;
}
if (streamCachingStrategy.getDenyClasses() != null) {
this.denyClasses = streamCachingStrategy.getDenyClasses()
.stream().map(Class::getName)
.toArray(String[]::new);
} else {
this.denyClasses = null;
}
}
public CamelContext getCamelContext() {
return camelContext;
}
public StreamCachingStrategy getStreamCachingStrategy() {
return streamCachingStrategy;
}
@Override
public boolean isEnabled() {
return streamCachingStrategy.isEnabled();
}
@Override
public String[] getAllowClasses() {
return allowClasses;
}
@Override
public String[] getDenyClasses() {
return denyClasses;
}
@Override
public boolean isSpoolEnabled() {
return streamCachingStrategy.isSpoolEnabled();
}
@Override
public String getSpoolDirectory() {
if (streamCachingStrategy.getSpoolDirectory() != null) {
return streamCachingStrategy.getSpoolDirectory().getPath();
} else {
return null;
}
}
@Override
public String getSpoolCipher() {
return streamCachingStrategy.getSpoolCipher();
}
@Override
public void setSpoolThreshold(long threshold) {
streamCachingStrategy.setSpoolThreshold(threshold);
}
@Override
public long getSpoolThreshold() {
return streamCachingStrategy.getSpoolThreshold();
}
@Override
public void setSpoolUsedHeapMemoryThreshold(int percentage) {
streamCachingStrategy.setSpoolUsedHeapMemoryThreshold(percentage);
}
@Override
public int getSpoolUsedHeapMemoryThreshold() {
return streamCachingStrategy.getSpoolUsedHeapMemoryThreshold();
}
@Override
public void setSpoolUsedHeapMemoryLimit(SpoolUsedHeapMemoryLimit limit) {
StreamCachingStrategy.SpoolUsedHeapMemoryLimit l;
if (limit == null) {
l = null;
} else {
l = switch (limit) {
case Committed -> StreamCachingStrategy.SpoolUsedHeapMemoryLimit.Committed;
case Max -> StreamCachingStrategy.SpoolUsedHeapMemoryLimit.Max;
};
}
streamCachingStrategy.setSpoolUsedHeapMemoryLimit(l);
}
@Override
public SpoolUsedHeapMemoryLimit getSpoolUsedHeapMemoryLimit() {
StreamCachingStrategy.SpoolUsedHeapMemoryLimit l = streamCachingStrategy.getSpoolUsedHeapMemoryLimit();
if (l == null) {
return null;
} else {
return switch (l) {
case Committed -> SpoolUsedHeapMemoryLimit.Committed;
case Max -> SpoolUsedHeapMemoryLimit.Max;
};
}
}
@Override
public void setBufferSize(int bufferSize) {
streamCachingStrategy.setBufferSize(bufferSize);
}
@Override
public int getBufferSize() {
return streamCachingStrategy.getBufferSize();
}
@Override
public void setRemoveSpoolDirectoryWhenStopping(boolean remove) {
streamCachingStrategy.setRemoveSpoolDirectoryWhenStopping(remove);
}
@Override
public boolean isRemoveSpoolDirectoryWhenStopping() {
return streamCachingStrategy.isRemoveSpoolDirectoryWhenStopping();
}
@Override
public void setAnySpoolRules(boolean any) {
streamCachingStrategy.setAnySpoolRules(any);
}
@Override
public boolean isAnySpoolRules() {
return streamCachingStrategy.isAnySpoolRules();
}
@Override
public long getCacheMemoryCounter() {
return streamCachingStrategy.getStatistics().getCacheMemoryCounter();
}
@Override
public long getCacheMemorySize() {
return streamCachingStrategy.getStatistics().getCacheMemorySize();
}
@Override
public long getCacheMemoryAverageSize() {
return streamCachingStrategy.getStatistics().getCacheMemoryAverageSize();
}
@Override
public long getCacheSpoolCounter() {
return streamCachingStrategy.getStatistics().getCacheSpoolCounter();
}
@Override
public long getCacheSpoolSize() {
return streamCachingStrategy.getStatistics().getCacheSpoolSize();
}
@Override
public long getCacheSpoolAverageSize() {
return streamCachingStrategy.getStatistics().getCacheSpoolAverageSize();
}
@Override
public boolean isStatisticsEnabled() {
return streamCachingStrategy.getStatistics().isStatisticsEnabled();
}
@Override
public void setStatisticsEnabled(boolean enabled) {
streamCachingStrategy.getStatistics().setStatisticsEnabled(enabled);
}
@Override
public void resetStatistics() {
streamCachingStrategy.getStatistics().reset();
}
}
| ManagedStreamCachingStrategy |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestDatanodeManager.java | {
"start": 14391,
"end": 14525
} | class ____ resolve method which always returns null
* in order to simulate unresolved topology mapping.
*/
public static | provides |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/update/UpdateRequest.java | {
"start": 2473,
"end": 29475
} | class ____ extends InstanceShardOperationRequest<UpdateRequest>
implements
DocWriteRequest<UpdateRequest>,
WriteRequest<UpdateRequest>,
ToXContentObject {
private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(UpdateRequest.class);
private static final ObjectParser<UpdateRequest, Void> PARSER;
private static final ParseField SCRIPT_FIELD = new ParseField("script");
private static final ParseField SCRIPTED_UPSERT_FIELD = new ParseField("scripted_upsert");
private static final ParseField UPSERT_FIELD = new ParseField("upsert");
private static final ParseField DOC_FIELD = new ParseField("doc");
private static final ParseField DOC_AS_UPSERT_FIELD = new ParseField("doc_as_upsert");
private static final ParseField DETECT_NOOP_FIELD = new ParseField("detect_noop");
private static final ParseField SOURCE_FIELD = new ParseField("_source");
private static final ParseField IF_SEQ_NO = new ParseField("if_seq_no");
private static final ParseField IF_PRIMARY_TERM = new ParseField("if_primary_term");
static {
PARSER = new ObjectParser<>(UpdateRequest.class.getSimpleName());
PARSER.declareField(
(request, script) -> request.script = script,
(parser, context) -> Script.parse(parser),
SCRIPT_FIELD,
ObjectParser.ValueType.OBJECT_OR_STRING
);
PARSER.declareBoolean(UpdateRequest::scriptedUpsert, SCRIPTED_UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeUpsertRequest().source(builder), (parser, context) -> {
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.copyCurrentStructure(parser);
return builder;
}, UPSERT_FIELD);
PARSER.declareObject((request, builder) -> request.safeDoc().source(builder), (parser, context) -> {
XContentBuilder docBuilder = XContentFactory.contentBuilder(parser.contentType());
docBuilder.copyCurrentStructure(parser);
return docBuilder;
}, DOC_FIELD);
PARSER.declareBoolean(UpdateRequest::docAsUpsert, DOC_AS_UPSERT_FIELD);
PARSER.declareBoolean(UpdateRequest::detectNoop, DETECT_NOOP_FIELD);
PARSER.declareField(
UpdateRequest::fetchSource,
(parser, context) -> FetchSourceContext.fromXContent(parser),
SOURCE_FIELD,
ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING
);
PARSER.declareLong(UpdateRequest::setIfSeqNo, IF_SEQ_NO);
PARSER.declareLong(UpdateRequest::setIfPrimaryTerm, IF_PRIMARY_TERM);
}
private String id;
@Nullable
private String routing;
@Nullable
Script script;
private FetchSourceContext fetchSourceContext;
private int retryOnConflict = 0;
private long ifSeqNo = UNASSIGNED_SEQ_NO;
private long ifPrimaryTerm = UNASSIGNED_PRIMARY_TERM;
private RefreshPolicy refreshPolicy = RefreshPolicy.NONE;
private ActiveShardCount waitForActiveShards = ActiveShardCount.DEFAULT;
private IndexRequest upsertRequest;
private boolean scriptedUpsert = false;
private boolean docAsUpsert = false;
private boolean detectNoop = true;
private boolean requireAlias = false;
@Nullable
private IndexRequest doc;
public UpdateRequest() {}
public UpdateRequest(StreamInput in) throws IOException {
this(null, in);
}
public UpdateRequest(@Nullable ShardId shardId, StreamInput in) throws IOException {
super(shardId, in);
waitForActiveShards = ActiveShardCount.readFrom(in);
if (in.getTransportVersion().before(TransportVersions.V_8_0_0)) {
String type = in.readString();
assert MapperService.SINGLE_MAPPING_NAME.equals(type) : "Expected [_doc] but received [" + type + "]";
}
id = in.readString();
routing = in.readOptionalString();
if (in.readBoolean()) {
script = new Script(in);
}
retryOnConflict = in.readVInt();
refreshPolicy = RefreshPolicy.readFrom(in);
if (in.readBoolean()) {
doc = new IndexRequest(shardId, in);
}
fetchSourceContext = in.readOptionalWriteable(FetchSourceContext::readFrom);
if (in.readBoolean()) {
upsertRequest = new IndexRequest(shardId, in);
}
docAsUpsert = in.readBoolean();
ifSeqNo = in.readZLong();
ifPrimaryTerm = in.readVLong();
detectNoop = in.readBoolean();
scriptedUpsert = in.readBoolean();
requireAlias = in.readBoolean();
}
public UpdateRequest(String index, String id) {
super(index);
this.id = id;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validate();
if (upsertRequest != null && upsertRequest.version() != Versions.MATCH_ANY) {
validationException = addValidationError("can't provide version in upsert request", validationException);
}
if (Strings.isEmpty(id)) {
validationException = addValidationError("id is missing", validationException);
}
validationException = DocWriteRequest.validateSeqNoBasedCASParams(this, validationException);
validationException = DocWriteRequest.validateDocIdLength(id, validationException);
if (ifSeqNo != UNASSIGNED_SEQ_NO) {
if (retryOnConflict > 0) {
validationException = addValidationError("compare and write operations can not be retried", validationException);
}
if (docAsUpsert) {
validationException = addValidationError("compare and write operations can not be used with upsert", validationException);
}
if (upsertRequest != null) {
validationException = addValidationError(
"upsert requests don't support `if_seq_no` and `if_primary_term`",
validationException
);
}
}
if (script == null && doc == null) {
validationException = addValidationError("script or doc is missing", validationException);
}
if (script != null && doc != null) {
validationException = addValidationError("can't provide both script and doc", validationException);
}
if (doc == null && docAsUpsert) {
validationException = addValidationError("doc must be specified if doc_as_upsert is enabled", validationException);
}
return validationException;
}
/**
* The id of the indexed document.
*/
@Override
public String id() {
return id;
}
/**
* Sets the id of the indexed document.
*/
public UpdateRequest id(String id) {
this.id = id;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
@Override
public UpdateRequest routing(String routing) {
if (routing != null && routing.length() == 0) {
this.routing = null;
} else {
this.routing = routing;
}
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
*/
@Override
public String routing() {
return this.routing;
}
public ShardId getShardId() {
return this.shardId;
}
public Script script() {
return this.script;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.
*/
public UpdateRequest script(Script script) {
this.script = script;
return this;
}
/**
* Indicate that _source should be returned with every hit, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param include
* An optional include (optionally wildcarded) pattern to filter
* the returned _source
* @param exclude
* An optional exclude (optionally wildcarded) pattern to filter
* the returned _source
*/
public UpdateRequest fetchSource(@Nullable String include, @Nullable String exclude) {
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
String[] includes = include == null ? Strings.EMPTY_ARRAY : new String[] { include };
String[] excludes = exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude };
this.fetchSourceContext = FetchSourceContext.of(context.fetchSource(), includes, excludes);
return this;
}
/**
* Indicate that _source should be returned, with an
* "include" and/or "exclude" set which can include simple wildcard
* elements.
*
* @param includes
* An optional list of include (optionally wildcarded) pattern to
* filter the returned _source
* @param excludes
* An optional list of exclude (optionally wildcarded) pattern to
* filter the returned _source
*/
public UpdateRequest fetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
this.fetchSourceContext = FetchSourceContext.of(context.fetchSource(), includes, excludes);
return this;
}
/**
* Indicates whether the response should contain the updated _source.
*/
public UpdateRequest fetchSource(boolean fetchSource) {
FetchSourceContext context = this.fetchSourceContext == null ? FetchSourceContext.FETCH_SOURCE : this.fetchSourceContext;
this.fetchSourceContext = FetchSourceContext.of(fetchSource, context.includes(), context.excludes());
return this;
}
/**
* Explicitly set the fetch source context for this request
*/
public UpdateRequest fetchSource(FetchSourceContext context) {
this.fetchSourceContext = context;
return this;
}
/**
* Gets the {@link FetchSourceContext} which defines how the _source should
* be fetched.
*/
public FetchSourceContext fetchSource() {
return fetchSourceContext;
}
/**
* Sets the number of retries of a version conflict occurs because the document was updated between
* getting it and updating it. Defaults to 0.
*/
public UpdateRequest retryOnConflict(int retryOnConflict) {
this.retryOnConflict = retryOnConflict;
return this;
}
public int retryOnConflict() {
return this.retryOnConflict;
}
@Override
public UpdateRequest version(long version) {
throw new UnsupportedOperationException("update requests do not support versioning");
}
@Override
public long version() {
return Versions.MATCH_ANY;
}
@Override
public UpdateRequest versionType(VersionType versionType) {
throw new UnsupportedOperationException("update requests do not support versioning");
}
@Override
public VersionType versionType() {
return VersionType.INTERNAL;
}
/**
* only perform this update request if the document's modification was assigned the given
* sequence number. Must be used in combination with {@link #setIfPrimaryTerm(long)}
*
* If the document last modification was assigned a different sequence number a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
public UpdateRequest setIfSeqNo(long seqNo) {
if (seqNo < 0 && seqNo != UNASSIGNED_SEQ_NO) {
throw new IllegalArgumentException("sequence numbers must be non negative. got [" + seqNo + "].");
}
ifSeqNo = seqNo;
return this;
}
/**
* only performs this update request if the document's last modification was assigned the given
* primary term. Must be used in combination with {@link #setIfSeqNo(long)}
*
* If the document last modification was assigned a different term a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
public UpdateRequest setIfPrimaryTerm(long term) {
if (term < 0) {
throw new IllegalArgumentException("primary term must be non negative. got [" + term + "]");
}
ifPrimaryTerm = term;
return this;
}
/**
* If set, only perform this update request if the document was last modification was assigned this sequence number.
* If the document last modification was assigned a different sequence number a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
public long ifSeqNo() {
return ifSeqNo;
}
/**
* If set, only perform this update request if the document was last modification was assigned this primary term.
*
* If the document last modification was assigned a different term a
* {@link org.elasticsearch.index.engine.VersionConflictEngineException} will be thrown.
*/
public long ifPrimaryTerm() {
return ifPrimaryTerm;
}
@Override
public OpType opType() {
return OpType.UPDATE;
}
@Override
public UpdateRequest setRefreshPolicy(RefreshPolicy refreshPolicy) {
this.refreshPolicy = refreshPolicy;
return this;
}
@Override
public RefreshPolicy getRefreshPolicy() {
return refreshPolicy;
}
public ActiveShardCount waitForActiveShards() {
return this.waitForActiveShards;
}
/**
* Sets the number of shard copies that must be active before proceeding with the write.
* See {@link ReplicationRequest#waitForActiveShards(ActiveShardCount)} for details.
*/
public UpdateRequest waitForActiveShards(ActiveShardCount waitForActiveShards) {
this.waitForActiveShards = waitForActiveShards;
return this;
}
/**
* A shortcut for {@link #waitForActiveShards(ActiveShardCount)} where the numerical
* shard count is passed in, instead of having to first call {@link ActiveShardCount#from(int)}
* to get the ActiveShardCount.
*/
public UpdateRequest waitForActiveShards(final int waitForActiveShards) {
return waitForActiveShards(ActiveShardCount.from(waitForActiveShards));
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(IndexRequest doc) {
this.doc = doc;
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(XContentBuilder source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map<String, Object> source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(Map<String, Object> source, XContentType contentType) {
safeDoc().source(source, contentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(String source, XContentType xContentType) {
safeDoc().source(source, xContentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source, XContentType xContentType) {
safeDoc().source(source, xContentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified.
*/
public UpdateRequest doc(byte[] source, int offset, int length, XContentType xContentType) {
safeDoc().source(source, offset, length, xContentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified, the doc provided
* is a field and value pairs.
*/
public UpdateRequest doc(Object... source) {
safeDoc().source(source);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified. The doc is provided in a bytes form.
*/
public UpdateRequest doc(BytesReference source, XContentType contentType) {
safeDoc().source(source, contentType);
return this;
}
/**
* Sets the doc to use for updates when a script is not specified, the doc provided
* is a field and value pairs.
*/
public UpdateRequest doc(XContentType xContentType, Object... source) {
safeDoc().source(xContentType, source);
return this;
}
public IndexRequest doc() {
return this.doc;
}
private IndexRequest safeDoc() {
if (doc == null) {
doc = new IndexRequest();
}
return doc;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(BytesReference source, XContentType contentType) {
safeUpsertRequest().source(source, contentType);
return this;
}
/**
* Sets the index request to be used if the document does not exists. Otherwise, a
* {@link org.elasticsearch.index.engine.DocumentMissingException} is thrown.
*/
public UpdateRequest upsert(IndexRequest upsertRequest) {
this.upsertRequest = upsertRequest;
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(XContentBuilder source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map<String, Object> source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(Map<String, Object> source, XContentType contentType) {
safeUpsertRequest().source(source, contentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(String source, XContentType xContentType) {
safeUpsertRequest().source(source, xContentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source, XContentType xContentType) {
safeUpsertRequest().source(source, xContentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists.
*/
public UpdateRequest upsert(byte[] source, int offset, int length, XContentType xContentType) {
safeUpsertRequest().source(source, offset, length, xContentType);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists. The doc
* includes field and value pairs.
*/
public UpdateRequest upsert(Object... source) {
safeUpsertRequest().source(source);
return this;
}
/**
* Sets the doc source of the update request to be used when the document does not exists. The doc
* includes field and value pairs.
*/
public UpdateRequest upsert(XContentType xContentType, Object... source) {
safeUpsertRequest().source(xContentType, source);
return this;
}
public IndexRequest upsertRequest() {
return this.upsertRequest;
}
private IndexRequest safeUpsertRequest() {
if (upsertRequest == null) {
upsertRequest = new IndexRequest();
}
return upsertRequest;
}
/**
* Should this update attempt to detect if it is a noop? Defaults to true.
* @return this for chaining
*/
public UpdateRequest detectNoop(boolean detectNoop) {
this.detectNoop = detectNoop;
return this;
}
/**
* Should this update attempt to detect if it is a noop? Defaults to true.
*/
public boolean detectNoop() {
return detectNoop;
}
public UpdateRequest fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, this, null);
}
public boolean docAsUpsert() {
return this.docAsUpsert;
}
public UpdateRequest docAsUpsert(boolean shouldUpsertDoc) {
this.docAsUpsert = shouldUpsertDoc;
return this;
}
public boolean scriptedUpsert() {
return this.scriptedUpsert;
}
public UpdateRequest scriptedUpsert(boolean scriptedUpsert) {
this.scriptedUpsert = scriptedUpsert;
return this;
}
@Override
public boolean isRequireAlias() {
return requireAlias;
}
@Override
public boolean isRequireDataStream() {
// Always false because data streams cannot accept update operations
return false;
}
@Override
public int route(IndexRouting indexRouting) {
return indexRouting.updateShard(id, routing);
}
@Override
public int rerouteAtSourceDuringResharding(IndexRouting indexRouting) {
return indexRouting.updateShard(id, routing);
}
public UpdateRequest setRequireAlias(boolean requireAlias) {
this.requireAlias = requireAlias;
return this;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
doWrite(out, false);
}
@Override
public void writeThin(StreamOutput out) throws IOException {
super.writeThin(out);
doWrite(out, true);
}
private void doWrite(StreamOutput out, boolean thin) throws IOException {
waitForActiveShards.writeTo(out);
if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) {
out.writeString(MapperService.SINGLE_MAPPING_NAME);
}
out.writeString(id);
out.writeOptionalString(routing);
boolean hasScript = script != null;
out.writeBoolean(hasScript);
if (hasScript) {
script.writeTo(out);
}
out.writeVInt(retryOnConflict);
refreshPolicy.writeTo(out);
writeIndexRequest(out, thin, doc);
out.writeOptionalWriteable(fetchSourceContext);
writeIndexRequest(out, thin, upsertRequest);
out.writeBoolean(docAsUpsert);
out.writeZLong(ifSeqNo);
out.writeVLong(ifPrimaryTerm);
out.writeBoolean(detectNoop);
out.writeBoolean(scriptedUpsert);
out.writeBoolean(requireAlias);
}
private void writeIndexRequest(StreamOutput out, boolean thin, IndexRequest upsertRequest) throws IOException {
if (upsertRequest == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
// make sure the basics are set
upsertRequest.index(index);
upsertRequest.id(id);
if (thin) {
upsertRequest.writeThin(out);
} else {
upsertRequest.writeTo(out);
}
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (docAsUpsert) {
builder.field("doc_as_upsert", docAsUpsert);
}
if (doc != null) {
XContentType xContentType = doc.getContentType();
try (
XContentParser parser = XContentHelper.createParser(
NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE,
doc.source(),
xContentType
)
) {
builder.field("doc");
builder.copyCurrentStructure(parser);
}
}
if (ifSeqNo != UNASSIGNED_SEQ_NO) {
builder.field(IF_SEQ_NO.getPreferredName(), ifSeqNo);
builder.field(IF_PRIMARY_TERM.getPreferredName(), ifPrimaryTerm);
}
if (script != null) {
builder.field("script", script);
}
if (upsertRequest != null) {
XContentType xContentType = upsertRequest.getContentType();
try (
XContentParser parser = XContentHelper.createParser(
NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE,
upsertRequest.source(),
xContentType
)
) {
builder.field("upsert");
builder.copyCurrentStructure(parser);
}
}
if (scriptedUpsert) {
builder.field("scripted_upsert", scriptedUpsert);
}
if (detectNoop == false) {
builder.field("detect_noop", detectNoop);
}
if (fetchSourceContext != null) {
builder.field("_source", fetchSourceContext);
}
builder.endObject();
return builder;
}
@Override
public String toString() {
StringBuilder res = new StringBuilder().append("update {[").append(index).append("][").append(id).append("]");
res.append(", doc_as_upsert[").append(docAsUpsert).append("]");
if (doc != null) {
res.append(", doc[").append(doc).append("]");
}
if (script != null) {
res.append(", script[").append(script).append("]");
}
if (upsertRequest != null) {
res.append(", upsert[").append(upsertRequest).append("]");
}
res.append(", scripted_upsert[").append(scriptedUpsert).append("]");
res.append(", detect_noop[").append(detectNoop).append("]");
return res.append("}").toString();
}
@Override
public long ramBytesUsed() {
long childRequestBytes = 0;
if (doc != null) {
childRequestBytes += doc.ramBytesUsed();
}
if (upsertRequest != null) {
childRequestBytes += upsertRequest.ramBytesUsed();
}
return SHALLOW_SIZE + RamUsageEstimator.sizeOf(id) + childRequestBytes;
}
}
| UpdateRequest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/validation/beanvalidation/BeanValidationBeanRegistrationAotProcessorTests.java | {
"start": 10231,
"end": 10327
} | class ____ {
Map<BeanWithRecursiveMap, BeanWithRecursiveMap> map;
}
static | BeanWithRecursiveMap |
java | elastic__elasticsearch | x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/EmbeddedCli.java | {
"start": 2092,
"end": 12846
} | class ____ implements Closeable {
private static final Logger logger = LogManager.getLogger(EmbeddedCli.class);
private final Thread exec;
private final Cli cli;
private final AtomicInteger returnCode = new AtomicInteger(Integer.MIN_VALUE);
private final AtomicReference<Exception> failure = new AtomicReference<>();
private final BufferedWriter out;
private final BufferedReader in;
/**
* Has the client already been closed?
*/
private boolean closed = false;
@SuppressWarnings("this-escape")
public EmbeddedCli(String elasticsearchAddress, boolean checkConnectionOnStartup, @Nullable SecurityConfig security)
throws IOException {
PipedOutputStream outgoing = new PipedOutputStream();
PipedInputStream cliIn = new PipedInputStream(outgoing);
PipedInputStream incoming = new PipedInputStream();
PipedOutputStream cliOut = new PipedOutputStream(incoming);
CliTerminal cliTerminal = new JLineTerminal(
new ExternalTerminal("test", "xterm-256color", cliIn, cliOut, StandardCharsets.UTF_8),
false
);
cli = new Cli(cliTerminal) {
};
out = new BufferedWriter(new OutputStreamWriter(outgoing, StandardCharsets.UTF_8));
in = new BufferedReader(new InputStreamReader(incoming, StandardCharsets.UTF_8));
List<String> args = new ArrayList<>();
if (security == null) {
args.add(elasticsearchAddress);
} else {
String address = security.user + "@" + elasticsearchAddress;
if (security.https) {
address = "https://" + address;
} else if (randomBoolean()) {
address = "http://" + address;
}
args.add(address);
if (security.keystoreLocation != null) {
args.add("-keystore_location");
args.add(security.keystoreLocation);
}
}
if (false == checkConnectionOnStartup) {
args.add("-check");
args.add("false");
}
args.add("-debug");
if (randomBoolean()) {
args.add("-binary");
args.add(Boolean.toString(randomBoolean()));
}
exec = new Thread(() -> {
try {
/*
* We don't really interact with the terminal because we're
* trying to test our interaction with jLine which doesn't
* support Elasticsearch's Terminal abstraction.
*/
Terminal terminal = MockTerminal.create();
int exitCode = cli.main(args.toArray(new String[0]), terminal, new ProcessInfo(Map.of(), Map.of(), createTempDir()));
returnCode.set(exitCode);
logger.info("cli exited with code [{}]", exitCode);
} catch (Exception e) {
failure.set(e);
}
});
exec.start();
try {
// Feed it passwords if needed
if (security != null) {
String passwordPrompt = "[?1h=[?2004hpassword: ";
if (security.keystoreLocation != null) {
assertEquals("[?1h=[?2004hkeystore password: ", readUntil(s -> s.endsWith(": ")));
out.write(security.keystorePassword + "\n");
out.flush();
logger.info("out: {}", security.keystorePassword);
// Read the newline echoed after the password prompt
assertEquals("", readLine());
/*
* And for some reason jLine adds a second one so
* consume that too. I'm not sure why it does this
* but it looks right when a use runs the cli.
*/
assertEquals("", readLine());
/*
* If we read the keystore password the console will
* emit some state reset escape sequences before the
* prompt for the password.
*/
passwordPrompt = "[?1l>[?1000l[?2004l[?1h=[?2004hpassword: ";
}
assertEquals(passwordPrompt, readUntil(s -> s.endsWith(": ")));
out.write(security.password + "\n");
out.flush();
logger.info("out: {}", security.password);
// Read the newline echoed after the password prompt
assertEquals("", readLine());
}
// Read until the first "good" line (skip the logo or read until an exception)
boolean isLogoOrException = false;
while (isLogoOrException == false) {
String line = readLine();
if ("SQL".equals(line.trim())) {
// it's almost the bottom of the logo, so read the next line (the version) and break out of the loop
readLine();
isLogoOrException = true;
} else if (line.contains("Exception")) {
// if it's an exception, just break out of the loop and don't read the next line
// as it will swallow the exception and IT tests won't catch it
isLogoOrException = true;
}
}
assertConnectionTest();
} catch (IOException e) {
try {
forceClose();
} catch (Exception closeException) {
e.addSuppressed(closeException);
throw e;
}
}
}
/**
* Assert that result of the connection test. Default implementation
* asserts that the test passes but overridden to check places where
* we want to assert that it fails.
*/
protected void assertConnectionTest() throws IOException {
// After the connection test passess we emit an empty line and then the prompt
assertEquals("", readLine());
}
/**
* Attempts an orderly shutdown of the CLI, reporting any unconsumed lines as errors.
*/
@Override
public void close() throws IOException {
if (closed) {
return;
}
try {
// Try and shutdown the client normally
/*
* Don't use command here because we want want
* to collect all the responses and report them
* as failures if there is a problem rather than
* failing on the first bad response.
*/
out.write("quit;\n");
out.flush();
List<String> nonQuit = new ArrayList<>();
String line;
while (true) {
line = readLine();
if (line == null) {
fail("got EOF before [Bye!]. Extras " + nonQuit);
}
if (line.contains("quit;")) {
continue;
}
if (line.contains("Bye!")) {
break;
}
if (false == line.isEmpty()) {
nonQuit.add(line);
}
}
assertThat("unconsumed lines", nonQuit, empty());
} finally {
forceClose();
}
assertEquals(0, returnCode.get());
}
/**
* Shutdown the connection to the remote CLI without attempting to shut
* the remote down in an orderly way.
*/
public void forceClose() throws IOException {
closed = true;
IOUtils.close(out, in, cli);
try {
exec.join(TimeUnit.SECONDS.toMillis(10));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
Exception e = failure.get();
if (e != null) {
throw new RuntimeException("CLI thread failed", e);
}
}
/**
* Send a command and assert the echo.
*/
public String command(String command) throws IOException {
assertThat("; automatically added", command, not(endsWith(";")));
logger.info("out: {};", command);
out.write(command + ";\n");
out.flush();
for (String echo : expectedCommandEchos(command)) {
assertEquals(echo, readLine());
}
return readLine();
}
/**
* Create the "echo" that we expect jLine to send to the terminal
* while we're typing a command.
*/
private static List<String> expectedCommandEchos(String command) {
List<String> commandLines = Arrays.stream(command.split("\n")).filter(s -> s.isEmpty() == false).toList();
List<String> result = new ArrayList<>(commandLines.size() * 2);
result.add("[?1h=[?2004h[33msql> [0m" + commandLines.get(0));
// Every line gets an extra new line because, I dunno, but it looks right in the CLI
result.add("");
for (int i = 1; i < commandLines.size(); i++) {
result.add("[?1l>[?1000l[?2004l[?1h=[?2004h[33m | [0m" + commandLines.get(i));
// Every line gets an extra new line because, I dunno, but it looks right in the CLI
result.add("");
}
result.set(result.size() - 2, result.get(result.size() - 2) + ";");
return result;
}
public String readLine() throws IOException {
/*
* Since we can't *see* esc in the error messages we just
* remove it here and pretend it isn't required. Hopefully
* `[` is enough for us to assert on.
*
* `null` means EOF so we should just pass that back through.
*/
String line = in.readLine();
line = line == null ? null : line.replace("\u001B", "");
logger.info("in : {}", line);
return line;
}
private String readUntil(Predicate<String> end) throws IOException {
StringBuilder b = new StringBuilder();
String result;
while (true) {
int c = in.read();
if (c == -1) {
throw new IOException("got eof before end");
}
if (c == '\u001B') {
/*
* Since we can't *see* esc in the error messages we just
* remove it here and pretend it isn't required. Hopefully
* `[` is enough for us to assert on.
*/
continue;
}
b.append((char) c);
result = b.toString();
if (end.test(result)) {
break;
}
}
logger.info("in : {}", result);
return result;
}
public static | EmbeddedCli |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/shard/ShardNotFoundException.java | {
"start": 659,
"end": 1316
} | class ____ extends ResourceNotFoundException {
public ShardNotFoundException(ShardId shardId) {
this(shardId, null);
}
public ShardNotFoundException(ShardId shardId, Throwable ex) {
this(shardId, "no such shard", ex);
}
public ShardNotFoundException(ShardId shardId, String msg, Object... args) {
this(shardId, msg, null, args);
}
public ShardNotFoundException(ShardId shardId, String msg, Throwable ex, Object... args) {
super(msg, ex, args);
setShard(shardId);
}
public ShardNotFoundException(StreamInput in) throws IOException {
super(in);
}
}
| ShardNotFoundException |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng3052DepRepoAggregationTest.java | {
"start": 1510,
"end": 2670
} | class ____ extends AbstractMavenIntegrationTestCase {
@Test
public void testitMNG3052() throws Exception {
File testDir = extractResources("/mng-3052").getCanonicalFile();
Verifier verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng3052");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.filterFile(
"repo-d/org/apache/maven/its/mng3052/direct/0.1-SNAPSHOT/template.pom",
"repo-d/org/apache/maven/its/mng3052/direct/0.1-SNAPSHOT/direct-0.1-20090517.133956-1.pom");
verifier.addCliArgument("--settings");
verifier.addCliArgument("settings.xml");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier.verifyArtifactPresent("org.apache.maven.its.mng3052", "direct", "0.1-SNAPSHOT", "jar");
verifier.verifyArtifactPresent("org.apache.maven.its.mng3052", "trans", "0.1-SNAPSHOT", "jar");
}
}
| MavenITmng3052DepRepoAggregationTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/util/concurrent/FutureUtils.java | {
"start": 49884,
"end": 51592
} | enum ____ {
;
static final ScheduledThreadPoolExecutor DELAYER =
new ScheduledThreadPoolExecutor(
1, new ExecutorThreadFactory("FlinkCompletableFutureDelayScheduler"));
/**
* Delay the given action by the given delay.
*
* @param runnable to execute after the given delay
* @param delay after which to execute the runnable
* @param timeUnit time unit of the delay
* @return Future of the scheduled action
*/
private static ScheduledFuture<?> delay(Runnable runnable, long delay, TimeUnit timeUnit) {
checkNotNull(runnable);
checkNotNull(timeUnit);
return DELAYER.schedule(runnable, delay, timeUnit);
}
}
/**
* Asserts that the given {@link CompletableFuture} is not completed exceptionally. If the
* future is completed exceptionally, then it will call the {@link FatalExitExceptionHandler}.
*
* @param completableFuture to assert for no exceptions
*/
public static void assertNoException(CompletableFuture<?> completableFuture) {
handleUncaughtException(completableFuture, FatalExitExceptionHandler.INSTANCE);
}
/**
* Checks that the given {@link CompletableFuture} is not completed exceptionally with the
* specified class. If the future is completed exceptionally with the specific class, then try
* to recover using a given exception handler. If the exception does not match the specified
* class, just pass it through to later stages.
*
* @param completableFuture to assert for a given exception
* @param exceptionClass exception | Delayer |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/DefaultExecutionGraphConstructionTest.java | {
"start": 23064,
"end": 23709
} | class ____<T extends InputSplit>
implements InputSplitSource<T> {
private final T[] inputSplits;
private final InputSplitAssigner assigner;
private TestingInputSplitSource(T[] inputSplits, InputSplitAssigner assigner) {
this.inputSplits = inputSplits;
this.assigner = assigner;
}
@Override
public T[] createInputSplits(int minNumSplits) throws Exception {
return inputSplits;
}
@Override
public InputSplitAssigner getInputSplitAssigner(T[] inputSplits) {
return assigner;
}
}
}
| TestingInputSplitSource |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/TransactionalIdAuthorizationException.java | {
"start": 847,
"end": 1029
} | class ____ extends AuthorizationException {
public TransactionalIdAuthorizationException(final String message) {
super(message);
}
}
| TransactionalIdAuthorizationException |
java | apache__flink | flink-datastream/src/main/java/org/apache/flink/datastream/impl/extension/window/operators/MergingWindowSet.java | {
"start": 8799,
"end": 9561
} | interface ____<W> {
/**
* This gets called when a merge occurs.
*
* @param mergeResult The newly resulting merged {@code Window}.
* @param mergedWindows The merged {@code Window Windows}.
* @param stateWindowResult The state window of the merge result.
* @param mergedStateWindows The merged state windows.
* @throws Exception
*/
void merge(
W mergeResult,
Collection<W> mergedWindows,
W stateWindowResult,
Collection<W> mergedStateWindows)
throws Exception;
}
@Override
public String toString() {
return "MergingWindowSet{" + "windows=" + mapping + '}';
}
}
| MergeFunction |
java | google__guava | guava-testlib/test/com/google/common/testing/ClassSanityTesterTest.java | {
"start": 31668,
"end": 32224
} | class ____ {
// ignored by testEquals() since it has less parameters.
public BadEqualsWithParameterizedType() {}
public static BadEqualsWithParameterizedType create(
@SuppressWarnings("unused") ImmutableList<Iterable<? extends String>> s) {
return new BadEqualsWithParameterizedType();
}
@Override
public boolean equals(@Nullable Object obj) {
return obj instanceof BadEqualsWithParameterizedType;
}
@Override
public int hashCode() {
return 0;
}
}
static | BadEqualsWithParameterizedType |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/objects/SymmetricDateComparator.java | {
"start": 721,
"end": 1036
} | class ____ implements Comparator<Date> {
public static final SymmetricDateComparator SYMMETRIC_DATE_COMPARATOR = new SymmetricDateComparator();
@Override
public int compare(Date date1, Date date2) {
return date1.equals(date2) || date2.equals(date1) ? 0 : date1.compareTo(date2);
}
} | SymmetricDateComparator |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeotile.java | {
"start": 2904,
"end": 4006
} | class ____ implements BoundedGrid {
private final int precision;
private final GeoTileBoundedPredicate bounds;
private GeoTileBoundedGrid(int precision, GeoBoundingBox bbox) {
this.precision = checkPrecisionRange(precision);
this.bounds = new GeoTileBoundedPredicate(precision, bbox);
}
public long calculateGridId(Point point) {
final int tiles = 1 << precision;
final int x = GeoTileUtils.getXTile(point.getX(), tiles);
final int y = GeoTileUtils.getYTile(point.getY(), tiles);
if (bounds.validTile(x, y, precision)) {
return GeoTileUtils.longEncodeTiles(precision, x, y);
}
// GeoTileUtils uses the highest 6 bits to store the zoom level. However, MAX_ZOOM is 29, which takes 5 bits.
// This leaves the sign bit unused, so it can be used to indicate an invalid tile.
return -1L;
}
@Override
public int precision() {
return precision;
}
protected static | GeoTileBoundedGrid |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/format/EnumFormatShapeTest.java | {
"start": 629,
"end": 847
} | enum ____ {
A("a1"), B("b2");
@JsonProperty
protected final String value;
private PoNUM(String v) { value = v; }
public String getValue() { return value; }
}
static | PoNUM |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsDeleter.java | {
"start": 2219,
"end": 8567
} | class ____ {
private static final Logger logger = LogManager.getLogger(DataFrameAnalyticsDeleter.class);
private final Client client;
private final DataFrameAnalyticsAuditor auditor;
public DataFrameAnalyticsDeleter(Client client, DataFrameAnalyticsAuditor auditor) {
this.client = Objects.requireNonNull(client);
this.auditor = Objects.requireNonNull(auditor);
}
public void deleteAllDocuments(DataFrameAnalyticsConfig config, TimeValue timeout, ActionListener<AcknowledgedResponse> listener) {
final String id = config.getId();
// Step 3. Delete the config
ActionListener<BulkByScrollResponse> deleteStatsHandler = ActionListener.wrap(bulkByScrollResponse -> {
if (bulkByScrollResponse.isTimedOut()) {
logger.warn("[{}] DeleteByQuery for stats timed out", id);
}
if (bulkByScrollResponse.getBulkFailures().isEmpty() == false) {
logger.warn(
"[{}] {} failures and {} conflicts encountered while running DeleteByQuery for stats",
id,
bulkByScrollResponse.getBulkFailures().size(),
bulkByScrollResponse.getVersionConflicts()
);
for (BulkItemResponse.Failure failure : bulkByScrollResponse.getBulkFailures()) {
logger.warn("[{}] DBQ failure: {}", id, failure);
}
}
deleteConfig(id, listener);
}, failure -> {
logger.warn(() -> "[" + id + "] failed to remove stats", ExceptionsHelper.unwrapCause(failure));
deleteConfig(id, listener);
});
// Step 2. Delete job docs from stats index
ActionListener<BulkByScrollResponse> deleteStateHandler = ActionListener.wrap(bulkByScrollResponse -> {
if (bulkByScrollResponse.isTimedOut()) {
logger.warn("[{}] DeleteByQuery for state timed out", id);
}
if (bulkByScrollResponse.getBulkFailures().isEmpty() == false) {
logger.warn(
"[{}] {} failures and {} conflicts encountered while running DeleteByQuery for state",
id,
bulkByScrollResponse.getBulkFailures().size(),
bulkByScrollResponse.getVersionConflicts()
);
for (BulkItemResponse.Failure failure : bulkByScrollResponse.getBulkFailures()) {
logger.warn("[{}] DBQ failure: {}", id, failure);
}
}
deleteStats(id, timeout, deleteStatsHandler);
}, listener::onFailure);
// Step 1. Delete state
deleteState(config, timeout, deleteStateHandler);
}
private void deleteConfig(String id, ActionListener<AcknowledgedResponse> listener) {
DeleteRequest deleteRequest = new DeleteRequest(MlConfigIndex.indexName());
deleteRequest.id(DataFrameAnalyticsConfig.documentId(id));
deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
executeAsyncWithOrigin(client, ML_ORIGIN, TransportDeleteAction.TYPE, deleteRequest, ActionListener.wrap(deleteResponse -> {
if (deleteResponse.getResult() == DocWriteResponse.Result.NOT_FOUND) {
listener.onFailure(ExceptionsHelper.missingDataFrameAnalytics(id));
return;
}
assert deleteResponse.getResult() == DocWriteResponse.Result.DELETED;
logger.info("[{}] Deleted", id);
auditor.info(id, Messages.DATA_FRAME_ANALYTICS_AUDIT_DELETED);
listener.onResponse(AcknowledgedResponse.TRUE);
}, e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexNotFoundException) {
listener.onFailure(ExceptionsHelper.missingDataFrameAnalytics(id));
} else {
listener.onFailure(e);
}
}));
}
private void deleteState(DataFrameAnalyticsConfig config, TimeValue timeout, ActionListener<BulkByScrollResponse> listener) {
ActionListener<Boolean> deleteModelStateListener = listener.delegateFailureAndWrap(
(l, r) -> executeDeleteByQuery(
AnomalyDetectorsIndex.jobStateIndexPattern(),
QueryBuilders.idsQuery().addIds(StoredProgress.documentId(config.getId())),
timeout,
l
)
);
deleteModelState(config, timeout, 1, deleteModelStateListener);
}
private void deleteModelState(DataFrameAnalyticsConfig config, TimeValue timeout, int docNum, ActionListener<Boolean> listener) {
if (config.getAnalysis().persistsState() == false) {
listener.onResponse(true);
return;
}
IdsQueryBuilder query = QueryBuilders.idsQuery().addIds(config.getAnalysis().getStateDocIdPrefix(config.getId()) + docNum);
executeDeleteByQuery(
AnomalyDetectorsIndex.jobStateIndexPattern(),
query,
timeout,
listener.delegateFailureAndWrap((l, response) -> {
if (response.getDeleted() > 0) {
deleteModelState(config, timeout, docNum + 1, l);
return;
}
l.onResponse(true);
})
);
}
private void deleteStats(String jobId, TimeValue timeout, ActionListener<BulkByScrollResponse> listener) {
executeDeleteByQuery(
MlStatsIndex.indexPattern(),
QueryBuilders.termQuery(Fields.JOB_ID.getPreferredName(), jobId),
timeout,
listener
);
}
private void executeDeleteByQuery(String index, QueryBuilder query, TimeValue timeout, ActionListener<BulkByScrollResponse> listener) {
DeleteByQueryRequest request = new DeleteByQueryRequest(index);
request.setQuery(query);
request.setIndicesOptions(MlIndicesUtils.addIgnoreUnavailable(IndicesOptions.lenientExpandOpen()));
request.setSlices(AbstractBulkByScrollRequest.AUTO_SLICES);
request.setAbortOnVersionConflict(false);
request.setRefresh(true);
request.setTimeout(timeout);
executeAsyncWithOrigin(client, ML_ORIGIN, DeleteByQueryAction.INSTANCE, request, listener);
}
}
| DataFrameAnalyticsDeleter |
java | google__error-prone | core/src/test/java/com/google/errorprone/matchers/MethodMatchersTest.java | {
"start": 4427,
"end": 4899
} | class ____ {
public Foo(String s) {}
// BUG: Diagnostic contains:
private static final Function<String, Foo> make = Foo::new;
}
""")
.doTest();
}
@Test
public void constructorMatcherTest_regular() {
CompilationTestHelper.newInstance(ConstructorDeleter.class, getClass())
.addSourceLines(
"test/Foo.java",
"""
package test;
public | Foo |
java | quarkusio__quarkus | extensions/cache/runtime/src/main/java/io/quarkus/cache/runtime/CacheManagerRecorder.java | {
"start": 683,
"end": 7888
} | class ____ {
private final CacheBuildConfig cacheBuildConfig;
private final RuntimeValue<CacheConfig> cacheConfigRV;
public CacheManagerRecorder(CacheBuildConfig cacheBuildConfig, RuntimeValue<CacheConfig> cacheConfigRV) {
this.cacheBuildConfig = cacheBuildConfig;
this.cacheConfigRV = cacheConfigRV;
}
private CacheManagerInfo.Context createContextForCacheType(
String cacheType,
boolean micrometerMetricsEnabled) {
return new CacheManagerInfo.Context() {
private final Set<String> cacheNames = new HashSet<>();
@Override
public boolean cacheEnabled() {
return cacheConfigRV.getValue().enabled();
}
@Override
public Metrics metrics() {
return micrometerMetricsEnabled ? Metrics.MICROMETER : Metrics.NONE;
}
@Override
public String cacheType() {
return cacheType;
}
@Override
public Set<String> cacheNames() {
return cacheNames;
}
};
}
private Map<String, String> mapCacheTypeByCacheName(Set<String> cacheNames) {
Map<String, String> cacheTypeByName = new HashMap<>(cacheNames.size());
for (String cacheName : cacheNames) {
CacheBuildConfig.CacheTypeBuildConfig cacheTypeBuildConfig = cacheBuildConfig.cacheTypeByName().get(cacheName);
// check if the cache type is defined for this cache name "quarkus.cache.<cache-name>.type"
if (cacheTypeBuildConfig != null &&
cacheTypeBuildConfig.type() != null &&
!cacheTypeBuildConfig.type().isEmpty()) {
cacheTypeByName.put(cacheName, cacheTypeBuildConfig.type());
} else {
// if not, use the default cache type defined "quarkus.cache.type"
cacheTypeByName.put(cacheName, cacheBuildConfig.type());
}
}
return cacheTypeByName;
}
private Supplier<CacheManager> findSupplierForType(
CacheManagerInfo.Context context,
Collection<CacheManagerInfo> infos) {
for (CacheManagerInfo info : infos) {
if (info.supports(context)) {
return info.get(context);
}
}
throw new DeploymentException("Unknown cache type: " + context.cacheType());
}
private Map<String, Supplier<CacheManager>> createCacheSupplierByCacheType(
Collection<CacheManagerInfo> infos,
Set<String> cacheNames,
boolean micrometerMetricsEnabled) {
Map<String, String> cacheTypeByCacheName = mapCacheTypeByCacheName(cacheNames);
// create one context per cache type with their corresponding list of cache names
Map<String, CacheManagerInfo.Context> contextByCacheType = new HashMap<>();
for (String cacheName : cacheNames) {
contextByCacheType.computeIfAbsent(
cacheTypeByCacheName.get(cacheName),
cacheType -> this.createContextForCacheType(cacheType, micrometerMetricsEnabled))
.cacheNames()
.add(cacheName);
}
// suppliers grouped by cache type
Map<String, Supplier<CacheManager>> suppliersByType = new HashMap<>();
for (Map.Entry<String, CacheManagerInfo.Context> entry : contextByCacheType.entrySet()) {
String cacheType = entry.getKey();
if (!suppliersByType.containsKey(cacheType)) {
suppliersByType.put(cacheType, findSupplierForType(entry.getValue(), infos));
}
}
return suppliersByType;
}
public Supplier<CacheManager> resolveCacheInfo(
Collection<CacheManagerInfo> infos, Set<String> cacheNames,
boolean micrometerMetricsEnabled) {
Map<String, Supplier<CacheManager>> suppliersByType = createCacheSupplierByCacheType(
infos,
cacheNames,
micrometerMetricsEnabled);
return new Supplier<CacheManager>() {
@Override
public CacheManager get() {
if (suppliersByType.size() == 1) {
// if there is only one cache type, return the corresponding cache implementation
return suppliersByType.values().iterator().next().get();
}
// if there are multiple cache types, return a CacheManager implementation that aggregates all caches
// get the cache manager implementation by cache type of each supplier
Map<String, CacheManager> cacheImplByCacheType = new HashMap<>();
for (Map.Entry<String, Supplier<CacheManager>> entry : suppliersByType.entrySet()) {
cacheImplByCacheType.put(entry.getKey(), entry.getValue().get());
}
// put all cache implementations together in a single map indexed by cache name
Map<String, Cache> allCaches = new HashMap<>();
for (CacheManager cacheManager : cacheImplByCacheType.values()) {
for (String cacheName : cacheManager.getCacheNames()) {
cacheManager.getCache(cacheName).ifPresent(cache -> allCaches.put(cacheName, cache));
}
}
return new CacheManagerImpl(allCaches);
}
};
}
public CacheManagerInfo noOpCacheManagerInfo() {
return new CacheManagerInfo() {
@Override
public boolean supports(Context context) {
return !context.cacheEnabled();
}
@Override
public Supplier<CacheManager> get(Context context) {
return NoOpCacheManagerBuilder.build(context.cacheNames());
}
};
}
public CacheManagerInfo getCacheManagerInfoWithMicrometerMetrics() {
return new CacheManagerInfo() {
@Override
public boolean supports(Context context) {
return context.cacheEnabled() && context.cacheType().equals(CAFFEINE_CACHE_TYPE)
&& (context.metrics() == Context.Metrics.MICROMETER);
}
@Override
public Supplier<CacheManager> get(Context context) {
return CaffeineCacheManagerBuilder.buildWithMicrometerMetrics(context.cacheNames(), cacheConfigRV.getValue());
}
};
}
public CacheManagerInfo getCacheManagerInfoWithoutMetrics() {
return new CacheManagerInfo() {
@Override
public boolean supports(Context context) {
return context.cacheEnabled() && context.cacheType().equals(CAFFEINE_CACHE_TYPE)
&& (context.metrics() == Context.Metrics.NONE);
}
@Override
public Supplier<CacheManager> get(Context context) {
return CaffeineCacheManagerBuilder.buildWithoutMetrics(context.cacheNames(), cacheConfigRV.getValue());
}
};
}
}
| CacheManagerRecorder |
java | quarkusio__quarkus | integration-tests/devtools/src/test/java/io/quarkus/devtools/commands/CreateProjectTest.java | {
"start": 6285,
"end": 7583
} | class ____"))
.satisfies(checkContains("@RequestMapping(\"/bar\")"));
}
@Test
public void createRESTEasyAndSpringWeb() throws Exception {
final File file = new File("target/create-spring-resteasy");
final Path projectDir = file.toPath();
SnapshotTesting.deleteTestDirectory(file);
assertCreateProject(newCreateProject(projectDir)
.artifactId("spring-web-resteasy-app")
.resourceClassName("BarController")
.packageName("io.test")
.resourcePath("/bar")
.extensions(new HashSet<>(Arrays.asList("resteasy", "spring-web"))));
assertThat(projectDir.resolve("pom.xml"))
.exists()
.satisfies(checkContains("<artifactId>spring-web-resteasy-app</artifactId>"))
.satisfies(checkContains("<artifactId>quarkus-spring-web</artifactId>"))
.satisfies(checkContains("<artifactId>quarkus-resteasy</artifactId>"));
assertThat(projectDir.resolve("src/main/java/io/test/GreetingController.java"))
.exists()
.satisfies(checkContains("package io.test;"))
.satisfies(checkContains("@RestController"))
.satisfies(checkContains(" | BarController |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/extension-removed-resources/supersonic-provider/src/main/java/org/acme/SupersonicProvider.java | {
"start": 26,
"end": 159
} | class ____ implements WordProvider {
@Override
public String getWord() {
return "supersonic";
}
} | SupersonicProvider |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/embeddable/EmbeddableWithJavaTypeTest.java | {
"start": 1261,
"end": 4180
} | class ____ implements SessionFactoryScopeAware {
private SessionFactoryScope scope;
@Override
public void injectSessionFactoryScope(SessionFactoryScope scope) {
this.scope = scope;
}
// uses an embeddable with a custom java type
@ParameterizedTest
@ValueSource(strings = {
"select z from EntityEmbedCustom z where embedCustom.value=:datum",
"select z from EntityEmbedCustom z where :datum=embedCustom.value",
"select z from EntityEmbedCustom z where embedCustom=:datum", // this query failed with the bug
"select z from EntityEmbedCustom z where :datum=embedCustom",
"select z from EntityEmbedCustom z where embedCustom.value in (:datum)",
"select z from EntityEmbedCustom z where embedCustom in (:datum)" // failed as well
})
void hhh18898Test_embedCustom(String hql) {
// prepare
scope.inTransaction( session -> {
EntityEmbedCustom e = new EntityEmbedCustom();
e.id = 1;
EmbedCustom datum = new EmbedCustom();
datum.value = new MyDate( LocalDate.now() );
e.embedCustom = datum;
session.persist( e );
} );
// assert
scope.inTransaction( session -> {
QueryImplementor<EntityEmbedCustom> query = session.createQuery( hql, EntityEmbedCustom.class );
query.setParameter( "datum", new MyDate( LocalDate.now() ), MyDateJavaType.TYPE );
List<EntityEmbedCustom> resultList = query.getResultList();
assertFalse( resultList.isEmpty() );
assertEquals( LocalDate.now(), resultList.get( 0 ).embedCustom.value.wrapped );
session.remove( resultList.get( 0 ) );
} );
}
// uses an embeddable with a native java type
@ParameterizedTest
@ValueSource(strings = {
"select z from EntityEmbedNative z where embedNative.value=:datum",
"select z from EntityEmbedNative z where :datum=embedNative.value",
"select z from EntityEmbedNative z where embedNative=:datum", // this query failed with the bug
"select z from EntityEmbedNative z where :datum=embedNative",
"select z from EntityEmbedNative z where embedNative.value in (:datum)",
"select z from EntityEmbedNative z where embedNative in (:datum)" // failed as well
})
void hhh18898Test_embedSingle(String hql) {
// prepare
scope.inTransaction( session -> {
EntityEmbedNative e = new EntityEmbedNative();
e.id = 1;
EmbedNative datum = new EmbedNative();
datum.value = LocalDate.now();
e.embedNative = datum;
session.persist( e );
} );
// assert
scope.inTransaction( session -> {
QueryImplementor<EntityEmbedNative> query = session.createQuery( hql, EntityEmbedNative.class );
query.setParameter( "datum", LocalDate.now(), LocalDateJavaType.INSTANCE.getJavaType() );
List<EntityEmbedNative> resultList = query.getResultList();
assertFalse( resultList.isEmpty() );
assertEquals( LocalDate.now(), resultList.get( 0 ).embedNative.value );
session.remove( resultList.get( 0 ) );
} );
}
@Embeddable
public static | EmbeddableWithJavaTypeTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/java8stream/defaultimplementation/NoSetterStreamMappingTest.java | {
"start": 611,
"end": 1474
} | class ____ {
@ProcessorTest
public void compilesAndMapsCorrectly() {
NoSetterSource source = new NoSetterSource();
source.setListValues( Stream.of( "foo", "bar" ) );
NoSetterTarget target = NoSetterMapper.INSTANCE.toTarget( source );
assertThat( target.getListValues() ).containsExactly( "foo", "bar" );
// now test existing instances
NoSetterSource source2 = new NoSetterSource();
source2.setListValues( Stream.of( "baz" ) );
List<String> originalCollectionInstance = target.getListValues();
NoSetterTarget target2 = NoSetterMapper.INSTANCE.toTargetWithExistingTarget( source2, target );
assertThat( target2.getListValues() ).isSameAs( originalCollectionInstance );
assertThat( target2.getListValues() ).containsExactly( "baz" );
}
}
| NoSetterStreamMappingTest |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorStatusTests.java | {
"start": 552,
"end": 3261
} | class ____ extends AbstractWireSerializingTestCase<HashAggregationOperator.Status> {
public static HashAggregationOperator.Status simple() {
return new HashAggregationOperator.Status(500012, 200012, 123, 111, 222, 180017);
}
public static String simpleToJson() {
return """
{
"hash_nanos" : 500012,
"hash_time" : "500micros",
"aggregation_nanos" : 200012,
"aggregation_time" : "200micros",
"pages_processed" : 123,
"rows_received" : 111,
"rows_emitted" : 222,
"emit_nanos" : 180017,
"emit_time" : "180micros"
}""";
}
public void testToXContent() {
assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson()));
}
@Override
protected Writeable.Reader<HashAggregationOperator.Status> instanceReader() {
return HashAggregationOperator.Status::new;
}
@Override
public HashAggregationOperator.Status createTestInstance() {
return new HashAggregationOperator.Status(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeInt(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong()
);
}
@Override
protected HashAggregationOperator.Status mutateInstance(HashAggregationOperator.Status instance) {
long hashNanos = instance.hashNanos();
long aggregationNanos = instance.aggregationNanos();
int pagesProcessed = instance.pagesProcessed();
long rowsReceived = instance.rowsReceived();
long rowsEmitted = instance.rowsEmitted();
long emitNanos = instance.emitNanos();
switch (between(0, 5)) {
case 0 -> hashNanos = randomValueOtherThan(hashNanos, ESTestCase::randomNonNegativeLong);
case 1 -> aggregationNanos = randomValueOtherThan(aggregationNanos, ESTestCase::randomNonNegativeLong);
case 2 -> pagesProcessed = randomValueOtherThan(pagesProcessed, ESTestCase::randomNonNegativeInt);
case 3 -> rowsReceived = randomValueOtherThan(rowsReceived, ESTestCase::randomNonNegativeLong);
case 4 -> rowsEmitted = randomValueOtherThan(rowsEmitted, ESTestCase::randomNonNegativeLong);
case 5 -> emitNanos = randomValueOtherThan(emitNanos, ESTestCase::randomNonNegativeLong);
default -> throw new UnsupportedOperationException();
}
return new HashAggregationOperator.Status(hashNanos, aggregationNanos, pagesProcessed, rowsReceived, rowsEmitted, emitNanos);
}
}
| HashAggregationOperatorStatusTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/AsyncBranchingStep.java | {
"start": 842,
"end": 3608
} | class ____ extends AsyncActionStep {
public static final String NAME = "branch";
private final StepKey nextStepKeyOnFalse;
private final StepKey nextStepKeyOnTrue;
private final TriConsumer<ProjectId, IndexMetadata, ActionListener<Boolean>> asyncPredicate;
private final SetOnce<Boolean> predicateValue;
public AsyncBranchingStep(
StepKey key,
StepKey nextStepKeyOnFalse,
StepKey nextStepKeyOnTrue,
TriConsumer<ProjectId, IndexMetadata, ActionListener<Boolean>> asyncPredicate,
Client client
) {
// super.nextStepKey is set to null since it is not used by this step
super(key, null, client);
this.nextStepKeyOnFalse = nextStepKeyOnFalse;
this.nextStepKeyOnTrue = nextStepKeyOnTrue;
this.asyncPredicate = asyncPredicate;
this.predicateValue = new SetOnce<>();
}
@Override
public boolean isRetryable() {
return true;
}
@Override
public void performAction(
IndexMetadata indexMetadata,
ProjectState currentState,
ClusterStateObserver observer,
ActionListener<Void> listener
) {
asyncPredicate.apply(currentState.projectId(), indexMetadata, listener.safeMap(value -> {
predicateValue.set(value);
return null;
}));
}
@Override
public final StepKey getNextStepKey() {
if (predicateValue.get() == null) {
throw new IllegalStateException("Cannot call getNextStepKey before performAction");
}
return predicateValue.get() ? nextStepKeyOnTrue : nextStepKeyOnFalse;
}
/**
* @return the next step if {@code predicate} is false
*/
final StepKey getNextStepKeyOnFalse() {
return nextStepKeyOnFalse;
}
/**
* @return the next step if {@code predicate} is true
*/
final StepKey getNextStepKeyOnTrue() {
return nextStepKeyOnTrue;
}
/**
* @return the next step if {@code predicate} is true
*/
final TriConsumer<ProjectId, IndexMetadata, ActionListener<Boolean>> getAsyncPredicate() {
return asyncPredicate;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (super.equals(o) == false) return false;
AsyncBranchingStep that = (AsyncBranchingStep) o;
return super.equals(o)
&& Objects.equals(nextStepKeyOnFalse, that.nextStepKeyOnFalse)
&& Objects.equals(nextStepKeyOnTrue, that.nextStepKeyOnTrue);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), nextStepKeyOnFalse, nextStepKeyOnTrue);
}
}
| AsyncBranchingStep |
java | apache__hadoop | hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/SignerSecretProvider.java | {
"start": 1083,
"end": 1251
} | class ____ an implementation that
* supports rolling over the secret at a regular interval.
*/
@InterfaceStability.Unstable
@InterfaceAudience.Private
public abstract | for |
java | elastic__elasticsearch | modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java | {
"start": 2709,
"end": 5275
} | class ____ extends Plugin implements ActionPlugin {
static final String ROUTE = "/_test/trashing-alloc";
@Override
public Collection<RestHandler> getRestHandlers(
Settings settings,
NamedWriteableRegistry namedWriteableRegistry,
RestController restController,
ClusterSettings clusterSettings,
IndexScopedSettings indexScopedSettings,
SettingsFilter settingsFilter,
IndexNameExpressionResolver indexNameExpressionResolver,
Supplier<DiscoveryNodes> nodesInCluster,
Predicate<NodeFeature> clusterSupportsFeature
) {
return List.of(new BaseRestHandler() {
@Override
public String getName() {
return ROUTE;
}
@Override
public List<Route> routes() {
return List.of(new Route(RestRequest.Method.POST, ROUTE));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
var content = request.content();
var iter = content.iterator();
return (chan) -> {
request.getHttpRequest().release();
assertFalse(content.hasReferences());
BytesRef br;
while ((br = iter.next()) != null) {
for (int i = br.offset; i < br.offset + br.length; i++) {
if (br.bytes[i] != 0) {
fail(
new AssertionError(
"buffer is not trashed, off="
+ br.offset
+ " len="
+ br.length
+ " pos="
+ i
+ " ind="
+ (i - br.offset)
)
);
}
}
}
chan.sendResponse(new RestResponse(RestStatus.OK, ""));
};
}
});
}
}
}
| Handler |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/config/meta/ConfigClassesAndProfileResolverWithCustomDefaultsMetaConfigWithOverridesTests.java | {
"start": 1590,
"end": 1817
} | class ____ {
@Autowired
private String foo;
@Test
void foo() {
assertThat(foo).isEqualTo("Local Dev Foo");
}
}
@Configuration
@Profile("dev")
| ConfigClassesAndProfileResolverWithCustomDefaultsMetaConfigWithOverridesTests |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/parsing/Location.java | {
"start": 1341,
"end": 2329
} | class ____ {
private final Resource resource;
private final @Nullable Object source;
/**
* Create a new instance of the {@link Location} class.
* @param resource the resource with which this location is associated
*/
public Location(Resource resource) {
this(resource, null);
}
/**
* Create a new instance of the {@link Location} class.
* @param resource the resource with which this location is associated
* @param source the actual location within the associated resource
* (may be {@code null})
*/
public Location(Resource resource, @Nullable Object source) {
Assert.notNull(resource, "Resource must not be null");
this.resource = resource;
this.source = source;
}
/**
* Get the resource with which this location is associated.
*/
public Resource getResource() {
return this.resource;
}
/**
* Get the actual location within the associated {@link #getResource() resource}
* (may be {@code null}).
* <p>See the {@link Location | Location |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/contract/rawlocal/TestRawlocalContractSetTimes.java | {
"start": 1046,
"end": 1253
} | class ____ extends
AbstractContractSetTimesTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new RawlocalFSContract(conf);
}
}
| TestRawlocalContractSetTimes |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/AbstractPropertyAccessorTests.java | {
"start": 66038,
"end": 66581
} | class ____ {
private String city;
private Country country;
private Address(String city, String country) {
this.city = city;
this.country = new Country(country);
}
public Address() {
this("DefaultCity", "DefaultCountry");
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public Country getCountry() {
return country;
}
public void setCountry(Country country) {
this.country = country;
}
}
@SuppressWarnings("unused")
private static | Address |
java | resilience4j__resilience4j | resilience4j-core/src/test/java/io/github/resilience4j/core/TestContextPropagators.java | {
"start": 1984,
"end": 2599
} | class ____<T> implements ContextPropagator<T> {
@Override
public Supplier<Optional<T>> retrieve() {
return () -> (Optional<T>) TestThreadLocalContextHolder.get();
}
@Override
public Consumer<Optional<T>> copy() {
return (t) -> t.ifPresent(e -> {
clear();
TestThreadLocalContextHolder.put(e);
});
}
@Override
public Consumer<Optional<T>> clear() {
return t -> TestThreadLocalContextHolder.clear();
}
public static | TestThreadLocalContextPropagatorWithHolder |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/extractor/CompositeKeyExtractor.java | {
"start": 740,
"end": 2974
} | class ____ implements BucketExtractor {
/**
* Key or Composite extractor.
*/
static final String NAME = "k";
private final String key;
private final boolean isDateTimeBased;
/**
* Constructs a new <code>CompositeKeyExtractor</code> instance.
*/
public CompositeKeyExtractor(String key, boolean isDateTimeBased) {
this.key = key;
this.isDateTimeBased = isDateTimeBased;
}
CompositeKeyExtractor(StreamInput in) throws IOException {
key = in.readString();
isDateTimeBased = in.readBoolean();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(key);
out.writeBoolean(isDateTimeBased);
}
public String key() {
return key;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object extract(Bucket bucket) {
// get the composite value
Object m = bucket.getKey();
if ((m instanceof Map) == false) {
throw new EqlIllegalArgumentException("Unexpected bucket returned: {}", m);
}
Object object = ((Map<?, ?>) m).get(key);
if (isDateTimeBased) {
if (object == null) {
return object;
} else if (object instanceof Long) {
// object = DateUtils.asDateTimeWithNanos(((Long) object).longValue(), zoneId);
return object;
} else {
throw new EqlIllegalArgumentException("Invalid date key returned: {}", object);
}
}
return object;
}
@Override
public int hashCode() {
return Objects.hash(key, isDateTimeBased);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
CompositeKeyExtractor other = (CompositeKeyExtractor) obj;
return Objects.equals(key, other.key) && Objects.equals(isDateTimeBased, other.isDateTimeBased);
}
@Override
public String toString() {
return "|" + key + "|";
}
}
| CompositeKeyExtractor |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/NNThroughputBenchmark.java | {
"start": 17380,
"end": 19307
} | class ____ extends OperationStatsBase {
// Operation types
static final String OP_CLEAN_NAME = "clean";
static final String OP_CLEAN_USAGE = "-op clean";
CleanAllStats(List<String> args) {
super();
parseArguments(args);
numOpsRequired = 1;
numThreads = 1;
keepResults = true;
}
@Override
String getOpName() {
return OP_CLEAN_NAME;
}
@Override
void parseArguments(List<String> args) {
boolean ignoreUnrelatedOptions = verifyOpArgument(args);
if(args.size() > 2 && !ignoreUnrelatedOptions)
printUsage();
}
@Override
void generateInputs(int[] opsPerThread) throws IOException {
// do nothing
}
/**
* Does not require the argument
*/
@Override
String getExecutionArgument(int daemonId) {
return null;
}
/**
* Remove entire benchmark directory.
*/
@Override
long executeOp(int daemonId, int inputIdx, String ignore)
throws IOException {
if (!nonSuperUser) {
try{
clientProto.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_LEAVE,
false);
} catch (Exception e){
LOG.error("Potentially insufficient permission: try running the tool" +
" with -nonSuperUser argument or login as super user");
throw e;
}
}
long start = Time.now();
clientProto.delete(getBaseDirName(), true);
long end = Time.now();
return end-start;
}
@Override
void printResults() {
LOG.info("--- " + getOpName() + " inputs ---");
LOG.info("Remove directory " + getBaseDirName());
printStats();
}
}
/**
* File creation statistics.
*
* Each thread creates the same (+ or -1) number of files.
* File names are pre-generated during initialization.
* The created files do not have blocks.
*/
| CleanAllStats |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/cacheable/api/Order.java | {
"start": 387,
"end": 881
} | class ____ {
private int id;
private int total;
public Order() {
}
public Order(int total) {
this.total = total;
}
public Order(int id, int total) {
this.id = id;
this.total = total;
}
@Id
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
public String toString() {
return "Order id=" + getId() + ", total=" + getTotal();
}
}
| Order |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client-jackson/deployment/src/test/java/io/quarkus/rest/client/reactive/jackson/test/InvalidJsonFromServerTest.java | {
"start": 702,
"end": 1422
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(JsonObject.class, JsonClient.class, InvalidJsonEndpoint.class));
@RestClient
JsonClient client;
@Test
public void test() {
assertThatThrownBy(() -> client.get())
.isInstanceOf(ClientWebApplicationException.class)
.hasMessageContaining("HTTP 200")
.cause()
.hasMessageContaining("was expecting double-quote to start field name");
}
@Path("/invalid-json")
@RegisterRestClient(baseUri = "http://localhost:8081")
public | InvalidJsonFromServerTest |
java | apache__camel | components/camel-telemetry/src/main/java/org/apache/camel/telemetry/decorators/SpringRabbitmqSpanDecorator.java | {
"start": 927,
"end": 1676
} | class ____ extends AbstractMessagingSpanDecorator {
@Override
public String getComponent() {
return "spring-rabbitmq";
}
@Override
protected String getDestination(Exchange exchange, Endpoint endpoint) {
// when using toD for dynamic destination then extract from header
String destination = exchange.getMessage().getHeader("CamelSpringRabbitmqExchangeOverrideName", String.class);
if (destination == null) {
destination = super.getDestination(exchange, endpoint);
}
return destination;
}
@Override
public String getComponentClassName() {
return "org.apache.camel.component.springrabbitmq.SpringRabbitMQComponent";
}
}
| SpringRabbitmqSpanDecorator |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/engine/discovery/DiscoverySelectorsTests.java | {
"start": 11298,
"end": 16597
} | class ____ {
@SuppressWarnings("DataFlowIssue")
@Test
void selectClasspathResourcesPreconditions() {
// @formatter:off
assertPreconditionViolationFor(() -> selectClasspathResource((String) null));
assertPreconditionViolationFor(() -> selectClasspathResource(""));
assertPreconditionViolationFor(() -> selectClasspathResource("/"));
assertPreconditionViolationFor(() -> selectClasspathResource(" "));
assertPreconditionViolationFor(() -> selectClasspathResource("/ "));
assertPreconditionViolationFor(() -> selectClasspathResource("\t"));
assertPreconditionViolationFor(() -> selectClasspathResource("/\t"));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(null));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(Collections.emptySet()));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(Collections.singleton(null)));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(Set.of(new StubResource(null))));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(Set.of(new StubResource(""))));
assertPreconditionViolationFor(() -> selectClasspathResourceByName(Set.of(new StubResource("a"), new StubResource("b"))));
// @formatter:on
}
@Test
void selectIndividualClasspathResources() {
// with unnecessary "/" prefix
var selector = selectClasspathResource("/foo/bar/spec.xml");
assertEquals("foo/bar/spec.xml", selector.getClasspathResourceName());
// standard use case
selector = selectClasspathResource("A/B/C/spec.json");
assertEquals("A/B/C/spec.json", selector.getClasspathResourceName());
}
@Test
void getSelectedClasspathResources() {
var selector = selectClasspathResource("org/junit/platform/commons/example.resource");
var classpathResources = selector.getResources();
assertAll(() -> assertThat(classpathResources).hasSize(1), //
() -> assertThat(classpathResources) //
.extracting(Resource::getName) //
.containsExactly("org/junit/platform/commons/example.resource") //
);
}
@Test
void getMissingClasspathResources() {
var selector = selectClasspathResource("org/junit/platform/commons/no-such-example.resource");
assertPreconditionViolationFor(selector::getResources);
}
@SuppressWarnings("DataFlowIssue")
@Test
void selectClasspathResourcesWithFilePosition() {
var filePosition = FilePosition.from(12, 34);
assertPreconditionViolationFor(() -> selectClasspathResource(null, filePosition));
assertPreconditionViolationFor(() -> selectClasspathResource("", filePosition));
assertPreconditionViolationFor(() -> selectClasspathResource(" ", filePosition));
assertPreconditionViolationFor(() -> selectClasspathResource("\t", filePosition));
// with unnecessary "/" prefix
var selector = selectClasspathResource("/foo/bar/spec.xml", filePosition);
assertEquals("foo/bar/spec.xml", selector.getClasspathResourceName());
assertEquals(FilePosition.from(12, 34), selector.getPosition().orElseThrow());
// standard use case
selector = selectClasspathResource("A/B/C/spec.json", filePosition);
assertEquals("A/B/C/spec.json", selector.getClasspathResourceName());
assertEquals(filePosition, selector.getPosition().orElseThrow());
}
@Test
void parseClasspathResources() {
// with unnecessary "/" prefix
var selector = parseIdentifier(selectClasspathResource("/foo/bar/spec.xml"));
assertThat(selector) //
.asInstanceOf(type(ClasspathResourceSelector.class)) //
.extracting(ClasspathResourceSelector::getClasspathResourceName,
ClasspathResourceSelector::getPosition) //
.containsExactly("foo/bar/spec.xml", Optional.empty());
// standard use case
selector = parseIdentifier(selectClasspathResource("A/B/C/spec.json"));
assertThat(selector) //
.asInstanceOf(type(ClasspathResourceSelector.class)) //
.extracting(ClasspathResourceSelector::getClasspathResourceName,
ClasspathResourceSelector::getPosition) //
.containsExactly("A/B/C/spec.json", Optional.empty());
}
@Test
void parseClasspathResourcesWithFilePosition() {
var filePosition = FilePosition.from(12, 34);
// with unnecessary "/" prefix
var selector = parseIdentifier(selectClasspathResource("/foo/bar/spec.xml", FilePosition.from(12, 34)));
assertThat(selector) //
.asInstanceOf(type(ClasspathResourceSelector.class)) //
.extracting(ClasspathResourceSelector::getClasspathResourceName,
ClasspathResourceSelector::getPosition) //
.containsExactly("foo/bar/spec.xml", Optional.of(filePosition));
// standard use case
selector = parseIdentifier(selectClasspathResource("A/B/C/spec.json", FilePosition.from(12, 34)));
assertThat(selector) //
.asInstanceOf(type(ClasspathResourceSelector.class)) //
.extracting(ClasspathResourceSelector::getClasspathResourceName,
ClasspathResourceSelector::getPosition) //
.containsExactly("A/B/C/spec.json", Optional.of(filePosition));
}
private record StubResource(String name) implements Resource {
@Override
public String getName() {
return name();
}
@Override
public URI getUri() {
throw new UnsupportedOperationException();
}
}
}
@Nested
| SelectClasspathResourceTests |
java | netty__netty | buffer/src/test/java/io/netty/buffer/AbstractByteBufTest.java | {
"start": 103235,
"end": 200732
} | enum ____ {
STRING,
ASCII_STRING;
public CharSequence create(char[] cs) {
switch (this) {
case STRING:
return new String(cs);
case ASCII_STRING:
return new AsciiString(cs);
default:
throw new UnsupportedOperationException("Unknown type: " + this);
}
}
}
@SuppressWarnings("unchecked")
@ParameterizedTest
@MethodSource("setCharSequenceCombinations")
void testSetCharSequenceMultipleThreads(final Charset charset, CharSequenceType charSeqType) throws Exception {
int bufSize = 32;
ByteBuf[] bufs = new ByteBuf[16];
for (int i = 0; i < bufs.length; i++) {
bufs[i] = newBuffer(bufSize);
}
int iterations = 256;
Semaphore start = new Semaphore(0);
Semaphore finish = new Semaphore(0);
char[] cs = new char[(int) (bufSize / charset.newEncoder().maxBytesPerChar())];
Arrays.fill(cs, 'a');
final CharSequence str = charSeqType.create(cs);
ExecutorService executor = Executors.newFixedThreadPool(bufs.length);
try {
Future<Void>[] futures = new Future[bufs.length];
for (int i = 0; i < bufs.length; i++) {
final ByteBuf buf = bufs[i];
futures[i] = executor.submit(() -> {
finish.release();
start.acquire();
for (int j = 0; j < iterations; j++) {
buf.setCharSequence(0, str, charset);
}
return null;
});
}
finish.acquire(bufs.length);
start.release(bufs.length);
Exception e = null;
for (Future<Void> future : futures) {
try {
future.get();
} catch (InterruptedException ex) {
if (e != null) {
ex.addSuppressed(e);
}
throw ex; // Propagate interrupted exceptions immediately.
} catch (ExecutionException ex) {
if (e != null) {
e = ex;
} else {
e.addSuppressed(ex);
}
}
}
if (e != null) {
fail("Worker threads failed", e);
}
} finally {
executor.shutdown();
for (ByteBuf buf : bufs) {
buf.release();
}
}
}
@Test
public void readByteThrowsIndexOutOfBoundsException() {
final ByteBuf buffer = newBuffer(8);
try {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
buffer.writeByte(0);
assertEquals((byte) 0, buffer.readByte());
buffer.readByte();
}
});
} finally {
buffer.release();
}
}
@Test
@SuppressWarnings("ForLoopThatDoesntUseLoopVariable")
public void testNioBufferExposeOnlyRegion() {
final ByteBuf buffer = newBuffer(8);
byte[] data = new byte[8];
random.nextBytes(data);
buffer.writeBytes(data);
ByteBuffer nioBuf = buffer.nioBuffer(1, data.length - 2);
assertEquals(0, nioBuf.position());
assertEquals(6, nioBuf.remaining());
for (int i = 1; nioBuf.hasRemaining(); i++) {
assertEquals(data[i], nioBuf.get());
}
buffer.release();
}
@Test
public void ensureWritableWithForceDoesNotThrow() {
ensureWritableDoesNotThrow(true);
}
@Test
public void ensureWritableWithOutForceDoesNotThrow() {
ensureWritableDoesNotThrow(false);
}
private void ensureWritableDoesNotThrow(boolean force) {
final ByteBuf buffer = newBuffer(8);
buffer.writerIndex(buffer.capacity());
buffer.ensureWritable(8, force);
buffer.release();
}
@Test
public void ensureWritableWithForceAsReadyOnly() {
ensureWritableReadOnly(true);
}
@Test
public void ensureWritableWithOutForceAsReadOnly() {
ensureWritableReadOnly(false);
}
private void ensureWritableReadOnly(boolean force) {
final ByteBuf buffer = newBuffer(8);
buffer.writerIndex(buffer.capacity());
assertEquals(1, buffer.asReadOnly().ensureWritable(8, force));
buffer.release();
}
// See:
// - https://github.com/netty/netty/issues/2587
// - https://github.com/netty/netty/issues/2580
@Test
public void testLittleEndianWithExpand() {
ByteBuf buffer = newBuffer(0).order(LITTLE_ENDIAN);
buffer.writeInt(0x12345678);
assertEquals("78563412", ByteBufUtil.hexDump(buffer));
buffer.release();
}
private ByteBuf releasedBuffer() {
ByteBuf buffer = newBuffer(8);
// Clear the buffer so we are sure the reader and writer indices are 0.
// This is important as we may return a slice from newBuffer(...).
buffer.clear();
assertTrue(buffer.release());
return buffer;
}
@Test
public void testDiscardReadBytesAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().discardReadBytes();
}
});
}
@Test
public void testDiscardSomeReadBytesAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().discardSomeReadBytes();
}
});
}
@Test
public void testEnsureWritableAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().ensureWritable(16);
}
});
}
@Test
public void testGetBooleanAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBoolean(0);
}
});
}
@Test
public void testGetByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getByte(0);
}
});
}
@Test
public void testGetUnsignedByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedByte(0);
}
});
}
@Test
public void testGetShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getShort(0);
}
});
}
@Test
public void testGetShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getShortLE(0);
}
});
}
@Test
public void testGetUnsignedShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedShort(0);
}
});
}
@Test
public void testGetUnsignedShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedShortLE(0);
}
});
}
@Test
public void testGetMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getMedium(0);
}
});
}
@Test
public void testGetMediumLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getMediumLE(0);
}
});
}
@Test
public void testGetUnsignedMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedMedium(0);
}
});
}
@Test
public void testGetIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getInt(0);
}
});
}
@Test
public void testGetIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getIntLE(0);
}
});
}
@Test
public void testGetUnsignedIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedInt(0);
}
});
}
@Test
public void testGetUnsignedIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getUnsignedIntLE(0);
}
});
}
@Test
public void testGetLongAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getLong(0);
}
});
}
@Test
public void testGetLongLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getLongLE(0);
}
});
}
@Test
public void testGetCharAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getChar(0);
}
});
}
@Test
public void testGetFloatAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getFloat(0);
}
});
}
@Test
public void testGetFloatLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getFloatLE(0);
}
});
}
@Test
public void testGetDoubleAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getDouble(0);
}
});
}
@Test
public void testGetDoubleLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getDoubleLE(0);
}
});
}
@Test
public void testGetBytesAfterRelease() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, buffer);
}
});
} finally {
buffer.release();
}
}
@Test
public void testGetBytesAfterRelease2() {
final ByteBuf buffer = buffer();
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, buffer, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testGetBytesAfterRelease3() {
final ByteBuf buffer = buffer();
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, buffer, 0, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testGetBytesAfterRelease4() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, new byte[8]);
}
});
}
@Test
public void testGetBytesAfterRelease5() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, new byte[8], 0, 1);
}
});
}
@Test
public void testGetBytesAfterRelease6() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().getBytes(0, ByteBuffer.allocate(8));
}
});
}
@Test
public void testGetBytesAfterRelease7() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().getBytes(0, new ByteArrayOutputStream(), 1);
}
});
}
@Test
public void testGetBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().getBytes(0, new DevNullGatheringByteChannel(), 1);
}
});
}
@Test
public void testSetBooleanAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBoolean(0, true);
}
});
}
@Test
public void testSetByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setByte(0, 1);
}
});
}
@Test
public void testSetShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setShort(0, 1);
}
});
}
@Test
public void testSetShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setShortLE(0, 1);
}
});
}
@Test
public void testSetMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setMedium(0, 1);
}
});
}
@Test
public void testSetMediumLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setMediumLE(0, 1);
}
});
}
@Test
public void testSetIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setInt(0, 1);
}
});
}
@Test
public void testSetIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setIntLE(0, 1);
}
});
}
@Test
public void testSetLongAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setLong(0, 1);
}
});
}
@Test
public void testSetLongLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setLongLE(0, 1);
}
});
}
@Test
public void testSetCharAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setChar(0, 1);
}
});
}
@Test
public void testSetFloatAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setFloat(0, 1);
}
});
}
@Test
public void testSetDoubleAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setDouble(0, 1);
}
});
}
@Test
public void testSetBytesAfterRelease() {
final ByteBuf buffer = buffer();
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, buffer);
}
});
} finally {
buffer.release();
}
}
@Test
public void testSetBytesAfterRelease2() {
final ByteBuf buffer = buffer();
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, buffer, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testSetBytesAfterRelease3() {
final ByteBuf buffer = buffer();
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, buffer, 0, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testSetUsAsciiCharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceAfterRelease0(CharsetUtil.US_ASCII);
}
});
}
@Test
public void testSetIso88591CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceAfterRelease0(CharsetUtil.ISO_8859_1);
}
});
}
@Test
public void testSetUtf8CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceAfterRelease0(CharsetUtil.UTF_8);
}
});
}
@Test
public void testSetUtf16CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceAfterRelease0(CharsetUtil.UTF_16);
}
});
}
private void testSetCharSequenceAfterRelease0(Charset charset) {
releasedBuffer().setCharSequence(0, "x", charset);
}
@Test
public void testSetBytesAfterRelease4() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, new byte[8]);
}
});
}
@Test
public void testSetBytesAfterRelease5() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, new byte[8], 0, 1);
}
});
}
@Test
public void testSetBytesAfterRelease6() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setBytes(0, ByteBuffer.allocate(8));
}
});
}
@Test
public void testSetBytesAfterRelease7() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().setBytes(0, new ByteArrayInputStream(new byte[8]), 1);
}
});
}
@Test
public void testSetBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().setBytes(0, new TestScatteringByteChannel(), 1);
}
});
}
@Test
public void testSetZeroAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().setZero(0, 1);
}
});
}
@Test
public void testReadBooleanAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBoolean();
}
});
}
@Test
public void testReadByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readByte();
}
});
}
@Test
public void testReadUnsignedByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedByte();
}
});
}
@Test
public void testReadShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readShort();
}
});
}
@Test
public void testReadShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readShortLE();
}
});
}
@Test
public void testReadUnsignedShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedShort();
}
});
}
@Test
public void testReadUnsignedShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedShortLE();
}
});
}
@Test
public void testReadMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readMedium();
}
});
}
@Test
public void testReadMediumLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readMediumLE();
}
});
}
@Test
public void testReadUnsignedMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedMedium();
}
});
}
@Test
public void testReadUnsignedMediumLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedMediumLE();
}
});
}
@Test
public void testReadIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readInt();
}
});
}
@Test
public void testReadIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readIntLE();
}
});
}
@Test
public void testReadUnsignedIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedInt();
}
});
}
@Test
public void testReadUnsignedIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readUnsignedIntLE();
}
});
}
@Test
public void testReadLongAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readLong();
}
});
}
@Test
public void testReadLongLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readLongLE();
}
});
}
@Test
public void testReadCharAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readChar();
}
});
}
@Test
public void testReadFloatAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readFloat();
}
});
}
@Test
public void testReadFloatLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readFloatLE();
}
});
}
@Test
public void testReadDoubleAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readDouble();
}
});
}
@Test
public void testReadDoubleLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readDoubleLE();
}
});
}
@Test
public void testReadBytesAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(1);
}
});
}
@Test
public void testReadBytesAfterRelease2() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(buffer);
}
});
} finally {
buffer.release();
}
}
@Test
public void testReadBytesAfterRelease3() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(buffer);
}
});
} finally {
buffer.release();
}
}
@Test
public void testReadBytesAfterRelease4() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(buffer, 0, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testReadBytesAfterRelease5() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(new byte[8]);
}
});
}
@Test
public void testReadBytesAfterRelease6() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(new byte[8], 0, 1);
}
});
}
@Test
public void testReadBytesAfterRelease7() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().readBytes(ByteBuffer.allocate(8));
}
});
}
@Test
public void testReadBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().readBytes(new ByteArrayOutputStream(), 1);
}
});
}
@Test
public void testReadBytesAfterRelease9() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().readBytes(new ByteArrayOutputStream(), 1);
}
});
}
@Test
public void testReadBytesAfterRelease10() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().readBytes(new DevNullGatheringByteChannel(), 1);
}
});
}
@Test
public void testWriteBooleanAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBoolean(true);
}
});
}
@Test
public void testWriteByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeByte(1);
}
});
}
@Test
public void testWriteShortAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeShort(1);
}
});
}
@Test
public void testWriteShortLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeShortLE(1);
}
});
}
@Test
public void testWriteMediumAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeMedium(1);
}
});
}
@Test
public void testWriteMediumLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeMediumLE(1);
}
});
}
@Test
public void testWriteIntAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeInt(1);
}
});
}
@Test
public void testWriteIntLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeIntLE(1);
}
});
}
@Test
public void testWriteLongAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeLong(1);
}
});
}
@Test
public void testWriteLongLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeLongLE(1);
}
});
}
@Test
public void testWriteCharAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeChar(1);
}
});
}
@Test
public void testWriteFloatAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeFloat(1);
}
});
}
@Test
public void testWriteFloatLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeFloatLE(1);
}
});
}
@Test
public void testWriteDoubleAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeDouble(1);
}
});
}
@Test
public void testWriteDoubleLEAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeDoubleLE(1);
}
});
}
@Test
public void testWriteBytesAfterRelease() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(buffer);
}
});
} finally {
buffer.release();
}
}
@Test
public void testWriteBytesAfterRelease2() {
final ByteBuf buffer = copiedBuffer(new byte[8]);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(buffer, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testWriteBytesAfterRelease3() {
final ByteBuf buffer = buffer(8);
try {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(buffer, 0, 1);
}
});
} finally {
buffer.release();
}
}
@Test
public void testWriteBytesAfterRelease4() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(new byte[8]);
}
});
}
@Test
public void testWriteBytesAfterRelease5() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(new byte[8], 0, 1);
}
});
}
@Test
public void testWriteBytesAfterRelease6() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeBytes(ByteBuffer.allocate(8));
}
});
}
@Test
public void testWriteBytesAfterRelease7() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().writeBytes(new ByteArrayInputStream(new byte[8]), 1);
}
});
}
@Test
public void testWriteBytesAfterRelease8() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() throws IOException {
releasedBuffer().writeBytes(new TestScatteringByteChannel(), 1);
}
});
}
@Test
public void testWriteZeroAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().writeZero(1);
}
});
}
@Test
public void testWriteUsAsciiCharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testWriteCharSequenceAfterRelease0(CharsetUtil.US_ASCII);
}
});
}
@Test
public void testWriteIso88591CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testWriteCharSequenceAfterRelease0(CharsetUtil.ISO_8859_1);
}
});
}
@Test
public void testWriteUtf8CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
testWriteCharSequenceAfterRelease0(CharsetUtil.UTF_8);
}
});
}
@Test
public void testWriteUtf16CharSequenceAfterRelease() {
assertThrows(IllegalReferenceCountException.class,
new Executable() {
@Override
public void execute() {
testWriteCharSequenceAfterRelease0(CharsetUtil.UTF_16);
}
});
}
private void testWriteCharSequenceAfterRelease0(Charset charset) {
releasedBuffer().writeCharSequence("x", charset);
}
@Test
public void testForEachByteAfterRelease() {
assertThrows(IllegalReferenceCountException.class,
new Executable() {
@Override
public void execute() {
releasedBuffer().forEachByte(new TestByteProcessor());
}
});
}
@Test
public void testForEachByteAfterRelease1() {
assertThrows(IllegalReferenceCountException.class,
new Executable() {
@Override
public void execute() {
releasedBuffer().forEachByte(0, 1, new TestByteProcessor());
}
});
}
@Test
public void testForEachByteDescAfterRelease() {
assertThrows(IllegalReferenceCountException.class,
new Executable() {
@Override
public void execute() {
releasedBuffer().forEachByteDesc(new TestByteProcessor());
}
});
}
@Test
public void testForEachByteDescAfterRelease1() {
assertThrows(IllegalReferenceCountException.class,
new Executable() {
@Override
public void execute() {
releasedBuffer().forEachByteDesc(0, 1, new TestByteProcessor());
}
});
}
@Test
public void testCopyAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().copy();
}
});
}
@Test
public void testCopyAfterRelease1() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().copy();
}
});
}
@Test
public void testNioBufferAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().nioBuffer();
}
});
}
@Test
public void testNioBufferAfterRelease1() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().nioBuffer(0, 1);
}
});
}
@Test
public void testInternalNioBufferAfterRelease() {
testInternalNioBufferAfterRelease0(IllegalReferenceCountException.class);
}
protected void testInternalNioBufferAfterRelease0(final Class<? extends Throwable> expectedException) {
final ByteBuf releasedBuffer = releasedBuffer();
assertThrows(expectedException, new Executable() {
@Override
public void execute() {
releasedBuffer.internalNioBuffer(releasedBuffer.readerIndex(), 1);
}
});
}
@Test
public void testNioBuffersAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().nioBuffers();
}
});
}
@Test
public void testNioBuffersAfterRelease2() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().nioBuffers(0, 1);
}
});
}
@Test
public void testArrayAfterRelease() {
ByteBuf buf = releasedBuffer();
if (buf.hasArray()) {
try {
buf.array();
fail();
} catch (IllegalReferenceCountException e) {
// expected
}
}
}
@Test
public void testMemoryAddressAfterRelease() {
ByteBuf buf = releasedBuffer();
if (buf.hasMemoryAddress()) {
try {
buf.memoryAddress();
fail();
} catch (IllegalReferenceCountException e) {
// expected
}
}
}
@Test
public void testSliceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().slice();
}
});
}
@Test
public void testSliceAfterRelease2() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().slice(0, 1);
}
});
}
private static void assertSliceFailAfterRelease(ByteBuf... bufs) {
for (ByteBuf buf : bufs) {
if (buf.refCnt() > 0) {
buf.release();
}
}
for (ByteBuf buf : bufs) {
try {
assertEquals(0, buf.refCnt());
buf.slice();
fail();
} catch (IllegalReferenceCountException ignored) {
// as expected
}
}
}
@Test
public void testSliceAfterReleaseRetainedSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
assertSliceFailAfterRelease(buf, buf2);
}
@Test
public void testSliceAfterReleaseRetainedSliceDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
ByteBuf buf3 = buf2.duplicate();
assertSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testSliceAfterReleaseRetainedSliceRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
ByteBuf buf3 = buf2.retainedDuplicate();
assertSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testSliceAfterReleaseRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
assertSliceFailAfterRelease(buf, buf2);
}
@Test
public void testSliceAfterReleaseRetainedDuplicateSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
ByteBuf buf3 = buf2.slice(0, 1);
assertSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testRetainedSliceAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().retainedSlice();
}
});
}
@Test
public void testRetainedSliceAfterRelease2() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().retainedSlice(0, 1);
}
});
}
private static void assertRetainedSliceFailAfterRelease(ByteBuf... bufs) {
for (ByteBuf buf : bufs) {
if (buf.refCnt() > 0) {
buf.release();
}
}
for (ByteBuf buf : bufs) {
try {
assertEquals(0, buf.refCnt());
buf.retainedSlice();
fail();
} catch (IllegalReferenceCountException ignored) {
// as expected
}
}
}
@Test
public void testRetainedSliceAfterReleaseRetainedSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
assertRetainedSliceFailAfterRelease(buf, buf2);
}
@Test
public void testRetainedSliceAfterReleaseRetainedSliceDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
ByteBuf buf3 = buf2.duplicate();
assertRetainedSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testRetainedSliceAfterReleaseRetainedSliceRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
ByteBuf buf3 = buf2.retainedDuplicate();
assertRetainedSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testRetainedSliceAfterReleaseRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
assertRetainedSliceFailAfterRelease(buf, buf2);
}
@Test
public void testRetainedSliceAfterReleaseRetainedDuplicateSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
ByteBuf buf3 = buf2.slice(0, 1);
assertRetainedSliceFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testDuplicateAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().duplicate();
}
});
}
@Test
public void testRetainedDuplicateAfterRelease() {
assertThrows(IllegalReferenceCountException.class, new Executable() {
@Override
public void execute() {
releasedBuffer().retainedDuplicate();
}
});
}
private static void assertDuplicateFailAfterRelease(ByteBuf... bufs) {
for (ByteBuf buf : bufs) {
if (buf.refCnt() > 0) {
buf.release();
}
}
for (ByteBuf buf : bufs) {
try {
assertEquals(0, buf.refCnt());
buf.duplicate();
fail();
} catch (IllegalReferenceCountException ignored) {
// as expected
}
}
}
@Test
public void testDuplicateAfterReleaseRetainedSliceDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
ByteBuf buf3 = buf2.duplicate();
assertDuplicateFailAfterRelease(buf, buf2, buf3);
}
@Test
public void testDuplicateAfterReleaseRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
assertDuplicateFailAfterRelease(buf, buf2);
}
@Test
public void testDuplicateAfterReleaseRetainedDuplicateSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
ByteBuf buf3 = buf2.slice(0, 1);
assertDuplicateFailAfterRelease(buf, buf2, buf3);
}
private static void assertRetainedDuplicateFailAfterRelease(ByteBuf... bufs) {
for (ByteBuf buf : bufs) {
if (buf.refCnt() > 0) {
buf.release();
}
}
for (ByteBuf buf : bufs) {
try {
assertEquals(0, buf.refCnt());
buf.retainedDuplicate();
fail();
} catch (IllegalReferenceCountException ignored) {
// as expected
}
}
}
@Test
public void testRetainedDuplicateAfterReleaseRetainedDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedDuplicate();
assertRetainedDuplicateFailAfterRelease(buf, buf2);
}
@Test
public void testRetainedDuplicateAfterReleaseDuplicate() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.duplicate();
assertRetainedDuplicateFailAfterRelease(buf, buf2);
}
@Test
public void testRetainedDuplicateAfterReleaseRetainedSlice() {
ByteBuf buf = newBuffer(1);
ByteBuf buf2 = buf.retainedSlice(0, 1);
assertRetainedDuplicateFailAfterRelease(buf, buf2);
}
@Test
public void testSliceRelease() {
ByteBuf buf = newBuffer(8);
assertEquals(1, buf.refCnt());
assertTrue(buf.slice().release());
assertEquals(0, buf.refCnt());
}
@Test
public void testReadSliceOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testReadSliceOutOfBounds(false);
}
});
}
@Test
public void testReadRetainedSliceOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testReadSliceOutOfBounds(true);
}
});
}
private void testReadSliceOutOfBounds(boolean retainedSlice) {
ByteBuf buf = newBuffer(100);
try {
buf.writeZero(50);
if (retainedSlice) {
buf.readRetainedSlice(51);
} else {
buf.readSlice(51);
}
fail();
} finally {
buf.release();
}
}
@Test
public void testWriteUsAsciiCharSequenceExpand() {
testWriteCharSequenceExpand(CharsetUtil.US_ASCII);
}
@Test
public void testWriteUtf8CharSequenceExpand() {
testWriteCharSequenceExpand(CharsetUtil.UTF_8);
}
@Test
public void testWriteIso88591CharSequenceExpand() {
testWriteCharSequenceExpand(CharsetUtil.ISO_8859_1);
}
@Test
public void testWriteUtf16CharSequenceExpand() {
testWriteCharSequenceExpand(CharsetUtil.UTF_16);
}
private void testWriteCharSequenceExpand(Charset charset) {
ByteBuf buf = newBuffer(1);
try {
int writerIndex = buf.capacity() - 1;
buf.writerIndex(writerIndex);
int written = buf.writeCharSequence("AB", charset);
assertEquals(writerIndex, buf.writerIndex() - written);
} finally {
buf.release();
}
}
@Test
public void testSetUsAsciiCharSequenceNoExpand() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceNoExpand(CharsetUtil.US_ASCII);
}
});
}
@Test
public void testSetUtf8CharSequenceNoExpand() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceNoExpand(CharsetUtil.UTF_8);
}
});
}
@Test
public void testSetIso88591CharSequenceNoExpand() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceNoExpand(CharsetUtil.ISO_8859_1);
}
});
}
@Test
public void testSetUtf16CharSequenceNoExpand() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSetCharSequenceNoExpand(CharsetUtil.UTF_16);
}
});
}
private void testSetCharSequenceNoExpand(Charset charset) {
ByteBuf buf = newBuffer(1);
try {
buf.setCharSequence(0, "AB", charset);
} finally {
buf.release();
}
}
@Test
public void testSetUsAsciiCharSequence() {
testSetGetCharSequence(CharsetUtil.US_ASCII);
}
@Test
public void testSetUtf8CharSequence() {
testSetGetCharSequence(CharsetUtil.UTF_8);
}
@Test
public void testSetIso88591CharSequence() {
testSetGetCharSequence(CharsetUtil.ISO_8859_1);
}
@Test
public void testSetUtf16CharSequence() {
testSetGetCharSequence(CharsetUtil.UTF_16);
}
private static final CharBuffer EXTENDED_ASCII_CHARS, ASCII_CHARS;
static {
char[] chars = new char[256];
for (char c = 0; c < chars.length; c++) {
chars[c] = c;
}
EXTENDED_ASCII_CHARS = CharBuffer.wrap(chars);
ASCII_CHARS = CharBuffer.wrap(chars, 0, 128);
}
private void testSetGetCharSequence(Charset charset) {
ByteBuf buf = newBuffer(1024);
CharBuffer sequence = CharsetUtil.US_ASCII.equals(charset)
? ASCII_CHARS : EXTENDED_ASCII_CHARS;
int bytes = buf.setCharSequence(1, sequence, charset);
assertEquals(sequence, CharBuffer.wrap(buf.getCharSequence(1, bytes, charset)));
buf.release();
}
@Test
public void testWriteReadUsAsciiCharSequence() {
testWriteReadCharSequence(CharsetUtil.US_ASCII);
}
@Test
public void testWriteReadUtf8CharSequence() {
testWriteReadCharSequence(CharsetUtil.UTF_8);
}
@Test
public void testWriteReadIso88591CharSequence() {
testWriteReadCharSequence(CharsetUtil.ISO_8859_1);
}
@Test
public void testWriteReadUtf16CharSequence() {
testWriteReadCharSequence(CharsetUtil.UTF_16);
}
private void testWriteReadCharSequence(Charset charset) {
ByteBuf buf = newBuffer(1024);
CharBuffer sequence = CharsetUtil.US_ASCII.equals(charset)
? ASCII_CHARS : EXTENDED_ASCII_CHARS;
buf.writerIndex(1);
int bytes = buf.writeCharSequence(sequence, charset);
buf.readerIndex(1);
assertEquals(sequence, CharBuffer.wrap(buf.readCharSequence(bytes, charset)));
buf.release();
}
@Test
public void testWriteReadUsAsciiString() {
testWriteReadString(CharsetUtil.US_ASCII);
}
@Test
public void testWriteReadUtf8String() {
testWriteReadString(CharsetUtil.UTF_8);
}
@Test
public void testWriteReadIso88591String() {
testWriteReadString(CharsetUtil.ISO_8859_1);
}
@Test
public void testWriteReadUtf16String() {
testWriteReadString(CharsetUtil.UTF_16);
}
private void testWriteReadString(Charset charset) {
ByteBuf buf = newBuffer(1024);
CharBuffer sequence = CharsetUtil.US_ASCII.equals(charset)
? ASCII_CHARS : EXTENDED_ASCII_CHARS;
buf.writerIndex(1);
int bytes = buf.writeCharSequence(sequence, charset);
buf.readerIndex(1);
assertEquals(sequence, CharBuffer.wrap(buf.readString(bytes, charset)));
buf.release();
}
@Test
public void testRetainedSliceIndexOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(true, true, true);
}
});
}
@Test
public void testRetainedSliceLengthOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(true, true, false);
}
});
}
@Test
public void testMixedSliceAIndexOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(true, false, true);
}
});
}
@Test
public void testMixedSliceALengthOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(true, false, false);
}
});
}
@Test
public void testMixedSliceBIndexOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(false, true, true);
}
});
}
@Test
public void testMixedSliceBLengthOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(false, true, false);
}
});
}
@Test
public void testSliceIndexOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(false, false, true);
}
});
}
@Test
public void testSliceLengthOutOfBounds() {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
testSliceOutOfBounds(false, false, false);
}
});
}
@Test
public void testRetainedSliceAndRetainedDuplicateContentIsExpected() {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected1 = newBuffer(6).resetWriterIndex();
ByteBuf expected2 = newBuffer(5).resetWriterIndex();
ByteBuf expected3 = newBuffer(4).resetWriterIndex();
ByteBuf expected4 = newBuffer(3).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected1.writeBytes(new byte[] {2, 3, 4, 5, 6, 7});
expected2.writeBytes(new byte[] {3, 4, 5, 6, 7});
expected3.writeBytes(new byte[] {4, 5, 6, 7});
expected4.writeBytes(new byte[] {5, 6, 7});
ByteBuf slice1 = buf.retainedSlice(buf.readerIndex() + 1, 6);
assertEquals(0, slice1.compareTo(expected1));
assertEquals(0, slice1.compareTo(buf.slice(buf.readerIndex() + 1, 6)));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
// Advance the reader index on the slice.
slice1.readByte();
ByteBuf dup1 = slice1.retainedDuplicate();
assertEquals(0, dup1.compareTo(expected2));
assertEquals(0, dup1.compareTo(slice1.duplicate()));
// Advance the reader index on dup1.
dup1.readByte();
ByteBuf dup2 = dup1.duplicate();
assertEquals(0, dup2.compareTo(expected3));
// Advance the reader index on dup2.
dup2.readByte();
ByteBuf slice2 = dup2.retainedSlice(dup2.readerIndex(), 3);
assertEquals(0, slice2.compareTo(expected4));
assertEquals(0, slice2.compareTo(dup2.slice(dup2.readerIndex(), 3)));
// Cleanup the expected buffers used for testing.
assertTrue(expected1.release());
assertTrue(expected2.release());
assertTrue(expected3.release());
assertTrue(expected4.release());
slice2.release();
dup2.release();
assertEquals(slice2.refCnt(), dup2.refCnt());
assertEquals(dup2.refCnt(), dup1.refCnt());
// The handler is now done with the original slice
assertTrue(slice1.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, slice1.refCnt());
assertEquals(0, slice2.refCnt());
assertEquals(0, dup1.refCnt());
assertEquals(0, dup2.refCnt());
}
@Test
public void testRetainedDuplicateAndRetainedSliceContentIsExpected() {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected1 = newBuffer(6).resetWriterIndex();
ByteBuf expected2 = newBuffer(5).resetWriterIndex();
ByteBuf expected3 = newBuffer(4).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected1.writeBytes(new byte[] {2, 3, 4, 5, 6, 7});
expected2.writeBytes(new byte[] {3, 4, 5, 6, 7});
expected3.writeBytes(new byte[] {5, 6, 7});
ByteBuf dup1 = buf.retainedDuplicate();
assertEquals(0, dup1.compareTo(buf));
assertEquals(0, dup1.compareTo(buf.slice()));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
// Advance the reader index on the dup.
dup1.readByte();
ByteBuf slice1 = dup1.retainedSlice(dup1.readerIndex(), 6);
assertEquals(0, slice1.compareTo(expected1));
assertEquals(0, slice1.compareTo(slice1.duplicate()));
// Advance the reader index on slice1.
slice1.readByte();
ByteBuf dup2 = slice1.duplicate();
assertEquals(0, dup2.compareTo(slice1));
// Advance the reader index on dup2.
dup2.readByte();
ByteBuf slice2 = dup2.retainedSlice(dup2.readerIndex() + 1, 3);
assertEquals(0, slice2.compareTo(expected3));
assertEquals(0, slice2.compareTo(dup2.slice(dup2.readerIndex() + 1, 3)));
// Cleanup the expected buffers used for testing.
assertTrue(expected1.release());
assertTrue(expected2.release());
assertTrue(expected3.release());
slice2.release();
slice1.release();
assertEquals(slice2.refCnt(), dup2.refCnt());
assertEquals(dup2.refCnt(), slice1.refCnt());
// The handler is now done with the original slice
assertTrue(dup1.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, slice1.refCnt());
assertEquals(0, slice2.refCnt());
assertEquals(0, dup1.refCnt());
assertEquals(0, dup2.refCnt());
}
@Test
public void testRetainedSliceContents() {
testSliceContents(true);
}
@Test
public void testMultipleLevelRetainedSlice1() {
testMultipleLevelRetainedSliceWithNonRetained(true, true);
}
@Test
public void testMultipleLevelRetainedSlice2() {
testMultipleLevelRetainedSliceWithNonRetained(true, false);
}
@Test
public void testMultipleLevelRetainedSlice3() {
testMultipleLevelRetainedSliceWithNonRetained(false, true);
}
@Test
public void testMultipleLevelRetainedSlice4() {
testMultipleLevelRetainedSliceWithNonRetained(false, false);
}
@Test
public void testRetainedSliceReleaseOriginal1() {
testSliceReleaseOriginal(true, true);
}
@Test
public void testRetainedSliceReleaseOriginal2() {
testSliceReleaseOriginal(true, false);
}
@Test
public void testRetainedSliceReleaseOriginal3() {
testSliceReleaseOriginal(false, true);
}
@Test
public void testRetainedSliceReleaseOriginal4() {
testSliceReleaseOriginal(false, false);
}
@Test
public void testRetainedDuplicateReleaseOriginal1() {
testDuplicateReleaseOriginal(true, true);
}
@Test
public void testRetainedDuplicateReleaseOriginal2() {
testDuplicateReleaseOriginal(true, false);
}
@Test
public void testRetainedDuplicateReleaseOriginal3() {
testDuplicateReleaseOriginal(false, true);
}
@Test
public void testRetainedDuplicateReleaseOriginal4() {
testDuplicateReleaseOriginal(false, false);
}
@Test
public void testMultipleRetainedSliceReleaseOriginal1() {
testMultipleRetainedSliceReleaseOriginal(true, true);
}
@Test
public void testMultipleRetainedSliceReleaseOriginal2() {
testMultipleRetainedSliceReleaseOriginal(true, false);
}
@Test
public void testMultipleRetainedSliceReleaseOriginal3() {
testMultipleRetainedSliceReleaseOriginal(false, true);
}
@Test
public void testMultipleRetainedSliceReleaseOriginal4() {
testMultipleRetainedSliceReleaseOriginal(false, false);
}
@Test
public void testMultipleRetainedDuplicateReleaseOriginal1() {
testMultipleRetainedDuplicateReleaseOriginal(true, true);
}
@Test
public void testMultipleRetainedDuplicateReleaseOriginal2() {
testMultipleRetainedDuplicateReleaseOriginal(true, false);
}
@Test
public void testMultipleRetainedDuplicateReleaseOriginal3() {
testMultipleRetainedDuplicateReleaseOriginal(false, true);
}
@Test
public void testMultipleRetainedDuplicateReleaseOriginal4() {
testMultipleRetainedDuplicateReleaseOriginal(false, false);
}
@Test
public void testSliceContents() {
testSliceContents(false);
}
@Test
public void testRetainedDuplicateContents() {
testDuplicateContents(true);
}
@Test
public void testDuplicateContents() {
testDuplicateContents(false);
}
@Test
public void testDuplicateCapacityChange() {
testDuplicateCapacityChange(false);
}
@Test
public void testRetainedDuplicateCapacityChange() {
testDuplicateCapacityChange(true);
}
@Test
public void testSliceCapacityChange() {
assertThrows(UnsupportedOperationException.class, new Executable() {
@Override
public void execute() {
testSliceCapacityChange(false);
}
});
}
@Test
public void testRetainedSliceCapacityChange() {
assertThrows(UnsupportedOperationException.class, new Executable() {
@Override
public void execute() {
testSliceCapacityChange(true);
}
});
}
@Test
public void testRetainedSliceUnreleasable1() {
testRetainedSliceUnreleasable(true, true);
}
@Test
public void testRetainedSliceUnreleasable2() {
testRetainedSliceUnreleasable(true, false);
}
@Test
public void testRetainedSliceUnreleasable3() {
testRetainedSliceUnreleasable(false, true);
}
@Test
public void testRetainedSliceUnreleasable4() {
testRetainedSliceUnreleasable(false, false);
}
@Test
public void testReadRetainedSliceUnreleasable1() {
testReadRetainedSliceUnreleasable(true, true);
}
@Test
public void testReadRetainedSliceUnreleasable2() {
testReadRetainedSliceUnreleasable(true, false);
}
@Test
public void testReadRetainedSliceUnreleasable3() {
testReadRetainedSliceUnreleasable(false, true);
}
@Test
public void testReadRetainedSliceUnreleasable4() {
testReadRetainedSliceUnreleasable(false, false);
}
@Test
public void testRetainedDuplicateUnreleasable1() {
testRetainedDuplicateUnreleasable(true, true);
}
@Test
public void testRetainedDuplicateUnreleasable2() {
testRetainedDuplicateUnreleasable(true, false);
}
@Test
public void testRetainedDuplicateUnreleasable3() {
testRetainedDuplicateUnreleasable(false, true);
}
@Test
public void testRetainedDuplicateUnreleasable4() {
testRetainedDuplicateUnreleasable(false, false);
}
private void testRetainedSliceUnreleasable(boolean initRetainedSlice, boolean finalRetainedSlice) {
ByteBuf buf = newBuffer(8);
ByteBuf buf1 = initRetainedSlice ? buf.retainedSlice() : buf.slice().retain();
ByteBuf buf2 = unreleasableBuffer(buf1);
ByteBuf buf3 = finalRetainedSlice ? buf2.retainedSlice() : buf2.slice().retain();
assertFalse(buf3.release());
assertFalse(buf2.release());
buf1.release();
assertTrue(buf.release());
assertEquals(0, buf1.refCnt());
assertEquals(0, buf.refCnt());
}
private void testReadRetainedSliceUnreleasable(boolean initRetainedSlice, boolean finalRetainedSlice) {
ByteBuf buf = newBuffer(8);
ByteBuf buf1 = initRetainedSlice ? buf.retainedSlice() : buf.slice().retain();
ByteBuf buf2 = unreleasableBuffer(buf1);
ByteBuf buf3 = finalRetainedSlice ? buf2.readRetainedSlice(buf2.readableBytes())
: buf2.readSlice(buf2.readableBytes()).retain();
assertFalse(buf3.release());
assertFalse(buf2.release());
buf1.release();
assertTrue(buf.release());
assertEquals(0, buf1.refCnt());
assertEquals(0, buf.refCnt());
}
private void testRetainedDuplicateUnreleasable(boolean initRetainedDuplicate, boolean finalRetainedDuplicate) {
ByteBuf buf = newBuffer(8);
ByteBuf buf1 = initRetainedDuplicate ? buf.retainedDuplicate() : buf.duplicate().retain();
ByteBuf buf2 = unreleasableBuffer(buf1);
ByteBuf buf3 = finalRetainedDuplicate ? buf2.retainedDuplicate() : buf2.duplicate().retain();
assertFalse(buf3.release());
assertFalse(buf2.release());
buf1.release();
assertTrue(buf.release());
assertEquals(0, buf1.refCnt());
assertEquals(0, buf.refCnt());
}
private void testDuplicateCapacityChange(boolean retainedDuplicate) {
ByteBuf buf = newBuffer(8);
ByteBuf dup = retainedDuplicate ? buf.retainedDuplicate() : buf.duplicate();
try {
dup.capacity(10);
assertEquals(buf.capacity(), dup.capacity());
dup.capacity(5);
assertEquals(buf.capacity(), dup.capacity());
} finally {
if (retainedDuplicate) {
dup.release();
}
buf.release();
}
}
private void testSliceCapacityChange(boolean retainedSlice) {
ByteBuf buf = newBuffer(8);
ByteBuf slice = retainedSlice ? buf.retainedSlice(buf.readerIndex() + 1, 3)
: buf.slice(buf.readerIndex() + 1, 3);
try {
slice.capacity(10);
} finally {
if (retainedSlice) {
slice.release();
}
buf.release();
}
}
private void testSliceOutOfBounds(boolean initRetainedSlice, boolean finalRetainedSlice, boolean indexOutOfBounds) {
ByteBuf buf = newBuffer(8);
ByteBuf slice = initRetainedSlice ? buf.retainedSlice(buf.readerIndex() + 1, 2)
: buf.slice(buf.readerIndex() + 1, 2);
try {
assertEquals(2, slice.capacity());
assertEquals(2, slice.maxCapacity());
final int index = indexOutOfBounds ? 3 : 0;
final int length = indexOutOfBounds ? 0 : 3;
if (finalRetainedSlice) {
// This is expected to fail ... so no need to release.
slice.retainedSlice(index, length);
} else {
slice.slice(index, length);
}
} finally {
if (initRetainedSlice) {
slice.release();
}
buf.release();
}
}
private void testSliceContents(boolean retainedSlice) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected = newBuffer(3).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected.writeBytes(new byte[] {4, 5, 6});
ByteBuf slice = retainedSlice ? buf.retainedSlice(buf.readerIndex() + 3, 3)
: buf.slice(buf.readerIndex() + 3, 3);
try {
assertEquals(0, slice.compareTo(expected));
assertEquals(0, slice.compareTo(slice.duplicate()));
ByteBuf b = slice.retainedDuplicate();
assertEquals(0, slice.compareTo(b));
b.release();
assertEquals(0, slice.compareTo(slice.slice(0, slice.capacity())));
} finally {
if (retainedSlice) {
slice.release();
}
buf.release();
expected.release();
}
}
private void testSliceReleaseOriginal(boolean retainedSlice1, boolean retainedSlice2) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected1 = newBuffer(3).resetWriterIndex();
ByteBuf expected2 = newBuffer(2).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected1.writeBytes(new byte[] {6, 7, 8});
expected2.writeBytes(new byte[] {7, 8});
ByteBuf slice1 = retainedSlice1 ? buf.retainedSlice(buf.readerIndex() + 5, 3)
: buf.slice(buf.readerIndex() + 5, 3).retain();
assertEquals(0, slice1.compareTo(expected1));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
ByteBuf slice2 = retainedSlice2 ? slice1.retainedSlice(slice1.readerIndex() + 1, 2)
: slice1.slice(slice1.readerIndex() + 1, 2).retain();
assertEquals(0, slice2.compareTo(expected2));
// Cleanup the expected buffers used for testing.
assertTrue(expected1.release());
assertTrue(expected2.release());
// The handler created a slice of the slice and is now done with it.
slice2.release();
// The handler is now done with the original slice
assertTrue(slice1.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, slice1.refCnt());
assertEquals(0, slice2.refCnt());
}
private void testMultipleLevelRetainedSliceWithNonRetained(boolean doSlice1, boolean doSlice2) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected1 = newBuffer(6).resetWriterIndex();
ByteBuf expected2 = newBuffer(4).resetWriterIndex();
ByteBuf expected3 = newBuffer(2).resetWriterIndex();
ByteBuf expected4SliceSlice = newBuffer(1).resetWriterIndex();
ByteBuf expected4DupSlice = newBuffer(1).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected1.writeBytes(new byte[] {2, 3, 4, 5, 6, 7});
expected2.writeBytes(new byte[] {3, 4, 5, 6});
expected3.writeBytes(new byte[] {4, 5});
expected4SliceSlice.writeBytes(new byte[] {5});
expected4DupSlice.writeBytes(new byte[] {4});
ByteBuf slice1 = buf.retainedSlice(buf.readerIndex() + 1, 6);
assertEquals(0, slice1.compareTo(expected1));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
ByteBuf slice2 = slice1.retainedSlice(slice1.readerIndex() + 1, 4);
assertEquals(0, slice2.compareTo(expected2));
assertEquals(0, slice2.compareTo(slice2.duplicate()));
assertEquals(0, slice2.compareTo(slice2.slice()));
ByteBuf tmpBuf = slice2.retainedDuplicate();
assertEquals(0, slice2.compareTo(tmpBuf));
tmpBuf.release();
tmpBuf = slice2.retainedSlice();
assertEquals(0, slice2.compareTo(tmpBuf));
tmpBuf.release();
ByteBuf slice3 = doSlice1 ? slice2.slice(slice2.readerIndex() + 1, 2) : slice2.duplicate();
if (doSlice1) {
assertEquals(0, slice3.compareTo(expected3));
} else {
assertEquals(0, slice3.compareTo(expected2));
}
ByteBuf slice4 = doSlice2 ? slice3.slice(slice3.readerIndex() + 1, 1) : slice3.duplicate();
if (doSlice1 && doSlice2) {
assertEquals(0, slice4.compareTo(expected4SliceSlice));
} else if (doSlice2) {
assertEquals(0, slice4.compareTo(expected4DupSlice));
} else {
assertEquals(0, slice3.compareTo(slice4));
}
// Cleanup the expected buffers used for testing.
assertTrue(expected1.release());
assertTrue(expected2.release());
assertTrue(expected3.release());
assertTrue(expected4SliceSlice.release());
assertTrue(expected4DupSlice.release());
// Slice 4, 3, and 2 should effectively "share" a reference count.
slice4.release();
assertEquals(slice3.refCnt(), slice2.refCnt());
assertEquals(slice3.refCnt(), slice4.refCnt());
// Slice 1 should also release the original underlying buffer without throwing exceptions
assertTrue(slice1.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, slice1.refCnt());
assertEquals(0, slice2.refCnt());
assertEquals(0, slice3.refCnt());
}
private void testDuplicateReleaseOriginal(boolean retainedDuplicate1, boolean retainedDuplicate2) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected = newBuffer(8).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected.writeBytes(buf, buf.readerIndex(), buf.readableBytes());
ByteBuf dup1 = retainedDuplicate1 ? buf.retainedDuplicate()
: buf.duplicate().retain();
assertEquals(0, dup1.compareTo(expected));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
ByteBuf dup2 = retainedDuplicate2 ? dup1.retainedDuplicate()
: dup1.duplicate().retain();
assertEquals(0, dup2.compareTo(expected));
// Cleanup the expected buffers used for testing.
assertTrue(expected.release());
// The handler created a slice of the slice and is now done with it.
dup2.release();
// The handler is now done with the original slice
assertTrue(dup1.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, dup1.refCnt());
assertEquals(0, dup2.refCnt());
}
private void testMultipleRetainedSliceReleaseOriginal(boolean retainedSlice1, boolean retainedSlice2) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected1 = newBuffer(3).resetWriterIndex();
ByteBuf expected2 = newBuffer(2).resetWriterIndex();
ByteBuf expected3 = newBuffer(2).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected1.writeBytes(new byte[] {6, 7, 8});
expected2.writeBytes(new byte[] {7, 8});
expected3.writeBytes(new byte[] {6, 7});
ByteBuf slice1 = retainedSlice1 ? buf.retainedSlice(buf.readerIndex() + 5, 3)
: buf.slice(buf.readerIndex() + 5, 3).retain();
assertEquals(0, slice1.compareTo(expected1));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
ByteBuf slice2 = retainedSlice2 ? slice1.retainedSlice(slice1.readerIndex() + 1, 2)
: slice1.slice(slice1.readerIndex() + 1, 2).retain();
assertEquals(0, slice2.compareTo(expected2));
// The handler created a slice of the slice and is now done with it.
slice2.release();
ByteBuf slice3 = slice1.retainedSlice(slice1.readerIndex(), 2);
assertEquals(0, slice3.compareTo(expected3));
// The handler created another slice of the slice and is now done with it.
slice3.release();
// The handler is now done with the original slice
assertTrue(slice1.release());
// Cleanup the expected buffers used for testing.
assertTrue(expected1.release());
assertTrue(expected2.release());
assertTrue(expected3.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, slice1.refCnt());
assertEquals(0, slice2.refCnt());
assertEquals(0, slice3.refCnt());
}
private void testMultipleRetainedDuplicateReleaseOriginal(boolean retainedDuplicate1, boolean retainedDuplicate2) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
ByteBuf expected = newBuffer(8).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
expected.writeBytes(buf, buf.readerIndex(), buf.readableBytes());
ByteBuf dup1 = retainedDuplicate1 ? buf.retainedDuplicate()
: buf.duplicate().retain();
assertEquals(0, dup1.compareTo(expected));
// Simulate a handler that releases the original buffer, and propagates a slice.
buf.release();
ByteBuf dup2 = retainedDuplicate2 ? dup1.retainedDuplicate()
: dup1.duplicate().retain();
assertEquals(0, dup2.compareTo(expected));
assertEquals(0, dup2.compareTo(dup2.duplicate()));
assertEquals(0, dup2.compareTo(dup2.slice()));
ByteBuf tmpBuf = dup2.retainedDuplicate();
assertEquals(0, dup2.compareTo(tmpBuf));
tmpBuf.release();
tmpBuf = dup2.retainedSlice();
assertEquals(0, dup2.compareTo(tmpBuf));
tmpBuf.release();
// The handler created a slice of the slice and is now done with it.
dup2.release();
ByteBuf dup3 = dup1.retainedDuplicate();
assertEquals(0, dup3.compareTo(expected));
// The handler created another slice of the slice and is now done with it.
dup3.release();
// The handler is now done with the original slice
assertTrue(dup1.release());
// Cleanup the expected buffers used for testing.
assertTrue(expected.release());
// Reference counting may be shared, or may be independently tracked, but at this point all buffers should
// be deallocated and have a reference count of 0.
assertEquals(0, buf.refCnt());
assertEquals(0, dup1.refCnt());
assertEquals(0, dup2.refCnt());
assertEquals(0, dup3.refCnt());
}
private void testDuplicateContents(boolean retainedDuplicate) {
ByteBuf buf = newBuffer(8).resetWriterIndex();
buf.writeBytes(new byte[] {1, 2, 3, 4, 5, 6, 7, 8});
ByteBuf dup = retainedDuplicate ? buf.retainedDuplicate() : buf.duplicate();
try {
assertEquals(0, dup.compareTo(buf));
assertEquals(0, dup.compareTo(dup.duplicate()));
ByteBuf b = dup.retainedDuplicate();
assertEquals(0, dup.compareTo(b));
b.release();
assertEquals(0, dup.compareTo(dup.slice(dup.readerIndex(), dup.readableBytes())));
} finally {
if (retainedDuplicate) {
dup.release();
}
buf.release();
}
}
@Test
public void testDuplicateRelease() {
ByteBuf buf = newBuffer(8);
assertEquals(1, buf.refCnt());
assertTrue(buf.duplicate().release());
assertEquals(0, buf.refCnt());
}
@Test
public void testReadOnlyRelease() {
ByteBuf buf = newBuffer(8);
assertEquals(1, buf.refCnt());
assertTrue(buf.asReadOnly().release());
assertEquals(0, buf.refCnt());
}
// Test-case trying to reproduce:
// https://github.com/netty/netty/issues/2843
@Test
public void testRefCnt() throws Exception {
testRefCnt0(false);
}
// Test-case trying to reproduce:
// https://github.com/netty/netty/issues/2843
@Test
public void testRefCnt2() throws Exception {
testRefCnt0(true);
}
@Test
public void testEmptyNioBuffers() throws Exception {
ByteBuf buffer = newBuffer(8);
buffer.clear();
assertFalse(buffer.isReadable());
ByteBuffer[] nioBuffers = buffer.nioBuffers();
assertEquals(1, nioBuffers.length);
assertFalse(nioBuffers[0].hasRemaining());
buffer.release();
}
@Test
public void testGetReadOnlyDirectDst() {
testGetReadOnlyDst(true);
}
@Test
public void testGetReadOnlyHeapDst() {
testGetReadOnlyDst(false);
}
private void testGetReadOnlyDst(boolean direct) {
byte[] bytes = { 'a', 'b', 'c', 'd' };
ByteBuf buffer = newBuffer(bytes.length);
buffer.writeBytes(bytes);
ByteBuffer dst = direct ? ByteBuffer.allocateDirect(bytes.length) : ByteBuffer.allocate(bytes.length);
ByteBuffer readOnlyDst = dst.asReadOnlyBuffer();
try {
buffer.getBytes(0, readOnlyDst);
fail();
} catch (ReadOnlyBufferException e) {
// expected
}
assertEquals(0, readOnlyDst.position());
buffer.release();
}
@Test
public void testReadBytesAndWriteBytesWithFileChannel() throws IOException {
File file = PlatformDependent.createTempFile("file-channel", ".tmp", null);
RandomAccessFile randomAccessFile = null;
try {
randomAccessFile = new RandomAccessFile(file, "rw");
FileChannel channel = randomAccessFile.getChannel();
// channelPosition should never be changed
long channelPosition = channel.position();
byte[] bytes = {'a', 'b', 'c', 'd'};
int len = bytes.length;
ByteBuf buffer = newBuffer(len);
buffer.resetReaderIndex();
buffer.resetWriterIndex();
buffer.writeBytes(bytes);
int oldReaderIndex = buffer.readerIndex();
assertEquals(len, buffer.readBytes(channel, 10, len));
assertEquals(oldReaderIndex + len, buffer.readerIndex());
assertEquals(channelPosition, channel.position());
ByteBuf buffer2 = newBuffer(len);
buffer2.resetReaderIndex();
buffer2.resetWriterIndex();
int oldWriterIndex = buffer2.writerIndex();
assertEquals(len, buffer2.writeBytes(channel, 10, len));
assertEquals(channelPosition, channel.position());
assertEquals(oldWriterIndex + len, buffer2.writerIndex());
assertEquals('a', buffer2.getByte(0));
assertEquals('b', buffer2.getByte(1));
assertEquals('c', buffer2.getByte(2));
assertEquals('d', buffer2.getByte(3));
buffer.release();
buffer2.release();
} finally {
if (randomAccessFile != null) {
randomAccessFile.close();
}
file.delete();
}
}
@Test
public void testGetBytesAndSetBytesWithFileChannel() throws IOException {
File file = PlatformDependent.createTempFile("file-channel", ".tmp", null);
RandomAccessFile randomAccessFile = null;
try {
randomAccessFile = new RandomAccessFile(file, "rw");
FileChannel channel = randomAccessFile.getChannel();
// channelPosition should never be changed
long channelPosition = channel.position();
byte[] bytes = {'a', 'b', 'c', 'd'};
int len = bytes.length;
ByteBuf buffer = newBuffer(len);
buffer.resetReaderIndex();
buffer.resetWriterIndex();
buffer.writeBytes(bytes);
int oldReaderIndex = buffer.readerIndex();
assertEquals(len, buffer.getBytes(oldReaderIndex, channel, 10, len));
assertEquals(oldReaderIndex, buffer.readerIndex());
assertEquals(channelPosition, channel.position());
ByteBuf buffer2 = newBuffer(len);
buffer2.resetReaderIndex();
buffer2.resetWriterIndex();
int oldWriterIndex = buffer2.writerIndex();
assertEquals(buffer2.setBytes(oldWriterIndex, channel, 10, len), len);
assertEquals(channelPosition, channel.position());
assertEquals(oldWriterIndex, buffer2.writerIndex());
assertEquals('a', buffer2.getByte(oldWriterIndex));
assertEquals('b', buffer2.getByte(oldWriterIndex + 1));
assertEquals('c', buffer2.getByte(oldWriterIndex + 2));
assertEquals('d', buffer2.getByte(oldWriterIndex + 3));
buffer.release();
buffer2.release();
} finally {
if (randomAccessFile != null) {
randomAccessFile.close();
}
file.delete();
}
}
@Test
public void testReadBytes() {
ByteBuf buffer = newBuffer(8);
byte[] bytes = new byte[8];
buffer.writeBytes(bytes);
ByteBuf buffer2 = buffer.readBytes(4);
assertSame(buffer.alloc(), buffer2.alloc());
assertEquals(4, buffer.readerIndex());
assertTrue(buffer.release());
assertEquals(0, buffer.refCnt());
assertTrue(buffer2.release());
assertEquals(0, buffer2.refCnt());
}
@Test
public void testForEachByteDesc2() {
byte[] expected = {1, 2, 3, 4};
ByteBuf buf = newBuffer(expected.length);
try {
buf.writeBytes(expected);
final byte[] bytes = new byte[expected.length];
int i = buf.forEachByteDesc(new ByteProcessor() {
private int index = bytes.length - 1;
@Override
public boolean process(byte value) throws Exception {
bytes[index--] = value;
return true;
}
});
assertEquals(-1, i);
assertArrayEquals(expected, bytes);
} finally {
buf.release();
}
}
@Test
public void testForEachByte2() {
byte[] expected = {1, 2, 3, 4};
ByteBuf buf = newBuffer(expected.length);
try {
buf.writeBytes(expected);
final byte[] bytes = new byte[expected.length];
int i = buf.forEachByte(new ByteProcessor() {
private int index;
@Override
public boolean process(byte value) throws Exception {
bytes[index++] = value;
return true;
}
});
assertEquals(-1, i);
assertArrayEquals(expected, bytes);
} finally {
buf.release();
}
}
@Test
public void testGetBytesByteBuffer() {
byte[] bytes = {'a', 'b', 'c', 'd', 'e', 'f', 'g'};
// Ensure destination buffer is bigger then what is in the ByteBuf.
final ByteBuffer nioBuffer = ByteBuffer.allocate(bytes.length + 1);
final ByteBuf buffer = newBuffer(bytes.length);
try {
buffer.writeBytes(bytes);
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
buffer.getBytes(buffer.readerIndex(), nioBuffer);
}
});
} finally {
buffer.release();
}
}
private void testRefCnt0(final boolean parameter) throws Exception {
for (int i = 0; i < 10; i++) {
final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch innerLatch = new CountDownLatch(1);
final ByteBuf buffer = newBuffer(4);
assertEquals(1, buffer.refCnt());
final AtomicInteger cnt = new AtomicInteger(Integer.MAX_VALUE);
Thread t1 = new Thread(new Runnable() {
@Override
public void run() {
boolean released;
if (parameter) {
released = buffer.release(buffer.refCnt());
} else {
released = buffer.release();
}
assertTrue(released);
Thread t2 = new Thread(new Runnable() {
@Override
public void run() {
cnt.set(buffer.refCnt());
latch.countDown();
}
});
t2.start();
try {
// Keep Thread alive a bit so the ThreadLocal caches are not freed
innerLatch.await();
} catch (InterruptedException ignore) {
// ignore
}
}
});
t1.start();
latch.await();
assertEquals(0, cnt.get());
innerLatch.countDown();
}
}
static final | CharSequenceType |
java | elastic__elasticsearch | x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/TermsQueryTests.java | {
"start": 900,
"end": 3509
} | class ____ extends AbstractBuilderTestCase {
protected Collection<Class<? extends Plugin>> getPlugins() {
return List.of(Wildcard.class);
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge("_doc", new CompressedXContent(org.elasticsearch.common.Strings.format("""
{
"properties": {
"mapped_wildcard": {
"type": "wildcard"
}
}
}""")), MapperService.MergeReason.MAPPING_UPDATE);
}
public void testSingleDuplicateTerms() throws IOException {
String[] duplicates = new String[1023];
Arrays.fill(duplicates, "duplicate");
QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates);
termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext());
Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext());
QueryBuilder queryBuilder = new TermsQueryBuilder("mapped_wildcard", "duplicate");
queryBuilder = queryBuilder.rewrite(createQueryRewriteContext());
Query expected = queryBuilder.toQuery(createSearchExecutionContext());
assertEquals(expected, actual);
}
public void testMultiDuplicateTerms() throws IOException {
int numTerms = randomIntBetween(2, 10);
List<String> randomTerms = new ArrayList<>(numTerms);
for (int i = 0; i < numTerms; ++i) {
randomTerms.add(randomAlphaOfLengthBetween(1, 1024));
}
int totalTerms = randomIntBetween(numTerms * 5, 1023);
String[] duplicates = new String[totalTerms];
for (int i = 0; i < numTerms; ++i) {
duplicates[i] = randomTerms.get(i);
}
for (int i = numTerms; i < totalTerms; ++i) {
duplicates[i] = randomTerms.get(randomIntBetween(0, numTerms - 1));
}
QueryBuilder termsQueryBuilder = new TermsQueryBuilder("mapped_wildcard", duplicates);
termsQueryBuilder = termsQueryBuilder.rewrite(createQueryRewriteContext());
Query actual = termsQueryBuilder.toQuery(createSearchExecutionContext());
Set<String> ordered = new HashSet<>(randomTerms);
QueryBuilder queryBuilder = new TermsQueryBuilder("mapped_wildcard", ordered.toArray(new String[0]));
queryBuilder = queryBuilder.rewrite(createQueryRewriteContext());
Query expected = queryBuilder.toQuery(createSearchExecutionContext());
assertEquals(expected, actual);
}
}
| TermsQueryTests |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/TaskState.java | {
"start": 1488,
"end": 1630
} | class ____ groups all non-partitioned state and key-group state belonging to the
* same job vertex together.
*
* @deprecated Internal | basically |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/config/ConfigFile.java | {
"start": 853,
"end": 900
} | interface ____ {
void initData();
}
| ConfigFile |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/argumentselectiondefects/ParameterTest.java | {
"start": 9367,
"end": 9843
} | class ____ {",
" abstract void target(Object o);",
" void test() {",
" // BUG: Diagnostic contains: " + Parameter.NAME_NULL,
" target(null);",
" }",
"}")
.doTest();
}
@Test
public void getName_returnsUnknown_withTerneryIf() {
CompilationTestHelper.newInstance(PrintNameOfFirstArgument.class, getClass())
.addSourceLines(
"Test.java",
"abstract | Test |
java | apache__flink | flink-kubernetes/src/main/java/org/apache/flink/kubernetes/kubeclient/resources/KubernetesWatch.java | {
"start": 954,
"end": 1157
} | class ____ extends KubernetesResource<Watch> {
public KubernetesWatch(Watch watch) {
super(watch);
}
public void close() {
getInternalResource().close();
}
}
| KubernetesWatch |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/AbstractFutureDefaultAtomicHelperTest.java | {
"start": 1188,
"end": 1813
} | class ____ extends TestCase {
public void testUsingExpectedAtomicHelper() throws Exception {
if (isAndroid()) {
assertThat(AbstractFutureState.atomicHelperTypeForTest()).isEqualTo("UnsafeAtomicHelper");
} else {
assertThat(AbstractFutureState.atomicHelperTypeForTest())
.isEqualTo("AtomicReferenceFieldUpdaterAtomicHelper");
}
}
private static boolean isJava8() {
return JAVA_SPECIFICATION_VERSION.value().equals("1.8");
}
private static boolean isAndroid() {
return System.getProperty("java.runtime.name", "").contains("Android");
}
}
| AbstractFutureDefaultAtomicHelperTest |
java | apache__camel | dsl/camel-cli-connector/src/main/java/org/apache/camel/cli/connector/LoggerHelper.java | {
"start": 1159,
"end": 3617
} | class ____ {
// log4j support
private static final String LOG4J_MBEAN = "org.apache.logging.log4j2";
private LoggerHelper() {
}
/**
* Change logging level in the logging system.
*
* Currently, only log4j is supported.
*
* @param logger the logger name, null is assumed to be root
* @param level the new logging level
*/
public static void changeLoggingLevel(String logger, String level) {
if (logger == null || logger.isEmpty()) {
logger = "root";
}
try {
MBeanServer ms = ManagementFactory.getPlatformMBeanServer();
if (ms != null) {
Set<ObjectName> set = ms.queryNames(new ObjectName(LOG4J_MBEAN + ":type=*,component=Loggers,name=*"), null);
for (ObjectName on : set) {
if (ms.isRegistered(on)) {
String name = (String) ms.getAttribute(on, "Name");
if (name == null || name.isEmpty()) {
name = "root";
}
if (logger.equals(name)) {
ms.setAttribute(on, new Attribute("Level", level));
}
}
}
}
} catch (Exception e) {
// ignore
}
}
public static String stripSourceLocationLineNumber(String location) {
int cnt = StringHelper.countChar(location, ':');
if (cnt >= 1) {
int pos = location.lastIndexOf(':');
return location.substring(0, pos);
} else {
return location;
}
}
public static Integer extractSourceLocationLineNumber(String location) {
int cnt = StringHelper.countChar(location, ':');
if (cnt >= 1) {
int pos = location.lastIndexOf(':');
String num = location.substring(pos + 1);
try {
return Integer.valueOf(num);
} catch (Exception var5) {
return null;
}
} else {
return null;
}
}
public static String extractSourceLocationId(String location) {
int cnt = StringHelper.countChar(location, ':');
if (cnt >= 1) {
int pos = location.lastIndexOf(':');
return location.substring(pos + 1);
} else {
return null;
}
}
}
| LoggerHelper |
java | apache__maven | impl/maven-di/src/test/java/org/apache/maven/di/impl/InjectorImplTest.java | {
"start": 10142,
"end": 10245
} | class ____ {
int num = BEAN_1.incrementAndGet();
}
@Named
static | Bean1 |
java | dropwizard__dropwizard | dropwizard-auth/src/test/java/io/dropwizard/auth/principal/NoAuthPolymorphicPrincipalEntityTest.java | {
"start": 1980,
"end": 5278
} | class ____ extends AbstractAuthResourceConfig {
public NoAuthPolymorphicPrincipalInjectedResourceConfig() {
register(NoAuthPolymorphicPrincipalEntityResource.class);
packages("io.dropwizard.jersey.jackson");
}
@Override protected Class<? extends Principal> getPrincipalClass() {
throw new AssertionError("Authentication must not be performed");
}
@Override protected ContainerRequestFilter getAuthFilter() {
return requestContext -> {
throw new AssertionError("Authentication must not be performed");
};
}
@Override protected AbstractBinder getAuthBinder() {
return new PolymorphicAuthValueFactoryProvider.Binder<>(
Set.of(JsonPrincipal.class, NullPrincipal.class));
}
@Override protected DynamicFeature getAuthDynamicFeature(ContainerRequestFilter authFilter) {
return new PolymorphicAuthDynamicFeature<>(Map.of(
JsonPrincipal.class, getAuthFilter(),
NullPrincipal.class, getAuthFilter()
));
}
}
@Test
void jsonPrincipalEntityResourceWithoutAuth200() {
String principalName = "Astar Seran";
assertThat(target("/no-auth-test/json-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Anything here")
.post(Entity.entity(new JsonPrincipal(principalName), MediaType.APPLICATION_JSON))
.readEntity(String.class))
.isEqualTo(principalName);
}
@Test
void nullPrincipalEntityResourceWithoutAuth200() {
assertThat(target("/no-auth-test/null-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Anything here")
.post(Entity.entity(new NullPrincipal(), MediaType.APPLICATION_JSON))
.readEntity(String.class))
.isEqualTo("null");
}
/**
* When parameter is annotated then Jersey classifies such
* parameter as {@link org.glassfish.jersey.server.model.Parameter.Source#UNKNOWN}
* instead of {@link org.glassfish.jersey.server.model.Parameter.Source#ENTITY}
* which is used for unannotated parameters. ValueFactoryProvider resolution
* logic is different for these two sources therefore must be tested separately.
*/
@Test
void annotatedJsonPrincipalEntityResourceWithoutAuth200() {
String principalName = "Astar Seran";
assertThat(target("/no-auth-test/annotated-json-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Anything here")
.post(Entity.entity(new JsonPrincipal(principalName), MediaType.APPLICATION_JSON))
.readEntity(String.class))
.isEqualTo(principalName);
}
@Test
void annotatedNullPrincipalEntityResourceWithoutAuth200() {
assertThat(target("/no-auth-test/annotated-null-principal-entity").request()
.header(HttpHeaders.AUTHORIZATION, "Anything here")
.post(Entity.entity(new NullPrincipal(), MediaType.APPLICATION_JSON))
.readEntity(String.class))
.isEqualTo("null");
}
}
| NoAuthPolymorphicPrincipalInjectedResourceConfig |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledOnOs.java | {
"start": 1046,
"end": 1134
} | class ____ test method is disabled if both conditions apply.
*
* <p>When applied at the | or |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/notifications/DataFrameAnalyticsAuditor.java | {
"start": 574,
"end": 1155
} | class ____ extends AbstractMlAuditor<DataFrameAnalyticsAuditMessage> {
private final boolean includeNodeInfo;
public DataFrameAnalyticsAuditor(
Client client,
ClusterService clusterService,
IndexNameExpressionResolver indexNameExpressionResolver,
boolean includeNodeInfo
) {
super(client, DataFrameAnalyticsAuditMessage::new, clusterService, indexNameExpressionResolver);
this.includeNodeInfo = includeNodeInfo;
}
public boolean includeNodeInfo() {
return includeNodeInfo;
}
}
| DataFrameAnalyticsAuditor |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/physical/batch/RemoveRedundantLocalRankRule.java | {
"start": 2763,
"end": 5154
} | interface ____ extends RelRule.Config {
RemoveRedundantLocalRankRule.RemoveRedundantLocalRankRuleConfig DEFAULT =
ImmutableRemoveRedundantLocalRankRule.RemoveRedundantLocalRankRuleConfig.builder()
.build()
.withOperandSupplier(
b0 ->
b0.operand(BatchPhysicalRank.class)
.oneInput(
b1 ->
b1.operand(BatchPhysicalRank.class)
.oneInput(
b2 ->
b2.operand(
RelNode
.class)
.oneInput(
b3 ->
b3.operand(
FlinkConventions
.BATCH_PHYSICAL()
.getInterface())
.noInputs()))))
.withDescription("RemoveRedundantLocalRankRule")
.as(RemoveRedundantLocalRankRuleConfig.class);
@Override
default RemoveRedundantLocalRankRule toRule() {
return new RemoveRedundantLocalRankRule(this);
}
}
}
| RemoveRedundantLocalRankRuleConfig |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FutureReturnValueIgnoredTest.java | {
"start": 11050,
"end": 11435
} | class ____<T> {
ListenableFuture<Void> ignoreReturnTypeSetByInputFuture(T input) {
return returnsInputType(logAsyncInternal(input), 0);
}
protected ListenableFuture<Void> logAsyncInternal(T record) {
return null;
}
<V> ListenableFuture<V> returnsInputType(ListenableFuture<V> future, final int n) {
return null;
}
}
public static | TypedClass |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/test/java/org/apache/hadoop/fs/tosfs/object/tos/auth/TestDefaultCredentialsProviderChain.java | {
"start": 1556,
"end": 8953
} | class ____ extends TestAbstractCredentialsProvider {
private static final String MOCK_TEST_AK = "AK";
private static final String MOCK_TEST_SK = "SK";
private static final String MOCK_TEST_TST_TOKEN = "STS_TOKEN";
private static final String MOCK_TEST_AK_WITH_BUCKET = "AK_WITH_BUCKET";
private static final String MOCK_TEST_SK_WITH_BUCKET = "SK_WITH_BUCKET";
private static final String MOCK_TEST_STS_TOKEN_WITH_BUCKET = "STS_TOKEN_WITH_BUCKET";
private static final String MOCK_TEST_ENV_AK = "ENV_AK";
private static final String MOCK_TEST_ENV_SK = "ENV_SK";
private static final String MOCK_TEST_ENV_STS_TOKEN = "ENV_STS_TOKEN";
private static final String MOCK_TEST_BUCKET = "test";
private static final String MOCK_TEST_ROLE_NAME = "roleName";
private static final String MOCK_PATH = "/volcstack/latest/iam/security_credentials/";
private static final String API_ENDPOINT = MOCK_PATH + MOCK_TEST_ROLE_NAME;
private static final String EXPIRED_TIME_PATTERN = "yyyy-MM-dd'T'HH:mm:ssXXX";
@Override
public Configuration getConf() {
Configuration conf = new Configuration();
conf.set(TosKeys.FS_TOS_BUCKET_ACCESS_KEY_ID.key("test"), MOCK_TEST_AK_WITH_BUCKET);
conf.set(TosKeys.FS_TOS_BUCKET_SECRET_ACCESS_KEY.key("test"), MOCK_TEST_SK_WITH_BUCKET);
conf.set(TosKeys.FS_TOS_BUCKET_SESSION_TOKEN.key("test"), MOCK_TEST_STS_TOKEN_WITH_BUCKET);
conf.set(TosKeys.FS_TOS_ACCESS_KEY_ID, MOCK_TEST_AK);
conf.set(TosKeys.FS_TOS_SECRET_ACCESS_KEY, MOCK_TEST_SK);
conf.set(TosKeys.FS_TOS_SESSION_TOKEN, MOCK_TEST_TST_TOKEN);
return conf;
}
@BeforeEach
public void setUp() {
saveOsCredEnv();
}
@Test
public void testLoadCredFromEnvProvider() {
Configuration conf = getConf();
setSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID, MOCK_TEST_ENV_AK);
setSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY, MOCK_TEST_ENV_SK);
setSystemEnv(TOS.ENV_TOS_SESSION_TOKEN, MOCK_TEST_ENV_STS_TOKEN);
DefaultCredentialsProviderChain chain = new DefaultCredentialsProviderChain();
chain.initialize(conf, null);
assertEquals(chain.credential().getAccessKeyId(),
MOCK_TEST_ENV_AK, String.format("expect %s", MOCK_TEST_ENV_AK));
assertEquals(chain.credential().getAccessKeySecret(), MOCK_TEST_ENV_SK,
String.format("expect %s", MOCK_TEST_ENV_SK));
assertEquals(chain.credential().getSecurityToken(), MOCK_TEST_ENV_STS_TOKEN,
String.format("expect %s", MOCK_TEST_ENV_STS_TOKEN));
assertTrue(chain.lastUsedProvider() instanceof EnvironmentCredentialsProvider);
}
@Test
public void testLoadCredFromSimpleProviderWithBucket() {
Configuration conf = getConf();
removeSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID);
removeSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY);
removeSystemEnv(TOS.ENV_TOS_SESSION_TOKEN);
DefaultCredentialsProviderChain chain = new DefaultCredentialsProviderChain();
chain.initialize(conf, MOCK_TEST_BUCKET);
assertEquals(chain.credential().getAccessKeyId(), MOCK_TEST_AK_WITH_BUCKET,
String.format("expect %s", MOCK_TEST_AK_WITH_BUCKET));
assertEquals(chain.credential().getAccessKeySecret(), MOCK_TEST_SK_WITH_BUCKET,
String.format("expect %s", MOCK_TEST_SK_WITH_BUCKET));
assertEquals(chain.credential().getSecurityToken(), MOCK_TEST_STS_TOKEN_WITH_BUCKET,
String.format("expect %s", MOCK_TEST_STS_TOKEN_WITH_BUCKET));
assertTrue(chain.lastUsedProvider() instanceof SimpleCredentialsProvider);
}
@Test
public void testLoadCredFromSimpleProvider() {
Configuration conf = getConf();
removeSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID);
removeSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY);
DefaultCredentialsProviderChain chain = new DefaultCredentialsProviderChain();
chain.initialize(conf, "test-bucket");
assertEquals(chain.credential().getAccessKeyId(), MOCK_TEST_AK,
String.format("expect %s", MOCK_TEST_AK));
assertEquals(chain.credential().getAccessKeySecret(), MOCK_TEST_SK,
String.format("expect %s", MOCK_TEST_SK));
assertTrue(chain.lastUsedProvider() instanceof SimpleCredentialsProvider);
}
@Test
public void testNotFoundAnyProvider() {
removeSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID);
removeSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY);
DefaultCredentialsProviderChain chain = new DefaultCredentialsProviderChain();
chain.initialize(new Configuration(), MOCK_TEST_BUCKET);
assertThrows(RuntimeException.class, chain::credential);
}
@AfterEach
public void after() {
resetOsCredEnv();
}
@Test
public void testShouldReturnAKSKFollowByProviderSequence() {
setSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID, "ENV_ACCESS_KEY");
setSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY, "ENV_SECRET_KEY");
// use the simple credential provider at first.
String providerClassesStr = SimpleCredentialsProvider.class.getName() + ','
+ EnvironmentCredentialsProvider.class.getName();
Configuration conf = new Configuration();
conf.set(TosKeys.FS_TOS_CUSTOM_CREDENTIAL_PROVIDER_CLASSES, providerClassesStr);
conf.set(TosKeys.FS_TOS_ACCESS_KEY_ID, MOCK_TEST_AK);
conf.set(TosKeys.FS_TOS_SECRET_ACCESS_KEY, MOCK_TEST_SK);
conf.set(TosKeys.FS_TOS_SESSION_TOKEN, MOCK_TEST_TST_TOKEN);
DefaultCredentialsProviderChain provider = new DefaultCredentialsProviderChain();
provider.initialize(conf, MOCK_TEST_BUCKET);
ExpireableCredential cred = provider.createCredential();
assertEquals(MOCK_TEST_AK, cred.getAccessKeyId());
assertEquals(MOCK_TEST_SK, cred.getAccessKeySecret());
assertFalse(cred.isExpired());
// use the env credential provider at first.
providerClassesStr = EnvironmentCredentialsProvider.class.getName() + ','
+ SimpleCredentialsProvider.class.getName();
conf.set(TosKeys.FS_TOS_CUSTOM_CREDENTIAL_PROVIDER_CLASSES, providerClassesStr);
provider = new DefaultCredentialsProviderChain();
provider.initialize(conf, MOCK_TEST_BUCKET);
cred = provider.createCredential();
assertEquals("ENV_ACCESS_KEY", cred.getAccessKeyId());
assertEquals("ENV_SECRET_KEY", cred.getAccessKeySecret());
assertFalse(cred.isExpired());
removeSystemEnv(TOS.ENV_TOS_ACCESS_KEY_ID);
removeSystemEnv(TOS.ENV_TOS_SECRET_ACCESS_KEY);
}
@Test
public void testShouldThrowExceptionWhenCustomClassNotFound() {
Configuration conf = new Configuration();
conf.set(TosKeys.FS_TOS_CUSTOM_CREDENTIAL_PROVIDER_CLASSES,
SimpleCredentialsProvider.class.getName() + "NotExist");
DefaultCredentialsProviderChain provider = new DefaultCredentialsProviderChain();
TosException tosException =
assertThrows(TosException.class, () -> provider.initialize(conf, null));
assertTrue(tosException.getCause() instanceof ClassNotFoundException);
}
@Test
public void testShouldThrowExceptionIfNoDefaultConstructorFound() {
Configuration conf = new Configuration();
conf.set(TosKeys.FS_TOS_CUSTOM_CREDENTIAL_PROVIDER_CLASSES,
TestCredentialProviderNoDefaultConstructor.class.getName());
DefaultCredentialsProviderChain provider = new DefaultCredentialsProviderChain();
RuntimeException exception =
assertThrows(RuntimeException.class, () -> provider.initialize(conf, null));
assertTrue(exception.getMessage().contains("java.lang.NoSuchMethodException"));
}
static | TestDefaultCredentialsProviderChain |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/WakelockReleasedDangerouslyTest.java | {
"start": 4238,
"end": 4621
} | class ____ {
void foo(WakeLock wl1, WakeLock wl2) {
wl1.acquire(100);
if (wl2.isHeld()) {
wl1.release();
}
}
}
""")
.addOutputLines(
"out/TestApp.java",
"""
import android.os.PowerManager.WakeLock;
public | TestApp |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoAnnotationCompilationTest.java | {
"start": 10176,
"end": 13047
} | class ____ implements MyAnnotation,"
+ " Serializable {",
" private static final long serialVersionUID = -8116050813861599066L;",
" private final int[] value;",
"",
" AutoAnnotation_AnnotationFactory_newMyAnnotation(int[] value) {",
" if (value == null) {",
" throw new NullPointerException(\"Null value\");",
" }",
" this.value = Arrays.copyOf(value, value.length);",
" }",
"",
" @Override public Class<? extends MyAnnotation> annotationType() {",
" return MyAnnotation.class;",
" }",
"",
" @Override public int[] value() {",
" return Arrays.copyOf(value, value.length);",
" }",
"",
" @Override public String toString() {",
" StringBuilder sb = new StringBuilder(\"@com.example.annotations.MyAnnotation(\");",
" sb.append(Arrays.toString(value));",
" return sb.append(')').toString();",
" }",
"",
" @Override public boolean equals(Object o) {",
" if (o == this) {",
" return true;",
" }",
" if (o instanceof MyAnnotation) {",
" MyAnnotation that = (MyAnnotation) o;",
" return Arrays.equals(value,",
" (that instanceof AutoAnnotation_AnnotationFactory_newMyAnnotation)",
" ? ((AutoAnnotation_AnnotationFactory_newMyAnnotation) that).value",
" : that.value());",
" }",
" return false;",
" }",
"",
" @Override public int hashCode() {",
" return ",
" + (" + 127 * "value".hashCode() + " ^ Arrays.hashCode(value));",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoAnnotationProcessor())
.withOptions("-A" + Nullables.NULLABLE_OPTION + "=")
.compile(annotationFactoryJavaFile, myAnnotationJavaFile, gwtCompatibleJavaFile);
assertThat(compilation).succeededWithoutWarnings();
assertThat(compilation)
.generatedSourceFile(
"com.example.factories.AutoAnnotation_AnnotationFactory_newMyAnnotation")
.hasSourceEquivalentTo(expectedOutput);
}
@Test
public void testCollectionsForArrays() {
JavaFileObject myAnnotationJavaFile =
JavaFileObjects.forSourceLines(
"com.example.annotations.MyAnnotation",
"package com.example.annotations;",
"",
"import com.example.enums.MyEnum;",
"",
"public @ | AutoAnnotation_AnnotationFactory_newMyAnnotation |
java | google__gson | test-shrinker/src/main/java/com/example/ClassWithJsonAdapterAnnotation.java | {
"start": 2565,
"end": 3331
} | class ____ implements TypeAdapterFactory {
@Override
public <T> TypeAdapter<T> create(Gson gson, TypeToken<T> type) {
// the code below is not type-safe, but does not matter for this test
@SuppressWarnings("unchecked")
TypeAdapter<T> r =
(TypeAdapter<T>)
new TypeAdapter<DummyClass>() {
@Override
public DummyClass read(JsonReader in) throws IOException {
return new DummyClass("factory-" + in.nextInt());
}
@Override
public void write(JsonWriter out, DummyClass value) throws IOException {
out.value("factory-" + value.s);
}
};
return r;
}
}
static | Factory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/jobgraph/JobResourceRequirements.java | {
"start": 7847,
"end": 10089
} | class ____ {
private final Map<JobVertexID, JobVertexResourceRequirements> vertexResources =
new HashMap<>();
public Builder setParallelismForJobVertex(
JobVertexID jobVertexId, int lowerBound, int upperBound) {
vertexResources.put(
jobVertexId,
new JobVertexResourceRequirements(
new JobVertexResourceRequirements.Parallelism(lowerBound, upperBound)));
return this;
}
public JobResourceRequirements build() {
return new JobResourceRequirements(vertexResources);
}
}
private final Map<JobVertexID, JobVertexResourceRequirements> vertexResources;
public JobResourceRequirements(
Map<JobVertexID, JobVertexResourceRequirements> vertexResources) {
this.vertexResources =
Collections.unmodifiableMap(new HashMap<>(checkNotNull(vertexResources)));
}
public JobVertexResourceRequirements.Parallelism getParallelism(JobVertexID jobVertexId) {
return Optional.ofNullable(vertexResources.get(jobVertexId))
.map(JobVertexResourceRequirements::getParallelism)
.orElseThrow(
() ->
new IllegalStateException(
"No requirement set for vertex " + jobVertexId));
}
public Set<JobVertexID> getJobVertices() {
return vertexResources.keySet();
}
public Map<JobVertexID, JobVertexResourceRequirements> getJobVertexParallelisms() {
return vertexResources;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final JobResourceRequirements that = (JobResourceRequirements) o;
return Objects.equals(vertexResources, that.vertexResources);
}
@Override
public int hashCode() {
return Objects.hash(vertexResources);
}
@Override
public String toString() {
return "JobResourceRequirements{" + "vertexResources=" + vertexResources + '}';
}
}
| Builder |
java | quarkusio__quarkus | core/deployment/src/test/java/io/quarkus/deployment/util/FileUtilTest.java | {
"start": 215,
"end": 1276
} | class ____ {
@Test
public void testTranslateToVolumePath() {
// Windows-Style paths are formatted.
assertEquals("/c/tmp/code-with-quarkus", translateToVolumePath("C:\\tmp\\code-with-quarkus"));
assertEquals("/c/", translateToVolumePath("C"));
assertEquals("/c/", translateToVolumePath("C:"));
assertEquals("/c/", translateToVolumePath("C:\\"));
assertEquals("/c/Users", translateToVolumePath("C:\\Users"));
assertEquals("/c/Users/Quarkus/lambdatest-1.0-SNAPSHOT-native-image-source-jar",
translateToVolumePath("C:\\Users\\Quarkus\\lambdatest-1.0-SNAPSHOT-native-image-source-jar"));
// Side effect for Unix-style path.
assertEquals("/c/Users/Quarkus", translateToVolumePath("c:/Users/Quarkus"));
// Side effects for fancy inputs - for the sake of documentation.
assertEquals("something/bizarre", translateToVolumePath("something\\bizarre"));
assertEquals("something.bizarre", translateToVolumePath("something.bizarre"));
}
}
| FileUtilTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/LdapComponentBuilderFactory.java | {
"start": 1357,
"end": 1788
} | interface ____ {
/**
* LDAP (camel-ldap)
* Perform searches on LDAP servers.
*
* Category: database,security
* Since: 1.5
* Maven coordinates: org.apache.camel:camel-ldap
*
* @return the dsl builder
*/
static LdapComponentBuilder ldap() {
return new LdapComponentBuilderImpl();
}
/**
* Builder for the LDAP component.
*/
| LdapComponentBuilderFactory |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.