language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/date/DateAssert_isAfterOrEqualTo_Test.java | {
"start": 869,
"end": 1522
} | class ____ extends AbstractDateAssertWithDateArg_Test {
@Override
protected DateAssert assertionInvocationWithDateArg() {
return assertions.isAfterOrEqualTo(otherDate);
}
@Override
protected DateAssert assertionInvocationWithStringArg(String date) {
return assertions.isAfterOrEqualTo(date);
}
@Override
protected void verifyAssertionInvocation(Date date) {
verify(dates).assertIsAfterOrEqualTo(getInfo(assertions), getActual(assertions), date);
}
@Override
protected DateAssert assertionInvocationWithInstantArg() {
return assertions.isAfterOrEqualTo(otherDate.toInstant());
}
}
| DateAssert_isAfterOrEqualTo_Test |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/resource/beans/container/internal/CdiBeanContainerImmediateAccessImpl.java | {
"start": 621,
"end": 1655
} | class ____ extends AbstractCdiBeanContainer {
private final BeanManager beanManager;
CdiBeanContainerImmediateAccessImpl(BeanManager beanManager) {
BEANS_MSG_LOGGER.standardAccessToBeanManager();
this.beanManager = beanManager;
}
@Override
public BeanManager getUsableBeanManager() {
return beanManager;
}
@Override
protected <B> ContainedBeanImplementor<B> createBean(
Class<B> beanType,
BeanLifecycleStrategy lifecycleStrategy,
BeanInstanceProducer fallbackProducer) {
final ContainedBeanImplementor<B> bean =
lifecycleStrategy.createBean( beanType, fallbackProducer, this );
bean.initialize();
return bean;
}
@Override
protected <B> ContainedBeanImplementor<B> createBean(
String name,
Class<B> beanType,
BeanLifecycleStrategy lifecycleStrategy,
BeanInstanceProducer fallbackProducer) {
final ContainedBeanImplementor<B> bean =
lifecycleStrategy.createBean( name, beanType, fallbackProducer, this );
bean.initialize();
return bean;
}
}
| CdiBeanContainerImmediateAccessImpl |
java | google__error-prone | check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java | {
"start": 21654,
"end": 21794
} | interface ____ extends Suppressible {
Description matchUnionType(UnionTypeTree tree, VisitorState state);
}
public | UnionTypeTreeMatcher |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/support/DelegatePerTargetObjectIntroductionInterceptor.java | {
"start": 1931,
"end": 2133
} | class ____ the future.</i>
*
* @author Adrian Colyer
* @author Juergen Hoeller
* @since 2.0
* @see #suppressInterface
* @see DelegatingIntroductionInterceptor
*/
@SuppressWarnings("serial")
public | in |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/NamedLogHandlersBuildItem.java | {
"start": 315,
"end": 991
} | class ____ extends MultiBuildItem {
private final RuntimeValue<Map<String, Handler>> namedHandlersMap;
/**
* Construct a new instance.
*
* @param namedHandlersMap the named handlers to add to the run time configuration
*/
public NamedLogHandlersBuildItem(final RuntimeValue<Map<String, Handler>> namedHandlersMap) {
this.namedHandlersMap = Assert.checkNotNullParam("namedHandlersMap", namedHandlersMap);
}
/**
* Get the named handlers.
*
* @return the named handlers map
*/
public RuntimeValue<Map<String, Handler>> getNamedHandlersMap() {
return namedHandlersMap;
}
}
| NamedLogHandlersBuildItem |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/event/spi/FlushEvent.java | {
"start": 254,
"end": 921
} | class ____ extends AbstractSessionEvent {
private int numberOfEntitiesProcessed;
private int numberOfCollectionsProcessed;
public FlushEvent(EventSource source) {
super( source );
}
public int getNumberOfEntitiesProcessed() {
return numberOfEntitiesProcessed;
}
public void setNumberOfEntitiesProcessed(int numberOfEntitiesProcessed) {
this.numberOfEntitiesProcessed = numberOfEntitiesProcessed;
}
public int getNumberOfCollectionsProcessed() {
return numberOfCollectionsProcessed;
}
public void setNumberOfCollectionsProcessed(int numberOfCollectionsProcessed) {
this.numberOfCollectionsProcessed = numberOfCollectionsProcessed;
}
}
| FlushEvent |
java | redisson__redisson | redisson/src/main/java/org/redisson/config/Credentials.java | {
"start": 794,
"end": 1201
} | class ____ implements Serializable {
private String username;
private String password;
public Credentials() {
}
public Credentials(String username, String password) {
this.username = username;
this.password = password;
}
public String getPassword() {
return password;
}
public String getUsername() {
return username;
}
}
| Credentials |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptivebatch/util/PointwiseVertexInputInfoComputer.java | {
"start": 2657,
"end": 10064
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(PointwiseVertexInputInfoComputer.class);
// Used to limit the maximum number of subpartition slices to prevent increasing the
// time complexity of the parallelism deciding.
private static final int MAX_NUM_SUBPARTITION_SLICES_FACTOR = 32;
/**
* Decide parallelism and input infos, which will make the data be evenly distributed to
* downstream subtasks for POINTWISE, such that different downstream subtasks consume roughly
* the same amount of data.
*
* <p>Assume that `inputInfo` has two partitions, each partition has three subpartitions, their
* data bytes are: {0->[1,2,1], 1->[2,1,2]}, and the expected parallelism is 3. The calculation
* process is as follows: <br>
* 1. Create subpartition slices for input which is composed of several subpartitions. The
* created slice list and its data bytes are: [1,2,1,2,1,2] <br>
* 2. Distribute the subpartition slices array into n balanced parts (described by `IndexRange`,
* named SubpartitionSliceRanges) based on data volume: [0,1],[2,3],[4,5] <br>
* 3. Reorganize the distributed results into a mapping of partition range to subpartition
* range: {0 -> [0,1]}, {0->[2,2],1->[0,0]}, {1->[1,2]}. <br>
* The final result is the `SubpartitionGroup` that each of the three parallel tasks need to
* subscribe.
*
* @param inputInfos The information of consumed blocking results
* @param parallelism The parallelism of the job vertex
* @param minParallelism the min parallelism
* @param maxParallelism the max parallelism
* @param dataVolumePerTask proposed data volume per task for this set of inputInfo
* @return the parallelism and vertex input infos
*/
public Map<IntermediateDataSetID, JobVertexInputInfo> compute(
List<BlockingInputInfo> inputInfos,
int parallelism,
int minParallelism,
int maxParallelism,
long dataVolumePerTask) {
Map<Integer, List<SubpartitionSlice>> subpartitionSlicesByInputIndex =
createSubpartitionSlicesByInputIndex(inputInfos, maxParallelism);
// Note: SubpartitionSliceRanges does not represent the real index of the subpartitions, but
// the location of that subpartition in all subpartitions, as we aggregate all subpartitions
// into a one-digit array to calculate.
Optional<List<IndexRange>> optionalSubpartitionSliceRanges =
tryComputeSubpartitionSliceRange(
minParallelism,
maxParallelism,
dataVolumePerTask,
subpartitionSlicesByInputIndex);
if (optionalSubpartitionSliceRanges.isEmpty()) {
LOG.info(
"Cannot find a legal parallelism to evenly distribute data amount for inputs {}, "
+ "fallback to compute a parallelism that can evenly distribute num subpartitions.",
inputInfos.stream()
.map(BlockingInputInfo::getResultId)
.collect(Collectors.toList()));
// This computer is only used in the adaptive batch scenario, where isDynamicGraph
// should always be true.
return VertexInputInfoComputationUtils.computeVertexInputInfos(
parallelism, inputInfos, true);
}
List<IndexRange> subpartitionSliceRanges = optionalSubpartitionSliceRanges.get();
checkState(
isLegalParallelism(subpartitionSliceRanges.size(), minParallelism, maxParallelism));
// Create vertex input infos based on the subpartition slice and ranges.
return createJobVertexInputInfos(
inputInfos,
subpartitionSlicesByInputIndex,
subpartitionSliceRanges,
index -> index);
}
private static Map<Integer, List<SubpartitionSlice>> createSubpartitionSlicesByInputIndex(
List<BlockingInputInfo> inputInfos, int maxParallelism) {
int numSubpartitionSlices;
List<BlockingInputInfo> inputsWithIntraCorrelation =
getInputsWithIntraCorrelation(inputInfos);
if (!inputsWithIntraCorrelation.isEmpty()) {
// Ensure that when creating subpartition slices, data with intra-correlation will
// not be split.
numSubpartitionSlices = checkAndGetPartitionNum(inputsWithIntraCorrelation);
} else {
// Use the minimum of the two to avoid creating too many subpartition slices, which will
// lead to too high the time complexity of the parallelism deciding.
numSubpartitionSlices =
Math.min(
getMinSubpartitionCount(inputInfos),
MAX_NUM_SUBPARTITION_SLICES_FACTOR * maxParallelism);
}
Map<Integer, List<SubpartitionSlice>> subpartitionSlices = new HashMap<>();
for (int i = 0; i < inputInfos.size(); ++i) {
BlockingInputInfo inputInfo = inputInfos.get(i);
subpartitionSlices.put(i, createSubpartitionSlices(inputInfo, numSubpartitionSlices));
}
return subpartitionSlices;
}
private static List<SubpartitionSlice> createSubpartitionSlices(
BlockingInputInfo inputInfo, int total) {
List<SubpartitionSlice> subpartitionSlices = new ArrayList<>();
int numPartitions = inputInfo.getNumPartitions();
int numSubpartitions = checkAndGetSubpartitionNum(List.of(inputInfo));
if (numPartitions >= total) {
for (int i = 0; i < total; ++i) {
int start = i * numPartitions / total;
int nextStart = (i + 1) * numPartitions / total;
IndexRange partitionRange = new IndexRange(start, nextStart - 1);
IndexRange subpartitionRange = new IndexRange(0, numSubpartitions - 1);
subpartitionSlices.add(
createSubpartitionSlice(
partitionRange,
subpartitionRange,
inputInfo.getNumBytesProduced(partitionRange, subpartitionRange)));
}
} else {
for (int i = 0; i < numPartitions; i++) {
int count = (i + 1) * total / numPartitions - i * total / numPartitions;
checkState(count > 0 && count <= numSubpartitions);
IndexRange partitionRange = new IndexRange(i, i);
for (int j = 0; j < count; ++j) {
int start = j * numSubpartitions / count;
int nextStart = (j + 1) * numSubpartitions / count;
IndexRange subpartitionRange = new IndexRange(start, nextStart - 1);
subpartitionSlices.add(
createSubpartitionSlice(
partitionRange,
subpartitionRange,
inputInfo.getNumBytesProduced(
partitionRange, subpartitionRange)));
}
}
}
return subpartitionSlices;
}
}
| PointwiseVertexInputInfoComputer |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/TokenXMLPairMultiNamespaceSplitTest.java | {
"start": 985,
"end": 2473
} | class ____ extends TokenXMLPairNamespaceSplitTest {
@Override
@Test
public void testTokenXMLPair() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:split");
mock.expectedMessageCount(3);
mock.message(0).body()
.isEqualTo("<order id=\"1\" xmlns=\"http:acme.com\" xmlns:foo=\"http:foo.com\">Camel in Action</order>");
mock.message(1).body()
.isEqualTo("<order id=\"2\" xmlns=\"http:acme.com\" xmlns:foo=\"http:foo.com\">ActiveMQ in Action</order>");
mock.message(2).body()
.isEqualTo("<order id=\"3\" xmlns=\"http:acme.com\" xmlns:foo=\"http:foo.com\">DSL in Action</order>");
String body = createBody();
template.sendBody("direct:pair", body);
assertMockEndpointsSatisfied();
}
@Override
@Disabled
@Test
public void testTokenXMLPair2() {
// noop
}
@Override
protected String createBody() {
// multiple namespaces on parent on the same line
StringBuilder sb = new StringBuilder("<?xml version=\"1.0\"?>\n");
sb.append("<orders xmlns=\"http:acme.com\" xmlns:foo=\"http:foo.com\">\n");
sb.append(" <order id=\"1\">Camel in Action</order>\n");
sb.append(" <order id=\"2\">ActiveMQ in Action</order>\n");
sb.append(" <order id=\"3\">DSL in Action</order>\n");
sb.append("</orders>");
return sb.toString();
}
}
| TokenXMLPairMultiNamespaceSplitTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/api/StatefulConnection.java | {
"start": 556,
"end": 4346
} | interface ____<K, V> extends AutoCloseable, AsyncCloseable {
/**
* Add a listener for the {@link RedisConnectionStateListener}. The listener is notified every time a connect/disconnect/IO
* exception happens. The listener is called on the event loop thread so code within the listener methods must not block.
*
* @param listener must not be {@code null}.
* @since 6.2
*/
void addListener(RedisConnectionStateListener listener);
/**
* Removes a listener.
*
* @param listener must not be {@code null}.
* @since 6.2
*/
void removeListener(RedisConnectionStateListener listener);
/**
* Set the default command timeout for this connection. A zero timeout value indicates to not time out.
*
* @param timeout Command timeout.
* @since 5.0
*/
void setTimeout(Duration timeout);
/**
* @return the timeout.
*/
Duration getTimeout();
/**
* Dispatch a command. Write a command on the channel. The command may be changed/wrapped during write and the written
* instance is returned after the call. This command does not wait until the command completes and does not guarantee
* whether the command is executed successfully.
*
* @param command the Redis command.
* @param <T> result type
* @return the written Redis command.
*/
<T> RedisCommand<K, V, T> dispatch(RedisCommand<K, V, T> command);
/**
* Dispatch multiple command in a single write on the channel. The commands may be changed/wrapped during write and the
* written instance is returned after the call. This command does not wait until the command completes and does not
* guarantee whether the command is executed successfully.
*
* @param commands the Redis commands.
* @return the written Redis commands.
* @since 5.0
*/
Collection<RedisCommand<K, V, ?>> dispatch(Collection<? extends RedisCommand<K, V, ?>> commands);
/**
* Close the connection. The connection will become not usable anymore as soon as this method was called.
*/
void close();
/**
* Request to close the connection and return the {@link CompletableFuture} that is notified about its progress. The
* connection will become not usable anymore as soon as this method was called.
*
* @return a {@link CompletableFuture} that is notified once the operation completes, either because the operation was
* successful or because of an error.
* @since 5.1
*/
@Override
CompletableFuture<Void> closeAsync();
/**
* @return true if the connection is open (connected and not closed).
*/
boolean isOpen();
/**
* @return the client options valid for this connection.
*/
ClientOptions getOptions();
/**
* @return the client resources used for this connection.
*/
ClientResources getResources();
/**
* Disable or enable auto-flush behavior. Default is {@code true}. If autoFlushCommands is disabled, multiple commands can
* be issued without writing them actually to the transport. Commands are buffered until a {@link #flushCommands()} is
* issued. After calling {@link #flushCommands()} commands are sent to the transport and executed by Redis.
*
* @param autoFlush state of autoFlush.
*/
void setAutoFlushCommands(boolean autoFlush);
/**
* Flush pending commands. This commands forces a flush on the channel and can be used to buffer ("pipeline") commands to
* achieve batching. No-op if channel is not connected.
*/
void flushCommands();
/**
* @return the {@link RedisCodec} used by this connection.
*/
RedisCodec<K, V> getCodec();
}
| StatefulConnection |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/annotation/AutowiredAnnotationBeanPostProcessorTests.java | {
"start": 155035,
"end": 155955
} | class ____<S, I> {
@Autowired
public S string;
@Autowired
public I integer;
@Autowired
public S[] stringArray;
@Autowired
public I[] integerArray;
@Autowired
public List<S> stringList;
@Autowired
public List<I> integerList;
@Autowired
public Map<String, S> stringMap;
@Autowired
public Map<String, I> integerMap;
@Autowired
public Repository<S> stringRepository;
@Autowired
public Repository<I> integerRepository;
@Autowired
public Repository<S>[] stringRepositoryArray;
@Autowired
public Repository<I>[] integerRepositoryArray;
@Autowired
public List<Repository<S>> stringRepositoryList;
@Autowired
public List<Repository<I>> integerRepositoryList;
@Autowired
public Map<String, Repository<S>> stringRepositoryMap;
@Autowired
public Map<String, Repository<I>> integerRepositoryMap;
}
public static | RepositoryFieldInjectionBeanWithVariables |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/SpringApplicationAdminClient.java | {
"start": 1388,
"end": 4247
} | class ____ {
// Note: see SpringApplicationAdminJmxAutoConfiguration
static final String DEFAULT_OBJECT_NAME = "org.springframework.boot:type=Admin,name=SpringApplication";
private final MBeanServerConnection connection;
private final ObjectName objectName;
SpringApplicationAdminClient(MBeanServerConnection connection, String jmxName) {
this.connection = connection;
this.objectName = toObjectName(jmxName);
}
/**
* Check if the spring application managed by this instance is ready. Returns
* {@code false} if the mbean is not yet deployed so this method should be repeatedly
* called until a timeout is reached.
* @return {@code true} if the application is ready to service requests
* @throws MojoExecutionException if the JMX service could not be contacted
*/
boolean isReady() throws MojoExecutionException {
try {
return (Boolean) this.connection.getAttribute(this.objectName, "Ready");
}
catch (InstanceNotFoundException ex) {
return false; // Instance not available yet
}
catch (AttributeNotFoundException ex) {
throw new IllegalStateException("Unexpected: attribute 'Ready' not available", ex);
}
catch (ReflectionException ex) {
throw new MojoExecutionException("Failed to retrieve Ready attribute", ex.getCause());
}
catch (MBeanException | IOException ex) {
throw new MojoExecutionException(ex.getMessage(), ex);
}
}
/**
* Stop the application managed by this instance.
* @throws MojoExecutionException if the JMX service could not be contacted
* @throws IOException if an I/O error occurs
* @throws InstanceNotFoundException if the lifecycle mbean cannot be found
*/
void stop() throws MojoExecutionException, IOException, InstanceNotFoundException {
try {
this.connection.invoke(this.objectName, "shutdown", null, null);
}
catch (ReflectionException ex) {
throw new MojoExecutionException("Shutdown failed", ex.getCause());
}
catch (MBeanException ex) {
throw new MojoExecutionException("Could not invoke shutdown operation", ex);
}
}
private ObjectName toObjectName(String name) {
try {
return new ObjectName(name);
}
catch (MalformedObjectNameException ex) {
throw new IllegalArgumentException("Invalid jmx name '" + name + "'");
}
}
/**
* Create a connector for an {@link javax.management.MBeanServer} exposed on the
* current machine and the current port. Security should be disabled.
* @param port the port on which the mbean server is exposed
* @return a connection
* @throws IOException if the connection to that server failed
*/
static JMXConnector connect(int port) throws IOException {
String url = "service:jmx:rmi:///jndi/rmi://127.0.0.1:" + port + "/jmxrmi";
JMXServiceURL serviceUrl = new JMXServiceURL(url);
return JMXConnectorFactory.connect(serviceUrl, null);
}
}
| SpringApplicationAdminClient |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/ManifestWorkspace.java | {
"start": 24968,
"end": 25084
} | class ____: " + ln + " in " + u);
}
names.add(ln);
}
return lc + 1;
}
}
| name |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_349.java | {
"start": 987,
"end": 1219
} | class ____ {
public Currency currency;
public BigDecimal amount;
@Override
public String toString() {
return "Money{currency=" + currency + ", amount=" + amount + '}';
}
}
}
| Money |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MiloClientEndpointBuilderFactory.java | {
"start": 1620,
"end": 24407
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedMiloClientEndpointConsumerBuilder advanced() {
return (AdvancedMiloClientEndpointConsumerBuilder) this;
}
/**
* A virtual client id to force the creation of a new connection
* instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param clientId the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder clientId(String clientId) {
doSetProperty("clientId", clientId);
return this;
}
/**
* Deadband type for MonitorFilterType DataChangeFilter.
*
* The option is a:
* <code>org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger</code> type.
*
* Default: 0
* Group: common
*
* @param dataChangeFilterDeadbandType the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterDeadbandType(org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger dataChangeFilterDeadbandType) {
doSetProperty("dataChangeFilterDeadbandType", dataChangeFilterDeadbandType);
return this;
}
/**
* Deadband type for MonitorFilterType DataChangeFilter.
*
* The option will be converted to a
* <code>org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger</code> type.
*
* Default: 0
* Group: common
*
* @param dataChangeFilterDeadbandType the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterDeadbandType(String dataChangeFilterDeadbandType) {
doSetProperty("dataChangeFilterDeadbandType", dataChangeFilterDeadbandType);
return this;
}
/**
* Deadband value for MonitorFilterType DataChangeFilter.
*
* The option is a: <code>java.lang.Double</code> type.
*
* Default: 0.0
* Group: common
*
* @param dataChangeFilterDeadbandValue the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterDeadbandValue(Double dataChangeFilterDeadbandValue) {
doSetProperty("dataChangeFilterDeadbandValue", dataChangeFilterDeadbandValue);
return this;
}
/**
* Deadband value for MonitorFilterType DataChangeFilter.
*
* The option will be converted to a <code>java.lang.Double</code> type.
*
* Default: 0.0
* Group: common
*
* @param dataChangeFilterDeadbandValue the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterDeadbandValue(String dataChangeFilterDeadbandValue) {
doSetProperty("dataChangeFilterDeadbandValue", dataChangeFilterDeadbandValue);
return this;
}
/**
* Data change trigger for data change monitor filter type.
*
* The option is a:
* <code>org.eclipse.milo.opcua.stack.core.types.enumerated.DataChangeTrigger</code> type.
*
* Default: StatusValueTimestamp
* Group: common
*
* @param dataChangeFilterTrigger the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterTrigger(org.eclipse.milo.opcua.stack.core.types.enumerated.DataChangeTrigger dataChangeFilterTrigger) {
doSetProperty("dataChangeFilterTrigger", dataChangeFilterTrigger);
return this;
}
/**
* Data change trigger for data change monitor filter type.
*
* The option will be converted to a
* <code>org.eclipse.milo.opcua.stack.core.types.enumerated.DataChangeTrigger</code> type.
*
* Default: StatusValueTimestamp
* Group: common
*
* @param dataChangeFilterTrigger the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder dataChangeFilterTrigger(String dataChangeFilterTrigger) {
doSetProperty("dataChangeFilterTrigger", dataChangeFilterTrigger);
return this;
}
/**
* Default await setting for writes.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param defaultAwaitWrites the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder defaultAwaitWrites(boolean defaultAwaitWrites) {
doSetProperty("defaultAwaitWrites", defaultAwaitWrites);
return this;
}
/**
* Default await setting for writes.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param defaultAwaitWrites the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder defaultAwaitWrites(String defaultAwaitWrites) {
doSetProperty("defaultAwaitWrites", defaultAwaitWrites);
return this;
}
/**
* A suffix for endpoint URI when discovering.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param discoveryEndpointSuffix the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder discoveryEndpointSuffix(String discoveryEndpointSuffix) {
doSetProperty("discoveryEndpointSuffix", discoveryEndpointSuffix);
return this;
}
/**
* An alternative discovery URI.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param discoveryEndpointUri the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder discoveryEndpointUri(String discoveryEndpointUri) {
doSetProperty("discoveryEndpointUri", discoveryEndpointUri);
return this;
}
/**
* The method definition (see Method ID).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param method the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder method(String method) {
doSetProperty("method", method);
return this;
}
/**
* Monitor Filter Type for MonitoredItems.
*
* The option is a:
* <code>org.apache.camel.component.milo.client.MonitorFilterType</code>
* type.
*
* Group: common
*
* @param monitorFilterType the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder monitorFilterType(org.apache.camel.component.milo.client.MonitorFilterType monitorFilterType) {
doSetProperty("monitorFilterType", monitorFilterType);
return this;
}
/**
* Monitor Filter Type for MonitoredItems.
*
* The option will be converted to a
* <code>org.apache.camel.component.milo.client.MonitorFilterType</code>
* type.
*
* Group: common
*
* @param monitorFilterType the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder monitorFilterType(String monitorFilterType) {
doSetProperty("monitorFilterType", monitorFilterType);
return this;
}
/**
* The node definition (see Node ID).
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param node the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder node(String node) {
doSetProperty("node", node);
return this;
}
/**
* Omit notifications in case of null values.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param omitNullValues the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder omitNullValues(boolean omitNullValues) {
doSetProperty("omitNullValues", omitNullValues);
return this;
}
/**
* Omit notifications in case of null values.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param omitNullValues the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder omitNullValues(String omitNullValues) {
doSetProperty("omitNullValues", omitNullValues);
return this;
}
/**
* The sampling interval in milliseconds.
*
* The option is a: <code>java.lang.Double</code> type.
*
* Default: 0.0
* Group: common
*
* @param samplingInterval the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder samplingInterval(Double samplingInterval) {
doSetProperty("samplingInterval", samplingInterval);
return this;
}
/**
* The sampling interval in milliseconds.
*
* The option will be converted to a <code>java.lang.Double</code> type.
*
* Default: 0.0
* Group: common
*
* @param samplingInterval the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder samplingInterval(String samplingInterval) {
doSetProperty("samplingInterval", samplingInterval);
return this;
}
/**
* A set of allowed security policy URIs. Default is to accept all and
* use the highest.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param allowedSecurityPolicies the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder allowedSecurityPolicies(String allowedSecurityPolicies) {
doSetProperty("allowedSecurityPolicies", allowedSecurityPolicies);
return this;
}
/**
* The application name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: Apache Camel adapter for Eclipse Milo
* Group: client
*
* @param applicationName the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder applicationName(String applicationName) {
doSetProperty("applicationName", applicationName);
return this;
}
/**
* The application URI.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: http://camel.apache.org/EclipseMilo/Client
* Group: client
*
* @param applicationUri the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder applicationUri(String applicationUri) {
doSetProperty("applicationUri", applicationUri);
return this;
}
/**
* Channel lifetime in milliseconds.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: client
*
* @param channelLifetime the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder channelLifetime(Long channelLifetime) {
doSetProperty("channelLifetime", channelLifetime);
return this;
}
/**
* Channel lifetime in milliseconds.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: client
*
* @param channelLifetime the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder channelLifetime(String channelLifetime) {
doSetProperty("channelLifetime", channelLifetime);
return this;
}
/**
* The name of the key in the keystore file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param keyAlias the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder keyAlias(String keyAlias) {
doSetProperty("keyAlias", keyAlias);
return this;
}
/**
* The key password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param keyPassword the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder keyPassword(String keyPassword) {
doSetProperty("keyPassword", keyPassword);
return this;
}
/**
* The keystore password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param keyStorePassword the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder keyStorePassword(String keyStorePassword) {
doSetProperty("keyStorePassword", keyStorePassword);
return this;
}
/**
* The key store type.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param keyStoreType the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder keyStoreType(String keyStoreType) {
doSetProperty("keyStoreType", keyStoreType);
return this;
}
/**
* The URL where the key should be loaded from.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param keyStoreUrl the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder keyStoreUrl(String keyStoreUrl) {
doSetProperty("keyStoreUrl", keyStoreUrl);
return this;
}
/**
* The maximum number of pending publish requests.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: client
*
* @param maxPendingPublishRequests the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder maxPendingPublishRequests(Long maxPendingPublishRequests) {
doSetProperty("maxPendingPublishRequests", maxPendingPublishRequests);
return this;
}
/**
* The maximum number of pending publish requests.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: client
*
* @param maxPendingPublishRequests the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder maxPendingPublishRequests(String maxPendingPublishRequests) {
doSetProperty("maxPendingPublishRequests", maxPendingPublishRequests);
return this;
}
/**
* The maximum number of bytes a response message may have.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: client
*
* @param maxResponseMessageSize the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder maxResponseMessageSize(Long maxResponseMessageSize) {
doSetProperty("maxResponseMessageSize", maxResponseMessageSize);
return this;
}
/**
* The maximum number of bytes a response message may have.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: client
*
* @param maxResponseMessageSize the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder maxResponseMessageSize(String maxResponseMessageSize) {
doSetProperty("maxResponseMessageSize", maxResponseMessageSize);
return this;
}
/**
* Override the server reported endpoint host with the host from the
* endpoint URI.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: client
*
* @param overrideHost the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder overrideHost(boolean overrideHost) {
doSetProperty("overrideHost", overrideHost);
return this;
}
/**
* Override the server reported endpoint host with the host from the
* endpoint URI.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: client
*
* @param overrideHost the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder overrideHost(String overrideHost) {
doSetProperty("overrideHost", overrideHost);
return this;
}
/**
* The product URI.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: http://camel.apache.org/EclipseMilo
* Group: client
*
* @param productUri the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder productUri(String productUri) {
doSetProperty("productUri", productUri);
return this;
}
/**
* The requested publishing interval in milliseconds.
*
* The option is a: <code>java.lang.Double</code> type.
*
* Default: 1_000.0
* Group: client
*
* @param requestedPublishingInterval the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder requestedPublishingInterval(Double requestedPublishingInterval) {
doSetProperty("requestedPublishingInterval", requestedPublishingInterval);
return this;
}
/**
* The requested publishing interval in milliseconds.
*
* The option will be converted to a <code>java.lang.Double</code> type.
*
* Default: 1_000.0
* Group: client
*
* @param requestedPublishingInterval the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder requestedPublishingInterval(String requestedPublishingInterval) {
doSetProperty("requestedPublishingInterval", requestedPublishingInterval);
return this;
}
/**
* Request timeout in milliseconds.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: client
*
* @param requestTimeout the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder requestTimeout(Long requestTimeout) {
doSetProperty("requestTimeout", requestTimeout);
return this;
}
/**
* Request timeout in milliseconds.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: client
*
* @param requestTimeout the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder requestTimeout(String requestTimeout) {
doSetProperty("requestTimeout", requestTimeout);
return this;
}
/**
* Session name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: client
*
* @param sessionName the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder sessionName(String sessionName) {
doSetProperty("sessionName", sessionName);
return this;
}
/**
* Session timeout in milliseconds.
*
* The option is a: <code>java.lang.Long</code> type.
*
* Group: client
*
* @param sessionTimeout the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder sessionTimeout(Long sessionTimeout) {
doSetProperty("sessionTimeout", sessionTimeout);
return this;
}
/**
* Session timeout in milliseconds.
*
* The option will be converted to a <code>java.lang.Long</code> type.
*
* Group: client
*
* @param sessionTimeout the value to set
* @return the dsl builder
*/
default MiloClientEndpointConsumerBuilder sessionTimeout(String sessionTimeout) {
doSetProperty("sessionTimeout", sessionTimeout);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the OPC UA Client component.
*/
public | MiloClientEndpointConsumerBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/HqlParserMemoryUsageTest.java | {
"start": 3158,
"end": 3356
} | class ____ {
@Id
private Long id;
private String city;
@ManyToOne(fetch = FetchType.LAZY)
private AppUser user;
}
@Entity(name = "AppUser")
@Table(name = "app_users")
public static | Address |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/FluxTakeUntilPredicateTest.java | {
"start": 1102,
"end": 5242
} | class ____ {
@Test
public void sourceNull() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
new FluxTakeUntil<>(null, v -> true);
});
}
@Test
public void predicateNull() {
assertThatExceptionOfType(NullPointerException.class).isThrownBy(() -> {
Flux.never()
.takeUntil(null);
});
}
@Test
public void takeAll() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.takeUntil(v -> false)
.subscribe(ts);
ts.assertValues(1, 2, 3, 4, 5)
.assertComplete()
.assertNoError();
}
@Test
public void takeAllBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.range(1, 5)
.takeUntil(v -> false)
.subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1, 2)
.assertNoError()
.assertNotComplete();
ts.request(10);
ts.assertValues(1, 2, 3, 4, 5)
.assertComplete()
.assertNoError();
}
@Test
public void takeSome() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.takeUntil(v -> v == 3)
.subscribe(ts);
ts.assertValues(1, 2, 3)
.assertComplete()
.assertNoError();
}
@Test
public void takeSomeWithChangedValue() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.map(AtomicInteger::new)
.takeUntil(v -> v.get() == 3)
.map(v -> v.getAndSet(0))
.subscribe(ts);
ts.assertValues(1, 2, 3)
.assertComplete()
.assertNoError();
}
@Test
public void takeSomeBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.range(1, 5)
.takeUntil(v -> v == 3)
.subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1, 2)
.assertNoError()
.assertNotComplete();
ts.request(10);
ts.assertValues(1, 2, 3)
.assertComplete()
.assertNoError();
}
@Test
public void stopImmediately() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.takeUntil(v -> true)
.subscribe(ts);
ts.assertValues(1)
.assertComplete()
.assertNoError();
}
@Test
public void stopImmediatelyBackpressured() {
AssertSubscriber<Integer> ts = AssertSubscriber.create(0);
Flux.range(1, 5)
.takeUntil(v -> true)
.subscribe(ts);
ts.assertNoValues()
.assertNoError()
.assertNotComplete();
ts.request(2);
ts.assertValues(1)
.assertComplete()
.assertNoError();
}
@Test
public void predicateThrows() {
AssertSubscriber<Integer> ts = AssertSubscriber.create();
Flux.range(1, 5)
.takeUntil(v -> {
throw new RuntimeException("forced failure");
}).subscribe(ts);
ts.assertValues(1)
.assertNotComplete()
.assertError(RuntimeException.class)
.assertErrorMessage("forced failure");
}
@Test
public void scanOperator(){
Flux<Integer> parent = Flux.just(1);
FluxTakeUntil<Integer> test = new FluxTakeUntil<>(parent, v -> true);
Assertions.assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
Assertions.assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
}
@Test
public void scanPredicateSubscriber() {
CoreSubscriber<Integer> actual = new LambdaSubscriber<>(null, e -> {}, null, null);
FluxTakeUntil.TakeUntilPredicateSubscriber<Integer> test =
new FluxTakeUntil.TakeUntilPredicateSubscriber<>(actual, i -> true);
Subscription parent = Operators.emptySubscription();
test.onSubscribe(parent);
Assertions.assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(parent);
Assertions.assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
Assertions.assertThat(test.scan(Scannable.Attr.RUN_STYLE)).isSameAs(Scannable.Attr.RunStyle.SYNC);
Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
test.onComplete();
Assertions.assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
}
}
| FluxTakeUntilPredicateTest |
java | google__dagger | javatests/dagger/functional/producers/DependentTest.java | {
"start": 2325,
"end": 2441
} | interface ____ {
ListenableFuture<Integer> numGreetings();
}
@Module
static final | DependedProductionComponent |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/IntegersBaseTest.java | {
"start": 1294,
"end": 1930
} | class ____ {
protected Failures failures;
protected Integers integers;
protected ComparatorBasedComparisonStrategy absValueComparisonStrategy;
protected Integers integersWithAbsValueComparisonStrategy;
@BeforeEach
public void setUp() {
failures = spy(Failures.instance());
integers = new Integers();
integers.setFailures(failures);
absValueComparisonStrategy = new ComparatorBasedComparisonStrategy(new AbsValueComparator<Integer>());
integersWithAbsValueComparisonStrategy = new Integers(absValueComparisonStrategy);
integersWithAbsValueComparisonStrategy.failures = failures;
}
}
| IntegersBaseTest |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/named_constructor_args/Mapper.java | {
"start": 860,
"end": 1266
} | interface ____ {
// @formatter:off
@ConstructorArgs({
@Arg(column = "name", name = "name"),
@Arg(id = true, column = "id", name = "id"),
@Arg(column = "team", name = "team", javaType = String.class),
})
// @formatter:on
@Select("select * from users where id = #{id}")
User mapConstructorWithParamAnnos(Integer id);
User mapConstructorWithParamAnnosXml(Integer id);
}
| Mapper |
java | apache__camel | components/camel-sjms/src/test/java/org/apache/camel/component/sjms/it/AsyncJmsInOutIT.java | {
"start": 1386,
"end": 2681
} | class ____ extends JmsTestSupport {
@Test
public void testAsynchronous() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(100);
mock.expectsNoDuplicates(body());
StopWatch watch = new StopWatch();
for (int i = 0; i < 100; i++) {
template.sendBody("seda:start.queue.AsyncJmsInOutIT", Integer.toString(i));
}
// just in case we run on slow boxes
MockEndpoint.assertIsSatisfied(context, 20, TimeUnit.SECONDS);
log.info("Took {} ms. to process 100 messages request/reply over JMS", watch.taken());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("seda:start.queue.AsyncJmsInOutIT")
.to("sjms:queue:in.foo.queue.AsyncJmsInOutIT?asyncConsumer=true&replyTo=out.bar&exchangePattern=InOut")
.to("mock:result");
from("sjms:queue:in.foo.queue.AsyncJmsInOutIT?asyncConsumer=true")
.log("Using ${threadName} to process ${body}")
.transform(body().prepend("Bye "));
}
};
}
}
| AsyncJmsInOutIT |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/json/MappedObject.java | {
"start": 1378,
"end": 1487
} | class ____ mapped JSON objects.
*
* @author Phillip Webb
* @author Dmytro Nosan
* @since 2.3.0
*/
public | for |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/OBSEndpointBuilderFactory.java | {
"start": 1645,
"end": 33654
} | interface ____
extends
EndpointConsumerBuilder {
default AdvancedOBSEndpointConsumerBuilder advanced() {
return (AdvancedOBSEndpointConsumerBuilder) this;
}
/**
* Name of bucket to perform operation on.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param bucketName the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder bucketName(String bucketName) {
doSetProperty("bucketName", bucketName);
return this;
}
/**
* OBS url. Carries higher precedence than region parameter based client
* initialization.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param endpoint the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder endpoint(String endpoint) {
doSetProperty("endpoint", endpoint);
return this;
}
/**
* Name of object to perform operation with.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param objectName the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder objectName(String objectName) {
doSetProperty("objectName", objectName);
return this;
}
/**
* OBS service region. This is lower precedence than endpoint based
* configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: common
*
* @param region the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder region(String region) {
doSetProperty("region", region);
return this;
}
/**
* Determines if objects should be deleted after it has been retrieved.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param deleteAfterRead the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder deleteAfterRead(boolean deleteAfterRead) {
doSetProperty("deleteAfterRead", deleteAfterRead);
return this;
}
/**
* Determines if objects should be deleted after it has been retrieved.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param deleteAfterRead the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder deleteAfterRead(String deleteAfterRead) {
doSetProperty("deleteAfterRead", deleteAfterRead);
return this;
}
/**
* The character used for grouping object names.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param delimiter the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder delimiter(String delimiter) {
doSetProperty("delimiter", delimiter);
return this;
}
/**
* Name of destination bucket where objects will be moved when
* moveAfterRead is set to true.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param destinationBucket the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder destinationBucket(String destinationBucket) {
doSetProperty("destinationBucket", destinationBucket);
return this;
}
/**
* Get the object from the bucket with the given file name.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param fileName the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder fileName(String fileName) {
doSetProperty("fileName", fileName);
return this;
}
/**
* If true, objects in folders will be consumed. Otherwise, they will be
* ignored and no Exchanges will be created for them.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param includeFolders the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder includeFolders(boolean includeFolders) {
doSetProperty("includeFolders", includeFolders);
return this;
}
/**
* If true, objects in folders will be consumed. Otherwise, they will be
* ignored and no Exchanges will be created for them.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: consumer
*
* @param includeFolders the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder includeFolders(String includeFolders) {
doSetProperty("includeFolders", includeFolders);
return this;
}
/**
* The maximum number of messages to poll at each polling.
*
* The option is a: <code>int</code> type.
*
* Default: 10
* Group: consumer
*
* @param maxMessagesPerPoll the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder maxMessagesPerPoll(int maxMessagesPerPoll) {
doSetProperty("maxMessagesPerPoll", maxMessagesPerPoll);
return this;
}
/**
* The maximum number of messages to poll at each polling.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 10
* Group: consumer
*
* @param maxMessagesPerPoll the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder maxMessagesPerPoll(String maxMessagesPerPoll) {
doSetProperty("maxMessagesPerPoll", maxMessagesPerPoll);
return this;
}
/**
* Determines whether objects should be moved to a different bucket
* after they have been retrieved. The destinationBucket option must
* also be set for this option to work.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param moveAfterRead the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder moveAfterRead(boolean moveAfterRead) {
doSetProperty("moveAfterRead", moveAfterRead);
return this;
}
/**
* Determines whether objects should be moved to a different bucket
* after they have been retrieved. The destinationBucket option must
* also be set for this option to work.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param moveAfterRead the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder moveAfterRead(String moveAfterRead) {
doSetProperty("moveAfterRead", moveAfterRead);
return this;
}
/**
* The object name prefix used for filtering objects to be listed.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: consumer
*
* @param prefix the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder prefix(String prefix) {
doSetProperty("prefix", prefix);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder sendEmptyMessageWhenIdle(boolean sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* If the polling consumer did not poll any files, you can enable this
* option to send an empty message (no body) instead.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param sendEmptyMessageWhenIdle the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder sendEmptyMessageWhenIdle(String sendEmptyMessageWhenIdle) {
doSetProperty("sendEmptyMessageWhenIdle", sendEmptyMessageWhenIdle);
return this;
}
/**
* Proxy server ip/hostname.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyHost the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder proxyHost(String proxyHost) {
doSetProperty("proxyHost", proxyHost);
return this;
}
/**
* Proxy authentication password.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyPassword the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder proxyPassword(String proxyPassword) {
doSetProperty("proxyPassword", proxyPassword);
return this;
}
/**
* Proxy server port.
*
* The option is a: <code>int</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder proxyPort(int proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* Proxy server port.
*
* The option will be converted to a <code>int</code> type.
*
* Group: proxy
*
* @param proxyPort the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder proxyPort(String proxyPort) {
doSetProperty("proxyPort", proxyPort);
return this;
}
/**
* Proxy authentication user.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: proxy
*
* @param proxyUser the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder proxyUser(String proxyUser) {
doSetProperty("proxyUser", proxyUser);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffErrorThreshold(int backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent error polls (failed due some error) that
* should happen before the backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffErrorThreshold the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffErrorThreshold(String backoffErrorThreshold) {
doSetProperty("backoffErrorThreshold", backoffErrorThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffIdleThreshold(int backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* The number of subsequent idle polls that should happen before the
* backoffMultipler should kick-in.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffIdleThreshold the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffIdleThreshold(String backoffIdleThreshold) {
doSetProperty("backoffIdleThreshold", backoffIdleThreshold);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option is a: <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffMultiplier(int backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* To let the scheduled polling consumer backoff if there has been a
* number of subsequent idles/errors in a row. The multiplier is then
* the number of polls that will be skipped before the next actual
* attempt is happening again. When this option is in use then
* backoffIdleThreshold and/or backoffErrorThreshold must also be
* configured.
*
* The option will be converted to a <code>int</code> type.
*
* Group: scheduler
*
* @param backoffMultiplier the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder backoffMultiplier(String backoffMultiplier) {
doSetProperty("backoffMultiplier", backoffMultiplier);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option is a: <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder delay(long delay) {
doSetProperty("delay", delay);
return this;
}
/**
* Milliseconds before the next poll.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 500
* Group: scheduler
*
* @param delay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder delay(String delay) {
doSetProperty("delay", delay);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder greedy(boolean greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* If greedy is enabled, then the ScheduledPollConsumer will run
* immediately again, if the previous run polled 1 or more messages.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: scheduler
*
* @param greedy the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder greedy(String greedy) {
doSetProperty("greedy", greedy);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option is a: <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder initialDelay(long initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Milliseconds before the first poll starts.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 1000
* Group: scheduler
*
* @param initialDelay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder initialDelay(String initialDelay) {
doSetProperty("initialDelay", initialDelay);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option is a: <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder repeatCount(long repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* Specifies a maximum limit of number of fires. So if you set it to 1,
* the scheduler will only fire once. If you set it to 5, it will only
* fire five times. A value of zero or negative means fire forever.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 0
* Group: scheduler
*
* @param repeatCount the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder repeatCount(String repeatCount) {
doSetProperty("repeatCount", repeatCount);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option is a: <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder runLoggingLevel(org.apache.camel.LoggingLevel runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* The consumer logs a start/complete log line when it polls. This
* option allows you to configure the logging level for that.
*
* The option will be converted to a
* <code>org.apache.camel.LoggingLevel</code> type.
*
* Default: TRACE
* Group: scheduler
*
* @param runLoggingLevel the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder runLoggingLevel(String runLoggingLevel) {
doSetProperty("runLoggingLevel", runLoggingLevel);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option is a:
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder scheduledExecutorService(ScheduledExecutorService scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* Allows for configuring a custom/shared thread pool to use for the
* consumer. By default each consumer has its own single threaded thread
* pool.
*
* The option will be converted to a
* <code>java.util.concurrent.ScheduledExecutorService</code> type.
*
* Group: scheduler
*
* @param scheduledExecutorService the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder scheduledExecutorService(String scheduledExecutorService) {
doSetProperty("scheduledExecutorService", scheduledExecutorService);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option is a: <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder scheduler(Object scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To use a cron scheduler from either camel-spring or camel-quartz
* component. Use value spring or quartz for built in scheduler.
*
* The option will be converted to a <code>java.lang.Object</code> type.
*
* Default: none
* Group: scheduler
*
* @param scheduler the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder scheduler(String scheduler) {
doSetProperty("scheduler", scheduler);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param key the option key
* @param value the option value
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder schedulerProperties(String key, Object value) {
doSetMultiValueProperty("schedulerProperties", "scheduler." + key, value);
return this;
}
/**
* To configure additional properties when using a custom scheduler or
* any of the Quartz, Spring based scheduler. This is a multi-value
* option with prefix: scheduler.
*
* The option is a: <code>java.util.Map<java.lang.String,
* java.lang.Object></code> type.
* The option is multivalued, and you can use the
* schedulerProperties(String, Object) method to add a value (call the
* method multiple times to set more values).
*
* Group: scheduler
*
* @param values the values
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder schedulerProperties(Map values) {
doSetMultiValueProperties("schedulerProperties", "scheduler.", values);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder startScheduler(boolean startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Whether the scheduler should be auto started.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param startScheduler the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder startScheduler(String startScheduler) {
doSetProperty("startScheduler", startScheduler);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option is a: <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder timeUnit(TimeUnit timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Time unit for initialDelay and delay options.
*
* The option will be converted to a
* <code>java.util.concurrent.TimeUnit</code> type.
*
* Default: MILLISECONDS
* Group: scheduler
*
* @param timeUnit the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder timeUnit(String timeUnit) {
doSetProperty("timeUnit", timeUnit);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder useFixedDelay(boolean useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Controls if fixed delay or fixed rate is used. See
* ScheduledExecutorService in JDK for details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: scheduler
*
* @param useFixedDelay the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder useFixedDelay(String useFixedDelay) {
doSetProperty("useFixedDelay", useFixedDelay);
return this;
}
/**
* Access key for the cloud user.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: security
*
* @param accessKey the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder accessKey(String accessKey) {
doSetProperty("accessKey", accessKey);
return this;
}
/**
* Ignore SSL verification.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ignoreSslVerification the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder ignoreSslVerification(boolean ignoreSslVerification) {
doSetProperty("ignoreSslVerification", ignoreSslVerification);
return this;
}
/**
* Ignore SSL verification.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: security
*
* @param ignoreSslVerification the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder ignoreSslVerification(String ignoreSslVerification) {
doSetProperty("ignoreSslVerification", ignoreSslVerification);
return this;
}
/**
* Secret key for the cloud user.
*
* The option is a: <code>java.lang.String</code> type.
*
* Required: true
* Group: security
*
* @param secretKey the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder secretKey(String secretKey) {
doSetProperty("secretKey", secretKey);
return this;
}
/**
* Configuration object for cloud service authentication.
*
* The option is a:
* <code>org.apache.camel.component.huaweicloud.common.models.ServiceKeys</code> type.
*
* Group: security
*
* @param serviceKeys the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder serviceKeys(org.apache.camel.component.huaweicloud.common.models.ServiceKeys serviceKeys) {
doSetProperty("serviceKeys", serviceKeys);
return this;
}
/**
* Configuration object for cloud service authentication.
*
* The option will be converted to a
* <code>org.apache.camel.component.huaweicloud.common.models.ServiceKeys</code> type.
*
* Group: security
*
* @param serviceKeys the value to set
* @return the dsl builder
*/
default OBSEndpointConsumerBuilder serviceKeys(String serviceKeys) {
doSetProperty("serviceKeys", serviceKeys);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Huawei Object Storage Service (OBS) component.
*/
public | OBSEndpointConsumerBuilder |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/style/SimpleValueStyler.java | {
"start": 2837,
"end": 4518
} | class ____ method stylers.
* @param classStyler a function that applies styling to a {@link Class}
* @param methodStyler a function that applies styling to a {@link Method}
*/
public SimpleValueStyler(Function<Class<?>, String> classStyler, Function<Method, String> methodStyler) {
this.classStyler = classStyler;
this.methodStyler = methodStyler;
}
@Override
protected String styleNull() {
return "null";
}
@Override
protected String styleString(String str) {
return "\"" + str + "\"";
}
@Override
protected String styleClass(Class<?> clazz) {
return this.classStyler.apply(clazz);
}
@Override
protected String styleMethod(Method method) {
return this.methodStyler.apply(method);
}
@Override
protected <K, V> String styleMap(Map<K, V> map) {
StringJoiner result = new StringJoiner(", ", "{", "}");
for (Map.Entry<K, V> entry : map.entrySet()) {
result.add(style(entry));
}
return result.toString();
}
@Override
protected String styleCollection(Collection<?> collection) {
StringJoiner result = new StringJoiner(", ", "[", "]");
for (Object element : collection) {
result.add(style(element));
}
return result.toString();
}
@Override
protected String styleArray(Object[] array) {
StringJoiner result = new StringJoiner(", ", "[", "]");
for (Object element : array) {
result.add(style(element));
}
return result.toString();
}
private static String toSimpleMethodSignature(Method method) {
String parameterList = Arrays.stream(method.getParameterTypes())
.map(Class::getSimpleName)
.collect(Collectors.joining(", "));
return String.format("%s(%s)", method.getName(), parameterList);
}
}
| and |
java | apache__flink | flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/operator/CEPMigrationTest.java | {
"start": 25666,
"end": 26450
} | class ____ implements NFACompiler.NFAFactory<Event> {
private static final long serialVersionUID = 1173020762472766713L;
private final boolean handleTimeout;
private SinglePatternNFAFactory() {
this(false);
}
private SinglePatternNFAFactory(boolean handleTimeout) {
this.handleTimeout = handleTimeout;
}
@Override
public NFA<Event> createNFA() {
Pattern<Event, ?> pattern =
Pattern.<Event>begin("start")
.where(new StartFilter())
.within(Duration.ofMillis(10L));
return NFACompiler.compileFactory(pattern, handleTimeout).createNFA();
}
}
private static | SinglePatternNFAFactory |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/support/TestPropertySourceUtilsTests.java | {
"start": 2758,
"end": 3260
} | class ____ {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final String[] KEY_VALUE_PAIR = {"key = value"};
private static final String[] FOO_LOCATIONS = {"classpath:/foo.properties"};
@Test
void emptyAnnotation() {
assertThatIllegalStateException()
.isThrownBy(() -> buildMergedTestPropertySources(EmptyPropertySources.class))
.withMessageContainingAll(
"Could not detect default properties file for test class",
" | TestPropertySourceUtilsTests |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/websocketx/extensions/WebSocketExtensionData.java | {
"start": 985,
"end": 1622
} | class ____ {
private final String name;
private final Map<String, String> parameters;
public WebSocketExtensionData(String name, Map<String, String> parameters) {
this.name = ObjectUtil.checkNotNull(name, "name");
this.parameters = Collections.unmodifiableMap(
ObjectUtil.checkNotNull(parameters, "parameters"));
}
/**
* @return the extension name.
*/
public String name() {
return name;
}
/**
* @return the extension optional parameters.
*/
public Map<String, String> parameters() {
return parameters;
}
}
| WebSocketExtensionData |
java | apache__camel | components/camel-avro-rpc/camel-avro-rpc-component/src/main/java/org/apache/camel/component/avro/AvroEndpoint.java | {
"start": 1587,
"end": 4734
} | class ____ extends DefaultEndpoint implements AsyncEndpoint {
@UriParam
private AvroConfiguration configuration;
protected AvroEndpoint(String endpointUri, Component component, AvroConfiguration configuration) {
super(endpointUri, component);
this.configuration = configuration;
}
@Override
public boolean isSingletonProducer() {
return false;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
AvroConsumer consumer = new AvroConsumer(this, processor);
configureConsumer(consumer);
return consumer;
}
public AvroConfiguration getConfiguration() {
return configuration;
}
@Override
protected void doInit() throws Exception {
super.doInit();
validateConfiguration(configuration);
}
/**
* Validates configuration
*/
private void validateConfiguration(AvroConfiguration config) throws Exception {
if (config.getProtocol() == null && config.getProtocolClassName() != null) {
Class<?> protocolClass = getCamelContext().getClassResolver().resolveClass(config.getProtocolClassName());
if (protocolClass != null) {
try {
Field f = protocolClass.getField("PROTOCOL");
if (f != null) {
Protocol protocol = (Protocol) f.get(null);
config.setProtocol(protocol);
}
} catch (NoSuchFieldException e) {
ReflectData reflectData = ReflectData.get();
config.setProtocol(reflectData.getProtocol(protocolClass));
config.setReflectionProtocol(true);
}
}
}
if (config.getProtocol() == null) {
throw new IllegalArgumentException("Avro configuration does not contain protocol");
}
if (config.getMessageName() != null && !config.getProtocol().getMessages().containsKey(config.getMessageName())) {
throw new IllegalArgumentException("Message " + config.getMessageName() + " is not defined in protocol");
}
if (config.isSingleParameter()) {
Map<String, Protocol.Message> messageMap = config.getProtocol().getMessages();
Iterable<Protocol.Message> messagesToCheck = config.getMessageName() == null
? messageMap.values()
: Collections.singleton(messageMap.get(config.getMessageName()));
for (Protocol.Message message : messagesToCheck) {
if (message.getRequest().getFields().size() != 1) {
throw new IllegalArgumentException(
"Single parameter option can't be used with message "
+ message.getName() + " because it has "
+ message.getRequest().getFields().size()
+ " parameters defined");
}
}
}
}
}
| AvroEndpoint |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/GetAsyncResultRequest.java | {
"start": 632,
"end": 2976
} | class ____ extends LegacyActionRequest {
private final String id;
private TimeValue waitForCompletionTimeout = TimeValue.MINUS_ONE;
private TimeValue keepAlive = TimeValue.MINUS_ONE;
/**
* Creates a new request
*
* @param id The id of the search progress request.
*/
public GetAsyncResultRequest(String id) {
this.id = id;
}
public GetAsyncResultRequest(StreamInput in) throws IOException {
super(in);
this.id = in.readString();
this.waitForCompletionTimeout = TimeValue.timeValueMillis(in.readLong());
this.keepAlive = in.readTimeValue();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(id);
out.writeLong(waitForCompletionTimeout.millis());
out.writeTimeValue(keepAlive);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
/**
* Returns the id of the async search.
*/
public String getId() {
return id;
}
/**
* Sets the minimum time that the request should wait before returning a partial result (defaults to no wait).
*/
public GetAsyncResultRequest setWaitForCompletionTimeout(TimeValue timeValue) {
this.waitForCompletionTimeout = timeValue;
return this;
}
public TimeValue getWaitForCompletionTimeout() {
return waitForCompletionTimeout;
}
/**
* Extends the amount of time after which the result will expire (defaults to no extension).
*/
public GetAsyncResultRequest setKeepAlive(TimeValue timeValue) {
this.keepAlive = timeValue;
return this;
}
public TimeValue getKeepAlive() {
return keepAlive;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GetAsyncResultRequest request = (GetAsyncResultRequest) o;
return Objects.equals(id, request.id)
&& waitForCompletionTimeout.equals(request.waitForCompletionTimeout)
&& keepAlive.equals(request.keepAlive);
}
@Override
public int hashCode() {
return Objects.hash(id, waitForCompletionTimeout, keepAlive);
}
}
| GetAsyncResultRequest |
java | apache__camel | components/camel-servicenow/camel-servicenow-component/src/main/java/org/apache/camel/component/servicenow/releases/fuji/FujiServiceNowImportSetProcessor.java | {
"start": 1214,
"end": 3712
} | class ____ extends FujiServiceNowProcessor {
FujiServiceNowImportSetProcessor(ServiceNowEndpoint endpoint) throws Exception {
super(endpoint);
}
@Override
protected void doProcess(
Exchange exchange, Class<?> requestModel, Class<?> responseModel, String action, String apiVersion,
String tableName, String sysId)
throws Exception {
Response response;
if (ObjectHelper.equal(ServiceNowConstants.ACTION_RETRIEVE, action, true)) {
response = retrieveRecord(responseModel, apiVersion, tableName, sysId);
} else if (ObjectHelper.equal(ServiceNowConstants.ACTION_CREATE, action, true)) {
response = createRecord(exchange.getIn(), requestModel, responseModel, apiVersion, tableName);
} else {
throw new IllegalArgumentException("Unknown action " + action);
}
setBodyAndHeaders(exchange.getIn(), responseModel, response);
}
/*
* GET
* https://instance.service-now.com/api/now/import/{tableName}/{sys_id}
*/
private Response retrieveRecord(
Class<?> responseModel, String apiVersion, String tableName, String sysId)
throws Exception {
return client.reset()
.types(MediaType.APPLICATION_JSON_TYPE)
.path("now")
.path(apiVersion)
.path("import")
.path(tableName)
.path(ObjectHelper.notNull(sysId, "sysId"))
.query(responseModel)
.invoke(HttpMethod.GET);
}
/*
* POST
* https://instance.service-now.com/api/now/import/{tableName}
*/
private Response createRecord(
Message in, Class<?> requestModel, Class<?> responseModel, String apiVersion, String tableName)
throws Exception {
if (in.getHeader(ServiceNowConstants.RETRIEVE_TARGET_RECORD, config::getRetrieveTargetRecordOnImport, Boolean.class)) {
throw new UnsupportedOperationException("RetrieveTargetRecordOnImport is supported from Helsinky");
}
validateBody(in, requestModel);
return client.reset()
.types(MediaType.APPLICATION_JSON_TYPE)
.path("now")
.path(apiVersion)
.path("import")
.path(tableName)
.query(responseModel)
.invoke(HttpMethod.POST, in.getMandatoryBody());
}
}
| FujiServiceNowImportSetProcessor |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java | {
"start": 1714,
"end": 18522
} | class ____ implements LifecycleAction {
private static final Logger logger = LogManager.getLogger(ShrinkAction.class);
public static final String NAME = "shrink";
public static final ParseField NUMBER_OF_SHARDS_FIELD = new ParseField("number_of_shards");
public static final ParseField MAX_PRIMARY_SHARD_SIZE = new ParseField("max_primary_shard_size");
public static final ParseField ALLOW_WRITE_AFTER_SHRINK = new ParseField("allow_write_after_shrink");
public static final String CONDITIONAL_SKIP_SHRINK_STEP = BranchingStep.NAME + "-check-prerequisites";
public static final String CLEANUP_SHRINK_INDEX_STEP = "cleanup-shrink-index";
public static final String CONDITIONAL_DATASTREAM_CHECK_KEY = BranchingStep.NAME + "-on-datastream-check";
private static final ConstructingObjectParser<ShrinkAction, Void> PARSER = new ConstructingObjectParser<>(
NAME,
a -> new ShrinkAction((Integer) a[0], (ByteSizeValue) a[1], (a[2] != null && (Boolean) a[2]))
);
static {
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUMBER_OF_SHARDS_FIELD);
PARSER.declareField(
ConstructingObjectParser.optionalConstructorArg(),
(p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MAX_PRIMARY_SHARD_SIZE.getPreferredName()),
MAX_PRIMARY_SHARD_SIZE,
ObjectParser.ValueType.STRING
);
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ALLOW_WRITE_AFTER_SHRINK);
}
public static final Settings CLEAR_WRITE_BLOCK_SETTINGS = Settings.builder()
.put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), (String) null)
.build();
private final Integer numberOfShards;
private final ByteSizeValue maxPrimaryShardSize;
private final boolean allowWriteAfterShrink;
public static ShrinkAction parse(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
public ShrinkAction(@Nullable Integer numberOfShards, @Nullable ByteSizeValue maxPrimaryShardSize, boolean allowWriteAfterShrink) {
if (numberOfShards != null && maxPrimaryShardSize != null) {
throw new IllegalArgumentException("Cannot set both [number_of_shards] and [max_primary_shard_size]");
}
if (numberOfShards == null && maxPrimaryShardSize == null) {
throw new IllegalArgumentException("Either [number_of_shards] or [max_primary_shard_size] must be set");
}
if (maxPrimaryShardSize != null) {
if (maxPrimaryShardSize.getBytes() <= 0) {
throw new IllegalArgumentException("[max_primary_shard_size] must be greater than 0");
}
this.maxPrimaryShardSize = maxPrimaryShardSize;
this.numberOfShards = null;
} else {
if (numberOfShards <= 0) {
throw new IllegalArgumentException("[" + NUMBER_OF_SHARDS_FIELD.getPreferredName() + "] must be greater than 0");
}
this.numberOfShards = numberOfShards;
this.maxPrimaryShardSize = null;
}
this.allowWriteAfterShrink = allowWriteAfterShrink;
}
public ShrinkAction(StreamInput in) throws IOException {
if (in.readBoolean()) {
this.numberOfShards = in.readVInt();
this.maxPrimaryShardSize = null;
} else {
this.numberOfShards = null;
this.maxPrimaryShardSize = ByteSizeValue.readFrom(in);
}
this.allowWriteAfterShrink = in.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0) && in.readBoolean();
}
public Integer getNumberOfShards() {
return numberOfShards;
}
public ByteSizeValue getMaxPrimaryShardSize() {
return maxPrimaryShardSize;
}
public boolean getAllowWriteAfterShrink() {
return allowWriteAfterShrink;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
boolean hasNumberOfShards = numberOfShards != null;
out.writeBoolean(hasNumberOfShards);
if (hasNumberOfShards) {
out.writeVInt(numberOfShards);
} else {
maxPrimaryShardSize.writeTo(out);
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_14_0)) {
out.writeBoolean(this.allowWriteAfterShrink);
}
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (numberOfShards != null) {
builder.field(NUMBER_OF_SHARDS_FIELD.getPreferredName(), numberOfShards);
}
if (maxPrimaryShardSize != null) {
builder.field(MAX_PRIMARY_SHARD_SIZE.getPreferredName(), maxPrimaryShardSize);
}
builder.field(ALLOW_WRITE_AFTER_SHRINK.getPreferredName(), allowWriteAfterShrink);
builder.endObject();
return builder;
}
@Override
public boolean isSafeAction() {
return false;
}
@Override
public List<Step> toSteps(Client client, String phase, Step.StepKey nextStepKey) {
StepKey preShrinkBranchingKey = new StepKey(phase, NAME, CONDITIONAL_SKIP_SHRINK_STEP);
StepKey checkNotWriteIndex = new StepKey(phase, NAME, CheckNotDataStreamWriteIndexStep.NAME);
StepKey waitForNoFollowerStepKey = new StepKey(phase, NAME, WaitForNoFollowersStep.NAME);
StepKey waitTimeSeriesEndTimePassesKey = new StepKey(phase, NAME, WaitUntilTimeSeriesEndTimePassesStep.NAME);
StepKey readOnlyKey = new StepKey(phase, NAME, ReadOnlyAction.NAME);
StepKey checkTargetShardsCountKey = new StepKey(phase, NAME, CheckTargetShardsCountStep.NAME);
StepKey cleanupShrinkIndexKey = new StepKey(phase, NAME, CLEANUP_SHRINK_INDEX_STEP);
StepKey generateShrinkIndexNameKey = new StepKey(phase, NAME, GenerateUniqueIndexNameStep.NAME);
StepKey setSingleNodeKey = new StepKey(phase, NAME, SetSingleNodeAllocateStep.NAME);
StepKey allocationRoutedKey = new StepKey(phase, NAME, CheckShrinkReadyStep.NAME);
StepKey shrinkKey = new StepKey(phase, NAME, ResizeIndexStep.SHRINK);
StepKey enoughShardsKey = new StepKey(phase, NAME, ShrunkShardsAllocatedStep.NAME);
StepKey copyMetadataKey = new StepKey(phase, NAME, CopyExecutionStateStep.NAME);
StepKey dataStreamCheckBranchingKey = new StepKey(phase, NAME, CONDITIONAL_DATASTREAM_CHECK_KEY);
StepKey aliasKey = new StepKey(phase, NAME, ShrinkSetAliasStep.NAME);
StepKey isShrunkIndexKey = new StepKey(phase, NAME, ShrunkenIndexCheckStep.NAME);
StepKey replaceDataStreamIndexKey = new StepKey(phase, NAME, ReplaceDataStreamBackingIndexStep.NAME);
StepKey deleteIndexKey = new StepKey(phase, NAME, DeleteStep.NAME);
StepKey allowWriteKey = new StepKey(phase, NAME, UpdateSettingsStep.NAME);
StepKey lastOrNextStep = allowWriteAfterShrink ? allowWriteKey : nextStepKey;
AsyncBranchingStep conditionalSkipShrinkStep = new AsyncBranchingStep(
preShrinkBranchingKey,
checkNotWriteIndex,
lastOrNextStep,
(projectId, indexMetadata, listener) -> {
if (indexMetadata.getSettings().get(LifecycleSettings.SNAPSHOT_INDEX_NAME) != null) {
logger.warn(
"[{}] action is configured for index [{}] in policy [{}] which is mounted as searchable snapshot. "
+ "Skipping this action",
ShrinkAction.NAME,
indexMetadata.getIndex().getName(),
indexMetadata.getLifecyclePolicyName()
);
listener.onResponse(true);
return;
}
String indexName = indexMetadata.getIndex().getName();
client.projectClient(projectId)
.admin()
.indices()
.prepareStats(indexName)
.clear()
.setDocs(true)
.setStore(true)
.execute(listener.delegateFailure((delegateListener, indicesStatsResponse) -> {
int targetNumberOfShards = new ResizeNumberOfShardsCalculator.ShrinkShardsCalculator(
indicesStatsResponse.getPrimaries().store,
i -> {
IndexShardStats shard = indicesStatsResponse.getIndex(indexName).getIndexShards().get(i);
return shard == null ? null : shard.getPrimary().getDocs();
}
).calculate(numberOfShards, maxPrimaryShardSize, indexMetadata);
delegateListener.onResponse(indexMetadata.getNumberOfShards() == targetNumberOfShards);
}));
},
client
);
CheckNotDataStreamWriteIndexStep checkNotWriteIndexStep = new CheckNotDataStreamWriteIndexStep(
checkNotWriteIndex,
waitForNoFollowerStepKey
);
WaitForNoFollowersStep waitForNoFollowersStep = new WaitForNoFollowersStep(
waitForNoFollowerStepKey,
waitTimeSeriesEndTimePassesKey,
client
);
WaitUntilTimeSeriesEndTimePassesStep waitUntilTimeSeriesEndTimeStep = new WaitUntilTimeSeriesEndTimePassesStep(
waitTimeSeriesEndTimePassesKey,
readOnlyKey,
Instant::now
);
ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, checkTargetShardsCountKey, client, false);
CheckTargetShardsCountStep checkTargetShardsCountStep = new CheckTargetShardsCountStep(
checkTargetShardsCountKey,
cleanupShrinkIndexKey,
numberOfShards
);
// We generate a unique shrink index name but we also retry if the allocation of the shrunk index is not possible, so we want to
// delete the "previously generated" shrink index (this is a no-op if it's the first run of the action and we haven't generated a
// shrink index name)
CleanupGeneratedIndexStep cleanupShrinkIndexStep = new CleanupGeneratedIndexStep(
cleanupShrinkIndexKey,
generateShrinkIndexNameKey,
client,
ShrinkIndexNameSupplier::getShrinkIndexName
);
// generate a unique shrink index name and store it in the ILM execution state
GenerateUniqueIndexNameStep generateUniqueIndexNameStep = new GenerateUniqueIndexNameStep(
generateShrinkIndexNameKey,
setSingleNodeKey,
SHRUNKEN_INDEX_PREFIX,
(generatedIndexName, lifecycleStateBuilder) -> lifecycleStateBuilder.setShrinkIndexName(generatedIndexName)
);
// choose a node to collocate the source index in preparation for shrink
SetSingleNodeAllocateStep setSingleNodeStep = new SetSingleNodeAllocateStep(setSingleNodeKey, allocationRoutedKey, client);
// wait for the source shards to be collocated before attempting to shrink the index. we're waiting until a configured threshold is
// breached (controlled by LifecycleSettings.LIFECYCLE_STEP_WAIT_TIME_THRESHOLD) at which point we rewind to the
// "set-single-node-allocation" step to choose another node to host the shrink operation
ClusterStateWaitUntilThresholdStep checkShrinkReadyStep = new ClusterStateWaitUntilThresholdStep(
new CheckShrinkReadyStep(allocationRoutedKey, shrinkKey),
setSingleNodeKey
);
ResizeIndexStep shrink = new ResizeIndexStep(
shrinkKey,
enoughShardsKey,
client,
ResizeType.SHRINK,
ShrinkIndexNameSupplier::getShrinkIndexName,
indexMetadata -> {
Settings.Builder settingsBuilder = Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, indexMetadata.getNumberOfReplicas())
// We need to remove the single node allocation so replicas can be allocated.
.put(IndexMetadata.INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "_id", (String) null);
if (numberOfShards != null) {
settingsBuilder.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards);
}
return settingsBuilder.build();
},
maxPrimaryShardSize
);
// wait until the shrunk index is recovered. we again wait until the configured threshold is breached and if the shrunk index has
// not successfully recovered until then, we rewind to the "cleanup-shrink-index" step to delete this unsuccessful shrunk index
// and retry the operation by generating a new shrink index name and attempting to shrink again
ClusterStateWaitUntilThresholdStep allocated = new ClusterStateWaitUntilThresholdStep(
new ShrunkShardsAllocatedStep(enoughShardsKey, copyMetadataKey),
cleanupShrinkIndexKey
);
CopyExecutionStateStep copyMetadata = new CopyExecutionStateStep(
copyMetadataKey,
dataStreamCheckBranchingKey,
ShrinkIndexNameSupplier::getShrinkIndexName,
isShrunkIndexKey
);
// by the time we get to this step we have 2 indices, the source and the shrunken one. we now need to choose an index
// swapping strategy such that the shrunken index takes the place of the source index (which is also deleted).
// if the source index is part of a data stream it's a matter of replacing it with the shrunken index one in the data stream and
// then deleting the source index; otherwise we'll use the alias management api to atomically transfer the aliases from source to
// the shrunken index and delete the source
BranchingStep isDataStreamBranchingStep = new BranchingStep(
dataStreamCheckBranchingKey,
aliasKey,
replaceDataStreamIndexKey,
(index, project) -> {
IndexAbstraction indexAbstraction = project.getIndicesLookup().get(index.getName());
assert indexAbstraction != null : "invalid cluster metadata. index [" + index.getName() + "] was not found";
return indexAbstraction.getParentDataStream() != null;
}
);
ShrinkSetAliasStep aliasSwapAndDelete = new ShrinkSetAliasStep(aliasKey, isShrunkIndexKey, client);
ReplaceDataStreamBackingIndexStep replaceDataStreamBackingIndex = new ReplaceDataStreamBackingIndexStep(
replaceDataStreamIndexKey,
deleteIndexKey,
ShrinkIndexNameSupplier::getShrinkIndexName
);
DeleteStep deleteSourceIndexStep = new DeleteStep(deleteIndexKey, isShrunkIndexKey, client);
ShrunkenIndexCheckStep waitOnShrinkTakeover = new ShrunkenIndexCheckStep(isShrunkIndexKey, lastOrNextStep);
UpdateSettingsStep allowWriteAfterShrinkStep = allowWriteAfterShrink
? new UpdateSettingsStep(allowWriteKey, nextStepKey, client, CLEAR_WRITE_BLOCK_SETTINGS)
: null;
Stream<Step> steps = Stream.of(
conditionalSkipShrinkStep,
checkNotWriteIndexStep,
waitForNoFollowersStep,
waitUntilTimeSeriesEndTimeStep,
readOnlyStep,
checkTargetShardsCountStep,
cleanupShrinkIndexStep,
generateUniqueIndexNameStep,
setSingleNodeStep,
checkShrinkReadyStep,
shrink,
allocated,
copyMetadata,
isDataStreamBranchingStep,
aliasSwapAndDelete,
waitOnShrinkTakeover,
replaceDataStreamBackingIndex,
deleteSourceIndexStep,
allowWriteAfterShrinkStep
);
return steps.filter(Objects::nonNull).toList();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ShrinkAction that = (ShrinkAction) o;
return Objects.equals(numberOfShards, that.numberOfShards)
&& Objects.equals(maxPrimaryShardSize, that.maxPrimaryShardSize)
&& Objects.equals(allowWriteAfterShrink, that.allowWriteAfterShrink);
}
@Override
public int hashCode() {
return Objects.hash(numberOfShards, maxPrimaryShardSize, allowWriteAfterShrink);
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| ShrinkAction |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotThrottlingIT.java | {
"start": 1715,
"end": 9585
} | class ____ extends AbstractSnapshotIntegTestCase {
private Tuple<Long, Long> testThrottledRepository(String maxSnapshotBytesPerSec, String maxRestoreBytesPerSec, boolean compressRepo) {
logger.info(
"--> testing throttled repository (maxSnapshotBytesPerSec=[{}], maxRestoreBytesPerSec=[{}], compressRepo=[{}])",
maxSnapshotBytesPerSec,
maxRestoreBytesPerSec,
compressRepo
);
createRepository(
"test-repo",
"fs",
Settings.builder()
.put("location", randomRepoPath())
.put("compress", compressRepo)
.put("chunk_size", randomIntBetween(1000, 4000), ByteSizeUnit.BYTES)
.put("max_snapshot_bytes_per_sec", maxSnapshotBytesPerSec)
.put("max_restore_bytes_per_sec", maxRestoreBytesPerSec)
);
createSnapshot("test-repo", "test-snap", Collections.singletonList("test-idx"));
RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot(
TEST_REQUEST_TIMEOUT,
"test-repo",
"test-snap"
).setRenamePattern("test-").setRenameReplacement("test2-").setWaitForCompletion(true).execute().actionGet();
assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0));
assertDocCount("test-idx", 50L);
long snapshotPause = 0L;
long restorePause = 0L;
for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) {
final RepositoriesStats.SnapshotStats snapshotStats = repositoriesService.repository("test-repo").getSnapshotStats();
snapshotPause += snapshotStats.totalWriteThrottledNanos();
restorePause += snapshotStats.totalReadThrottledNanos();
}
cluster().wipeIndices("test2-idx");
logger.warn("--> tested throttled repository with snapshot pause [{}] and restore pause [{}]", snapshotPause, restorePause);
return new Tuple<>(snapshotPause, restorePause);
}
public void testThrottling() throws Exception {
boolean compressRepo = randomBoolean();
boolean throttleSnapshotViaRecovery = randomBoolean();
boolean throttleRestoreViaRecovery = throttleSnapshotViaRecovery || randomBoolean();
Settings.Builder primaryNodeSettings = Settings.builder()
.put(
INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(),
(throttleSnapshotViaRecovery || throttleRestoreViaRecovery) ? "25k" : "0"
);
if (throttleSnapshotViaRecovery) {
primaryNodeSettings = primaryNodeSettings.put(NODE_BANDWIDTH_RECOVERY_NETWORK_SETTING.getKey(), "25k")
.put(NODE_BANDWIDTH_RECOVERY_DISK_READ_SETTING.getKey(), "25k")
.put(NODE_BANDWIDTH_RECOVERY_DISK_WRITE_SETTING.getKey(), "25k");
}
final String primaryNode = internalCluster().startNode(primaryNodeSettings);
logger.info("--> create index");
createIndexWithRandomDocs("test-idx", 50);
long snapshotPauseViaRecovery = 0L;
long restorePauseViaRecovery = 0L;
// Throttle snapshot and/or restore only via recovery 25kb rate limit
if (throttleSnapshotViaRecovery || throttleRestoreViaRecovery) {
logger.info("--> testing throttling via recovery settings only");
Tuple<Long, Long> pauses = testThrottledRepository("0", "0", compressRepo);
snapshotPauseViaRecovery += pauses.v1();
restorePauseViaRecovery += pauses.v2();
if (throttleSnapshotViaRecovery) assertThat(snapshotPauseViaRecovery, greaterThan(0L));
if (throttleRestoreViaRecovery) assertThat(restorePauseViaRecovery, greaterThan(0L));
}
// Throttle snapshot and/or restore separately with 5kb rate limit, which is much less than half of the potential recovery rate
// limit. For this reason, we assert that the separately throttled speeds incur a pause time which is at least double of the
// pause time detected in the recovery-only throttling run above.
boolean throttleSnapshot = randomBoolean();
boolean throttleRestore = randomBoolean();
if (throttleSnapshot || throttleRestore) {
Tuple<Long, Long> pauses = testThrottledRepository(throttleSnapshot ? "5k" : "0", throttleRestore ? "5k" : "0", compressRepo);
long snapshotPause = pauses.v1();
long restorePause = pauses.v2();
if (throttleSnapshot) {
assertThat(snapshotPause, greaterThan(0L));
if (throttleSnapshotViaRecovery) assertThat(snapshotPause, greaterThan(snapshotPauseViaRecovery * 2));
}
if (throttleRestore) {
assertThat(restorePause, greaterThan(0L));
if (throttleRestoreViaRecovery) assertThat(restorePause, greaterThan(restorePauseViaRecovery * 2));
}
}
}
@TestLogging(
reason = "testing warning that speed is over recovery speed",
value = "org.elasticsearch.repositories.blobstore.BlobStoreRepository:WARN"
)
public void testWarningSpeedOverRecovery() throws Exception {
boolean nodeBandwidthSettingsSet = randomBoolean();
Settings.Builder primaryNodeSettings = Settings.builder().put(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey(), "100m");
if (nodeBandwidthSettingsSet) {
primaryNodeSettings = primaryNodeSettings.put(NODE_BANDWIDTH_RECOVERY_NETWORK_SETTING.getKey(), "100m")
.put(NODE_BANDWIDTH_RECOVERY_DISK_READ_SETTING.getKey(), "100m")
.put(NODE_BANDWIDTH_RECOVERY_DISK_WRITE_SETTING.getKey(), "100m");
}
final String primaryNode = internalCluster().startNode(primaryNodeSettings);
try (var mockLog = MockLog.capture(BlobStoreRepository.class)) {
MockLog.EventuallySeenEventExpectation snapshotExpectation = new MockLog.EventuallySeenEventExpectation(
"snapshot speed over recovery speed",
"org.elasticsearch.repositories.blobstore.BlobStoreRepository",
Level.WARN,
"repository [default/test-repo] has a rate limit [max_snapshot_bytes_per_sec=1gb] per second which is above "
+ "the effective recovery rate limit [indices.recovery.max_bytes_per_sec=100mb] per second, thus the repository "
+ "rate limit will be superseded by the recovery rate limit"
);
if (nodeBandwidthSettingsSet) snapshotExpectation.setExpectSeen();
mockLog.addExpectation(snapshotExpectation);
MockLog.SeenEventExpectation restoreExpectation = new MockLog.SeenEventExpectation(
"snapshot restore speed over recovery speed",
"org.elasticsearch.repositories.blobstore.BlobStoreRepository",
Level.WARN,
"repository [default/test-repo] has a rate limit [max_restore_bytes_per_sec=2gb] per second which is above "
+ "the effective recovery rate limit [indices.recovery.max_bytes_per_sec=100mb] per second, thus the repository "
+ "rate limit will be superseded by the recovery rate limit"
);
mockLog.addExpectation(restoreExpectation);
createRepository(
"test-repo",
"fs",
Settings.builder()
.put("location", randomRepoPath())
.put("max_snapshot_bytes_per_sec", "1g")
.put("max_restore_bytes_per_sec", "2g")
);
deleteRepository("test-repo");
mockLog.assertAllExpectationsMatched();
}
}
}
| SnapshotThrottlingIT |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/TahuHostComponentBuilderFactory.java | {
"start": 1393,
"end": 1913
} | interface ____ {
/**
* Tahu Host Application (camel-tahu)
* Sparkplug B Host Application support over MQTT using Eclipse Tahu
*
* Category: messaging,iot,monitoring
* Since: 4.8
* Maven coordinates: org.apache.camel:camel-tahu
*
* @return the dsl builder
*/
static TahuHostComponentBuilder tahuHost() {
return new TahuHostComponentBuilderImpl();
}
/**
* Builder for the Tahu Host Application component.
*/
| TahuHostComponentBuilderFactory |
java | elastic__elasticsearch | x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/operator/arithmetic/BinaryArithmeticOperation.java | {
"start": 488,
"end": 632
} | interface ____ extends PredicateBiFunction<Object, Object, Object>, NamedWriteable {
@Override
String symbol();
}
| BinaryArithmeticOperation |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/security/auth/DefaultKafkaPrincipalBuilderTest.java | {
"start": 1733,
"end": 8320
} | class ____ {
@Test
public void testReturnAnonymousPrincipalForPlaintext() throws Exception {
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(null, null);
assertEquals(KafkaPrincipal.ANONYMOUS, builder.build(
new PlaintextAuthenticationContext(InetAddress.getLocalHost(), SecurityProtocol.PLAINTEXT.name())));
}
@Test
public void testUseSessionPeerPrincipalForSsl() throws Exception {
SSLSession session = mock(SSLSession.class);
when(session.getPeerPrincipal()).thenReturn(new DummyPrincipal("foo"));
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(null, null);
KafkaPrincipal principal = builder.build(
new SslAuthenticationContext(session, InetAddress.getLocalHost(), SecurityProtocol.PLAINTEXT.name()));
assertEquals(KafkaPrincipal.USER_TYPE, principal.getPrincipalType());
assertEquals("foo", principal.getName());
verify(session, atLeastOnce()).getPeerPrincipal();
}
@Test
public void testPrincipalIfSSLPeerIsNotAuthenticated() throws Exception {
SSLSession session = mock(SSLSession.class);
when(session.getPeerPrincipal()).thenReturn(KafkaPrincipal.ANONYMOUS);
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(null, null);
KafkaPrincipal principal = builder.build(
new SslAuthenticationContext(session, InetAddress.getLocalHost(), SecurityProtocol.PLAINTEXT.name()));
assertEquals(KafkaPrincipal.ANONYMOUS, principal);
verify(session, atLeastOnce()).getPeerPrincipal();
}
@Test
public void testPrincipalWithSslPrincipalMapper() throws Exception {
SSLSession session = mock(SSLSession.class);
when(session.getPeerPrincipal()).thenReturn(new X500Principal("CN=Duke, OU=ServiceUsers, O=Org, C=US"))
.thenReturn(new X500Principal("CN=Duke, OU=SME, O=mycp, L=Fulton, ST=MD, C=US"))
.thenReturn(new X500Principal("CN=duke, OU=JavaSoft, O=Sun Microsystems"))
.thenReturn(new X500Principal("OU=JavaSoft, O=Sun Microsystems, C=US"));
String rules = String.join(", ",
"RULE:^CN=(.*),OU=ServiceUsers.*$/$1/L",
"RULE:^CN=(.*),OU=(.*),O=(.*),L=(.*),ST=(.*),C=(.*)$/$1@$2/L",
"RULE:^.*[Cc][Nn]=([a-zA-Z0-9.]*).*$/$1/U",
"DEFAULT"
);
SslPrincipalMapper mapper = SslPrincipalMapper.fromRules(rules);
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(null, mapper);
SslAuthenticationContext sslContext = new SslAuthenticationContext(session, InetAddress.getLocalHost(), SecurityProtocol.PLAINTEXT.name());
KafkaPrincipal principal = builder.build(sslContext);
assertEquals("duke", principal.getName());
principal = builder.build(sslContext);
assertEquals("duke@sme", principal.getName());
principal = builder.build(sslContext);
assertEquals("DUKE", principal.getName());
principal = builder.build(sslContext);
assertEquals("OU=JavaSoft,O=Sun Microsystems,C=US", principal.getName());
verify(session, times(4)).getPeerPrincipal();
}
@Test
public void testPrincipalBuilderScram() throws Exception {
SaslServer server = mock(SaslServer.class);
when(server.getMechanismName()).thenReturn(ScramMechanism.SCRAM_SHA_256.mechanismName());
when(server.getAuthorizationID()).thenReturn("foo");
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(null, null);
KafkaPrincipal principal = builder.build(new SaslAuthenticationContext(server,
SecurityProtocol.SASL_PLAINTEXT, InetAddress.getLocalHost(), SecurityProtocol.SASL_PLAINTEXT.name()));
assertEquals(KafkaPrincipal.USER_TYPE, principal.getPrincipalType());
assertEquals("foo", principal.getName());
verify(server, atLeastOnce()).getMechanismName();
verify(server, atLeastOnce()).getAuthorizationID();
}
@Test
public void testPrincipalBuilderGssapi() throws Exception {
SaslServer server = mock(SaslServer.class);
KerberosShortNamer kerberosShortNamer = mock(KerberosShortNamer.class);
when(server.getMechanismName()).thenReturn(SaslConfigs.GSSAPI_MECHANISM);
when(server.getAuthorizationID()).thenReturn("foo/host@REALM.COM");
when(kerberosShortNamer.shortName(any())).thenReturn("foo");
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(kerberosShortNamer, null);
KafkaPrincipal principal = builder.build(new SaslAuthenticationContext(server,
SecurityProtocol.SASL_PLAINTEXT, InetAddress.getLocalHost(), SecurityProtocol.SASL_PLAINTEXT.name()));
assertEquals(KafkaPrincipal.USER_TYPE, principal.getPrincipalType());
assertEquals("foo", principal.getName());
verify(server, atLeastOnce()).getMechanismName();
verify(server, atLeastOnce()).getAuthorizationID();
verify(kerberosShortNamer, atLeastOnce()).shortName(any());
}
@Test
public void testPrincipalBuilderSerde() throws Exception {
SaslServer server = mock(SaslServer.class);
KerberosShortNamer kerberosShortNamer = mock(KerberosShortNamer.class);
when(server.getMechanismName()).thenReturn(SaslConfigs.GSSAPI_MECHANISM);
when(server.getAuthorizationID()).thenReturn("foo/host@REALM.COM");
when(kerberosShortNamer.shortName(any())).thenReturn("foo");
DefaultKafkaPrincipalBuilder builder = new DefaultKafkaPrincipalBuilder(kerberosShortNamer, null);
KafkaPrincipal principal = builder.build(new SaslAuthenticationContext(server,
SecurityProtocol.SASL_PLAINTEXT, InetAddress.getLocalHost(), SecurityProtocol.SASL_PLAINTEXT.name()));
assertEquals(KafkaPrincipal.USER_TYPE, principal.getPrincipalType());
assertEquals("foo", principal.getName());
byte[] serializedPrincipal = builder.serialize(principal);
KafkaPrincipal deserializedPrincipal = builder.deserialize(serializedPrincipal);
assertEquals(principal, deserializedPrincipal);
verify(server, atLeastOnce()).getMechanismName();
verify(server, atLeastOnce()).getAuthorizationID();
verify(kerberosShortNamer, atLeastOnce()).shortName(any());
}
private static | DefaultKafkaPrincipalBuilderTest |
java | apache__kafka | tools/src/main/java/org/apache/kafka/tools/VerifiableConsumer.java | {
"start": 4535,
"end": 11535
} | class ____ implements Closeable, OffsetCommitCallback, ConsumerRebalanceListener {
private static final Logger log = LoggerFactory.getLogger(VerifiableConsumer.class);
private final ObjectMapper mapper = new ObjectMapper();
private final PrintStream out;
private final KafkaConsumer<String, String> consumer;
private final String topic;
private final boolean useAutoCommit;
private final boolean useAsyncCommit;
private final boolean verbose;
private final int maxMessages;
private final CountDownLatch shutdownLatch = new CountDownLatch(1);
private int consumedMessages = 0;
public VerifiableConsumer(KafkaConsumer<String, String> consumer,
PrintStream out,
String topic,
int maxMessages,
boolean useAutoCommit,
boolean useAsyncCommit,
boolean verbose) {
this.consumer = consumer;
this.out = out;
this.topic = topic;
this.maxMessages = maxMessages;
this.useAutoCommit = useAutoCommit;
this.useAsyncCommit = useAsyncCommit;
this.verbose = verbose;
addKafkaSerializerModule();
}
private void addKafkaSerializerModule() {
SimpleModule kafka = new SimpleModule();
kafka.addSerializer(TopicPartition.class, new JsonSerializer<TopicPartition>() {
@Override
public void serialize(TopicPartition tp, JsonGenerator gen, SerializerProvider serializers) throws IOException {
gen.writeStartObject();
gen.writeObjectField("topic", tp.topic());
gen.writeObjectField("partition", tp.partition());
gen.writeEndObject();
}
});
mapper.registerModule(kafka);
}
private boolean hasMessageLimit() {
return maxMessages >= 0;
}
private boolean isFinished() {
return hasMessageLimit() && consumedMessages >= maxMessages;
}
private Map<TopicPartition, OffsetAndMetadata> onRecordsReceived(ConsumerRecords<String, String> records) {
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
List<RecordSetSummary> summaries = new ArrayList<>();
for (TopicPartition tp : records.partitions()) {
List<ConsumerRecord<String, String>> partitionRecords = records.records(tp);
if (hasMessageLimit() && consumedMessages + partitionRecords.size() > maxMessages)
partitionRecords = partitionRecords.subList(0, maxMessages - consumedMessages);
if (partitionRecords.isEmpty())
continue;
long minOffset = partitionRecords.get(0).offset();
long maxOffset = partitionRecords.get(partitionRecords.size() - 1).offset();
offsets.put(tp, new OffsetAndMetadata(maxOffset + 1));
summaries.add(new RecordSetSummary(tp.topic(), tp.partition(),
partitionRecords.size(), minOffset, maxOffset));
if (verbose) {
for (ConsumerRecord<String, String> record : partitionRecords) {
printJson(new RecordData(record));
}
}
consumedMessages += partitionRecords.size();
if (isFinished())
break;
}
printJson(new RecordsConsumed(records.count(), summaries));
return offsets;
}
@Override
public void onComplete(Map<TopicPartition, OffsetAndMetadata> offsets, Exception exception) {
List<CommitData> committedOffsets = new ArrayList<>();
for (Map.Entry<TopicPartition, OffsetAndMetadata> offsetEntry : offsets.entrySet()) {
TopicPartition tp = offsetEntry.getKey();
committedOffsets.add(new CommitData(tp.topic(), tp.partition(), offsetEntry.getValue().offset()));
}
boolean success = true;
String error = null;
if (exception != null) {
success = false;
error = exception.getMessage();
}
printJson(new OffsetsCommitted(committedOffsets, error, success));
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
printJson(new PartitionsAssigned(partitions));
}
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
printJson(new PartitionsRevoked(partitions));
}
private void printJson(Object data) {
try {
out.println(mapper.writeValueAsString(data));
} catch (JsonProcessingException e) {
out.println("Bad data can't be written as json: " + e.getMessage());
}
}
public void commitSync(Map<TopicPartition, OffsetAndMetadata> offsets) {
try {
consumer.commitSync(offsets);
onComplete(offsets, null);
} catch (WakeupException e) {
// we only call wakeup() once to close the consumer, so this recursion should be safe
commitSync(offsets);
throw e;
} catch (FencedInstanceIdException e) {
throw e;
} catch (Exception e) {
onComplete(offsets, e);
}
}
public void run() {
try {
printJson(new StartupComplete());
consumer.subscribe(List.of(topic), this);
while (!isFinished()) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(Long.MAX_VALUE));
Map<TopicPartition, OffsetAndMetadata> offsets = onRecordsReceived(records);
if (!useAutoCommit) {
if (useAsyncCommit)
consumer.commitAsync(offsets, this);
else
commitSync(offsets);
}
}
} catch (WakeupException e) {
// ignore, we are closing
log.trace("Caught WakeupException because consumer is shutdown, ignore and terminate.", e);
} catch (Throwable t) {
// Log the error so it goes to the service log and not stdout
log.error("Error during processing, terminating consumer process: ", t);
} finally {
consumer.close();
printJson(new ShutdownComplete());
shutdownLatch.countDown();
}
}
public void close() {
boolean interrupted = false;
try {
consumer.wakeup();
while (true) {
try {
shutdownLatch.await();
return;
} catch (InterruptedException e) {
interrupted = true;
}
}
} finally {
if (interrupted)
Thread.currentThread().interrupt();
}
}
@JsonPropertyOrder({ "timestamp", "name" })
private abstract static | VerifiableConsumer |
java | apache__logging-log4j2 | log4j-perf-test/src/main/java/org/apache/logging/log4j/perf/jmh/ThreadsafeDateFormatBenchmark.java | {
"start": 1589,
"end": 2638
} | class ____ {
private final Date date = new Date();
private final DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSS");
private final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("HH:mm:ss.SSS");
private final ThreadLocal<SimpleDateFormat> threadLocalSDFormat = new ThreadLocal<>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("HH:mm:ss.SSS");
}
};
private final ThreadLocal<FormatterSimple> threadLocalCachedSDFormat = new ThreadLocal<>() {
@Override
protected FormatterSimple initialValue() {
return new FormatterSimple();
}
};
private final FastDateFormat fastDateFormat = FastDateFormat.getInstance("HH:mm:ss.SSS");
private final FixedDateFormat fixedDateFormat = FixedDateFormat.createIfSupported("HH:mm:ss.SSS");
private final FormatterFixedReuseBuffer formatFixedReuseBuffer = new FormatterFixedReuseBuffer();
private | ThreadsafeDateFormatBenchmark |
java | processing__processing4 | build/macos/appbundler/src/com/oracle/appbundler/AppBundlerTask.java | {
"start": 2265,
"end": 13232
} | class ____ extends Task {
// Output folder for generated bundle
private File outputDirectory = null;
// General bundle properties
private String name = null;
private String displayName = null;
private String identifier = null;
private File icon = null;
private String executableName = EXECUTABLE_NAME;
private String shortVersion = "1.0";
private String version = "1.0";
private String signature = "????";
private String copyright = "";
private String privileged = null;
private String workingDirectory = null;
private String minimumSystemVersion = "10.7";
private boolean requiresAquaAppearance = false;
private String jvmRequired = null;
private boolean jrePreferred = false;
private boolean jdkPreferred = false;
private String applicationCategory = null;
private boolean highResolutionCapable = true;
private boolean supportsAutomaticGraphicsSwitching = true;
private boolean hideDockIcon = false;
private boolean isDebug = false;
private boolean ignorePSN = false;
// JVM info properties
private String mainClassName = null;
private String jnlpLauncherName = null;
private String jarLauncherName = null;
private Runtime runtime = null;
private JLink jlink = null;
private ArrayList<FileSet> classPath = new ArrayList<>();
private ArrayList<FileSet> libraryPath = new ArrayList<>();
private ArrayList<Option> options = new ArrayList<>();
private ArrayList<String> arguments = new ArrayList<>();
private ArrayList<String> architectures = new ArrayList<>();
private ArrayList<String> registeredProtocols = new ArrayList<>();
private ArrayList<BundleDocument> bundleDocuments = new ArrayList<>();
private ArrayList<TypeDeclaration> exportedTypeDeclarations = new ArrayList<>();
private ArrayList<TypeDeclaration> importedTypeDeclarations = new ArrayList<>();
private ArrayList<PlistEntry> plistEntries = new ArrayList<>();
private ArrayList<Environment> environments = new ArrayList<>();
private Reference classPathRef;
private ArrayList<String> plistClassPaths = new ArrayList<>();
private static final String EXECUTABLE_NAME = "JavaAppLauncher";
private static final String DEFAULT_ICON_NAME = "GenericApp.icns";
private static final String OS_TYPE_CODE = "APPL";
private static final String PLIST_DTD = "<!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">";
private static final String PLIST_TAG = "plist";
private static final String PLIST_VERSION_ATTRIBUTE = "version";
private static final String DICT_TAG = "dict";
private static final String KEY_TAG = "key";
private static final String ARRAY_TAG = "array";
private static final String STRING_TAG = "string";
private static final int BUFFER_SIZE = 2048;
public void setOutputDirectory(File outputDirectory) {
this.outputDirectory = outputDirectory;
}
public void setName(String name) {
this.name = name;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public void setIcon(File icon) {
this.icon = icon;
}
public void setExecutableName(String executable) {
this.executableName = executable;
}
public void setShortVersion(String shortVersion) {
this.shortVersion = shortVersion;
}
public void setVersion(String version) {
this.version = version;
}
public void setSignature(String signature) {
this.signature = signature;
}
public void setCopyright(String copyright) {
this.copyright = copyright;
}
public void setPrivileged(String privileged) {
this.privileged = privileged;
}
public void setWorkingDirectory(String workingDirectory) {
this.workingDirectory = workingDirectory;
}
public void setJVMRequired(String v){
this.jvmRequired = v;
}
public void setJREPreferred(boolean preferred){
this.jrePreferred = preferred;
}
public void setJDKPreferred(boolean preferred){
this.jdkPreferred = preferred;
}
public void setMinimumSystemVersion(String v){
this.minimumSystemVersion = v;
}
public void setApplicationCategory(String applicationCategory) {
this.applicationCategory = applicationCategory;
}
public void setHighResolutionCapable(boolean highResolutionCapable) {
this.highResolutionCapable = highResolutionCapable;
}
public void setHideDockIcon(boolean hideDock) {
this.hideDockIcon = hideDock;
}
public void setDebug(boolean enabled) {
this.isDebug = enabled;
}
public void setSupportsAutomaticGraphicsSwitching(boolean supportsAutomaticGraphicsSwitching) {
this.supportsAutomaticGraphicsSwitching = supportsAutomaticGraphicsSwitching;
}
public void setIgnorePSN(boolean ignorePSN) {
this.ignorePSN = ignorePSN;
}
public void setMainClassName(String mainClassName) {
this.mainClassName = mainClassName;
}
public void setJnlpLauncherName(String jnlpLauncherName) {
this.jnlpLauncherName = jnlpLauncherName;
}
public void setJarLauncherName(String jarLauncherName) {
this.jarLauncherName = jarLauncherName;
}
public void addConfiguredRuntime(Runtime runtime) throws BuildException {
if (this.runtime != null) {
throw new BuildException("Runtime already specified.");
}
if (this.jlink != null) {
throw new BuildException("Cannot specify runtime and jlink together.");
}
this.runtime = runtime;
}
public void addConfiguredJLink(JLink jlink) throws BuildException {
if (this.jlink != null) {
throw new BuildException("JLink already specified.");
}
if (this.runtime != null) {
throw new BuildException("Cannot specify runtime and jlink together.");
}
jlink.setTask(this);
this.jlink = jlink;
}
public void setClasspathRef(Reference ref) {
this.classPathRef = ref;
}
public void setPlistClassPaths(String plistClassPaths) {
for (String tok : plistClassPaths.split("\\s*,\\s*")) {
this.plistClassPaths.add(tok);
}
}
public void addConfiguredClassPath(FileSet classPath) {
this.classPath.add(classPath);
}
public void addConfiguredLibraryPath(FileSet libraryPath) {
this.libraryPath.add(libraryPath);
}
public void addConfiguredBundleDocument(BundleDocument document) {
if ((document.getContentTypes() == null) && (document.getExtensions() == null)) {
throw new BuildException("Document content type or extension is required.");
}
this.bundleDocuments.add(document);
}
public void addConfiguredTypeDeclaration(TypeDeclaration typeDeclaration) {
if (typeDeclaration.getIdentifier() == null) {
throw new BuildException("Type declarations must have an identifier.");
}
if (typeDeclaration.isImported()) {
this.importedTypeDeclarations.add(typeDeclaration);
} else {
this.exportedTypeDeclarations.add(typeDeclaration);
}
}
public void addConfiguredPlistEntry(PlistEntry plistEntry) {
if (plistEntry.getKey() == null) {
throw new BuildException("Name is required.");
}
if (plistEntry.getValue() == null) {
throw new BuildException("Value is required.");
}
if (plistEntry.getType() == null) {
plistEntry.setType(STRING_TAG);
}
this.plistEntries.add(plistEntry);
}
public void addConfiguredEnvironment(Environment environment) {
if (environment.getName() == null) {
throw new BuildException("Name is required.");
}
if (environment.getValue() == null) {
throw new BuildException("Value is required.");
}
this.environments.add(environment);
}
public void addConfiguredOption(Option option) throws BuildException {
String value = option.getValue();
if (value == null) {
throw new BuildException("Value is required.");
}
options.add(option);
}
public void addConfiguredArgument(Argument argument) throws BuildException {
String value = argument.getValue();
if (value == null) {
throw new BuildException("Value is required.");
}
arguments.add(value);
}
public void addConfiguredScheme(Argument argument) throws BuildException {
String value = argument.getValue();
if (value == null) {
throw new BuildException("Value is required.");
}
this.registeredProtocols.add(value);
}
public void addConfiguredArch(Architecture architecture) throws BuildException {
String name = architecture.getName();
if (name == null) {
throw new BuildException("Name is required.");
}
architectures.add(name);
}
@Override
public void execute() throws BuildException {
// Validate required properties
if (outputDirectory == null) {
throw new IllegalStateException("Output directory is required.");
}
if (!outputDirectory.exists()) {
throw new IllegalStateException("Output directory does not exist.");
}
if (!outputDirectory.isDirectory()) {
throw new IllegalStateException("Invalid output directory.");
}
if (name == null) {
throw new IllegalStateException("Name is required.");
}
if (displayName == null) {
throw new IllegalStateException("Display name is required.");
}
if (identifier == null) {
throw new IllegalStateException("Identifier is required.");
}
if (icon != null) {
if (!icon.exists()) {
throw new IllegalStateException("Icon does not exist.");
}
if (icon.isDirectory()) {
throw new IllegalStateException("Invalid icon.");
}
}
if (shortVersion == null) {
throw new IllegalStateException("Short version is required.");
}
if (signature == null || signature.trim().length() != 4) {
throw new IllegalStateException("Invalid or missing signature.");
}
if (copyright == null) {
throw new IllegalStateException("Copyright is required.");
}
if (jnlpLauncherName == null && mainClassName == null) {
throw new IllegalStateException("Main | AppBundlerTask |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/StreamingResponseBodyReturnValueHandlerTests.java | {
"start": 1841,
"end": 5734
} | class ____ {
private StreamingResponseBodyReturnValueHandler handler;
private ModelAndViewContainer mavContainer;
private NativeWebRequest webRequest;
private MockHttpServletRequest request;
private MockHttpServletResponse response;
@BeforeEach
void setup() throws Exception {
this.handler = new StreamingResponseBodyReturnValueHandler();
this.mavContainer = new ModelAndViewContainer();
this.request = new MockHttpServletRequest("GET", "/path");
this.response = new MockHttpServletResponse();
this.webRequest = new ServletWebRequest(this.request, this.response);
AsyncWebRequest asyncWebRequest = new StandardServletAsyncWebRequest(this.request, this.response);
WebAsyncUtils.getAsyncManager(this.webRequest).setAsyncWebRequest(asyncWebRequest);
this.request.setAsyncSupported(true);
}
@Test
void supportsReturnType() throws Exception {
assertThat(this.handler.supportsReturnType(returnType(TestController.class, "handle"))).isTrue();
assertThat(this.handler.supportsReturnType(returnType(TestController.class, "handleResponseEntity"))).isTrue();
assertThat(this.handler.supportsReturnType(returnType(TestController.class, "handleResponseEntityString"))).isFalse();
assertThat(this.handler.supportsReturnType(returnType(TestController.class, "handleResponseEntityParameterized"))).isFalse();
}
@Test
void streamingResponseBody() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
MethodParameter returnType = returnType(TestController.class, "handle");
StreamingResponseBody streamingBody = outputStream -> {
outputStream.write("foo".getBytes(StandardCharsets.UTF_8));
latch.countDown();
};
this.handler.handleReturnValue(streamingBody, returnType, this.mavContainer, this.webRequest);
assertThat(this.request.isAsyncStarted()).isTrue();
assertThat(latch.await(5, TimeUnit.SECONDS)).isTrue();
assertThat(this.response.getContentAsString()).isEqualTo("foo");
}
@Test
void responseEntity() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
MethodParameter returnType = returnType(TestController.class, "handleResponseEntity");
ResponseEntity<StreamingResponseBody> emitter = ResponseEntity.ok().header("foo", "bar")
.body(outputStream -> {
outputStream.write("foo".getBytes(StandardCharsets.UTF_8));
latch.countDown();
});
this.handler.handleReturnValue(emitter, returnType, this.mavContainer, this.webRequest);
assertThat(this.request.isAsyncStarted()).isTrue();
assertThat(this.response.getStatus()).isEqualTo(200);
assertThat(this.response.getHeader("foo")).isEqualTo("bar");
assertThat(latch.await(5, TimeUnit.SECONDS)).isTrue();
assertThat(this.response.getContentAsString()).isEqualTo("foo");
}
@Test
void responseEntityNoContent() throws Exception {
MethodParameter returnType = returnType(TestController.class, "handleResponseEntity");
ResponseEntity<?> emitter = ResponseEntity.noContent().build();
this.handler.handleReturnValue(emitter, returnType, this.mavContainer, this.webRequest);
assertThat(this.request.isAsyncStarted()).isFalse();
assertThat(this.response.getStatus()).isEqualTo(204);
}
@Test
void responseEntityWithHeadersAndNoContent() throws Exception {
ResponseEntity<?> emitter = ResponseEntity.noContent().header("foo", "bar").build();
MethodParameter returnType = returnType(TestController.class, "handleResponseEntity");
this.handler.handleReturnValue(emitter, returnType, this.mavContainer, this.webRequest);
assertThat(this.response.getHeaders("foo")).isEqualTo(Collections.singletonList("bar"));
}
private MethodParameter returnType(Class<?> clazz, String methodName) throws NoSuchMethodException {
Method method = clazz.getDeclaredMethod(methodName);
return new MethodParameter(method, -1);
}
@SuppressWarnings("unused")
private static | StreamingResponseBodyReturnValueHandlerTests |
java | apache__flink | flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableApiTestStep.java | {
"start": 1385,
"end": 3925
} | class ____ implements TestStep {
private final Function<TableEnvAccessor, Table> tableQuery;
private final String sinkName;
TableApiTestStep(Function<TableEnvAccessor, Table> tableQuery, String sinkName) {
this.tableQuery = tableQuery;
this.sinkName = sinkName;
}
@Override
public TestKind getKind() {
return TestKind.TABLE_API;
}
public Table toTable(TableEnvironment env) {
return tableQuery.apply(
new TableEnvAccessor() {
@Override
public Table from(String path) {
return env.from(path);
}
@Override
public Table fromCall(String path, Object... arguments) {
return env.fromCall(path, arguments);
}
@Override
public Table fromCall(
Class<? extends UserDefinedFunction> function, Object... arguments) {
return env.fromCall(function, arguments);
}
@Override
public Table fromValues(Object... values) {
return env.fromValues(values);
}
@Override
public Table fromValues(AbstractDataType<?> dataType, Object... values) {
return env.fromValues(dataType, values);
}
@Override
public Table sqlQuery(String query) {
return env.sqlQuery(query);
}
@Override
public Model fromModel(String modelPath) {
return env.fromModel(modelPath);
}
@Override
public Model from(ModelDescriptor modelDescriptor) {
return env.fromModel(modelDescriptor);
}
});
}
public TableResult apply(TableEnvironment env) {
final Table table = toTable(env);
return table.executeInsert(sinkName);
}
public TableResult applyAsSql(TableEnvironment env) {
final Table table = toTable(env);
final String query =
table.getQueryOperation().asSerializableString(DefaultSqlFactory.INSTANCE);
return env.executeSql(String.format("INSERT INTO %s %s", sinkName, query));
}
/**
* An | TableApiTestStep |
java | square__moshi | moshi/src/test/java/com/squareup/moshi/JsonQualifiersTest.java | {
"start": 2417,
"end": 2967
} | class ____ {
@ToJson
void fooPrefixStringToString(JsonWriter jsonWriter, @FooPrefix String s) throws IOException {
jsonWriter.value("foo" + s);
}
@FromJson
@FooPrefix
String fooPrefixStringFromString(JsonReader reader) throws Exception {
String s = reader.nextString();
if (!s.startsWith("foo")) throw new JsonDataException();
return s.substring(3);
}
}
/** Fields with this annotation get "foo" as a prefix in the JSON. */
@Retention(RUNTIME)
@JsonQualifier
public @ | ReaderWriterJsonAdapter |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 138300,
"end": 138674
} | class ____ {
@RequestMapping(method = RequestMethod.GET)
public void noParams(Writer writer) throws IOException {
writer.write("noParams");
}
@RequestMapping(params = "myParam")
public void param(@RequestParam("myParam") int myParam, Writer writer) throws IOException {
writer.write("myParam-" + myParam);
}
}
@Controller
static | AmbiguousParamsController |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesFormatTests.java | {
"start": 4646,
"end": 94855
} | class ____ extends ES819TSDBDocValuesFormat {
public TestES819TSDBDocValuesFormatVersion0() {
super();
}
@Override
public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException {
return new ES819TSDBDocValuesConsumerVersion0(
state,
skipIndexIntervalSize,
minDocsPerOrdinalForRangeEncoding,
enableOptimizedMerge,
DATA_CODEC,
DATA_EXTENSION,
META_CODEC,
META_EXTENSION,
NUMERIC_BLOCK_SHIFT
);
}
}
@Override
protected Codec getCodec() {
return codec;
}
public void testBinaryCompressionEnabled() {
ES819TSDBDocValuesFormat docValueFormat = new ES819TSDBDocValuesFormat();
assertThat(docValueFormat.binaryDVCompressionMode, equalTo(BinaryDVCompressionMode.COMPRESSED_ZSTD_LEVEL_1));
}
public void testBlockWiseBinary() throws Exception {
boolean sparse = randomBoolean();
int numBlocksBound = 10;
// Since average size is 25b will hit count threshold rather than size threshold, so use count threshold compute needed docs.
int numNonNullValues = randomIntBetween(0, numBlocksBound * BLOCK_COUNT_THRESHOLD);
List<String> binaryValues = new ArrayList<>();
int numNonNull = 0;
while (numNonNull < numNonNullValues) {
if (sparse && randomBoolean()) {
binaryValues.add(null);
} else {
// Average
final String value = randomAlphaOfLengthBetween(0, 50);
binaryValues.add(value);
numNonNull++;
}
}
assertBinaryValues(binaryValues);
}
public void testBlockWiseBinarySmallValues() throws Exception {
boolean sparse = randomBoolean();
int numBlocksBound = 5;
int numNonNullValues = randomIntBetween(0, numBlocksBound * BLOCK_COUNT_THRESHOLD);
List<String> binaryValues = new ArrayList<>();
int numNonNull = 0;
while (numNonNull < numNonNullValues) {
if (sparse && randomBoolean()) {
binaryValues.add(null);
} else {
final String value = randomAlphaOfLengthBetween(0, 2);
binaryValues.add(value);
numNonNull++;
}
}
assertBinaryValues(binaryValues);
}
public void testBlockWiseBinaryLargeValues() throws Exception {
boolean sparse = randomBoolean();
int numBlocksBound = 5;
int binaryDataSize = randomIntBetween(0, numBlocksBound * BLOCK_BYTES_THRESHOLD);
List<String> binaryValues = new ArrayList<>();
int totalSize = 0;
while (totalSize < binaryDataSize) {
if (sparse && randomBoolean()) {
binaryValues.add(null);
} else {
final String value = randomAlphaOfLengthBetween(BLOCK_BYTES_THRESHOLD / 2, 2 * BLOCK_BYTES_THRESHOLD);
binaryValues.add(value);
totalSize += value.length();
}
}
assertBinaryValues(binaryValues);
}
public void assertBinaryValues(List<String> binaryValues) throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
String binaryField = "binary_field";
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
int numDocs = binaryValues.size();
for (int i = 0; i < numDocs; i++) {
var d = new Document();
long timestamp = baseTimestamp + (1000L * i);
d.add(new SortedDocValuesField(hostnameField, new BytesRef("host-1")));
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
String binaryValue = binaryValues.get(i);
if (binaryValue != null) {
d.add(new BinaryDocValuesField(binaryField, new BytesRef(binaryValue)));
}
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
}
iw.commit();
iw.forceMerge(1);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var binaryDV = leaf.getBinaryDocValues(binaryField);
assertNotNull(binaryDV);
for (int i = 0; i < numDocs; i++) {
String expected = binaryValues.removeLast();
if (expected == null) {
assertFalse(binaryDV.advanceExact(i));
} else {
assertTrue(binaryDV.advanceExact(i));
assertEquals(expected, binaryDV.binaryValue().utf8ToString());
}
}
}
}
}
public void testForceMergeDenseCase() throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long counter1 = 0;
long counter2 = 10_000_000;
long[] gauge1Values = new long[] { 2, 4, 6, 8, 10, 12, 14, 16 };
long[] gauge2Values = new long[] { -2, -4, -6, -8, -10, -12, -14, -16 };
String[] tags = new String[] { "tag_1", "tag_2", "tag_3", "tag_4", "tag_5", "tag_6", "tag_7", "tag_8" };
int numDocs = 256 + random().nextInt(1024);
int numHosts = numDocs / 20;
for (int i = 0; i < numDocs; i++) {
var d = new Document();
int batchIndex = i / numHosts;
String hostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
long timestamp = baseTimestamp + (1000L * i);
d.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName)));
// Index sorting doesn't work with NumericDocValuesField:
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
d.add(new NumericDocValuesField("counter_1", counter1++));
d.add(new SortedNumericDocValuesField("counter_2", counter2++));
d.add(new SortedNumericDocValuesField("gauge_1", gauge1Values[i % gauge1Values.length]));
int numGauge2 = 1 + random().nextInt(8);
for (int j = 0; j < numGauge2; j++) {
d.add(new SortedNumericDocValuesField("gauge_2", gauge2Values[(i + j) % gauge2Values.length]));
}
int numTags = 1 + random().nextInt(8);
for (int j = 0; j < numTags; j++) {
d.add(new SortedSetDocValuesField("tags", new BytesRef(tags[(i + j) % tags.length])));
}
d.add(new BinaryDocValuesField("tags_as_bytes", new BytesRef(tags[i % tags.length])));
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
}
iw.commit();
iw.forceMerge(1);
// For asserting using binary search later on:
Arrays.sort(gauge2Values);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedDocValues(hostnameField);
assertNotNull(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
var counterOneDV = leaf.getNumericDocValues("counter_1");
assertNotNull(counterOneDV);
var counterTwoDV = leaf.getSortedNumericDocValues("counter_2");
assertNotNull(counterTwoDV);
var gaugeOneDV = leaf.getSortedNumericDocValues("gauge_1");
assertNotNull(gaugeOneDV);
var gaugeTwoDV = leaf.getSortedNumericDocValues("gauge_2");
assertNotNull(gaugeTwoDV);
var tagsDV = leaf.getSortedSetDocValues("tags");
assertNotNull(tagsDV);
var tagBytesDV = leaf.getBinaryDocValues("tags_as_bytes");
assertNotNull(tagBytesDV);
for (int i = 0; i < numDocs; i++) {
assertEquals(i, hostNameDV.nextDoc());
int batchIndex = i / numHosts;
assertEquals(batchIndex, hostNameDV.ordValue());
String expectedHostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
assertEquals(expectedHostName, hostNameDV.lookupOrd(hostNameDV.ordValue()).utf8ToString());
assertEquals(i, timestampDV.nextDoc());
long timestamp = timestampDV.longValue();
long lowerBound = baseTimestamp;
long upperBound = baseTimestamp + (1000L * numDocs);
assertTrue(
"unexpected timestamp [" + timestamp + "], expected between [" + lowerBound + "] and [" + upperBound + "]",
timestamp >= lowerBound && timestamp < upperBound
);
assertEquals(i, counterOneDV.nextDoc());
long counterOneValue = counterOneDV.longValue();
assertTrue("unexpected counter [" + counterOneValue + "]", counterOneValue >= 0 && counterOneValue < counter1);
assertEquals(i, counterTwoDV.nextDoc());
assertEquals(1, counterTwoDV.docValueCount());
long counterTwoValue = counterTwoDV.nextValue();
assertTrue("unexpected counter [" + counterTwoValue + "]", counterTwoValue > 0 && counterTwoValue <= counter2);
assertEquals(i, gaugeOneDV.nextDoc());
assertEquals(1, gaugeOneDV.docValueCount());
long gaugeOneValue = gaugeOneDV.nextValue();
assertTrue("unexpected gauge [" + gaugeOneValue + "]", Arrays.binarySearch(gauge1Values, gaugeOneValue) >= 0);
assertEquals(i, gaugeTwoDV.nextDoc());
for (int j = 0; j < gaugeTwoDV.docValueCount(); j++) {
long gaugeTwoValue = gaugeTwoDV.nextValue();
assertTrue("unexpected gauge [" + gaugeTwoValue + "]", Arrays.binarySearch(gauge2Values, gaugeTwoValue) >= 0);
}
assertEquals(i, tagsDV.nextDoc());
for (int j = 0; j < tagsDV.docValueCount(); j++) {
long ordinal = tagsDV.nextOrd();
String actualTag = tagsDV.lookupOrd(ordinal).utf8ToString();
assertTrue("unexpected tag [" + actualTag + "]", Arrays.binarySearch(tags, actualTag) >= 0);
}
assertEquals(i, tagBytesDV.nextDoc());
BytesRef tagBytesValue = tagBytesDV.binaryValue();
assertTrue("unexpected bytes " + tagBytesValue, Arrays.binarySearch(tags, tagBytesValue.utf8ToString()) >= 0);
}
}
}
}
public void testTwoSegmentsTwoDifferentFields() throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long timestamp = 1704067200000L;
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long counter1 = 0;
long counter2 = 10_000_000;
{
var d = new Document();
d.add(new SortedDocValuesField(hostnameField, new BytesRef("host-001")));
d.add(new SortedNumericDocValuesField(timestampField, timestamp - 1));
d.add(new NumericDocValuesField("counter_1", counter1));
d.add(new SortedNumericDocValuesField("gauge_1", 2));
d.add(new BinaryDocValuesField("binary_1", new BytesRef("foo")));
iw.addDocument(d);
iw.commit();
}
{
var d = new Document();
d.add(new SortedDocValuesField(hostnameField, new BytesRef("host-001")));
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
d.add(new SortedNumericDocValuesField("counter_2", counter2));
d.add(new SortedNumericDocValuesField("gauge_2", -2));
d.add(new BinaryDocValuesField("binary_2", new BytesRef("bar")));
iw.addDocument(d);
iw.commit();
}
iw.forceMerge(1);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(2, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedDocValues(hostnameField);
assertNotNull(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
var counterOneDV = leaf.getNumericDocValues("counter_1");
assertNotNull(counterOneDV);
var counterTwoDV = leaf.getSortedNumericDocValues("counter_2");
assertNotNull(counterTwoDV);
var gaugeOneDV = leaf.getSortedNumericDocValues("gauge_1");
assertNotNull(gaugeOneDV);
var gaugeTwoDV = leaf.getSortedNumericDocValues("gauge_2");
assertNotNull(gaugeTwoDV);
var binaryOneDV = leaf.getBinaryDocValues("binary_1");
assertNotNull(binaryOneDV);
var binaryTwoDv = leaf.getBinaryDocValues("binary_2");
assertNotNull(binaryTwoDv);
for (int i = 0; i < 2; i++) {
assertEquals(i, hostNameDV.nextDoc());
assertEquals("host-001", hostNameDV.lookupOrd(hostNameDV.ordValue()).utf8ToString());
assertEquals(i, timestampDV.nextDoc());
long actualTimestamp = timestampDV.longValue();
assertTrue(actualTimestamp == timestamp || actualTimestamp == timestamp - 1);
if (counterOneDV.advanceExact(i)) {
long counterOneValue = counterOneDV.longValue();
assertEquals(counter1, counterOneValue);
}
if (counterTwoDV.advanceExact(i)) {
assertEquals(1, counterTwoDV.docValueCount());
long counterTwoValue = counterTwoDV.nextValue();
assertEquals(counter2, counterTwoValue);
}
if (gaugeOneDV.advanceExact(i)) {
assertEquals(1, gaugeOneDV.docValueCount());
long gaugeOneValue = gaugeOneDV.nextValue();
assertEquals(2, gaugeOneValue);
}
if (gaugeTwoDV.advanceExact(i)) {
assertEquals(1, gaugeTwoDV.docValueCount());
long gaugeTwoValue = gaugeTwoDV.nextValue();
assertEquals(-2, gaugeTwoValue);
}
if (binaryOneDV.advanceExact(i)) {
BytesRef binaryOneValue = binaryOneDV.binaryValue();
assertEquals(new BytesRef("foo"), binaryOneValue);
}
if (binaryTwoDv.advanceExact(i)) {
BytesRef binaryTwoValue = binaryTwoDv.binaryValue();
assertEquals(new BytesRef("bar"), binaryTwoValue);
}
}
}
}
}
public void testForceMergeSparseCase() throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long counter1 = 0;
long counter2 = 10_000_000;
long[] gauge1Values = new long[] { 2, 4, 6, 8, 10, 12, 14, 16 };
long[] gauge2Values = new long[] { -2, -4, -6, -8, -10, -12, -14, -16 };
String[] tags = new String[] { "tag_1", "tag_2", "tag_3", "tag_4", "tag_5", "tag_6", "tag_7", "tag_8" };
int numDocs = 256 + random().nextInt(1024);
int numHosts = numDocs / 20;
for (int i = 0; i < numDocs; i++) {
var d = new Document();
int batchIndex = i / numHosts;
String hostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
long timestamp = baseTimestamp + (1000L * i);
d.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName)));
// Index sorting doesn't work with NumericDocValuesField:
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
if (random().nextBoolean()) {
d.add(new NumericDocValuesField("counter_1", counter1++));
}
if (random().nextBoolean()) {
d.add(new SortedNumericDocValuesField("counter_2", counter2++));
}
if (random().nextBoolean()) {
d.add(new SortedNumericDocValuesField("gauge_1", gauge1Values[i % gauge1Values.length]));
}
if (random().nextBoolean()) {
int numGauge2 = 1 + random().nextInt(8);
for (int j = 0; j < numGauge2; j++) {
d.add(new SortedNumericDocValuesField("gauge_2", gauge2Values[(i + j) % gauge2Values.length]));
}
}
if (random().nextBoolean()) {
int numTags = 1 + random().nextInt(8);
for (int j = 0; j < numTags; j++) {
d.add(new SortedSetDocValuesField("tags", new BytesRef(tags[j])));
}
}
if (random().nextBoolean()) {
int randomIndex = random().nextInt(tags.length);
d.add(new SortedDocValuesField("other_tag", new BytesRef(tags[randomIndex])));
}
if (random().nextBoolean()) {
int randomIndex = random().nextInt(tags.length);
d.add(new BinaryDocValuesField("tags_as_bytes", new BytesRef(tags[randomIndex])));
}
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
}
iw.commit();
iw.forceMerge(1);
// For asserting using binary search later on:
Arrays.sort(gauge2Values);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedDocValues(hostnameField);
assertNotNull(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
var counterOneDV = leaf.getNumericDocValues("counter_1");
assertNotNull(counterOneDV);
var counterTwoDV = leaf.getSortedNumericDocValues("counter_2");
assertNotNull(counterTwoDV);
var gaugeOneDV = leaf.getSortedNumericDocValues("gauge_1");
assertNotNull(gaugeOneDV);
var gaugeTwoDV = leaf.getSortedNumericDocValues("gauge_2");
assertNotNull(gaugeTwoDV);
var tagsDV = leaf.getSortedSetDocValues("tags");
assertNotNull(tagsDV);
var otherTagDV = leaf.getSortedDocValues("other_tag");
assertNotNull(otherTagDV);
var tagBytesDV = leaf.getBinaryDocValues("tags_as_bytes");
assertNotNull(tagBytesDV);
for (int i = 0; i < numDocs; i++) {
assertEquals(i, hostNameDV.nextDoc());
int batchIndex = i / numHosts;
assertEquals(batchIndex, hostNameDV.ordValue());
String expectedHostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
assertEquals(expectedHostName, hostNameDV.lookupOrd(hostNameDV.ordValue()).utf8ToString());
assertEquals(i, timestampDV.nextDoc());
long timestamp = timestampDV.longValue();
long lowerBound = baseTimestamp;
long upperBound = baseTimestamp + (1000L * numDocs);
assertTrue(
"unexpected timestamp [" + timestamp + "], expected between [" + lowerBound + "] and [" + upperBound + "]",
timestamp >= lowerBound && timestamp < upperBound
);
if (counterOneDV.advanceExact(i)) {
long counterOneValue = counterOneDV.longValue();
assertTrue("unexpected counter [" + counterOneValue + "]", counterOneValue >= 0 && counterOneValue < counter1);
}
if (counterTwoDV.advanceExact(i)) {
assertEquals(1, counterTwoDV.docValueCount());
long counterTwoValue = counterTwoDV.nextValue();
assertTrue("unexpected counter [" + counterTwoValue + "]", counterTwoValue > 0 && counterTwoValue <= counter2);
}
if (gaugeOneDV.advanceExact(i)) {
assertEquals(1, gaugeOneDV.docValueCount());
long gaugeOneValue = gaugeOneDV.nextValue();
assertTrue("unexpected gauge [" + gaugeOneValue + "]", Arrays.binarySearch(gauge1Values, gaugeOneValue) >= 0);
}
if (gaugeTwoDV.advanceExact(i)) {
for (int j = 0; j < gaugeTwoDV.docValueCount(); j++) {
long gaugeTwoValue = gaugeTwoDV.nextValue();
assertTrue("unexpected gauge [" + gaugeTwoValue + "]", Arrays.binarySearch(gauge2Values, gaugeTwoValue) >= 0);
}
}
if (tagsDV.advanceExact(i)) {
for (int j = 0; j < tagsDV.docValueCount(); j++) {
long ordinal = tagsDV.nextOrd();
String actualTag = tagsDV.lookupOrd(ordinal).utf8ToString();
assertTrue("unexpected tag [" + actualTag + "]", Arrays.binarySearch(tags, actualTag) >= 0);
}
}
if (otherTagDV.advanceExact(i)) {
int ordinal = otherTagDV.ordValue();
String actualTag = otherTagDV.lookupOrd(ordinal).utf8ToString();
assertTrue("unexpected tag [" + actualTag + "]", Arrays.binarySearch(tags, actualTag) >= 0);
}
if (tagBytesDV.advanceExact(i)) {
BytesRef tagBytesValue = tagBytesDV.binaryValue();
assertTrue("unexpected bytes " + tagBytesValue, Arrays.binarySearch(tags, tagBytesValue.utf8ToString()) >= 0);
}
}
}
}
}
public void testWithNoValueMultiValue() throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
int numRounds = 32 + random().nextInt(32);
int numDocsPerRound = 64 + random().nextInt(64);
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long[] gauge1Values = new long[] { 2, 4, 6, 8, 10, 12, 14, 16 };
String[] tags = new String[] { "tag_1", "tag_2", "tag_3", "tag_4", "tag_5", "tag_6", "tag_7", "tag_8" };
{
long timestamp = baseTimestamp;
for (int i = 0; i < numRounds; i++) {
int r = random().nextInt(10);
for (int j = 0; j < numDocsPerRound; j++) {
var d = new Document();
// host in reverse, otherwise merging will detect that segments are already ordered and will use sequential docid
// merger:
String hostName = String.format(Locale.ROOT, "host-%03d", numRounds - i);
d.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName)));
// Index sorting doesn't work with NumericDocValuesField:
d.add(new SortedNumericDocValuesField(timestampField, timestamp++));
if (r % 10 == 5) {
// sometimes no values
} else if (r % 10 > 5) {
// often single value:
d.add(new SortedNumericDocValuesField("gauge_1", gauge1Values[j % gauge1Values.length]));
d.add(new SortedSetDocValuesField("tags", new BytesRef(tags[j % tags.length])));
} else {
// otherwise multiple values:
int numValues = 2 + random().nextInt(4);
for (int k = 0; k < numValues; k++) {
d.add(new SortedNumericDocValuesField("gauge_1", gauge1Values[(j + k) % gauge1Values.length]));
d.add(new SortedSetDocValuesField("tags", new BytesRef(tags[(j + k) % tags.length])));
}
}
iw.addDocument(d);
}
iw.commit();
}
iw.forceMerge(1);
}
int numDocs = numRounds * numDocsPerRound;
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedDocValues(hostnameField);
assertNotNull(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
var gaugeOneDV = leaf.getSortedNumericDocValues("gauge_1");
assertNotNull(gaugeOneDV);
var tagsDV = leaf.getSortedSetDocValues("tags");
assertNotNull(tagsDV);
for (int i = 0; i < numDocs; i++) {
assertEquals(i, hostNameDV.nextDoc());
String actualHostName = hostNameDV.lookupOrd(hostNameDV.ordValue()).utf8ToString();
assertTrue("unexpected host name:" + actualHostName, actualHostName.startsWith("host-"));
assertEquals(i, timestampDV.nextDoc());
long timestamp = timestampDV.longValue();
long lowerBound = baseTimestamp;
long upperBound = baseTimestamp + numDocs;
assertTrue(
"unexpected timestamp [" + timestamp + "], expected between [" + lowerBound + "] and [" + upperBound + "]",
timestamp >= lowerBound && timestamp < upperBound
);
if (gaugeOneDV.advanceExact(i)) {
for (int j = 0; j < gaugeOneDV.docValueCount(); j++) {
long value = gaugeOneDV.nextValue();
assertTrue("unexpected gauge [" + value + "]", Arrays.binarySearch(gauge1Values, value) >= 0);
}
}
if (tagsDV.advanceExact(i)) {
for (int j = 0; j < tagsDV.docValueCount(); j++) {
long ordinal = tagsDV.nextOrd();
String actualTag = tagsDV.lookupOrd(ordinal).utf8ToString();
assertTrue("unexpected tag [" + actualTag + "]", Arrays.binarySearch(tags, actualTag) >= 0);
}
}
}
}
}
}
public void testAddIndices() throws IOException {
String timestampField = "@timestamp";
String hostnameField = "host.name";
Supplier<IndexWriterConfig> indexConfigWithRandomDVFormat = () -> {
IndexWriterConfig config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
DocValuesFormat dvFormat = switch (random().nextInt(3)) {
case 0 -> new ES87TSDBDocValuesFormatTests.TestES87TSDBDocValuesFormat(random().nextInt(4, 16));
case 1 -> new ES819TSDBDocValuesFormat();
case 2 -> new Lucene90DocValuesFormat();
default -> throw new AssertionError("unknown option");
};
config.setCodec(new Elasticsearch900Lucene101Codec() {
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return dvFormat;
}
});
return config;
};
var allNumericFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "numeric_" + n).toList();
var allSortedNumericFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_numeric_" + n).toList();
var allSortedFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_" + n).toList();
var allSortedSetFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "sorted_set" + n).toList();
var allBinaryFields = IntStream.range(0, ESTestCase.between(1, 10)).mapToObj(n -> "binary_" + n).toList();
try (var source1 = newDirectory(); var source2 = newDirectory(); var singleDir = newDirectory(); var mergeDir = newDirectory()) {
try (
var writer1 = new IndexWriter(source1, indexConfigWithRandomDVFormat.get());
var writer2 = new IndexWriter(source2, indexConfigWithRandomDVFormat.get());
var singleWriter = new IndexWriter(singleDir, indexConfigWithRandomDVFormat.get())
) {
int numDocs = 1 + random().nextInt(1_000);
long timestamp = random().nextLong(1000_000L);
for (int i = 0; i < numDocs; i++) {
List<IndexableField> fields = new ArrayList<>();
String hostName = String.format(Locale.ROOT, "host-%d", random().nextInt(5));
timestamp += 1 + random().nextInt(1_000);
fields.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName)));
fields.add(new SortedNumericDocValuesField(timestampField, timestamp));
var numericFields = ESTestCase.randomSubsetOf(allNumericFields);
for (String f : numericFields) {
fields.add(new NumericDocValuesField(f, random().nextLong(1000L)));
}
var sortedNumericFields = ESTestCase.randomSubsetOf(allSortedNumericFields);
for (String field : sortedNumericFields) {
int valueCount = 1 + random().nextInt(3);
for (int v = 0; v < valueCount; v++) {
fields.add(new SortedNumericDocValuesField(field, random().nextLong(1000L)));
}
}
var sortedFields = ESTestCase.randomSubsetOf(allSortedFields);
for (String field : sortedFields) {
fields.add(new SortedDocValuesField(field, new BytesRef("s" + random().nextInt(100))));
}
var sortedSetFields = ESTestCase.randomSubsetOf(allSortedSetFields);
for (String field : sortedSetFields) {
int valueCount = 1 + random().nextInt(3);
for (int v = 0; v < valueCount; v++) {
fields.add(new SortedSetDocValuesField(field, new BytesRef("ss" + random().nextInt(100))));
}
}
List<String> binaryFields = ESTestCase.randomSubsetOf(allBinaryFields);
for (String field : binaryFields) {
fields.add(new BinaryDocValuesField(field, new BytesRef("b" + random().nextInt(100))));
}
for (IndexWriter writer : List.of(ESTestCase.randomFrom(writer1, writer2), singleWriter)) {
Randomness.shuffle(fields);
writer.addDocument(fields);
if (random().nextInt(100) <= 5) {
writer.commit();
}
}
}
if (random().nextBoolean()) {
writer1.forceMerge(1);
}
if (random().nextBoolean()) {
writer2.forceMerge(1);
}
singleWriter.commit();
singleWriter.forceMerge(1);
}
try (var mergeWriter = new IndexWriter(mergeDir, getTimeSeriesIndexWriterConfig(hostnameField, timestampField))) {
mergeWriter.addIndexes(source1, source2);
mergeWriter.forceMerge(1);
}
try (var reader1 = DirectoryReader.open(singleDir); var reader2 = DirectoryReader.open(mergeDir)) {
assertEquals(reader1.maxDoc(), reader2.maxDoc());
assertEquals(1, reader1.leaves().size());
assertEquals(1, reader2.leaves().size());
for (int i = 0; i < reader1.leaves().size(); i++) {
LeafReader leaf1 = reader1.leaves().get(i).reader();
LeafReader leaf2 = reader2.leaves().get(i).reader();
for (String f : CollectionUtils.appendToCopy(allSortedNumericFields, timestampField)) {
var dv1 = leaf1.getNumericDocValues(f);
var dv2 = leaf2.getNumericDocValues(f);
if (dv1 == null) {
assertNull(dv2);
continue;
}
assertNotNull(dv2);
while (dv1.nextDoc() != NumericDocValues.NO_MORE_DOCS) {
assertNotEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
assertEquals(dv1.docID(), dv2.docID());
assertEquals(dv1.longValue(), dv2.longValue());
}
assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
}
for (String f : CollectionUtils.appendToCopy(allSortedNumericFields, timestampField)) {
var dv1 = leaf1.getSortedNumericDocValues(f);
var dv2 = leaf2.getSortedNumericDocValues(f);
if (dv1 == null) {
assertNull(dv2);
continue;
}
assertNotNull(dv2);
while (dv1.nextDoc() != NumericDocValues.NO_MORE_DOCS) {
assertNotEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
assertEquals(dv1.docID(), dv2.docID());
assertEquals(dv1.docValueCount(), dv2.docValueCount());
for (int v = 0; v < dv1.docValueCount(); v++) {
assertEquals(dv1.nextValue(), dv2.nextValue());
}
}
assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
}
for (String f : CollectionUtils.appendToCopy(allSortedFields, hostnameField)) {
var dv1 = leaf1.getSortedDocValues(f);
var dv2 = leaf2.getSortedDocValues(f);
if (dv1 == null) {
assertNull(dv2);
continue;
}
assertNotNull(dv2);
while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) {
assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc());
assertEquals(dv1.docID(), dv2.docID());
assertEquals(dv1.lookupOrd(dv1.ordValue()), dv2.lookupOrd(dv2.ordValue()));
}
assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
}
for (String f : allSortedSetFields) {
var dv1 = leaf1.getSortedSetDocValues(f);
var dv2 = leaf2.getSortedSetDocValues(f);
if (dv1 == null) {
assertNull(dv2);
continue;
}
assertNotNull(dv2);
while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) {
assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc());
assertEquals(dv1.docID(), dv2.docID());
assertEquals(dv1.docValueCount(), dv2.docValueCount());
for (int v = 0; v < dv1.docValueCount(); v++) {
assertEquals(dv1.lookupOrd(dv1.nextOrd()), dv2.lookupOrd(dv2.nextOrd()));
}
}
assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
}
for (String f : allBinaryFields) {
var dv1 = leaf1.getBinaryDocValues(f);
var dv2 = leaf2.getBinaryDocValues(f);
if (dv1 == null) {
assertNull(dv2);
continue;
}
assertNotNull(dv2);
while (dv1.nextDoc() != SortedDocValues.NO_MORE_DOCS) {
assertNotEquals(SortedDocValues.NO_MORE_DOCS, dv2.nextDoc());
assertEquals(dv1.docID(), dv2.docID());
assertEquals(dv1.binaryValue(), dv2.binaryValue());
}
assertEquals(NumericDocValues.NO_MORE_DOCS, dv2.nextDoc());
}
}
}
}
}
public void testOptionalColumnAtATimeReader() throws Exception {
final String counterField = "counter";
final String counterFieldAsString = "counter_as_string";
final String timestampField = "@timestamp";
final String gaugeField = "gauge";
final String binaryFixedField = "binary_variable";
final String binaryVariableField = "binary_fixed";
final int binaryFieldMaxLength = randomIntBetween(1, 20);
long currentTimestamp = 1704067200000L;
long currentCounter = 10_000_000;
var config = getTimeSeriesIndexWriterConfig(null, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long[] gauge1Values = new long[] { 2, 4, 6, 8, 10, 12, 14, 16 };
List<BytesRef> binaryFixedValues = new ArrayList<>();
List<BytesRef> binaryVariableValues = new ArrayList<>();
int numDocs = 256 + random().nextInt(8096);
for (int i = 0; i < numDocs; i++) {
binaryFixedValues.add(new BytesRef(randomAlphaOfLength(binaryFieldMaxLength)));
binaryVariableValues.add(new BytesRef(randomAlphaOfLength(between(0, binaryFieldMaxLength))));
var d = new Document();
long timestamp = currentTimestamp;
// Index sorting doesn't work with NumericDocValuesField:
d.add(SortedNumericDocValuesField.indexedField(timestampField, timestamp));
d.add(new SortedNumericDocValuesField(counterField, currentCounter));
d.add(new SortedSetDocValuesField(counterFieldAsString, new BytesRef(Long.toString(currentCounter))));
d.add(new SortedNumericDocValuesField(gaugeField, gauge1Values[i % gauge1Values.length]));
d.add(new BinaryDocValuesField(binaryFixedField, binaryFixedValues.getLast()));
d.add(new BinaryDocValuesField(binaryVariableField, binaryVariableValues.getLast()));
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
if (i < numDocs - 1) {
currentTimestamp += 1000L;
currentCounter++;
}
}
iw.commit();
var factory = TestBlock.factory();
final long lastIndexedTimestamp = currentTimestamp;
final long lastIndexedCounter = currentCounter;
try (var reader = DirectoryReader.open(iw)) {
int gaugeIndex = numDocs;
for (var leaf : reader.leaves()) {
var timestampDV = getBaseDenseNumericValues(leaf.reader(), timestampField);
var counterDV = getBaseDenseNumericValues(leaf.reader(), counterField);
var gaugeDV = getBaseDenseNumericValues(leaf.reader(), gaugeField);
var stringCounterDV = getBaseSortedDocValues(leaf.reader(), counterFieldAsString);
var binaryFixedDV = getDenseBinaryValues(leaf.reader(), binaryFixedField);
var binaryVariableDV = getDenseBinaryValues(leaf.reader(), binaryVariableField);
int maxDoc = leaf.reader().maxDoc();
for (int i = 0; i < maxDoc;) {
int size = Math.max(1, random().nextInt(0, maxDoc - i));
var docs = TestBlock.docs(IntStream.range(i, i + size).toArray());
{
// bulk loading timestamp:
var block = (TestBlock) timestampDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
long actualTimestamp = (long) block.get(j);
long expectedTimestamp = currentTimestamp;
assertEquals(expectedTimestamp, actualTimestamp);
currentTimestamp -= 1000L;
}
}
{
// bulk loading counter field:
var block = (TestBlock) counterDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(size, block.size());
var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
for (int j = 0; j < block.size(); j++) {
long expectedCounter = currentCounter;
long actualCounter = (long) block.get(j);
assertEquals(expectedCounter, actualCounter);
var expectedStringCounter = Long.toString(actualCounter);
var actualStringCounter = ((BytesRef) stringBlock.get(j)).utf8ToString();
assertEquals(expectedStringCounter, actualStringCounter);
currentCounter--;
}
}
{
// bulk loading gauge field:
var block = (TestBlock) gaugeDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
long actualGauge = (long) block.get(j);
long expectedGauge = gauge1Values[--gaugeIndex % gauge1Values.length];
assertEquals(expectedGauge, actualGauge);
}
}
{
// bulk loading binary fixed length field:
var block = (TestBlock) binaryFixedDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
var actual = (BytesRef) block.get(j);
var expected = binaryFixedValues.removeLast();
assertEquals(expected, actual);
}
}
{
// bulk loading binary variable length field:
var block = (TestBlock) binaryVariableDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
var actual = (BytesRef) block.get(j);
var expected = binaryVariableValues.removeLast();
assertEquals(expected, actual);
}
}
i += size;
}
}
}
// Now bulk reader from one big segment and use random offset:
iw.forceMerge(1);
var blockFactory = TestBlock.factory();
try (var reader = DirectoryReader.open(iw)) {
int randomOffset = random().nextInt(numDocs / 4);
currentTimestamp = lastIndexedTimestamp - (randomOffset * 1000L);
currentCounter = lastIndexedCounter - randomOffset;
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leafReader = reader.leaves().get(0).reader();
int maxDoc = leafReader.maxDoc();
int size = maxDoc - randomOffset;
int gaugeIndex = size;
var timestampDV = getBaseDenseNumericValues(leafReader, timestampField);
var counterDV = getBaseDenseNumericValues(leafReader, counterField);
var gaugeDV = getBaseDenseNumericValues(leafReader, gaugeField);
var stringCounterDV = getBaseSortedDocValues(leafReader, counterFieldAsString);
var docs = TestBlock.docs(IntStream.range(0, maxDoc).toArray());
{
// bulk loading timestamp:
var block = (TestBlock) timestampDV.tryRead(blockFactory, docs, randomOffset, false, null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
long actualTimestamp = (long) block.get(j);
long expectedTimestamp = currentTimestamp;
assertEquals(expectedTimestamp, actualTimestamp);
currentTimestamp -= 1000L;
}
}
{
// bulk loading counter field:
var block = (TestBlock) counterDV.tryRead(factory, docs, randomOffset, false, null, false);
assertNotNull(block);
assertEquals(size, block.size());
var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, randomOffset, false, null, false);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
for (int j = 0; j < block.size(); j++) {
long actualCounter = (long) block.get(j);
long expectedCounter = currentCounter;
assertEquals(expectedCounter, actualCounter);
var expectedStringCounter = Long.toString(actualCounter);
var actualStringCounter = ((BytesRef) stringBlock.get(j)).utf8ToString();
assertEquals(expectedStringCounter, actualStringCounter);
currentCounter--;
}
}
{
// bulk loading gauge field:
var block = (TestBlock) gaugeDV.tryRead(factory, docs, randomOffset, false, null, false);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
long actualGauge = (long) block.get(j);
long expectedGauge = gauge1Values[--gaugeIndex % gauge1Values.length];
assertEquals(expectedGauge, actualGauge);
}
}
// And finally docs with gaps:
docs = TestBlock.docs(IntStream.range(0, maxDoc).filter(docId -> docId == 0 || docId % 64 != 0).toArray());
size = docs.count();
// Test against values loaded using normal doc value apis:
long[] expectedCounters = new long[size];
counterDV = getBaseDenseNumericValues(leafReader, counterField);
for (int i = 0; i < docs.count(); i++) {
int docId = docs.get(i);
counterDV.advanceExact(docId);
expectedCounters[i] = counterDV.longValue();
}
counterDV = getBaseDenseNumericValues(leafReader, counterField);
stringCounterDV = getBaseSortedDocValues(leafReader, counterFieldAsString);
{
// bulk loading counter field:
var block = (TestBlock) counterDV.tryRead(factory, docs, 0, false, null, false);
assertNotNull(block);
assertEquals(size, block.size());
var stringBlock = (TestBlock) stringCounterDV.tryRead(factory, docs, 0, false, null, false);
assertNotNull(stringBlock);
assertEquals(size, stringBlock.size());
for (int j = 0; j < block.size(); j++) {
long actualCounter = (long) block.get(j);
long expectedCounter = expectedCounters[j];
assertEquals(expectedCounter, actualCounter);
var expectedStringCounter = Long.toString(actualCounter);
var actualStringCounter = ((BytesRef) stringBlock.get(j)).utf8ToString();
assertEquals(expectedStringCounter, actualStringCounter);
}
}
}
}
}
public void testOptionalColumnAtATimeReaderReadAsInt() throws Exception {
final String counterField = "counter";
final String timestampField = "@timestamp";
final String gaugeField = "gauge";
int currentTimestamp = 17040672;
int currentCounter = 10_000_000;
var config = getTimeSeriesIndexWriterConfig(null, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
int[] gauge1Values = new int[] { 2, 4, 6, 8, 10, 12, 14, 16 };
int numDocs = 256 + random().nextInt(8096);
for (int i = 0; i < numDocs; i++) {
var d = new Document();
long timestamp = currentTimestamp;
// Index sorting doesn't work with NumericDocValuesField:
d.add(SortedNumericDocValuesField.indexedField(timestampField, timestamp));
d.add(new SortedNumericDocValuesField(counterField, currentCounter));
d.add(new SortedNumericDocValuesField(gaugeField, gauge1Values[i % gauge1Values.length]));
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
if (i < numDocs - 1) {
currentTimestamp += 1000;
currentCounter++;
}
}
iw.commit();
var factory = TestBlock.factory();
try (var reader = DirectoryReader.open(iw)) {
int gaugeIndex = numDocs;
for (var leaf : reader.leaves()) {
var timestampDV = getBaseDenseNumericValues(leaf.reader(), timestampField);
var counterDV = getBaseDenseNumericValues(leaf.reader(), counterField);
var gaugeDV = getBaseDenseNumericValues(leaf.reader(), gaugeField);
int maxDoc = leaf.reader().maxDoc();
for (int i = 0; i < maxDoc;) {
int size = Math.max(1, random().nextInt(0, maxDoc - i));
var docs = TestBlock.docs(IntStream.range(i, i + size).toArray());
{
// bulk loading timestamp:
var block = (TestBlock) timestampDV.tryRead(factory, docs, 0, random().nextBoolean(), null, true);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
int actualTimestamp = (int) block.get(j);
int expectedTimestamp = currentTimestamp;
assertEquals(expectedTimestamp, actualTimestamp);
currentTimestamp -= 1000;
}
}
{
// bulk loading counter field:
var block = (TestBlock) counterDV.tryRead(factory, docs, 0, random().nextBoolean(), null, true);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
int expectedCounter = currentCounter;
int actualCounter = (int) block.get(j);
assertEquals(expectedCounter, actualCounter);
currentCounter--;
}
}
{
// bulk loading gauge field:
var block = (TestBlock) gaugeDV.tryRead(factory, docs, 0, random().nextBoolean(), null, true);
assertNotNull(block);
assertEquals(size, block.size());
for (int j = 0; j < block.size(); j++) {
int actualGauge = (int) block.get(j);
int expectedGauge = gauge1Values[--gaugeIndex % gauge1Values.length];
assertEquals(expectedGauge, actualGauge);
}
}
i += size;
}
}
}
}
}
public void testOptionalColumnAtATimeReaderWithSparseDocs() throws Exception {
final String counterField = "counter";
final String counterAsStringField = "counter_as_string";
final String timestampField = "@timestamp";
String queryField = "query_field";
String temperatureField = "temperature_field";
final String binaryFixedField = "binary_variable";
final String binaryVariableField = "binary_fixed";
final int binaryFieldMaxLength = randomIntBetween(1, 20);
boolean denseBinaryData = randomBoolean();
long currentTimestamp = 1704067200000L;
long currentCounter = 10_000_000;
var config = getTimeSeriesIndexWriterConfig(null, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
int numDocsPerQValue = 120;
int numDocs = numDocsPerQValue * (1 + random().nextInt(40));
Long[] temperatureValues = new Long[numDocs];
BytesRef[] binaryFixed = new BytesRef[numDocs];
BytesRef[] binaryVariable = new BytesRef[numDocs];
long q = 1;
for (int i = 1; i <= numDocs; i++) {
var d = new Document();
// Index sorting doesn't work with NumericDocValuesField:
d.add(SortedNumericDocValuesField.indexedField(timestampField, currentTimestamp));
currentTimestamp += 1000L;
d.add(new SortedNumericDocValuesField(counterField, currentCounter));
d.add(new SortedDocValuesField(counterAsStringField, new BytesRef(Long.toString(currentCounter))));
d.add(new SortedNumericDocValuesField(queryField, q));
if (denseBinaryData || random().nextBoolean()) {
binaryFixed[numDocs - i] = new BytesRef(randomAlphaOfLength(binaryFieldMaxLength));
d.add(new BinaryDocValuesField(binaryFixedField, binaryFixed[numDocs - i]));
binaryVariable[numDocs - i] = new BytesRef(randomAlphaOfLength(between(0, binaryFieldMaxLength)));
d.add(new BinaryDocValuesField(binaryVariableField, binaryVariable[numDocs - i]));
}
if (i % 120 == 0) {
q++;
}
if (random().nextBoolean()) {
long v = random().nextLong();
temperatureValues[numDocs - i] = v;
d.add(new NumericDocValuesField(temperatureField, v));
}
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
if (i < numDocs - 1) {
currentCounter++;
}
}
iw.commit();
// Now bulk reader from one big segment and use random offset:
iw.forceMerge(1);
var factory = TestBlock.factory();
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leafReader = reader.leaves().get(0).reader();
for (int query = 1; query < q; query++) {
IndexSearcher searcher = new IndexSearcher(reader);
var topDocs = searcher.search(
SortedNumericDocValuesField.newSlowExactQuery(queryField, query),
numDocsPerQValue,
new Sort(SortField.FIELD_DOC),
false
);
assertEquals(numDocsPerQValue, topDocs.totalHits.value());
var timestampDV = getBaseDenseNumericValues(leafReader, timestampField);
long[] expectedTimestamps = new long[numDocsPerQValue];
var counterDV = getBaseDenseNumericValues(leafReader, counterField);
long[] expectedCounters = new long[numDocsPerQValue];
var counterAsStringDV = getBaseSortedDocValues(leafReader, counterAsStringField);
String[] expectedCounterAsStrings = new String[numDocsPerQValue];
int[] docIds = new int[numDocsPerQValue];
for (int i = 0; i < topDocs.scoreDocs.length; i++) {
var scoreDoc = topDocs.scoreDocs[i];
docIds[i] = scoreDoc.doc;
assertTrue(timestampDV.advanceExact(scoreDoc.doc));
expectedTimestamps[i] = timestampDV.longValue();
assertTrue(counterDV.advanceExact(scoreDoc.doc));
expectedCounters[i] = counterDV.longValue();
assertTrue(counterAsStringDV.advanceExact(scoreDoc.doc));
expectedCounterAsStrings[i] = counterAsStringDV.lookupOrd(counterAsStringDV.ordValue()).utf8ToString();
}
var docs = TestBlock.docs(docIds);
{
timestampDV = getBaseDenseNumericValues(leafReader, timestampField);
var block = (TestBlock) timestampDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
long actualTimestamp = (long) block.get(j);
long expectedTimestamp = expectedTimestamps[j];
assertEquals(expectedTimestamp, actualTimestamp);
}
}
{
counterDV = getBaseDenseNumericValues(leafReader, counterField);
var block = (TestBlock) counterDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
long actualCounter = (long) block.get(j);
long expectedCounter = expectedCounters[j];
assertEquals(expectedCounter, actualCounter);
}
}
{
counterAsStringDV = getBaseSortedDocValues(leafReader, counterAsStringField);
var block = (TestBlock) counterAsStringDV.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
assertEquals(numDocsPerQValue, block.size());
for (int j = 0; j < block.size(); j++) {
var actualCounter = ((BytesRef) block.get(j)).utf8ToString();
var expectedCounter = expectedCounterAsStrings[j];
assertEquals(expectedCounter, actualCounter);
}
}
}
BlockLoader.Docs docs;
{
int startIndex = ESTestCase.between(0, temperatureValues.length - 1);
int endIndex = ESTestCase.between(startIndex + 1, temperatureValues.length);
List<Integer> testDocs = new ArrayList<>();
for (int i = startIndex; i < endIndex; i++) {
if (temperatureValues[i] != null) {
testDocs.add(i);
}
}
if (testDocs.isEmpty() == false) {
NumericDocValues dv = leafReader.getNumericDocValues(temperatureField);
assertThat(dv, instanceOf(OptionalColumnAtATimeReader.class));
OptionalColumnAtATimeReader directReader = (OptionalColumnAtATimeReader) dv;
docs = TestBlock.docs(testDocs.stream().mapToInt(n -> n).toArray());
assertNull(directReader.tryRead(factory, docs, 0, false, null, false));
TestBlock block = (TestBlock) directReader.tryRead(factory, docs, 0, true, null, false);
assertNotNull(block);
for (int i = 0; i < testDocs.size(); i++) {
assertThat(block.get(i), equalTo(temperatureValues[testDocs.get(i)]));
}
}
if (testDocs.size() > 2) {
// currently bulk loading is disabled with gaps
testDocs.remove(ESTestCase.between(1, testDocs.size() - 2));
docs = TestBlock.docs(testDocs.stream().mapToInt(n -> n).toArray());
NumericDocValues dv = leafReader.getNumericDocValues(temperatureField);
OptionalColumnAtATimeReader directReader = (OptionalColumnAtATimeReader) dv;
assertNull(directReader.tryRead(factory, docs, 0, false, null, false));
assertNull(directReader.tryRead(factory, docs, 0, true, null, false));
}
}
{
// Bulk binary loader can only handle sparse queries over dense or sparse documents
List<Integer> testDocs = IntStream.range(0, numDocs - 1).filter(i -> randomBoolean()).boxed().toList();
docs = TestBlock.docs(testDocs.stream().mapToInt(n -> n).toArray());
if (testDocs.isEmpty() == false) {
if (denseBinaryData) {
{
var dv = getDenseBinaryValues(leafReader, binaryFixedField);
var block = (TestBlock) dv.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
for (int i = 0; i < testDocs.size(); i++) {
assertThat(block.get(i), equalTo(binaryFixed[testDocs.get(i)]));
}
}
{
var dv = getDenseBinaryValues(leafReader, binaryVariableField);
var block = (TestBlock) dv.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNotNull(block);
for (int i = 0; i < testDocs.size(); i++) {
assertThat(block.get(i), equalTo(binaryVariable[testDocs.get(i)]));
}
}
} else {
{
var dv = getSparseBinaryValues(leafReader, binaryFixedField);
var block = (TestBlock) dv.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNull(block);
}
{
var dv = getSparseBinaryValues(leafReader, binaryVariableField);
var block = (TestBlock) dv.tryRead(factory, docs, 0, random().nextBoolean(), null, false);
assertNull(block);
}
}
}
}
}
}
}
public void testLoadKeywordFieldWithIndexSorts() throws IOException {
String primaryField = "sorted_first";
String secondField = "sorted_second";
String unsortedField = "no_sort";
String sparseField = "sparse";
var config = new IndexWriterConfig();
config.setIndexSort(new Sort(new SortField(primaryField, SortField.Type.STRING, false)));
config.setMergePolicy(new LogByteSizeMergePolicy());
final Codec codec = new Elasticsearch92Lucene103Codec() {
final ES819TSDBDocValuesFormat docValuesFormat = new ES819TSDBDocValuesFormat(
randomIntBetween(2, 4096),
1, // always enable range-encode
random().nextBoolean(),
randomBinaryCompressionMode(),
randomBoolean(),
randomNumericBlockSize()
);
@Override
public DocValuesFormat getDocValuesFormatForField(String field) {
return docValuesFormat;
}
};
config.setCodec(codec);
Map<Integer, String> hostnames = new HashMap<>();
try (Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, config)) {
int numDocs = randomIntBetween(100, 5000);
for (int i = 0; i < numDocs; i++) {
hostnames.put(i, "h" + random().nextInt(10));
}
List<Integer> ids = new ArrayList<>(hostnames.keySet());
Randomness.shuffle(ids);
Set<Integer> sparseIds = new HashSet<>(ESTestCase.randomSubsetOf(ESTestCase.between(1, ids.size() / 2), ids));
for (Integer id : ids) {
var d = new Document();
String hostname = hostnames.get(id);
d.add(new NumericDocValuesField("id", id));
d.add(new SortedDocValuesField(primaryField, new BytesRef(hostname)));
d.add(new SortedDocValuesField(secondField, new BytesRef(hostname)));
d.add(new SortedDocValuesField(unsortedField, new BytesRef(hostname)));
if (sparseIds.contains(id)) {
d.add(new SortedDocValuesField(sparseField, new BytesRef(hostname)));
}
writer.addDocument(d);
if (random().nextInt(100) < 10) {
writer.flush();
}
}
for (int iter = 0; iter < 2; iter++) {
var factory = TestBlock.factory();
try (DirectoryReader reader = DirectoryReader.open(writer)) {
for (LeafReaderContext leaf : reader.leaves()) {
BlockLoader.Docs docs = new BlockLoader.Docs() {
@Override
public int count() {
return leaf.reader().maxDoc();
}
@Override
public int get(int i) {
return i;
}
};
var idReader = ESTestCase.asInstanceOf(OptionalColumnAtATimeReader.class, leaf.reader().getNumericDocValues("id"));
TestBlock idBlock = (TestBlock) idReader.tryRead(factory, docs, 0, false, null, false);
assertNotNull(idBlock);
{
var reader2 = (BaseSortedDocValues) ESTestCase.asInstanceOf(
OptionalColumnAtATimeReader.class,
leaf.reader().getSortedDocValues(secondField)
);
int randomOffset = ESTestCase.between(0, docs.count() - 1);
TestBlock block;
if (reader2.getValueCount() == 1) {
block = (TestBlock) reader2.tryReadAHead(factory, docs, randomOffset);
} else {
assertNull(reader2.tryReadAHead(factory, docs, randomOffset));
block = (TestBlock) reader2.tryRead(factory, docs, randomOffset, false, null, false);
}
assertNotNull(block);
assertThat(block.size(), equalTo(docs.count() - randomOffset));
for (int i = 0; i < block.size(); i++) {
String actualHostName = BytesRefs.toString(block.get(i));
int id = ((Number) idBlock.get(i + randomOffset)).intValue();
String expectedHostName = hostnames.get(id);
assertEquals(expectedHostName, actualHostName);
}
}
{
var reader3 = (BaseSortedDocValues) ESTestCase.asInstanceOf(
OptionalColumnAtATimeReader.class,
leaf.reader().getSortedDocValues(unsortedField)
);
int randomOffset = ESTestCase.between(0, docs.count() - 1);
TestBlock block;
if (reader3.getValueCount() == 1) {
block = (TestBlock) reader3.tryReadAHead(factory, docs, randomOffset);
} else {
assertNull(reader3.tryReadAHead(factory, docs, randomOffset));
block = (TestBlock) reader3.tryRead(factory, docs, randomOffset, false, null, false);
}
assertNotNull(reader3);
assertNotNull(block);
assertThat(block.size(), equalTo(docs.count() - randomOffset));
for (int i = 0; i < block.size(); i++) {
String actualHostName = BytesRefs.toString(block.get(i));
int id = ((Number) idBlock.get(i + randomOffset)).intValue();
String expectedHostName = hostnames.get(id);
assertEquals(expectedHostName, actualHostName);
}
}
for (int offset = 0; offset < idBlock.size(); offset += ESTestCase.between(1, numDocs)) {
int start = offset;
var reader1 = (BaseSortedDocValues) ESTestCase.asInstanceOf(
OptionalColumnAtATimeReader.class,
leaf.reader().getSortedDocValues(primaryField)
);
while (start < idBlock.size()) {
int end = start + random().nextInt(idBlock.size() - start);
TestBlock hostBlock = (TestBlock) reader1.tryReadAHead(factory, new BlockLoader.Docs() {
@Override
public int count() {
return end + 1;
}
@Override
public int get(int docId) {
return docId;
}
}, start);
assertNotNull(hostBlock);
assertThat(hostBlock.size(), equalTo(end - start + 1));
for (int i = 0; i < hostBlock.size(); i++) {
String actualHostName = BytesRefs.toString(hostBlock.get(i));
assertThat(actualHostName, equalTo(hostnames.get(((Number) idBlock.get(i + start)).intValue())));
}
if (start == idBlock.size() - 1) {
break;
}
start = end + ESTestCase.between(0, 10);
}
}
writer.forceMerge(1);
}
}
}
}
}
public void testEncodeRangeWithSortedSetPrimarySortField() throws Exception {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
var config = getTimeSeriesIndexWriterConfig(hostnameField, true, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
int numDocs = 512 + random().nextInt(512);
int numHosts = numDocs / 20;
for (int i = 0; i < numDocs; i++) {
var d = new Document();
int batchIndex = i / numHosts;
{
String hostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
d.add(new SortedSetDocValuesField(hostnameField, new BytesRef(hostName)));
}
{
String hostName = String.format(Locale.ROOT, "host-%03d", batchIndex + 1);
d.add(new SortedSetDocValuesField(hostnameField, new BytesRef(hostName)));
}
// Index sorting doesn't work with NumericDocValuesField:
long timestamp = baseTimestamp + (1000L * i);
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
}
iw.commit();
iw.forceMerge(1);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedSetDocValues(hostnameField);
assertNotNull(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
for (int i = 0; i < numDocs; i++) {
assertEquals(i, hostNameDV.nextDoc());
int batchIndex = i / numHosts;
assertEquals(2, hostNameDV.docValueCount());
long firstOrd = hostNameDV.nextOrd();
assertEquals(batchIndex, firstOrd);
String expectedFirstHostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
String actualFirstHostName = hostNameDV.lookupOrd(firstOrd).utf8ToString();
assertEquals(expectedFirstHostName, actualFirstHostName);
batchIndex++;
long secondOrd = hostNameDV.nextOrd();
assertEquals(batchIndex, secondOrd);
String expectedSecondHostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
String actualSecondHostName = hostNameDV.lookupOrd(secondOrd).utf8ToString();
assertEquals(expectedSecondHostName, actualSecondHostName);
assertEquals(i, timestampDV.nextDoc());
long timestamp = timestampDV.longValue();
long lowerBound = baseTimestamp;
long upperBound = baseTimestamp + (1000L * numDocs);
assertTrue(
"unexpected timestamp [" + timestamp + "], expected between [" + lowerBound + "] and [" + upperBound + "]",
timestamp >= lowerBound && timestamp < upperBound
);
}
}
}
}
private static DenseBinaryDocValues getDenseBinaryValues(LeafReader leafReader, String field) throws IOException {
return (DenseBinaryDocValues) leafReader.getBinaryDocValues(field);
}
private static SparseBinaryDocValues getSparseBinaryValues(LeafReader leafReader, String field) throws IOException {
return (SparseBinaryDocValues) leafReader.getBinaryDocValues(field);
}
private static BaseDenseNumericValues getBaseDenseNumericValues(LeafReader leafReader, String field) throws IOException {
return (BaseDenseNumericValues) DocValues.unwrapSingleton(leafReader.getSortedNumericDocValues(field));
}
private static BaseSortedDocValues getBaseSortedDocValues(LeafReader leafReader, String field) throws IOException {
var sortedDocValues = leafReader.getSortedDocValues(field);
if (sortedDocValues == null) {
sortedDocValues = DocValues.unwrapSingleton(leafReader.getSortedSetDocValues(field));
}
return (BaseSortedDocValues) sortedDocValues;
}
public void testDocIDEndRun() throws IOException {
String timestampField = "@timestamp";
String hostnameField = "host.name";
long baseTimestamp = 1704067200000L;
var config = getTimeSeriesIndexWriterConfig(hostnameField, timestampField);
try (var dir = newDirectory(); var iw = new IndexWriter(dir, config)) {
long counter1 = 0;
long[] gauge2Values = new long[] { -2, -4, -6, -8, -10, -12, -14, -16 };
String[] tags = new String[] { "tag_1", "tag_2", "tag_3", "tag_4", "tag_5", "tag_6", "tag_7", "tag_8" };
// IndexedDISI stores ids in blocks of 4096. To test sparse end runs, we want a mixture of
// dense and sparse blocks, so we need the gap frequency to be larger than
// this value, but smaller than two blocks, and to index at least three blocks
int gap_frequency = 4500 + random().nextInt(2048);
int numDocs = 10000 + random().nextInt(10000);
int numHosts = numDocs / 20;
for (int i = 0; i < numDocs; i++) {
var d = new Document();
int batchIndex = i / numHosts;
String hostName = String.format(Locale.ROOT, "host-%03d", batchIndex);
long timestamp = baseTimestamp + (1000L * i);
d.add(new SortedDocValuesField(hostnameField, new BytesRef(hostName)));
// Index sorting doesn't work with NumericDocValuesField:
d.add(new SortedNumericDocValuesField(timestampField, timestamp));
d.add(new NumericDocValuesField("counter", counter1++));
if (i % gap_frequency != 0) {
d.add(new NumericDocValuesField("sparse_counter", counter1));
}
int numGauge2 = 1 + random().nextInt(8);
for (int j = 0; j < numGauge2; j++) {
d.add(new SortedNumericDocValuesField("gauge", gauge2Values[(i + j) % gauge2Values.length]));
if (i % gap_frequency != 0) {
d.add(new SortedNumericDocValuesField("sparse_gauge", gauge2Values[(i + j) % gauge2Values.length]));
}
}
d.add(new SortedDocValuesField("tag", new BytesRef(randomFrom(tags))));
if (i % gap_frequency != 0) {
d.add(new SortedDocValuesField("sparse_tag", new BytesRef(randomFrom(tags))));
}
int numTags = 1 + random().nextInt(8);
for (int j = 0; j < numTags; j++) {
d.add(new SortedSetDocValuesField("tags", new BytesRef(tags[(i + j) % tags.length])));
if (i % gap_frequency != 0) {
d.add(new SortedSetDocValuesField("sparse_tags", new BytesRef(tags[(i + j) % tags.length])));
}
}
d.add(new BinaryDocValuesField("tags_as_bytes", new BytesRef(tags[i % tags.length])));
if (i % gap_frequency != 0) {
d.add(new BinaryDocValuesField("sparse_tags_as_bytes", new BytesRef(tags[i % tags.length])));
}
iw.addDocument(d);
if (i % 100 == 0) {
iw.commit();
}
}
iw.commit();
iw.forceMerge(1);
try (var reader = DirectoryReader.open(iw)) {
assertEquals(1, reader.leaves().size());
assertEquals(numDocs, reader.maxDoc());
var leaf = reader.leaves().get(0).reader();
var hostNameDV = leaf.getSortedDocValues(hostnameField);
assertNotNull(hostNameDV);
validateRunEnd(hostNameDV);
var timestampDV = DocValues.unwrapSingleton(leaf.getSortedNumericDocValues(timestampField));
assertNotNull(timestampDV);
validateRunEnd(timestampDV);
var counterOneDV = leaf.getNumericDocValues("counter");
assertNotNull(counterOneDV);
validateRunEnd(counterOneDV);
var sparseCounter = leaf.getNumericDocValues("sparse_counter");
assertNotNull(sparseCounter);
validateRunEnd(sparseCounter);
var gaugeOneDV = leaf.getSortedNumericDocValues("gauge");
assertNotNull(gaugeOneDV);
validateRunEnd(gaugeOneDV);
var sparseGaugeDV = leaf.getSortedNumericDocValues("sparse_gauge");
assertNotNull(sparseGaugeDV);
validateRunEnd(sparseGaugeDV);
var tagDV = leaf.getSortedDocValues("tag");
assertNotNull(tagDV);
validateRunEnd(tagDV);
var sparseTagDV = leaf.getSortedDocValues("sparse_tag");
assertNotNull(sparseTagDV);
validateRunEnd(sparseTagDV);
var tagsDV = leaf.getSortedSetDocValues("tags");
assertNotNull(tagsDV);
validateRunEnd(tagsDV);
var sparseTagsDV = leaf.getSortedSetDocValues("sparse_tags");
assertNotNull(sparseTagsDV);
validateRunEnd(sparseTagsDV);
var tagBytesDV = leaf.getBinaryDocValues("tags_as_bytes");
assertNotNull(tagBytesDV);
validateRunEnd(tagBytesDV);
var sparseTagBytesDV = leaf.getBinaryDocValues("sparse_tags_as_bytes");
assertNotNull(sparseTagBytesDV);
validateRunEnd(sparseTagBytesDV);
}
}
}
private void validateRunEnd(DocIdSetIterator iterator) throws IOException {
int runCount = 0;
while (iterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
int runLength = iterator.docIDRunEnd() - iterator.docID() - 1;
if (runLength > 1) {
runCount++;
for (int i = 0; i < runLength; i++) {
int expected = iterator.docID() + 1;
assertEquals(expected, iterator.advance(expected));
}
}
}
assertTrue("Expected docid runs of greater than 1", runCount > 0);
}
private IndexWriterConfig getTimeSeriesIndexWriterConfig(String hostnameField, String timestampField) {
return getTimeSeriesIndexWriterConfig(hostnameField, false, timestampField);
}
private IndexWriterConfig getTimeSeriesIndexWriterConfig(String hostnameField, boolean multiValued, String timestampField) {
var config = new IndexWriterConfig();
if (hostnameField != null) {
config.setIndexSort(
new Sort(
multiValued ? new SortedSetSortField(hostnameField, false) : new SortField(hostnameField, SortField.Type.STRING, false),
new SortedNumericSortField(timestampField, SortField.Type.LONG, true)
)
);
} else {
config.setIndexSort(new Sort(new SortedNumericSortField(timestampField, SortField.Type.LONG, true)));
}
config.setLeafSorter(DataStream.TIMESERIES_LEAF_READERS_SORTER);
config.setMergePolicy(new LogByteSizeMergePolicy());
config.setCodec(getCodec());
return config;
}
public static BinaryDVCompressionMode randomBinaryCompressionMode() {
BinaryDVCompressionMode[] modes = BinaryDVCompressionMode.values();
return modes[random().nextInt(modes.length)];
}
public static int randomNumericBlockSize() {
return random().nextBoolean() ? ES819TSDBDocValuesFormat.NUMERIC_LARGE_BLOCK_SHIFT : ES819TSDBDocValuesFormat.NUMERIC_BLOCK_SHIFT;
}
}
| TestES819TSDBDocValuesFormatVersion0 |
java | square__retrofit | samples/src/main/java/com/example/retrofit/AnnotatedConverters.java | {
"start": 3685,
"end": 3736
} | class ____ {
@Attribute String name;
}
| Library |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/StreamOperatorParameters.java | {
"start": 1394,
"end": 1825
} | class ____ construct {@link AbstractStreamOperatorV2}. Wraps couple of internal parameters
* to simplify for users construction of classes extending {@link AbstractStreamOperatorV2} and to
* allow for backward compatible changes in the {@link AbstractStreamOperatorV2}'s constructor.
*
* @param <OUT> The output type of an operator that will be constructed using {@link
* StreamOperatorParameters}.
*/
@Experimental
public | to |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/core/NotificationTest.java | {
"start": 762,
"end": 2742
} | class ____ extends RxJavaTest {
@Test
public void valueOfOnErrorIsNull() {
Notification<Integer> notification = Notification.createOnError(new TestException());
assertNull(notification.getValue());
assertTrue(notification.getError().toString(), notification.getError() instanceof TestException);
}
@Test
public void valueOfOnCompleteIsNull() {
Notification<Integer> notification = Notification.createOnComplete();
assertNull(notification.getValue());
assertNull(notification.getError());
assertTrue(notification.isOnComplete());
}
@Test
public void notEqualsToObject() {
Notification<Integer> n1 = Notification.createOnNext(0);
assertNotEquals(0, n1);
assertNotEquals(n1, 0);
Notification<Integer> n2 = Notification.createOnError(new TestException());
assertNotEquals(0, n2);
assertNotEquals(n2, 0);
Notification<Integer> n3 = Notification.createOnComplete();
assertNotEquals(0, n3);
assertNotEquals(n3, 0);
}
@Test
public void twoEqual() {
Notification<Integer> n1 = Notification.createOnNext(0);
Notification<Integer> n2 = Notification.createOnNext(0);
assertEquals(n1, n2);
assertEquals(n2, n1);
}
@Test
public void hashCodeIsTheInner() {
Notification<Integer> n1 = Notification.createOnNext(1337);
assertEquals(Integer.valueOf(1337).hashCode(), n1.hashCode());
assertEquals(0, Notification.createOnComplete().hashCode());
}
@Test
public void toStringPattern() {
assertEquals("OnNextNotification[1]", Notification.createOnNext(1).toString());
assertEquals("OnErrorNotification[io.reactivex.rxjava3.exceptions.TestException]", Notification.createOnError(new TestException()).toString());
assertEquals("OnCompleteNotification", Notification.createOnComplete().toString());
}
}
| NotificationTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/DuplicateConfigurationClassPostProcessorTests.java | {
"start": 1380,
"end": 1861
} | class ____ {
@Test
void repro() {
GenericApplicationContext ctx = new GenericApplicationContext();
ctx.registerBeanDefinition("a", new RootBeanDefinition(ConfigurationClassPostProcessor.class));
ctx.registerBeanDefinition("b", new RootBeanDefinition(ConfigurationClassPostProcessor.class));
ctx.registerBeanDefinition("myConfig", new RootBeanDefinition(Config.class));
ctx.refresh();
ctx.close();
}
@Configuration
static | DuplicateConfigurationClassPostProcessorTests |
java | apache__camel | components/camel-metrics/src/test/java/org/apache/camel/component/metrics/GaugeProducerTest.java | {
"start": 1746,
"end": 3983
} | class ____ {
private static final String METRICS_NAME = "metrics.name";
private static final String METRICS_NAME_HEADER = "metrics.name.header";
private static final Object VALUE = "my subject";
private static final Object VALUE_HEADER = "my subject header";
@Mock
private MetricsEndpoint endpoint;
@Mock
private Exchange exchange;
@Mock
private MetricRegistry registry;
@Mock
private Message in;
private GaugeProducer producer;
@BeforeEach
public void setUp() {
lenient().when(endpoint.getRegistry()).thenReturn(registry);
lenient().when(endpoint.getSubject()).thenReturn(VALUE);
lenient().when(endpoint.getMetricsName()).thenReturn(METRICS_NAME);
lenient().when(exchange.getIn()).thenReturn(in);
producer = new GaugeProducer(endpoint);
}
@Test
public void testGaugeProducer() {
assertThat(producer.getEndpoint().equals(endpoint), is(true));
}
@Test
public void testDefault() {
verify(registry, times(1)).register(eq(METRICS_NAME), argThat(new ArgumentMatcher<CamelMetricsGauge>() {
@Override
public boolean matches(CamelMetricsGauge argument) {
return VALUE.equals(argument.getValue());
}
}));
}
@Test
public void testProcessWithHeaderValues() throws Exception {
when(in.getHeader(HEADER_GAUGE_SUBJECT, Object.class)).thenReturn(VALUE_HEADER);
producer.doProcess(exchange, endpoint, registry, METRICS_NAME_HEADER);
verify(in, times(1)).getHeader(HEADER_GAUGE_SUBJECT, Object.class);
verify(registry, times(1)).register(eq(METRICS_NAME), argThat(new ArgumentMatcher<CamelMetricsGauge>() {
@Override
public boolean matches(CamelMetricsGauge argument) {
return VALUE.equals(argument.getValue());
}
}));
verify(registry, times(1)).register(eq(METRICS_NAME_HEADER), argThat(new ArgumentMatcher<CamelMetricsGauge>() {
@Override
public boolean matches(CamelMetricsGauge argument) {
return VALUE_HEADER.equals(argument.getValue());
}
}));
}
}
| GaugeProducerTest |
java | quarkusio__quarkus | devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/azure-functions-codestart/java/src/main/java/org/acme/GreetingService.java | {
"start": 99,
"end": 209
} | class ____ {
public String greeting(String name) {
return "Guten Tag " + name;
}
}
| GreetingService |
java | elastic__elasticsearch | x-pack/plugin/kql/src/main/java/org/elasticsearch/xpack/kql/parser/KqlParsingException.java | {
"start": 474,
"end": 1745
} | class ____ extends ElasticsearchException {
private final int line;
private final int charPositionInLine;
public KqlParsingException(String message, Exception cause, int line, int charPositionInLine) {
super(message, cause);
this.line = line;
this.charPositionInLine = charPositionInLine;
}
public KqlParsingException(String message, int line, int charPositionInLine, Object... args) {
super(message, args);
this.line = line;
this.charPositionInLine = charPositionInLine;
}
public KqlParsingException(String message, Throwable cause, int line, int charPositionInLine, Object... args) {
super(message, cause, args);
this.line = line;
this.charPositionInLine = charPositionInLine;
}
public int getLineNumber() {
return line;
}
public int getColumnNumber() {
return charPositionInLine + 1;
}
public String getErrorMessage() {
return super.getMessage();
}
@Override
public String getMessage() {
return format("line {}:{}: {}", getLineNumber(), getColumnNumber(), getErrorMessage());
}
@Override
public RestStatus status() {
return RestStatus.BAD_REQUEST;
}
}
| KqlParsingException |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/response/ResponseStringNonAsciiTest.java | {
"start": 1256,
"end": 1467
} | class ____ {
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response hello() {
return Response.ok("{\"message\": \"Καλημέρα κόσμε\"}").build();
}
}
}
| HelloResource |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/InternalHandlerRegistry.java | {
"start": 1715,
"end": 2604
} | class ____ {
// Store per-service first, to make sure services are added/replaced atomically.
private final HashMap<String, ServerServiceDefinition> services =
new LinkedHashMap<>();
Builder addService(ServerServiceDefinition service) {
services.put(service.getServiceDescriptor().getName(), service);
return this;
}
InternalHandlerRegistry build() {
Map<String, ServerMethodDefinition<?, ?>> map =
new HashMap<>();
for (ServerServiceDefinition service : services.values()) {
for (ServerMethodDefinition<?, ?> method : service.getMethods()) {
map.put(method.getMethodDescriptor().getFullMethodName(), method);
}
}
return new InternalHandlerRegistry(
Collections.unmodifiableList(new ArrayList<>(services.values())),
Collections.unmodifiableMap(map));
}
}
}
| Builder |
java | quarkusio__quarkus | integration-tests/jackson/src/test/java/io/quarkus/it/jackson/ModelWithBuilderResourceTest.java | {
"start": 380,
"end": 1031
} | class ____ {
@Test
public void testModelWithBuilder() throws IOException {
ModelWithBuilder model = new ModelWithBuilder.Builder("123")
.withVersion(3)
.withValue("some")
.build();
given()
.contentType("application/json")
.body(model.toJson(getObjectMapperForTest()))
.when().post("/modelwithbuilder")
.then()
.statusCode(201)
.body("id", equalTo("123"))
.body("version", equalTo(3))
.body("value", equalTo("some"));
}
}
| ModelWithBuilderResourceTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GrapeComponentBuilderFactory.java | {
"start": 1902,
"end": 4658
} | interface ____ extends ComponentBuilder<GrapeComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default GrapeComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default GrapeComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Implementation of org.apache.camel.component.grape.PatchesRepository,
* by default: FilePatchesRepository.
*
* The option is a:
* <code>org.apache.camel.component.grape.PatchesRepository</code> type.
*
* Group: advanced
*
* @param patchesRepository the value to set
* @return the dsl builder
*/
default GrapeComponentBuilder patchesRepository(org.apache.camel.component.grape.PatchesRepository patchesRepository) {
doSetProperty("patchesRepository", patchesRepository);
return this;
}
}
| GrapeComponentBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyBasicFieldNotInitializedTest.java | {
"start": 1749,
"end": 3029
} | class ____ {
private Long entityId;
@BeforeEach
public void prepare(SessionFactoryScope scope) {
scope.inTransaction( s -> {
TestEntity entity = new TestEntity();
entity.description = "desc";
s.persist( entity );
entityId = entity.id;
} );
}
@Test
public void test(SessionFactoryScope scope) {
scope.inTransaction( s -> {
TestEntity entity = s.get( TestEntity.class, entityId );
assertFalse( Hibernate.isPropertyInitialized( entity, "description" ) );
EntityPersister entityPersister = scope.getSessionFactory().getRuntimeMetamodels()
.getMappingMetamodel()
.getEntityDescriptor( TestEntity.class );
boolean[] propertyLaziness = entityPersister.getPropertyLaziness();
assertEquals( 1, propertyLaziness.length );
Assertions.assertTrue( propertyLaziness[0] );
// Make sure NonIdentifierAttribute#isLazy is consistent (HHH-10551)
final AttributeMapping theBytesAttr = entityPersister.findAttributeMapping( "description" );
assertThat( theBytesAttr ).isInstanceOf( BasicValuedModelPart.class );
assertThat( theBytesAttr.getMappedFetchOptions().getTiming() ).isEqualTo( FetchTiming.DELAYED );
} );
}
// --- //
@Entity(name = "TestEntity")
@Table( name = "TEST_ENTITY" )
static | LazyBasicFieldNotInitializedTest |
java | netty__netty | common/src/test/java/io/netty/util/internal/logging/InternalLoggerFactoryTest.java | {
"start": 1133,
"end": 5979
} | class ____ {
private static final Exception e = new Exception();
private InternalLoggerFactory oldLoggerFactory;
private InternalLogger mockLogger;
@BeforeEach
public void init() {
oldLoggerFactory = InternalLoggerFactory.getDefaultFactory();
final InternalLoggerFactory mockFactory = mock(InternalLoggerFactory.class);
mockLogger = mock(InternalLogger.class);
when(mockFactory.newInstance("mock")).thenReturn(mockLogger);
InternalLoggerFactory.setDefaultFactory(mockFactory);
}
@AfterEach
public void destroy() {
reset(mockLogger);
InternalLoggerFactory.setDefaultFactory(oldLoggerFactory);
}
@Test
public void shouldNotAllowNullDefaultFactory() {
assertThrows(NullPointerException.class, new Executable() {
@Override
public void execute() {
InternalLoggerFactory.setDefaultFactory(null);
}
});
}
@Test
public void shouldGetInstance() {
InternalLoggerFactory.setDefaultFactory(oldLoggerFactory);
String helloWorld = "Hello, world!";
InternalLogger one = InternalLoggerFactory.getInstance("helloWorld");
InternalLogger two = InternalLoggerFactory.getInstance(helloWorld.getClass());
assertNotNull(one);
assertNotNull(two);
assertNotSame(one, two);
}
@Test
public void testIsTraceEnabled() {
when(mockLogger.isTraceEnabled()).thenReturn(true);
InternalLogger logger = InternalLoggerFactory.getInstance("mock");
assertTrue(logger.isTraceEnabled());
verify(mockLogger).isTraceEnabled();
}
@Test
public void testIsDebugEnabled() {
when(mockLogger.isDebugEnabled()).thenReturn(true);
InternalLogger logger = InternalLoggerFactory.getInstance("mock");
assertTrue(logger.isDebugEnabled());
verify(mockLogger).isDebugEnabled();
}
@Test
public void testIsInfoEnabled() {
when(mockLogger.isInfoEnabled()).thenReturn(true);
InternalLogger logger = InternalLoggerFactory.getInstance("mock");
assertTrue(logger.isInfoEnabled());
verify(mockLogger).isInfoEnabled();
}
@Test
public void testIsWarnEnabled() {
when(mockLogger.isWarnEnabled()).thenReturn(true);
InternalLogger logger = InternalLoggerFactory.getInstance("mock");
assertTrue(logger.isWarnEnabled());
verify(mockLogger).isWarnEnabled();
}
@Test
public void testIsErrorEnabled() {
when(mockLogger.isErrorEnabled()).thenReturn(true);
InternalLogger logger = InternalLoggerFactory.getInstance("mock");
assertTrue(logger.isErrorEnabled());
verify(mockLogger).isErrorEnabled();
}
@Test
public void testTrace() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.trace("a");
verify(mockLogger).trace("a");
}
@Test
public void testTraceWithException() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.trace("a", e);
verify(mockLogger).trace("a", e);
}
@Test
public void testDebug() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.debug("a");
verify(mockLogger).debug("a");
}
@Test
public void testDebugWithException() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.debug("a", e);
verify(mockLogger).debug("a", e);
}
@Test
public void testInfo() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.info("a");
verify(mockLogger).info("a");
}
@Test
public void testInfoWithException() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.info("a", e);
verify(mockLogger).info("a", e);
}
@Test
public void testWarn() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.warn("a");
verify(mockLogger).warn("a");
}
@Test
public void testWarnWithException() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.warn("a", e);
verify(mockLogger).warn("a", e);
}
@Test
public void testError() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.error("a");
verify(mockLogger).error("a");
}
@Test
public void testErrorWithException() {
final InternalLogger logger = InternalLoggerFactory.getInstance("mock");
logger.error("a", e);
verify(mockLogger).error("a", e);
}
}
| InternalLoggerFactoryTest |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/ToCharFormatter.java | {
"start": 1113,
"end": 16517
} | class ____ {
protected static final Map<String, ToCharFormatter> FORMATTER_MAP;
static {
List<ToCharFormatter> formatters = List.of(
of("HH").formatFn("hh").numeric(),
of("HH12").formatFn("hh").numeric(),
of("HH24").formatFn("HH").numeric(),
of("MI").formatFn("mm").numeric(),
of("SS").formatFn("s", x -> String.format(Locale.ENGLISH, "%02d", parseInt(x))).numeric(),
of("MS").formatFn("n", nano -> firstDigitsOfNanos(nano, 3)).numericWithLeadingZeros(),
of("US").formatFn("n", nano -> firstDigitsOfNanos(nano, 6)).numericWithLeadingZeros(),
of("FF1").formatFn("n", nano -> firstDigitsOfNanos(nano, 1)).numericWithLeadingZeros(),
of("FF2").formatFn("n", nano -> firstDigitsOfNanos(nano, 2)).numericWithLeadingZeros(),
of("FF3").formatFn("n", nano -> firstDigitsOfNanos(nano, 3)).numericWithLeadingZeros(),
of("FF4").formatFn("n", nano -> firstDigitsOfNanos(nano, 4)).numericWithLeadingZeros(),
of("FF5").formatFn("n", nano -> firstDigitsOfNanos(nano, 5)).numericWithLeadingZeros(),
of("FF6").formatFn("n", nano -> firstDigitsOfNanos(nano, 6)).numericWithLeadingZeros(),
of("SSSSS").formatFn("A", milliSecondOfDay -> String.valueOf(parseInt(milliSecondOfDay) / 1000)).numeric(),
of("SSSS").formatFn("A", milliSecondOfDay -> String.valueOf(parseInt(milliSecondOfDay) / 1000)).numeric(),
of("AM").formatFn("a", x -> x.toUpperCase(Locale.ENGLISH)).text(),
of("am").formatFn("a", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("PM").formatFn("a", x -> x.toUpperCase(Locale.ENGLISH)).text(),
of("pm").formatFn("a", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("A.M.").formatFn("a", x -> x.charAt(0) + "." + x.charAt(1) + ".").text(),
of("a.m.").formatFn("a", x -> (x.charAt(0) + "." + x.charAt(1) + ".").toLowerCase(Locale.ENGLISH)).text(),
of("P.M.").formatFn("a", x -> x.charAt(0) + "." + x.charAt(1) + ".").text(),
of("p.m.").formatFn("a", x -> (x.charAt(0) + "." + x.charAt(1) + ".").toLowerCase(Locale.ENGLISH)).text(),
of("Y,YYY").formatFn("yyyy", year -> year.charAt(0) + "," + year.substring(1)).numericWithLeadingZeros(),
of("YYYY").formatFn("yyyy").numeric(),
of("YYY").formatFn("yyyy", year -> year.substring(1)).numeric(),
of("YY").formatFn("yy").numeric(),
of("Y").formatFn("yy", year -> year.substring(1)).numeric(),
of("IYYY").formatFn(t -> lastNCharacter(absoluteWeekBasedYear(t), 4)).numeric(),
of("IYY").formatFn(t -> lastNCharacter(absoluteWeekBasedYear(t), 3)).numeric(),
of("IY").formatFn(t -> lastNCharacter(absoluteWeekBasedYear(t), 2)).numeric(),
of("I").formatFn(t -> lastNCharacter(absoluteWeekBasedYear(t), 1)).numeric(),
of("BC").formatFn("G").text(),
of("bc").formatFn("G", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("AD").formatFn("G").text(),
of("ad").formatFn("G", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("B.C.").formatFn("G", x -> x.charAt(0) + "." + x.charAt(1) + ".").text(),
of("b.c.").formatFn("G", x -> (x.charAt(0) + "." + x.charAt(1) + ".").toLowerCase(Locale.ENGLISH)).text(),
of("A.D.").formatFn("G", x -> x.charAt(0) + "." + x.charAt(1) + ".").text(),
of("a.d.").formatFn("G", x -> (x.charAt(0) + "." + x.charAt(1) + ".").toLowerCase(Locale.ENGLISH)).text(),
of("MONTH").formatFn("MMMM", x -> String.format(Locale.ENGLISH, "%-9s", x.toUpperCase(Locale.ENGLISH))).text(),
of("Month").formatFn("MMMM", x -> String.format(Locale.ENGLISH, "%-9s", x)).text(),
of("month").formatFn("MMMM", x -> String.format(Locale.ENGLISH, "%-9s", x.toLowerCase(Locale.ENGLISH))).text(),
of("MON").formatFn("MMM", x -> x.toUpperCase(Locale.ENGLISH)).text(),
of("Mon").formatFn("MMM").text(),
of("mon").formatFn("MMM", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("MM").formatFn("MM").numeric(),
of("DAY").formatFn("EEEE", x -> String.format(Locale.ENGLISH, "%-9s", x.toUpperCase(Locale.ENGLISH))).text(),
of("Day").formatFn("EEEE", x -> String.format(Locale.ENGLISH, "%-9s", x)).text(),
of("day").formatFn("EEEE", x -> String.format(Locale.ENGLISH, "%-9s", x.toLowerCase(Locale.ENGLISH))).text(),
of("DY").formatFn("E", x -> x.toUpperCase(Locale.ENGLISH)).text(),
of("Dy").formatFn("E").text(),
of("dy").formatFn("E", x -> x.toLowerCase(Locale.ENGLISH)).text(),
of("DDD").formatFn("DDD").numeric(),
of("IDDD").formatFn(
t -> String.format(
Locale.ENGLISH,
"%03d",
(t.get(WeekFields.ISO.weekOfWeekBasedYear()) - 1) * 7 + t.get(ChronoField.DAY_OF_WEEK)
)
).numeric(),
of("DD").formatFn("d", x -> String.format(Locale.ENGLISH, "%02d", parseInt(x))).numeric(),
of("ID").formatFn(t -> String.valueOf(t.get(ChronoField.DAY_OF_WEEK))).numeric(),
of("D").formatFn(t -> String.valueOf(t.get(WeekFields.SUNDAY_START.dayOfWeek()))).numeric(),
of("W").formatFn(t -> String.valueOf(t.get(ChronoField.ALIGNED_WEEK_OF_MONTH))).numeric(),
of("WW").formatFn(t -> String.format(Locale.ENGLISH, "%02d", t.get(ChronoField.ALIGNED_WEEK_OF_YEAR))).numeric(),
of("IW").formatFn(t -> String.format(Locale.ENGLISH, "%02d", t.get(WeekFields.ISO.weekOfWeekBasedYear()))).numeric(),
of("CC").formatFn(t -> {
int century = yearToCentury(t.get(ChronoField.YEAR));
return String.format(Locale.ENGLISH, century < 0 ? "%03d" : "%02d", century);
}).numeric(),
of("J").formatFn(t -> String.valueOf(t.getLong(JulianFields.JULIAN_DAY))).numeric(),
of("Q").formatFn("Q").numeric(),
of("RM").formatFn("MM", month -> String.format(Locale.ENGLISH, "%-4s", monthToRoman(parseInt(month)))).text(),
of("rm").formatFn(
"MM",
month -> String.format(Locale.ENGLISH, "%-4s", monthToRoman(parseInt(month)).toLowerCase(Locale.ENGLISH))
).text(),
of("TZ").formatFn(ToCharFormatter::zoneAbbreviationOf).text(),
of("tz").formatFn(t -> zoneAbbreviationOf(t).toLowerCase(Locale.ENGLISH)).text(),
of("TZH").acceptsLowercase(false).formatFn("ZZ", s -> s.substring(0, 3)).text(),
of("TZM").acceptsLowercase(false).formatFn("ZZ", s -> lastNCharacter(s, 2)).text(),
of("OF").acceptsLowercase(false).formatFn("ZZZZZ", ToCharFormatter::formatOffset).offset()
);
Map<String, ToCharFormatter> formatterMap = new LinkedHashMap<>();
for (ToCharFormatter formatter : formatters) {
formatterMap.put(formatter.pattern, formatter);
}
// also index the lower case version of the patterns if accepted
for (ToCharFormatter formatter : formatters) {
if (formatter.acceptsLowercase) {
formatterMap.putIfAbsent(formatter.pattern.toLowerCase(Locale.ENGLISH), formatter);
}
}
FORMATTER_MAP = formatterMap;
}
private static final int MAX_TO_CHAR_FORMAT_STRING_LENGTH = FORMATTER_MAP.keySet()
.stream()
.mapToInt(String::length)
.max()
.orElse(Integer.MAX_VALUE);
private static final String[] ROMAN_NUMBERS = { "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX", "X", "XI", "XII" };
private final String pattern;
private final boolean acceptsLowercase;
// Fill mode: suppress leading zeroes and padding blanks
// https://www.postgresql.org/docs/13/functions-formatting.html#FUNCTIONS-FORMATTING-DATETIMEMOD-TABLE
private final Function<String, String> fillModeFn;
private final boolean hasOrdinalSuffix;
private final Function<TemporalAccessor, String> formatter;
private ToCharFormatter(
String pattern,
boolean acceptsLowercase,
Function<String, String> fillModeFn,
boolean hasOrdinalSuffix,
Function<TemporalAccessor, String> formatter
) {
this.pattern = pattern;
this.acceptsLowercase = acceptsLowercase;
this.fillModeFn = fillModeFn;
this.hasOrdinalSuffix = hasOrdinalSuffix;
this.formatter = formatter;
}
private static Builder of(String pattern) {
return new Builder(pattern);
}
private static String monthToRoman(int month) {
return ROMAN_NUMBERS[month - 1];
}
private static int yearToCentury(int year) {
int offset = -1;
if (year > 0) {
offset = year % 100 == 0 ? 0 : 1;
}
return year / 100 + offset;
}
private String format(TemporalAccessor temporalAccessor) {
return formatter.apply(temporalAccessor);
}
private ToCharFormatter withModifier(Function<String, String> modifier) {
return new ToCharFormatter(pattern, acceptsLowercase, fillModeFn, hasOrdinalSuffix, formatter.andThen(modifier));
}
private static List<ToCharFormatter> parsePattern(String toCharPattern) {
LinkedList<ToCharFormatter> formatters = new LinkedList<>();
while (toCharPattern.isEmpty() == false) {
ToCharFormatter formatter = null;
boolean fillModeModifierActive = false;
// we try to match the following: ( fill-modifier? ( ( pattern ordinal-suffix-modifier? ) | literal-non-pattern ) ) *
// and extract the individual patterns with the fill-modifiers and ordinal-suffix-modifiers or
// the non-matched literals (removing the potential fill modifiers specified for them, FMFM turns into FM)
// check for fill-modifier first
if (toCharPattern.startsWith("FM") || toCharPattern.startsWith("fm")) {
// try to apply the fill mode modifier to the next formatter
fillModeModifierActive = true;
toCharPattern = toCharPattern.substring(2);
}
// try to find a potential pattern next
for (int length = Math.min(MAX_TO_CHAR_FORMAT_STRING_LENGTH, toCharPattern.length()); length >= 1; length--) {
final String potentialPattern = toCharPattern.substring(0, length);
formatter = FORMATTER_MAP.get(potentialPattern);
// check if it is a known pattern string, if so apply it, with any modifier
if (formatter != null) {
if (fillModeModifierActive && formatter.fillModeFn != null) {
formatter = formatter.withModifier(formatter.fillModeFn);
}
toCharPattern = toCharPattern.substring(length);
break;
}
}
if (formatter == null) {
// the fill mode modifier is dropped in case of literals
formatter = literal(toCharPattern.substring(0, 1));
toCharPattern = toCharPattern.substring(1);
} else {
// try to look for an ordinal suffix modifier in case we found a pattern
if (toCharPattern.startsWith("TH") || toCharPattern.startsWith("th")) {
final String ordinalSuffixModifier = toCharPattern.substring(0, 2);
if (formatter.hasOrdinalSuffix) {
formatter = formatter.withModifier(s -> appendOrdinalSuffix(ordinalSuffixModifier, s));
}
toCharPattern = toCharPattern.substring(2);
}
}
formatters.addLast(formatter);
}
return formatters;
}
public static Function<TemporalAccessor, String> ofPattern(String pattern) {
if (Strings.isEmpty(pattern)) {
return timestamp -> "";
}
final List<ToCharFormatter> toCharFormatters = parsePattern(pattern);
return timestamp -> toCharFormatters.stream().map(p -> p.format(timestamp)).collect(Collectors.joining());
}
private static ToCharFormatter literal(String literal) {
return new ToCharFormatter(literal, false, null, true, t -> literal);
}
private static String ordinalSuffix(int i) {
if (i < 0) {
i = -i;
}
int mod100 = i % 100;
int mod10 = i % 10;
if (mod10 == 1 && mod100 != 11) {
return "st";
} else if (mod10 == 2 && mod100 != 12) {
return "nd";
} else if (mod10 == 3 && mod100 != 13) {
return "rd";
} else {
return "th";
}
}
private static String appendOrdinalSuffix(String defaultSuffix, String s) {
try {
// the Y,YYY pattern might can cause problems with the parsing, but thankfully the last 3
// characters is enough to calculate the suffix
int i = parseInt(lastNCharacter(s, 3));
final boolean upperCase = defaultSuffix.equals(defaultSuffix.toUpperCase(Locale.ENGLISH));
return s + (upperCase ? ordinalSuffix(i).toUpperCase(Locale.ENGLISH) : ordinalSuffix(i));
} catch (NumberFormatException ex) {
return s + defaultSuffix;
}
}
private static String formatOffset(String offset) {
if (offset.equals("Z")) {
return "+00";
}
if (offset.matches("^[+-][0-9][0-9]00$")) {
offset = offset.substring(0, offset.length() - 2);
} else if (offset.matches("^[+-][0-9]{3,4}$")) {
offset = offset.substring(0, offset.length() - 2) + ":" + offset.substring(offset.length() - 2);
} else if (offset.matches("^[+-][0-9][0-9]:00$")) {
offset = offset.substring(0, offset.length() - 3);
}
return offset.substring(0, Math.min(offset.length(), 6));
}
private static String removeLeadingZerosFromOffset(String offset) {
if (offset.matches("[+-]0{1,2}")) {
return offset.substring(0, 2);
} else {
if (offset.startsWith("+0")) {
return "+" + offset.substring(2);
} else if (offset.startsWith("-0")) {
return "-" + offset.substring(2);
} else {
return offset;
}
}
}
private static String absoluteWeekBasedYear(TemporalAccessor t) {
int year = t.get(IsoFields.WEEK_BASED_YEAR);
year = year > 0 ? year : -(year - 1);
return String.format(Locale.ENGLISH, "%04d", year);
}
private static String firstDigitsOfNanos(String nano, int digits) {
return String.format(Locale.ENGLISH, "%09d", parseInt(nano)).substring(0, digits);
}
private static String lastNCharacter(String s, int n) {
return s.substring(Math.max(0, s.length() - n));
}
private static String zoneAbbreviationOf(TemporalAccessor temporalAccessor) {
String zone = ZoneId.from(temporalAccessor).getDisplayName(TextStyle.SHORT, Locale.ENGLISH);
return "Z".equals(zone) ? "UTC" : zone;
}
private static | ToCharFormatter |
java | apache__maven | api/maven-api-spi/src/main/java/org/apache/maven/api/spi/LifecycleProvider.java | {
"start": 1049,
"end": 1132
} | interface ____ registering custom {@link Lifecycle} implementations.
* <p>
* This | for |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichOperatorStatusTests.java | {
"start": 623,
"end": 3016
} | class ____ extends AbstractWireSerializingTestCase<EnrichLookupOperator.Status> {
@Override
protected Writeable.Reader<EnrichLookupOperator.Status> instanceReader() {
return EnrichLookupOperator.Status::new;
}
@Override
protected EnrichLookupOperator.Status createTestInstance() {
return new EnrichLookupOperator.Status(
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomLongBetween(1, TimeValue.timeValueHours(1).millis())
);
}
@Override
protected EnrichLookupOperator.Status mutateInstance(EnrichLookupOperator.Status in) throws IOException {
int field = randomIntBetween(0, 3);
return switch (field) {
case 0 -> new EnrichLookupOperator.Status(
randomValueOtherThan(in.receivedPages(), ESTestCase::randomNonNegativeLong),
in.completedPages(),
in.totalTerms,
in.processNanos()
);
case 1 -> new EnrichLookupOperator.Status(
in.receivedPages(),
randomValueOtherThan(in.completedPages(), ESTestCase::randomNonNegativeLong),
in.totalTerms,
in.processNanos()
);
case 2 -> new EnrichLookupOperator.Status(
in.receivedPages(),
in.completedPages(),
randomValueOtherThan(in.totalTerms, ESTestCase::randomNonNegativeLong),
in.processNanos()
);
case 3 -> new EnrichLookupOperator.Status(
in.receivedPages(),
in.completedPages(),
in.totalTerms,
randomValueOtherThan(in.processNanos(), ESTestCase::randomNonNegativeLong)
);
default -> throw new AssertionError("unknown ");
};
}
public void testToXContent() {
var status = new EnrichLookupOperator.Status(100, 50, TimeValue.timeValueSeconds(10).millis(), 120);
String json = Strings.toString(status, true, true);
assertThat(json, equalTo("""
{
"process_nanos" : 10000,
"process_time" : "10micros",
"pages_received" : 100,
"pages_completed" : 50,
"total_terms" : 120
}"""));
}
}
| EnrichOperatorStatusTests |
java | quarkusio__quarkus | extensions/smallrye-openapi/deployment/src/test/java/io/quarkus/smallrye/openapi/test/jaxrs/OpenApiBuildTimeExcludedClassTestCase.java | {
"start": 3739,
"end": 4011
} | class ____ {
@GET
public String endpoint() {
return "";
}
}
@Path("/foobar-property-not-false")
@UnlessBuildProperty(name = "foobar", stringValue = "false", enableIfMissing = false)
public static | IfBuildPropertyBarBazIsTrue |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/support/DirtiesContextBeforeModesTestExecutionListener.java | {
"start": 3399,
"end": 3556
} | class ____ the supplied {@linkplain TestContext test context}
* is annotated with {@code @DirtiesContext} and the {@linkplain
* DirtiesContext#classMode() | of |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleDoAfterTerminate.java | {
"start": 1640,
"end": 3009
} | class ____<T> implements SingleObserver<T>, Disposable {
final SingleObserver<? super T> downstream;
final Action onAfterTerminate;
Disposable upstream;
DoAfterTerminateObserver(SingleObserver<? super T> actual, Action onAfterTerminate) {
this.downstream = actual;
this.onAfterTerminate = onAfterTerminate;
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.validate(this.upstream, d)) {
this.upstream = d;
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T t) {
downstream.onSuccess(t);
onAfterTerminate();
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
onAfterTerminate();
}
@Override
public void dispose() {
upstream.dispose();
}
@Override
public boolean isDisposed() {
return upstream.isDisposed();
}
private void onAfterTerminate() {
try {
onAfterTerminate.run();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
RxJavaPlugins.onError(ex);
}
}
}
}
| DoAfterTerminateObserver |
java | elastic__elasticsearch | modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexScriptTests.java | {
"start": 1048,
"end": 4279
} | class ____ extends AbstractAsyncBulkByScrollActionScriptTestCase<ReindexRequest, BulkByScrollResponse> {
public void testSetIndex() throws Exception {
Object dest = randomFrom(new Object[] { 234, 234L, "pancake" });
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_index", dest));
assertEquals(dest.toString(), index.index());
}
public void testSettingIndexToNullIsError() throws Exception {
try {
applyScript((Map<String, Object> ctx) -> ctx.put("_index", null));
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("_index cannot be null"));
}
}
public void testSetId() throws Exception {
Object id = randomFrom(new Object[] { null, 234, 234L, "pancake" });
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_id", id));
if (id == null) {
assertNull(index.id());
} else {
assertEquals(id.toString(), index.id());
}
}
public void testSetVersion() throws Exception {
Number version = randomFrom(new Number[] { null, 234, 234L });
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_version", version));
if (version == null) {
assertEquals(Versions.MATCH_ANY, index.version());
} else {
assertEquals(version.longValue(), index.version());
}
}
public void testSettingVersionToJunkIsAnError() throws Exception {
IllegalArgumentException err = expectThrows(
IllegalArgumentException.class,
() -> applyScript((Map<String, Object> ctx) -> ctx.put("_version", "junk"))
);
assertEquals(err.getMessage(), "_version [junk] is wrong type, expected assignable to [java.lang.Number], not [java.lang.String]");
err = expectThrows(IllegalArgumentException.class, () -> applyScript((Map<String, Object> ctx) -> ctx.put("_version", Math.PI)));
assertEquals(
err.getMessage(),
"_version may only be set to an int or a long but was [3.141592653589793] with type [java.lang.Double]"
);
}
public void testSetRouting() throws Exception {
String routing = randomRealisticUnicodeOfLengthBetween(5, 20);
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_routing", routing));
assertEquals(routing, index.routing());
}
@Override
protected ReindexRequest request() {
ReindexRequest request = new ReindexRequest();
request.getDestination().index("test");
return request;
}
@Override
protected Reindexer.AsyncIndexBySearchAction action(ScriptService scriptService, ReindexRequest request) {
ReindexSslConfig sslConfig = Mockito.mock(ReindexSslConfig.class);
return new Reindexer.AsyncIndexBySearchAction(
task,
logger,
null,
null,
threadPool,
scriptService,
ClusterState.EMPTY_STATE.projectState(Metadata.DEFAULT_PROJECT_ID),
sslConfig,
request,
listener()
);
}
}
| ReindexScriptTests |
java | spring-projects__spring-framework | spring-context/src/test/java/example/scannable/StubFooDao.java | {
"start": 840,
"end": 944
} | class ____ implements FooDao {
@Override
public String findFoo(int id) {
return "bar";
}
}
| StubFooDao |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java | {
"start": 432,
"end": 702
} | class ____ extends ActionType<PutUserResponse> {
public static final PutUserAction INSTANCE = new PutUserAction();
public static final String NAME = "cluster:admin/xpack/security/user/put";
protected PutUserAction() {
super(NAME);
}
}
| PutUserAction |
java | quarkusio__quarkus | extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerTruststoreTest.java | {
"start": 315,
"end": 3834
} | class ____ extends FakeSmtpTestBase {
@Test
public void sendMailWithCorrectTrustStore() {
MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() {
@Override
public boolean ssl() {
return true;
}
@Override
public TrustStoreConfig truststore() {
return new TrustStoreConfig() {
@Override
public Optional<String> type() {
return Optional.empty();
}
@Override
public Optional<List<String>> paths() {
return Optional.of(List.of(CLIENT_TRUSTSTORE));
}
@Override
public Optional<String> password() {
return Optional.of("password");
}
};
}
});
ReactiveMailer mailer = getMailer(mailersConfig);
startServer(SERVER_JKS);
mailer.send(getMail()).await().indefinitely();
}
@SuppressWarnings("deprecation")
@Test
public void sendMailWithCorrectButDeprecatedTrustStore() {
MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() {
@Override
public boolean ssl() {
return true;
}
@Override
public Optional<String> keyStorePassword() {
return Optional.of("password");
}
@Override
public Optional<String> keyStore() {
return Optional.of(CLIENT_TRUSTSTORE);
}
});
ReactiveMailer mailer = getMailer(mailersConfig);
startServer(SERVER_JKS);
mailer.send(getMail()).await().indefinitely();
}
@Test
public void sendMailWithTrustAll() {
MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() {
@Override
public boolean ssl() {
return true;
}
@Override
public Optional<Boolean> trustAll() {
return Optional.of(true);
}
});
ReactiveMailer mailer = getMailer(mailersConfig);
startServer(SERVER_JKS);
mailer.send(getMail()).await().indefinitely();
}
@Test
public void sendMailWithGlobalTrustAll() {
MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() {
@Override
public boolean ssl() {
return true;
}
});
ReactiveMailer mailer = getMailer(mailersConfig, true);
startServer(SERVER_JKS);
mailer.send(getMail()).await().indefinitely();
}
@Test
public void sendMailWithoutTrustStore() {
MailersRuntimeConfig mailersConfig = new DefaultMailersRuntimeConfig(new DefaultMailerRuntimeConfig() {
@Override
public boolean ssl() {
return true;
}
});
startServer(SERVER_JKS);
ReactiveMailer mailer = getMailer(mailersConfig);
Assertions.assertThatThrownBy(() -> mailer.send(getMail()).await().indefinitely())
.isInstanceOf(CompletionException.class)
.hasCauseInstanceOf(SSLHandshakeException.class);
}
}
| MailerTruststoreTest |
java | apache__flink | flink-clients/src/main/java/org/apache/flink/client/program/ClusterClient.java | {
"start": 4033,
"end": 4654
} | class ____ is used to
* deserialize the incoming accumulator results.
*
* @param jobID The job identifier of a job.
* @return A Map containing the accumulator's name and its value.
*/
default CompletableFuture<Map<String, Object>> getAccumulators(JobID jobID) {
return getAccumulators(jobID, ClassLoader.getSystemClassLoader());
}
/**
* Requests and returns the accumulators for the given job identifier. Accumulators can be
* requested while a is running or after it has finished.
*
* @param jobID The job identifier of a job.
* @param loader The | loader |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/core/JdbcTemplate.java | {
"start": 5214,
"end": 17007
} | class ____ extends JdbcAccessor implements JdbcOperations {
private static final String RETURN_RESULT_SET_PREFIX = "#result-set-";
private static final String RETURN_UPDATE_COUNT_PREFIX = "#update-count-";
/** If this variable is {@code false}, we will throw exceptions on SQL warnings. */
private boolean ignoreWarnings = true;
/**
* If this variable is set to a non-negative value, it will be used for setting the
* fetchSize property on statements used for query processing.
*/
private int fetchSize = -1;
/**
* If this variable is set to a non-negative value, it will be used for setting the
* maxRows property on statements used for query processing.
*/
private int maxRows = -1;
/**
* If this variable is set to a non-negative value, it will be used for setting the
* queryTimeout property on statements used for query processing.
*/
private int queryTimeout = -1;
/**
* If this variable is set to true, then all results checking will be bypassed for any
* callable statement processing. This can be used to avoid a bug in some older Oracle
* JDBC drivers like 10.1.0.2.
*/
private boolean skipResultsProcessing = false;
/**
* If this variable is set to true then all results from a stored procedure call
* that don't have a corresponding SqlOutParameter declaration will be bypassed.
* All other results processing will be take place unless the variable
* {@code skipResultsProcessing} is set to {@code true}.
*/
private boolean skipUndeclaredResults = false;
/**
* If this variable is set to true then execution of a CallableStatement will return
* the results in a Map that uses case-insensitive names for the parameters.
*/
private boolean resultsMapCaseInsensitive = false;
/**
* Construct a new JdbcTemplate for bean usage.
* <p>Note: The DataSource has to be set before using the instance.
* @see #setDataSource
*/
public JdbcTemplate() {
}
/**
* Construct a new JdbcTemplate, given a DataSource to obtain connections from.
* <p>Note: This will not trigger initialization of the exception translator.
* @param dataSource the JDBC DataSource to obtain connections from
*/
public JdbcTemplate(DataSource dataSource) {
setDataSource(dataSource);
afterPropertiesSet();
}
/**
* Construct a new JdbcTemplate, given a DataSource to obtain connections from.
* <p>Note: Depending on the "lazyInit" flag, initialization of the exception translator
* will be triggered.
* @param dataSource the JDBC DataSource to obtain connections from
* @param lazyInit whether to lazily initialize the SQLExceptionTranslator
*/
public JdbcTemplate(DataSource dataSource, boolean lazyInit) {
setDataSource(dataSource);
setLazyInit(lazyInit);
afterPropertiesSet();
}
/**
* Copy constructor for a derived JdbcTemplate.
* @param original the original template to copy from
* @since 7.0
*/
public JdbcTemplate(JdbcAccessor original) {
setDataSource(original.getDataSource());
setExceptionTranslator(original.getExceptionTranslator());
setLazyInit(original.isLazyInit());
if (original instanceof JdbcTemplate originalTemplate) {
setIgnoreWarnings(originalTemplate.isIgnoreWarnings());
setFetchSize(originalTemplate.getFetchSize());
setMaxRows(originalTemplate.getMaxRows());
setQueryTimeout(originalTemplate.getQueryTimeout());
setSkipResultsProcessing(originalTemplate.isSkipResultsProcessing());
setSkipUndeclaredResults(originalTemplate.isSkipUndeclaredResults());
setResultsMapCaseInsensitive(originalTemplate.isResultsMapCaseInsensitive());
}
}
/**
* Set whether we want to ignore JDBC statement warnings ({@link SQLWarning}).
* <p>Default is {@code true}, swallowing and logging all warnings. Switch this flag to
* {@code false} to make this JdbcTemplate throw a {@link SQLWarningException} instead
* (or chain the {@link SQLWarning} into the primary {@link SQLException}, if any).
* @see Statement#getWarnings()
* @see java.sql.SQLWarning
* @see org.springframework.jdbc.SQLWarningException
* @see #handleWarnings(Statement)
*/
public void setIgnoreWarnings(boolean ignoreWarnings) {
this.ignoreWarnings = ignoreWarnings;
}
/**
* Return whether we ignore SQLWarnings.
*/
public boolean isIgnoreWarnings() {
return this.ignoreWarnings;
}
/**
* Set the fetch size for this JdbcTemplate. This is important for processing large
* result sets: Setting this higher than the default value will increase processing
* speed at the cost of memory consumption; setting this lower can avoid transferring
* row data that will never be read by the application.
* <p>Default is -1, indicating to use the JDBC driver's default configuration
* (i.e. to not pass a specific fetch size setting on to the driver).
* <p>Note: As of 4.3, negative values other than -1 will get passed on to the driver,
* since, for example, MySQL supports special behavior for {@code Integer.MIN_VALUE}.
* @see java.sql.Statement#setFetchSize
*/
public void setFetchSize(int fetchSize) {
this.fetchSize = fetchSize;
}
/**
* Return the fetch size specified for this JdbcTemplate.
*/
public int getFetchSize() {
return this.fetchSize;
}
/**
* Set the maximum number of rows for this JdbcTemplate. This is important for
* processing subsets of large result sets, avoiding to read and hold the entire
* result set in the database or in the JDBC driver if we're never interested in
* the entire result in the first place (for example, when performing searches
* that might return a large number of matches).
* <p>Default is -1, indicating to use the JDBC driver's default configuration
* (i.e. to not pass a specific max rows setting on to the driver).
* <p>Note: As of 4.3, negative values other than -1 will get passed on to the
* driver, in sync with {@link #setFetchSize}'s support for special MySQL values.
* @see java.sql.Statement#setMaxRows
*/
public void setMaxRows(int maxRows) {
this.maxRows = maxRows;
}
/**
* Return the maximum number of rows specified for this JdbcTemplate.
*/
public int getMaxRows() {
return this.maxRows;
}
/**
* Set the query timeout (seconds) for statements that this JdbcTemplate executes.
* <p>Default is -1, indicating to use the JDBC driver's default
* (i.e. to not pass a specific query timeout setting on the driver).
* <p>Note: Any timeout specified here will be overridden by the remaining
* transaction timeout when executing within a transaction that has a
* timeout specified at the transaction level.
* @see java.sql.Statement#setQueryTimeout
*/
public void setQueryTimeout(int queryTimeout) {
this.queryTimeout = queryTimeout;
}
/**
* Return the query timeout (seconds) for statements that this JdbcTemplate executes.
*/
public int getQueryTimeout() {
return this.queryTimeout;
}
/**
* Set whether results processing should be skipped. Can be used to optimize callable
* statement processing when we know that no results are being passed back - the processing
* of out parameter will still take place. This can be used to avoid a bug in some older
* Oracle JDBC drivers like 10.1.0.2.
*/
public void setSkipResultsProcessing(boolean skipResultsProcessing) {
this.skipResultsProcessing = skipResultsProcessing;
}
/**
* Return whether results processing should be skipped.
*/
public boolean isSkipResultsProcessing() {
return this.skipResultsProcessing;
}
/**
* Set whether undeclared results should be skipped.
*/
public void setSkipUndeclaredResults(boolean skipUndeclaredResults) {
this.skipUndeclaredResults = skipUndeclaredResults;
}
/**
* Return whether undeclared results should be skipped.
*/
public boolean isSkipUndeclaredResults() {
return this.skipUndeclaredResults;
}
/**
* Set whether execution of a CallableStatement will return the results in a Map
* that uses case-insensitive names for the parameters.
*/
public void setResultsMapCaseInsensitive(boolean resultsMapCaseInsensitive) {
this.resultsMapCaseInsensitive = resultsMapCaseInsensitive;
}
/**
* Return whether execution of a CallableStatement will return the results in a Map
* that uses case-insensitive names for the parameters.
*/
public boolean isResultsMapCaseInsensitive() {
return this.resultsMapCaseInsensitive;
}
//-------------------------------------------------------------------------
// Methods dealing with a plain java.sql.Connection
//-------------------------------------------------------------------------
@Override
public <T extends @Nullable Object> T execute(ConnectionCallback<T> action) throws DataAccessException {
Assert.notNull(action, "Callback object must not be null");
Connection con = DataSourceUtils.getConnection(obtainDataSource());
try {
// Create close-suppressing Connection proxy, also preparing returned Statements.
Connection conToUse = createConnectionProxy(con);
return action.doInConnection(conToUse);
}
catch (SQLException ex) {
// Release Connection early, to avoid potential connection pool deadlock
// in the case when the exception translator hasn't been initialized yet.
String sql = getSql(action);
DataSourceUtils.releaseConnection(con, getDataSource());
con = null;
throw translateException("ConnectionCallback", sql, ex);
}
finally {
DataSourceUtils.releaseConnection(con, getDataSource());
}
}
/**
* Create a close-suppressing proxy for the given JDBC Connection.
* Called by the {@code execute} method.
* <p>The proxy also prepares returned JDBC Statements, applying
* statement settings such as fetch size, max rows, and query timeout.
* @param con the JDBC Connection to create a proxy for
* @return the Connection proxy
* @see java.sql.Connection#close()
* @see #execute(ConnectionCallback)
* @see #applyStatementSettings
*/
protected Connection createConnectionProxy(Connection con) {
return (Connection) Proxy.newProxyInstance(
ConnectionProxy.class.getClassLoader(),
new Class<?>[] {ConnectionProxy.class},
new CloseSuppressingInvocationHandler(con));
}
//-------------------------------------------------------------------------
// Methods dealing with static SQL (java.sql.Statement)
//-------------------------------------------------------------------------
private <T extends @Nullable Object> T execute(StatementCallback<T> action, boolean closeResources) throws DataAccessException {
Assert.notNull(action, "Callback object must not be null");
Connection con = DataSourceUtils.getConnection(obtainDataSource());
Statement stmt = null;
try {
stmt = con.createStatement();
applyStatementSettings(stmt);
T result = action.doInStatement(stmt);
handleWarnings(stmt);
return result;
}
catch (SQLException ex) {
// Release Connection early, to avoid potential connection pool deadlock
// in the case when the exception translator hasn't been initialized yet.
if (stmt != null) {
handleWarnings(stmt, ex);
}
String sql = getSql(action);
JdbcUtils.closeStatement(stmt);
stmt = null;
DataSourceUtils.releaseConnection(con, getDataSource());
con = null;
throw translateException("StatementCallback", sql, ex);
}
finally {
if (closeResources) {
JdbcUtils.closeStatement(stmt);
DataSourceUtils.releaseConnection(con, getDataSource());
}
}
}
@Override
public <T extends @Nullable Object> T execute(StatementCallback<T> action) throws DataAccessException {
return execute(action, true);
}
@Override
public void execute(String sql) throws DataAccessException {
if (logger.isDebugEnabled()) {
logger.debug("Executing SQL statement [" + sql + "]");
}
// Callback to execute the statement.
| JdbcTemplate |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/propertyeditors/UUIDEditor.java | {
"start": 982,
"end": 1361
} | class ____ extends PropertyEditorSupport {
@Override
public void setAsText(String text) throws IllegalArgumentException {
if (StringUtils.hasText(text)) {
setValue(UUID.fromString(text.trim()));
}
else {
setValue(null);
}
}
@Override
public String getAsText() {
UUID value = (UUID) getValue();
return (value != null ? value.toString() : "");
}
}
| UUIDEditor |
java | apache__rocketmq | test/src/test/java/org/apache/rocketmq/test/client/consumer/broadcast/normal/BroadcastNormalMsgRecvFailIT.java | {
"start": 1580,
"end": 2993
} | class ____ extends BaseBroadcast {
private static Logger logger = LoggerFactory
.getLogger(NormalMsgTwoSameGroupConsumerIT.class);
private RMQNormalProducer producer = null;
private String topic = null;
@Before
public void setUp() {
printSeparator();
topic = initTopic();
logger.info(String.format("use topic: %s;", topic));
producer = getProducer(NAMESRV_ADDR, topic);
}
@After
public void tearDown() {
super.shutdown();
}
@Ignore
@Test
public void testStartTwoConsumerAndOneConsumerFail() {
int msgSize = 16;
RMQBroadCastConsumer consumer1 = getBroadCastConsumer(NAMESRV_ADDR, topic, "*",
new RMQNormalListener());
RMQBroadCastConsumer consumer2 = getBroadCastConsumer(NAMESRV_ADDR,
consumer1.getConsumerGroup(), topic, "*",
new RMQNormalListener(ConsumeConcurrentlyStatus.RECONSUME_LATER));
producer.send(msgSize);
Assert.assertEquals("Not all sent succeeded", msgSize, producer.getAllUndupMsgBody().size());
consumer1.getListener().waitForMessageConsume(producer.getAllMsgBody(), CONSUME_TIME);
assertThat(VerifyUtils.getFilterdMessage(producer.getAllMsgBody(),
consumer1.getListener().getAllMsgBody()))
.containsExactlyElementsIn(producer.getAllMsgBody());
}
}
| BroadcastNormalMsgRecvFailIT |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerThreadsInProgressIssueTest.java | {
"start": 3403,
"end": 4119
} | class ____ implements Processor {
private final Map<String, Integer> duplicate;
public SampleProcessor(Map<String, Integer> duplicate) {
this.duplicate = duplicate;
}
@Override
public void process(Exchange exchange) throws Exception {
Integer integer = duplicate.get(exchange.getExchangeId());
if (integer == null) {
duplicate.put(exchange.getExchangeId(), 1);
} else {
integer++;
duplicate.put(exchange.getExchangeId(), integer);
}
log.info("Process called for-{}", exchange.getExchangeId());
Thread.sleep(20);
}
}
}
| SampleProcessor |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/SoftAssertionsTest.java | {
"start": 44451,
"end": 110711
} | class ____ extends SoftAssertions {
public TolkienCharacterAssert assertThat(TolkienCharacter actual) {
return proxy(TolkienCharacterAssert.class, TolkienCharacter.class, actual);
}
}
@Test
void should_return_failure_after_fail_without_message() {
// WHEN
softly.fail();
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.message().isEmpty();
}
@Test
void should_return_failure_after_fail() {
// GIVEN
String failureMessage = "Should not reach here";
// WHEN
softly.fail(failureMessage);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageStartingWith(failureMessage);
}
@Test
void should_return_failure_after_fail_with_parameters() {
// GIVEN
String failureMessage = "Should not reach %s or %s";
// WHEN
softly.fail(failureMessage, "here", "here");
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageStartingWith("Should not reach here or here");
}
@Test
void should_return_failure_after_fail_with_cause() {
// GIVEN
IllegalStateException realCause = new IllegalStateException();
// WHEN
softly.fail(realCause);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessage("")
.cause().isEqualTo(realCause);
}
@Test
void should_return_failure_after_fail_with_message_and_cause() {
// GIVEN
String failureMessage = "Should not reach here";
IllegalStateException realCause = new IllegalStateException();
// WHEN
softly.fail(failureMessage, realCause);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageStartingWith(failureMessage)
.cause().isEqualTo(realCause);
}
@Test
void should_return_failure_after_shouldHaveThrown() {
// WHEN
softly.shouldHaveThrown(IllegalArgumentException.class);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageStartingWith("IllegalArgumentException should have been thrown");
}
@Test
void should_return_failure_after_failBecauseExceptionWasNotThrown() {
// WHEN
softly.failBecauseExceptionWasNotThrown(IllegalArgumentException.class);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageStartingWith("IllegalArgumentException should have been thrown");
}
@Test
void should_assert_using_assertSoftly() {
assertThatThrownBy(() -> assertSoftly(assertions -> {
assertions.assertThat(true).isFalse();
assertions.assertThat(42).isEqualTo("meaning of life");
assertions.assertThat("red").isEqualTo("blue");
})).as("it should call assertAll() and fail with multiple validation errors")
.hasMessageContaining("meaning of life")
.hasMessageContaining("blue");
}
@Test
void should_work_with_atomic() {
// WHEN
// simple atomic value
softly.assertThat(new AtomicBoolean(true)).isTrue().isFalse();
softly.assertThat(new AtomicInteger(1)).hasValueGreaterThan(0).hasNegativeValue();
softly.assertThat(new AtomicLong(1L)).hasValueGreaterThan(0L).hasNegativeValue();
softly.assertThat(new AtomicReference<>("abc")).hasValue("abc").hasValue("def");
// atomic array value
softly.assertThat(new AtomicIntegerArray(new int[] { 1, 2, 3 })).containsExactly(1, 2, 3).isEmpty();
softly.assertThat(new AtomicLongArray(new long[] { 1L, 2L, 3L })).containsExactly(1L, 2L, 3L).contains(0);
softly.assertThat(new AtomicReferenceArray<>(array("a", "b", "c"))).containsExactly("a", "b", "c").contains("123");
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(7);
then(errorsCollected.get(0)).hasMessageContaining("false");
then(errorsCollected.get(1)).hasMessageContaining("0");
then(errorsCollected.get(2)).hasMessageContaining("0L");
then(errorsCollected.get(3)).hasMessageContaining("def");
then(errorsCollected.get(4)).hasMessageContaining("empty");
then(errorsCollected.get(5)).hasMessageContaining("0");
then(errorsCollected.get(6)).hasMessageContaining("123");
}
@Test
void should_fix_bug_1146() {
// GIVEN
Map<String, String> numbers = mapOf(entry("one", "1"),
entry("two", "2"),
entry("three", "3"));
// THEN
try (final AutoCloseableSoftAssertions softly = new AutoCloseableSoftAssertions()) {
softly.assertThat(numbers)
.extractingByKeys("one", "two")
.containsExactly("1", "2");
softly.assertThat(numbers)
.extractingByKey("one")
.isEqualTo("1");
}
}
@Test
void iterable_soft_assertions_should_work_with_navigation_methods() {
// GIVEN
Iterable<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("size isGreaterThan(10)")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(10);
softly.assertThat(names)
.as("size isGreaterThan(22)")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(22)
.returnToIterable()
.as("should not be empty") // TODO returnToIterable() does not yet propagate assertion info
.overridingErrorMessage("error message 2")
.isEmpty();
softly.assertThat(names)
.as("first element")
.overridingErrorMessage("error message")
.first()
.isNull();
softly.assertThat(names)
.as("first element as Name")
.overridingErrorMessage("error message")
.first(as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("element(0)")
.overridingErrorMessage("error message")
.element(0)
.isNull();
softly.assertThat(names)
.as("element(0) as Name")
.overridingErrorMessage("error message")
.element(0, as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("last element")
.overridingErrorMessage("error message")
.last()
.isNull();
softly.assertThat(names)
.as("last element as Name")
.overridingErrorMessage("error message")
.last(as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("elements(0, 1)")
.overridingErrorMessage("error message")
.elements(0, 1)
.isNull();
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(10);
then(errorsCollected.get(0)).hasMessage("[size isGreaterThan(10)] error message");
then(errorsCollected.get(1)).hasMessage("[size isGreaterThan(22)] error message");
then(errorsCollected.get(2)).hasMessage("[should not be empty] error message 2");
then(errorsCollected.get(3)).hasMessage("[first element] error message");
then(errorsCollected.get(4)).hasMessage("[first element as Name] error message");
then(errorsCollected.get(5)).hasMessage("[element(0)] error message");
then(errorsCollected.get(6)).hasMessage("[element(0) as Name] error message");
then(errorsCollected.get(7)).hasMessage("[last element] error message");
then(errorsCollected.get(8)).hasMessage("[last element as Name] error message");
then(errorsCollected.get(9)).hasMessage("[elements(0, 1)] error message");
}
@Test
void list_soft_assertions_should_work_with_navigation_methods() {
// GIVEN
List<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("size isGreaterThan(10)")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(10);
softly.assertThat(names)
.as("size isGreaterThan(22)")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(22)
.returnToIterable()
.as("shoud not be empty") // TODO returnToIterable() does not yet propagate assertion info
.overridingErrorMessage("error message 2")
.isEmpty();
softly.assertThat(names)
.as("first element")
.overridingErrorMessage("error message")
.first()
.isNull();
softly.assertThat(names)
.as("first element as Name")
.overridingErrorMessage("error message")
.first(as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("element(0)")
.overridingErrorMessage("error message")
.element(0)
.isNull();
softly.assertThat(names)
.as("element(0) as Name")
.overridingErrorMessage("error message")
.element(0, as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("last element")
.overridingErrorMessage("error message")
.last()
.isNull();
softly.assertThat(names)
.as("last element as Name")
.overridingErrorMessage("error message")
.last(as(type(Name.class)))
.isNull();
softly.assertThat(names)
.as("elements(0, 1)")
.overridingErrorMessage("error message")
.elements(0, 1)
.isNull();
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(10);
then(errorsCollected.get(0)).hasMessage("[size isGreaterThan(10)] error message");
then(errorsCollected.get(1)).hasMessage("[size isGreaterThan(22)] error message");
then(errorsCollected.get(2)).hasMessage("[shoud not be empty] error message 2");
then(errorsCollected.get(3)).hasMessage("[first element] error message");
then(errorsCollected.get(4)).hasMessage("[first element as Name] error message");
then(errorsCollected.get(5)).hasMessage("[element(0)] error message");
then(errorsCollected.get(6)).hasMessage("[element(0) as Name] error message");
then(errorsCollected.get(7)).hasMessage("[last element] error message");
then(errorsCollected.get(8)).hasMessage("[last element as Name] error message");
then(errorsCollected.get(9)).hasMessage("[elements(0, 1)] error message");
}
@Test
void iterable_soft_assertions_should_work_with_singleElement_navigation() {
// GIVEN
Iterable<Name> names = list(name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("single element")
.singleElement()
.isNotNull();
softly.assertThat(names)
.as("single element")
.overridingErrorMessage("error message")
.singleElement()
.isNull();
softly.assertThat(names)
.as("single element as Name")
.overridingErrorMessage("error message")
.singleElement(as(type(Name.class)))
.isNull();
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(2);
then(errorsCollected.get(0)).hasMessage("[single element] error message");
then(errorsCollected.get(1)).hasMessage("[single element as Name] error message");
}
@Test
void list_soft_assertions_should_work_with_singleElement_navigation() {
// GIVEN
List<Name> names = list(name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("single element")
.singleElement()
.isNotNull();
softly.assertThat(names)
.as("single element")
.overridingErrorMessage("error message")
.singleElement()
.isNull();
softly.assertThat(names)
.as("single element as Name")
.overridingErrorMessage("error message")
.singleElement(as(type(Name.class)))
.isNull();
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(2);
then(errorsCollected.get(0)).hasMessage("[single element] error message");
then(errorsCollected.get(1)).hasMessage("[single element as Name] error message");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@Test
void iterable_soft_assertions_should_report_errors_on_final_methods_and_methods_that_switch_the_object_under_test() {
// GIVEN
Iterable<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
Iterable<CartoonCharacter> characters = list(homer, fred);
// WHEN
softly.assertThat(names)
.as("extracting(throwingFirstNameFunction)")
.overridingErrorMessage("error message")
.extracting(throwingFirstNameFunction)
.contains("gandalf")
.contains("frodo");
softly.assertThat(names)
.as("extracting(\"last\")")
.overridingErrorMessage("error message")
.extracting("last")
.containsExactly("foo", "bar");
softly.assertThat(characters)
.as("using flatExtracting on Iterable")
.overridingErrorMessage("error message")
.flatExtracting(childrenExtractor)
.hasSize(1)
.containsAnyOf(homer, fred);
softly.assertThat(characters)
.as("using flatExtracting on Iterable with exception")
.overridingErrorMessage("error message")
.flatExtracting(CartoonCharacter::getChildrenWithException)
.containsExactlyInAnyOrder(homer, fred);
softly.assertThat(characters)
.containsOnly(bart);
softly.assertThat(characters)
.containsOnlyOnce(maggie, bart);
softly.assertThat(characters)
.containsSequence(homer, bart);
softly.assertThat(characters)
.containsSubsequence(homer, maggie);
softly.assertThat(characters)
.doesNotContain(homer, maggie);
softly.assertThat(characters)
.doesNotContainSequence(fred);
softly.assertThat(characters)
.doesNotContainSubsequence(homer, fred);
softly.assertThat(characters)
.isSubsetOf(homer, bart);
softly.assertThat(characters)
.startsWith(fred);
softly.assertThat(characters)
.endsWith(bart);
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(firstNameFunction, lastNameFunction)")
.extracting(firstNameFunction, lastNameFunction)
.contains(tuple("John", "Doe"))
.contains(tuple("Frodo", "Baggins"));
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"first\", \"last\")")
.extracting("first", "last")
.contains(tuple("John", "Doe"))
.contains(tuple("Bilbo", "Baggins"));
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(firstNameFunction)")
.extracting(firstNameFunction)
.contains("John")
.contains("sam");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"first\", String.class)")
.extracting("first", String.class)
.contains("John")
.contains("Aragorn");
softly.assertThat(names)
.as("filteredOn(name -> name.first.startsWith(\"Jo\"))")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.hasSize(123);
softly.assertThat(names)
.as("name.first.startsWith(\"Jo\")")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.extracting(firstNameFunction)
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatExtracting with multiple Extractors")
.flatExtracting(firstNameFunction, lastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatExtracting with multiple ThrowingExtractors")
.flatExtracting(throwingFirstNameFunction, throwingLastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extractingResultOf(\"getFirst\")")
.extractingResultOf("getFirst")
.contains("John", "Jane")
.contains("Sam", "Aragorn");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extractingResultOf(\"getFirst\", String.class)")
.extractingResultOf("getFirst", String.class)
.contains("John", "Jane")
.contains("Messi", "Ronaldo");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("filteredOn with condition")
.filteredOn(new Condition<>(name -> name.first.startsWith("Jo"), "startsWith Jo"))
.hasSize(5);
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("filteredOn firstName in {John, Frodo}")
.filteredOn("first", in("John", "Frodo"))
.isEmpty();
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("filteredOn firstName = John")
.filteredOn("first", "John")
.isEmpty();
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("filteredOn firstName = null")
.filteredOnNull("first")
.isNotEmpty();
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("using flatExtracting(String... fieldOrPropertyNames)")
.flatExtracting("first", "last")
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(characters)
.as("using flatExtracting(String fieldOrPropertyName)")
.overridingErrorMessage("error message")
.flatExtracting("children")
.contains(bart, maggie)
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.filteredOnAssertions(name -> assertThat(name.first).startsWith("Jo"))
.as("filteredOn with consumer")
.hasSize(5);
softly.assertThat(characters)
.as("using flatMap on Iterable")
.overridingErrorMessage("error message")
.flatMap(childrenExtractor)
.containsAnyOf(homer, fred);
softly.assertThat(characters)
.as("using flatMap on Iterable with exception")
.overridingErrorMessage("error message")
.flatMap(CartoonCharacter::getChildrenWithException)
.containsExactlyInAnyOrder(homer, fred);
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatMap with multiple Extractors")
.flatMap(firstNameFunction, lastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatMap with multiple ThrowingExtractors")
.flatMap(throwingFirstNameFunction, throwingLastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.as("map(throwingFirstNameFunction)")
.overridingErrorMessage("error message")
.map(throwingFirstNameFunction)
.contains("frodo");
softly.assertThat(names)
.as("map(firstNameFunction)")
.map(firstNameFunction)
.contains("sam");
softly.assertThat(names)
.map(firstNameFunction, lastNameFunction)
.as("map with multiple functions")
.contains(tuple("John", "Doe"))
.contains(tuple("Frodo", "Baggins"));
softly.assertThat(names)
.as("satisfiesExactly")
.satisfiesExactly(name -> assertThat(name).isNull(),
name -> assertThat(name).isNotNull());
softly.assertThat(names)
.as("satisfiesExactlyInAnyOrder")
.satisfiesExactlyInAnyOrder(name -> assertThat(name).isNull(),
name -> assertThat(name).isNotNull());
softly.assertThat(names)
.as("satisfiesOnlyOnce")
.satisfiesOnlyOnce(name -> assertThat(name).isNull());
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(43);
then(errorsCollected.get(0)).hasMessage("[extracting(throwingFirstNameFunction)] error message");
then(errorsCollected.get(1)).hasMessage("[extracting(throwingFirstNameFunction)] error message");
then(errorsCollected.get(2)).hasMessage("[extracting(\"last\")] error message");
then(errorsCollected.get(3)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(4)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(5)).hasMessage("[using flatExtracting on Iterable with exception] error message");
then(errorsCollected.get(6)).hasMessageContaining(bart.toString());
then(errorsCollected.get(7)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(8)).hasMessageContaining(bart.toString());
then(errorsCollected.get(9)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(10)).hasMessageContaining(homer.toString());
then(errorsCollected.get(11)).hasMessageContaining(fred.toString());
then(errorsCollected.get(12)).hasMessageContaining(homer.toString());
then(errorsCollected.get(13)).hasMessageContaining(bart.toString());
then(errorsCollected.get(14)).hasMessageContaining(fred.toString());
then(errorsCollected.get(15)).hasMessageContaining(bart.toString());
then(errorsCollected.get(16)).hasMessage("[extracting(firstNameFunction, lastNameFunction)] error message");
then(errorsCollected.get(17)).hasMessage("[extracting(\"first\", \"last\")] error message");
then(errorsCollected.get(18)).hasMessage("[extracting(firstNameFunction)] error message");
then(errorsCollected.get(19)).hasMessage("[extracting(\"first\", String.class)] error message");
then(errorsCollected.get(20)).hasMessage("[filteredOn(name -> name.first.startsWith(\"Jo\"))] error message");
then(errorsCollected.get(21)).hasMessage("[name.first.startsWith(\"Jo\")] error message");
then(errorsCollected.get(22)).hasMessage("[flatExtracting with multiple Extractors] error message");
then(errorsCollected.get(23)).hasMessage("[flatExtracting with multiple ThrowingExtractors] error message");
then(errorsCollected.get(24)).hasMessage("[extractingResultOf(\"getFirst\")] error message");
then(errorsCollected.get(25)).hasMessage("[extractingResultOf(\"getFirst\", String.class)] error message");
then(errorsCollected.get(26)).hasMessage("[filteredOn with condition] error message");
then(errorsCollected.get(27)).hasMessage("[filteredOn firstName in {John, Frodo}] error message");
then(errorsCollected.get(28)).hasMessage("[filteredOn firstName = John] error message");
then(errorsCollected.get(29)).hasMessage("[filteredOn firstName = null] error message");
then(errorsCollected.get(30)).hasMessage("[using flatExtracting(String... fieldOrPropertyNames)] error message");
then(errorsCollected.get(31)).hasMessage("[using flatExtracting(String fieldOrPropertyName)] error message");
then(errorsCollected.get(32)).hasMessage("[filteredOn with consumer] error message");
then(errorsCollected.get(33)).hasMessageContaining("using flatMap on Iterable");
then(errorsCollected.get(34)).hasMessageContaining("using flatMap on Iterable with exception");
then(errorsCollected.get(35)).hasMessageContaining("flatMap with multiple Extractors");
then(errorsCollected.get(36)).hasMessageContaining("flatMap with multiple ThrowingExtractors");
then(errorsCollected.get(37)).hasMessageContaining("map(throwingFirstNameFunction)");
then(errorsCollected.get(38)).hasMessageContaining("map(firstNameFunction)");
then(errorsCollected.get(39)).hasMessageContaining("map with multiple functions");
then(errorsCollected.get(40)).hasMessageContaining("satisfiesExactly");
then(errorsCollected.get(41)).hasMessageContaining("satisfiesExactlyInAnyOrder");
then(errorsCollected.get(42)).hasMessageContaining("satisfiesOnlyOnce");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@Test
void list_soft_assertions_should_report_errors_on_final_methods_and_methods_that_switch_the_object_under_test() {
// GIVEN
List<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
List<CartoonCharacter> characters = list(homer, fred);
// WHEN
softly.assertThat(names)
.as("extracting(throwingFirstNameFunction)")
.overridingErrorMessage("error message")
.extracting(throwingFirstNameFunction)
.contains("gandalf")
.contains("frodo");
softly.assertThat(names)
.as("extracting(\"last\")")
.overridingErrorMessage("error message")
.extracting("last")
.containsExactly("foo", "bar");
softly.assertThat(characters)
.as("using flatExtracting on Iterable")
.overridingErrorMessage("error message")
.flatExtracting(childrenExtractor)
.hasSize(1)
.containsAnyOf(homer, fred);
softly.assertThat(characters)
.as("using flatExtracting on Iterable with exception")
.overridingErrorMessage("error message")
.flatExtracting(CartoonCharacter::getChildrenWithException)
.containsExactlyInAnyOrder(homer, fred);
softly.assertThat(characters)
.containsOnly(bart);
softly.assertThat(characters)
.containsOnlyOnce(maggie, bart);
softly.assertThat(characters)
.containsSequence(homer, bart);
softly.assertThat(characters)
.containsSubsequence(homer, maggie);
softly.assertThat(characters)
.doesNotContain(homer, maggie);
softly.assertThat(characters)
.doesNotContainSequence(fred);
softly.assertThat(characters)
.doesNotContainSubsequence(homer, fred);
softly.assertThat(characters)
.isSubsetOf(homer, bart);
softly.assertThat(characters)
.startsWith(fred);
softly.assertThat(characters)
.endsWith(bart);
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(firstNameFunction, lastNameFunction)")
.extracting(firstNameFunction, lastNameFunction)
.contains(tuple("John", "Doe"))
.contains(tuple("Frodo", "Baggins"));
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"first\", \"last\")")
.extracting("first", "last")
.contains(tuple("John", "Doe"))
.contains(tuple("Bilbo", "Baggins"));
softly.assertThat(names)
.as("extracting(firstNameFunction)")
.overridingErrorMessage("error message")
.extracting(firstNameFunction)
.contains("John")
.contains("sam");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"first\", String.class)")
.extracting("first", String.class)
.contains("John")
.contains("Aragorn");
softly.assertThat(names)
.as("filteredOn(name -> name.first.startsWith(\"Jo\"))")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.hasSize(123);
softly.assertThat(names)
.as("name.first.startsWith(\"Jo\")")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.extracting(firstNameFunction)
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatExtracting with multiple Extractors")
.flatExtracting(firstNameFunction, lastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatExtracting with multiple ThrowingExtractors")
.flatExtracting(throwingFirstNameFunction, throwingLastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extractingResultOf(\"getFirst\")")
.extractingResultOf("getFirst")
.contains("John", "Jane")
.contains("Sam", "Aragorn");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extractingResultOf(\"getFirst\", String.class)")
.extractingResultOf("getFirst", String.class)
.contains("John", "Jane")
.contains("Messi", "Ronaldo");
softly.assertThat(names)
.as("filteredOn with condition")
.overridingErrorMessage("error message")
.filteredOn(new Condition<>(name -> name.first.startsWith("Jo"), "startsWith Jo"))
.hasSize(5);
softly.assertThat(names)
.as("filteredOn firstName in {John, Frodo}")
.overridingErrorMessage("error message")
.filteredOn("first", in("John", "Frodo"))
.isEmpty();
softly.assertThat(names)
.as("filteredOn firstName = John")
.overridingErrorMessage("error message")
.filteredOn("first", "John")
.isEmpty();
softly.assertThat(names)
.as("filteredOn firstName = null")
.overridingErrorMessage("error message")
.filteredOnNull("first")
.isNotEmpty();
softly.assertThat(names)
.as("using flatExtracting(String... fieldOrPropertyNames)")
.overridingErrorMessage("error message")
.flatExtracting("first", "last")
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(characters)
.as("using flatExtracting(String fieldOrPropertyName)")
.overridingErrorMessage("error message")
.flatExtracting("children")
.contains(bart, maggie)
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.filteredOnAssertions(name -> assertThat(name.first).startsWith("Jo"))
.as("filteredOn with consumer")
.hasSize(5);
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatMap with multiple Extractors")
.flatMap(firstNameFunction, lastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("flatMap with multiple ThrowingExtractors")
.flatMap(throwingFirstNameFunction, throwingLastNameFunction)
.contains("John", "Jane", "Doe")
.contains("Sauron");
softly.assertThat(characters)
.as("using flatMap on Iterable")
.overridingErrorMessage("error message")
.flatMap(childrenExtractor)
.containsAnyOf(homer, fred);
softly.assertThat(characters)
.as("using flatMap on Iterable with exception")
.overridingErrorMessage("error message")
.flatMap(CartoonCharacter::getChildrenWithException)
.containsExactlyInAnyOrder(homer, fred);
softly.assertThat(names)
.as("map(throwingFirstNameFunction)")
.overridingErrorMessage("error message")
.map(throwingFirstNameFunction)
.contains("frodo");
softly.assertThat(names)
.as("map(firstNameFunction)")
.map(firstNameFunction)
.contains("sam");
softly.assertThat(names)
.map(firstNameFunction, lastNameFunction)
.as("map with multiple functions")
.contains(tuple("John", "Doe"))
.contains(tuple("Frodo", "Baggins"));
softly.assertThat(names)
.as("satisfiesExactly")
.satisfiesExactly(name -> assertThat(name).isNull(),
name -> assertThat(name).isNotNull());
softly.assertThat(names)
.as("satisfiesExactlyInAnyOrder")
.satisfiesExactlyInAnyOrder(name -> assertThat(name).isNull(),
name -> assertThat(name).isNotNull());
softly.assertThat(names)
.as("satisfiesOnlyOnce")
.satisfiesOnlyOnce(name -> assertThat(name).isNull());
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(43);
then(errorsCollected.get(0)).hasMessage("[extracting(throwingFirstNameFunction)] error message");
then(errorsCollected.get(1)).hasMessage("[extracting(throwingFirstNameFunction)] error message");
then(errorsCollected.get(2)).hasMessage("[extracting(\"last\")] error message");
then(errorsCollected.get(3)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(4)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(5)).hasMessage("[using flatExtracting on Iterable with exception] error message");
then(errorsCollected.get(6)).hasMessageContaining(bart.toString());
then(errorsCollected.get(7)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(8)).hasMessageContaining(bart.toString());
then(errorsCollected.get(9)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(10)).hasMessageContaining(homer.toString());
then(errorsCollected.get(11)).hasMessageContaining(fred.toString());
then(errorsCollected.get(12)).hasMessageContaining(homer.toString());
then(errorsCollected.get(13)).hasMessageContaining(bart.toString());
then(errorsCollected.get(14)).hasMessageContaining(fred.toString());
then(errorsCollected.get(15)).hasMessageContaining(bart.toString());
then(errorsCollected.get(16)).hasMessage("[extracting(firstNameFunction, lastNameFunction)] error message");
then(errorsCollected.get(17)).hasMessage("[extracting(\"first\", \"last\")] error message");
then(errorsCollected.get(18)).hasMessage("[extracting(firstNameFunction)] error message");
then(errorsCollected.get(19)).hasMessage("[extracting(\"first\", String.class)] error message");
then(errorsCollected.get(20)).hasMessage("[filteredOn(name -> name.first.startsWith(\"Jo\"))] error message");
then(errorsCollected.get(21)).hasMessage("[name.first.startsWith(\"Jo\")] error message");
then(errorsCollected.get(22)).hasMessage("[flatExtracting with multiple Extractors] error message");
then(errorsCollected.get(23)).hasMessage("[flatExtracting with multiple ThrowingExtractors] error message");
then(errorsCollected.get(24)).hasMessage("[extractingResultOf(\"getFirst\")] error message");
then(errorsCollected.get(25)).hasMessage("[extractingResultOf(\"getFirst\", String.class)] error message");
then(errorsCollected.get(26)).hasMessage("[filteredOn with condition] error message");
then(errorsCollected.get(27)).hasMessage("[filteredOn firstName in {John, Frodo}] error message");
then(errorsCollected.get(28)).hasMessage("[filteredOn firstName = John] error message");
then(errorsCollected.get(29)).hasMessage("[filteredOn firstName = null] error message");
then(errorsCollected.get(30)).hasMessage("[using flatExtracting(String... fieldOrPropertyNames)] error message");
then(errorsCollected.get(31)).hasMessage("[using flatExtracting(String fieldOrPropertyName)] error message");
then(errorsCollected.get(32)).hasMessage("[filteredOn with consumer] error message");
then(errorsCollected.get(33)).hasMessageContaining("flatMap with multiple Extractors");
then(errorsCollected.get(34)).hasMessageContaining("flatMap with multiple ThrowingExtractors");
then(errorsCollected.get(35)).hasMessageContaining("using flatMap on Iterable");
then(errorsCollected.get(36)).hasMessageContaining("using flatMap on Iterable with exception");
then(errorsCollected.get(37)).hasMessageContaining("map(throwingFirstNameFunction)");
then(errorsCollected.get(38)).hasMessageContaining("map(firstNameFunction)");
then(errorsCollected.get(39)).hasMessageContaining("map with multiple functions");
then(errorsCollected.get(40)).hasMessageContaining("satisfiesExactly");
then(errorsCollected.get(41)).hasMessageContaining("satisfiesExactlyInAnyOrder");
then(errorsCollected.get(42)).hasMessageContaining("satisfiesOnlyOnce");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@Test
void object_array_soft_assertions_should_report_errors_on_final_methods_and_methods_that_switch_the_object_under_test() {
// GIVEN
Name[] names = array(name("John", "Doe"), name("Jane", "Doe"));
CartoonCharacter[] characters = array(homer, fred);
// WHEN
softly.assertThat(names)
.as("extracting(Name::getFirst)")
.overridingErrorMessage("error message")
.extracting(Name::getFirst)
.contains("gandalf")
.contains("frodo");
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"last\")")
.extracting("last")
.containsExactly("foo", "bar");
softly.assertThat(characters)
.as("using flatExtracting on Iterable")
.overridingErrorMessage("error message")
.flatExtracting(CartoonCharacter::getChildren)
.hasSize(1)
.containsAnyOf(homer, fred);
softly.assertThat(characters)
.overridingErrorMessage("error message")
.as("using flatExtracting on Iterable with exception")
.flatExtracting(CartoonCharacter::getChildrenWithException)
.containsExactlyInAnyOrder(homer, fred);
softly.assertThat(characters)
.containsOnly(bart);
softly.assertThat(characters)
.containsOnlyOnce(maggie, bart);
softly.assertThat(characters)
.containsSequence(homer, bart);
softly.assertThat(characters)
.containsSubsequence(homer, maggie);
softly.assertThat(characters)
.doesNotContain(homer, maggie);
softly.assertThat(characters)
.doesNotContainSequence(fred);
softly.assertThat(characters)
.doesNotContainSubsequence(homer, fred);
softly.assertThat(characters)
.isSubsetOf(homer, bart);
softly.assertThat(characters)
.startsWith(fred);
softly.assertThat(characters)
.endsWith(bart);
softly.assertThat(names)
.as("extracting(Name::getFirst, Name::getLast)")
.overridingErrorMessage("error message")
.extracting(Name::getFirst, Name::getLast)
.contains(tuple("John", "Doe"))
.contains(tuple("Frodo", "Baggins"));
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(\"first\", \"last\")")
.extracting("first", "last")
.contains(tuple("John", "Doe"))
.contains(tuple("Bilbo", "Baggins"));
softly.assertThat(names)
.overridingErrorMessage("error message")
.as("extracting(firstNameFunction)")
.extracting(firstNameFunction)
.contains("John")
.contains("sam");
softly.assertThat(names)
.as("extracting(\"first\", String.class)")
.overridingErrorMessage("error message")
.extracting("first", String.class)
.contains("John")
.contains("Aragorn");
softly.assertThat(names)
.as("filteredOn(name -> name.first.startsWith(\"Jo\"))")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.hasSize(123);
softly.assertThat(names)
.as("filteredOn + extracting")
.overridingErrorMessage("error message")
.filteredOn(name -> name.first.startsWith("Jo"))
.extracting(firstNameFunction)
.contains("Sauron");
softly.assertThat(names)
.as("extractingResultOf(\"getFirst\")")
.overridingErrorMessage("error message")
.extractingResultOf("getFirst")
.contains("John", "Jane")
.contains("Sam", "Aragorn");
softly.assertThat(names)
.as("extractingResultOf(\"getFirst\", String.class)")
.overridingErrorMessage("error message")
.extractingResultOf("getFirst", String.class)
.contains("John", "Jane")
.contains("Messi", "Ronaldo");
softly.assertThat(names)
.as("filteredOn with condition")
.overridingErrorMessage("error message")
.filteredOn(new Condition<>(name -> name.first.startsWith("Jo"), "startsWith Jo"))
.hasSize(5);
softly.assertThat(names)
.as("filteredOn firstName in {John, Frodo}")
.overridingErrorMessage("error message")
.filteredOn("first", in("John", "Frodo"))
.isEmpty();
softly.assertThat(names)
.as("filteredOn firstName = John")
.overridingErrorMessage("error message")
.filteredOn("first", "John")
.isEmpty();
softly.assertThat(names)
.as("filteredOn firstName = null")
.overridingErrorMessage("error message")
.filteredOnNull("first")
.isNotEmpty();
softly.assertThat(characters)
.as("using flatExtracting(String fieldOrPropertyName)")
.overridingErrorMessage("error message")
.flatExtracting("children")
.contains(bart, maggie)
.contains("Sauron");
softly.assertThat(names)
.filteredOnAssertions(name -> assertThat(name.first).startsWith("Jo"))
.as("filteredOn with consumer")
.hasSize(5);
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(30);
then(errorsCollected.get(0)).hasMessage("[extracting(Name::getFirst)] error message");
then(errorsCollected.get(1)).hasMessage("[extracting(Name::getFirst)] error message");
then(errorsCollected.get(2)).hasMessage("[extracting(\"last\")] error message")
.hasMessage("[extracting(\"last\")] error message");
then(errorsCollected.get(3)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(4)).hasMessage("[using flatExtracting on Iterable] error message");
then(errorsCollected.get(5)).hasMessage("[using flatExtracting on Iterable with exception] error message");
then(errorsCollected.get(6)).hasMessageContaining(bart.toString());
then(errorsCollected.get(7)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(8)).hasMessageContaining(bart.toString());
then(errorsCollected.get(9)).hasMessageContaining(maggie.toString());
then(errorsCollected.get(10)).hasMessageContaining(homer.toString());
then(errorsCollected.get(11)).hasMessageContaining(fred.toString());
then(errorsCollected.get(12)).hasMessageContaining(homer.toString());
then(errorsCollected.get(13)).hasMessageContaining(bart.toString());
then(errorsCollected.get(14)).hasMessageContaining(fred.toString());
then(errorsCollected.get(15)).hasMessageContaining(bart.toString());
then(errorsCollected.get(16)).hasMessage("[extracting(Name::getFirst, Name::getLast)] error message");
then(errorsCollected.get(17)).hasMessage("[extracting(\"first\", \"last\")] error message");
then(errorsCollected.get(18)).hasMessage("[extracting(firstNameFunction)] error message");
then(errorsCollected.get(19)).hasMessage("[extracting(\"first\", String.class)] error message");
then(errorsCollected.get(20)).hasMessage("[filteredOn(name -> name.first.startsWith(\"Jo\"))] error message");
then(errorsCollected.get(21)).hasMessage("[filteredOn + extracting] error message");
then(errorsCollected.get(22)).hasMessage("[extractingResultOf(\"getFirst\")] error message");
then(errorsCollected.get(23)).hasMessage("[extractingResultOf(\"getFirst\", String.class)] error message");
then(errorsCollected.get(24)).hasMessage("[filteredOn with condition] error message");
then(errorsCollected.get(25)).hasMessage("[filteredOn firstName in {John, Frodo}] error message");
then(errorsCollected.get(26)).hasMessage("[filteredOn firstName = John] error message");
then(errorsCollected.get(27)).hasMessage("[filteredOn firstName = null] error message");
then(errorsCollected.get(28)).hasMessage("[using flatExtracting(String fieldOrPropertyName)] error message");
then(errorsCollected.get(29)).hasMessageContaining("filteredOn with consumer");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@Test
void class_soft_assertions_should_report_errors_on_final_methods() {
// GIVEN
Class<AnnotatedClass> actual = AnnotatedClass.class;
// WHEN
softly.assertThat(actual)
.hasAnnotations(MyAnnotation.class, AnotherAnnotation.class)
.hasAnnotations(SafeVarargs.class);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE)).hasMessageContaining("SafeVarargs");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@SuppressWarnings("removal")
@Test
void object_soft_assertions_should_report_errors_on_final_methods_and_methods_that_switch_the_object_under_test() {
// GIVEN
Name name = name("John", "Doe");
Object alphabet = "abcdefghijklmnopqrstuvwxyz";
Object vowels = list("a", "e", "i", "o", "u");
// WHEN
softly.assertThat(name)
.as("extracting(\"first\", \"last\")")
.overridingErrorMessage("error message")
.extracting("first", "last")
.contains("John")
.contains("gandalf");
softly.assertThat(name)
.as("extracting(Name::getFirst, Name::getLast)")
.overridingErrorMessage("error message")
.extracting(Name::getFirst, Name::getLast)
.contains("John")
.contains("frodo");
softly.assertThat(alphabet)
.overridingErrorMessage("error message")
.as("asString()")
.asString()
.startsWith("abc")
.startsWith("123");
softly.assertThat(name)
.as("extracting(Name::getFirst)")
.overridingErrorMessage("error message")
.extracting(Name::getFirst)
.isEqualTo("Jack");
softly.assertThat(name)
.as("extracting(first)")
.overridingErrorMessage("error message")
.extracting("first")
.isEqualTo("Jack");
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(5);
then(errorsCollected.get(0)).hasMessage("[extracting(\"first\", \"last\")] error message");
then(errorsCollected.get(1)).hasMessage("[extracting(Name::getFirst, Name::getLast)] error message");
then(errorsCollected.get(2)).hasMessage("[asString()] error message");
then(errorsCollected.get(3)).hasMessage("[extracting(Name::getFirst)] error message");
then(errorsCollected.get(4)).hasMessage("[extracting(first)] error message");
}
// the test would fail if any method was not proxyable as the assertion error would not be softly caught
@Test
void map_soft_assertions_should_report_errors_on_final_methods_and_methods_that_switch_the_object_under_test() {
// GIVEN
Map<String, String> map = mapOf(entry("a", "1"), entry("b", "2"), entry("c", "3"));
// WHEN
softly.assertThat(map).contains(entry("abc", "ABC"), entry("def", "DEF")).isEmpty();
softly.assertThat(map).containsAnyOf(entry("gh", "GH"), entry("ij", "IJ"));
softly.assertThat(map).containsExactly(entry("kl", "KL"), entry("mn", "MN"));
softly.assertThat(map).containsKeys("K1", "K2");
softly.assertThat(map).containsOnly(entry("op", "OP"), entry("qr", "QR"));
softly.assertThat(map).containsOnlyKeys("K3", "K4");
softly.assertThat(map).containsValues("V1", "V2");
softly.assertThat(map).doesNotContain(entry("a", "1"), entry("abc", "ABC"));
softly.assertThat(map).doesNotContainKeys("a", "b");
softly.assertThat(map)
.as("extracting(\"a\", \"b\")")
.overridingErrorMessage("error message")
.extractingByKeys("a", "b")
.contains("456");
softly.assertThat(iterableMap)
.as("flatExtracting(\"name\", \"job\", \"city\", \"rank\")")
.overridingErrorMessage("error message")
.flatExtracting("name", "job", "city", "rank")
.contains("Unexpected", "Builder", "Dover", "Boston", "Paris", 1, 2, 3);
softly.assertThat(map)
.as("size()")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(1000);
softly.assertThat(map).containsExactlyEntriesOf(mapOf(entry("kl", "KL"), entry("mn", "MN")));
softly.assertThat(map).containsExactlyInAnyOrderEntriesOf(mapOf(entry("a", "1"), entry("b", "2")));
softly.assertThat(map)
.as("extracting(\"a\")")
.overridingErrorMessage("error message")
.extractingByKey("a")
.isEqualTo("456");
softly.assertThat(map)
.as("extracting(\"a\") as string")
.overridingErrorMessage("error message")
.extractingByKey("a", as(STRING))
.startsWith("456");
// THEN
List<Throwable> errors = softly.errorsCollected();
then(errors).hasSize(17);
then(errors.get(0)).hasMessageContaining("\"abc\"=\"ABC\"");
then(errors.get(1)).hasMessageContaining("empty");
then(errors.get(2)).hasMessageContaining("gh")
.hasMessageContaining("IJ");
then(errors.get(3)).hasMessageContaining("\"a\"=\"1\"");
then(errors.get(4)).hasMessageContaining("K2");
then(errors.get(5)).hasMessageContaining("OP");
then(errors.get(6)).hasMessageContaining("K4");
then(errors.get(7)).hasMessageContaining("V2");
then(errors.get(8)).hasMessageContaining("ABC");
then(errors.get(9)).hasMessageContaining("b");
then(errors.get(10)).hasMessage("[extracting(\"a\", \"b\")] error message");
then(errors.get(11)).hasMessage("[flatExtracting(\"name\", \"job\", \"city\", \"rank\")] error message");
then(errors.get(12)).hasMessage("[size()] error message");
then(errors.get(13)).hasMessageContaining("\"a\"=\"1\"");
then(errors.get(14)).hasMessageContaining("to contain only");
then(errors.get(15)).hasMessage("[extracting(\"a\")] error message");
then(errors.get(16)).hasMessage("[extracting(\"a\") as string] error message");
}
@Test
void map_soft_assertions_should_work_with_navigation_methods() {
// GIVEN
Map<String, String> map = mapOf(entry("a", "1"), entry("b", "2"), entry("c", "3"));
// WHEN
softly.assertThat(map)
.as("navigate to size")
.overridingErrorMessage("error message")
.size()
.isGreaterThan(10);
softly.assertThat(map)
.size()
.isGreaterThan(1)
// .as("returnToMap") TODO not yet supported
.returnToMap()
.as("returnToMap")
.isEmpty();
softly.assertThat(map)
.size()
.isGreaterThan(1)
.returnToMap()
.containsKey("nope")
.as("check size after navigating back")
.size()
.isLessThan(2);
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(4);
then(errorsCollected.get(0)).hasMessageContaining("[navigate to size] error message");
then(errorsCollected.get(1)).hasMessageContaining("returnToMap");
then(errorsCollected.get(2)).hasMessageContaining("nope");
then(errorsCollected.get(3)).hasMessageContaining("check size after navigating back");
}
@Test
void predicate_soft_assertions_should_report_errors_on_final_methods() {
// GIVEN
Predicate<MapEntry<String, String>> ballSportPredicate = sport -> sport.value.contains("ball");
// WHEN
softly.assertThat(ballSportPredicate)
.accepts(entry("sport", "boxing"), entry("sport", "marathon"))
.rejects(entry("sport", "football"), entry("sport", "basketball"));
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(2);
then(errorsCollected.get(0)).hasMessageContaining("boxing");
then(errorsCollected.get(1)).hasMessageContaining("basketball");
}
@Test
void optional_soft_assertions_should_report_errors_on_methods_that_switch_the_object_under_test() {
// GIVEN
Optional<String> optional = Optional.of("Yoda");
Function<String, Optional<String>> upperCaseOptional = s -> s == null ? Optional.empty() : Optional.of(s.toUpperCase());
// WHEN
softly.assertThat(optional)
.as("map(String::length)")
.overridingErrorMessage("error message")
.map(String::length)
.hasValue(4)
.hasValue(777); // fail
softly.assertThat(optional)
.as("flatMap(upperCaseOptional)")
.flatMap(upperCaseOptional)
.contains("YODA")
.contains("yoda") // fail
.as("map(String::length) after flatMap(upperCaseOptional)")
.map(String::length)
.hasValue(4)
.hasValue(888); // fail
softly.assertThat(optional)
.as("get()")
.overridingErrorMessage("error message")
.get()
.isEqualTo("Yoda")
.isEqualTo("Luke"); // fail
softly.assertThat(optional)
.as("get(as(STRING))")
.overridingErrorMessage("error message")
.get(as(STRING))
.startsWith("Yo")
.startsWith("Lu"); // fail
// THEN
List<Throwable> errorsCollected = softly.errorsCollected();
then(errorsCollected).hasSize(5);
then(errorsCollected.get(0)).hasMessage("[map(String::length)] error message");
then(errorsCollected.get(1)).hasMessageContaining("flatMap(upperCaseOptional)")
.hasMessageContaining("yoda");
then(errorsCollected.get(2)).hasMessageContaining("map(String::length) after flatMap(upperCaseOptional)")
.hasMessageContaining("888");
then(errorsCollected.get(3)).hasMessage("[get()] error message");
then(errorsCollected.get(4)).hasMessage("[get(as(STRING))] error message");
}
@Test
void string_soft_assertions_should_report_errors_on_methods_that_switch_the_object_under_test() {
// GIVEN
String base64String = "QXNzZXJ0Sg==";
// WHEN
softly.assertThat(base64String)
.as("asBase64Decoded()")
.overridingErrorMessage("error message 1")
.asBase64Decoded()
.isEmpty();
// THEN
then(softly.errorsCollected()).extracting(Throwable::getMessage)
.containsExactly("[asBase64Decoded()] error message 1");
}
@Test
void byte_array_soft_assertions_should_report_errors_on_methods_that_switch_the_object_under_test() {
// GIVEN
byte[] byteArray = "AssertJ".getBytes();
// WHEN
softly.assertThat(byteArray)
.as("asBase64Encoded()")
.overridingErrorMessage("error message 1")
.asBase64Encoded()
.isEmpty();
// THEN
then(softly.errorsCollected()).extracting(Throwable::getMessage)
.containsExactly("[asBase64Encoded()] error message 1");
}
@Test
void should_work_with_string() {
// GIVEN
String base64String = "QXNzZXJ0Sg==";
// WHEN
softly.assertThat(base64String)
.asBase64Decoded()
.containsExactly("AssertJ".getBytes());
// THEN
softly.assertAll();
}
@Test
void should_work_with_byte_array() {
// GIVEN
byte[] byteArray = "AssertJ".getBytes();
// WHEN
softly.assertThat(byteArray)
.asBase64Encoded()
.isEqualTo("QXNzZXJ0Sg==");
// THEN
softly.assertAll();
}
@Test
void soft_assertions_should_work_with_zipSatisfy() {
// GIVEN
List<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("zipSatisfy")
.overridingErrorMessage("error message")
.zipSatisfy(names, (n1, n2) -> softly.assertThat(n1).isNotEqualTo(n2));
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessage("[zipSatisfy] error message");
}
@Test
void bug_1209() {
// GIVEN
String string = "%%E";
// WHEN
softly.assertThat(string).matches("fffff");
// THEN
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageContaining("%%E")
.hasMessageContaining("to match pattern")
.hasMessageContaining("fffff");
}
@Test
void should_keep_representation_after_changing_the_object_under_test() {
// GIVEN
List<Name> names = list(name("John", "Doe"), name("Jane", "Doe"));
// WHEN
softly.assertThat(names)
.as("unicode")
.withRepresentation(UNICODE_REPRESENTATION)
.extracting(throwingFirstNameFunction)
.contains("ó");
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageContaining("unicode")
.hasMessageContaining("\\u00f3");
}
@Test
void should_keep_registered_comparators_after_changing_the_iterable_under_test() {
// GIVEN
Iterable<Name> names = list(name("Manu", "Ginobili"), name("Magic", "Johnson"));
// WHEN
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(string -> string.startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(new Condition<>(string -> string.startsWith("Ma"), "starts with Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOnAssertions(string -> assertThat(string).startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", "Manu")
.containsExactly(name("Whoever", "Ginobili"));
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", not("Manu"))
.containsExactly(name("Whoever", "Johnson"));
softly.assertThat(array(name("John", null), name("Jane", "Doe")))
.usingElementComparator(alwaysEqual())
.filteredOnNull("last")
.hasSize(1)
.contains(name("Can be", "anybody"));
// THEN
then(softly.errorsCollected()).isEmpty();
}
@Test
void should_keep_registered_comparators_after_changing_the_list_under_test() {
// GIVEN
List<Name> names = list(name("Manu", "Ginobili"), name("Magic", "Johnson"));
// WHEN
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(string -> string.startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(new Condition<>(string -> string.startsWith("Ma"), "starts with Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOnAssertions(string -> assertThat(string).startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", "Manu")
.containsExactly(name("Whoever", "Ginobili"));
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", not("Manu"))
.containsExactly(name("Whoever", "Johnson"));
softly.assertThat(array(name("John", null), name("Jane", "Doe")))
.usingElementComparator(alwaysEqual())
.filteredOnNull("last")
.hasSize(1)
.contains(name("Can be", "anybody"));
// THEN
then(softly.errorsCollected()).isEmpty();
}
@Test
void should_keep_registered_comparators_after_changing_the_object_array_under_test() {
// GIVEN
Name[] names = array(name("Manu", "Ginobili"), name("Magic", "Johnson"));
// WHEN
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(string -> string.startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOn(new Condition<>(string -> string.startsWith("Ma"), "starts with Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.extracting(firstNameFunction)
.usingElementComparator(CaseInsensitiveStringComparator.INSTANCE)
.filteredOnAssertions(string -> assertThat(string).startsWith("Ma"))
.containsExactly("MANU", "MAGIC");
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", "Manu")
.containsExactly(name("Whoever", "Ginobili"));
softly.assertThat(names)
.usingElementComparator(lastNameComparator)
.filteredOn("first", not("Manu"))
.containsExactly(name("Whoever", "Johnson"));
softly.assertThat(array(name("John", null), name("Jane", "Doe")))
.usingElementComparator(alwaysEqual())
.filteredOnNull("last")
.hasSize(1)
.contains(name("Can be", "anybody"));
// THEN
then(softly.errorsCollected()).isEmpty();
}
@Test
void soft_assertions_should_work_with_satisfiesAnyOf() {
// GIVEN
TolkienCharacter legolas = TolkienCharacter.of("Legolas", 1000, ELF);
Consumer<TolkienCharacter> isHobbit = tolkienCharacter -> assertThat(tolkienCharacter.getRace()).isEqualTo(HOBBIT);
Consumer<TolkienCharacter> isMan = tolkienCharacter -> assertThat(tolkienCharacter.getRace()).isEqualTo(MAN);
// WHEN
softly.assertThat(legolas)
.as("satisfiesAnyOf")
.satisfiesAnyOf(isHobbit, isMan);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageContaining("[satisfiesAnyOf] ")
.hasMessageContaining("HOBBIT")
.hasMessageContaining("ELF")
.hasMessageContaining("MAN");
}
@Test
void soft_assertions_should_work_with_satisfies() {
// GIVEN
TolkienCharacter legolas = TolkienCharacter.of("Legolas", 1000, ELF);
Consumer<TolkienCharacter> isHobbit = tolkienCharacter -> assertThat(tolkienCharacter.getRace()).isEqualTo(HOBBIT);
Consumer<TolkienCharacter> isElf = tolkienCharacter -> assertThat(tolkienCharacter.getRace()).isEqualTo(ELF);
Consumer<TolkienCharacter> isMan = tolkienCharacter -> assertThat(tolkienCharacter.getRace()).isEqualTo(MAN);
// WHEN
softly.assertThat(legolas).as("satisfies").satisfies(isHobbit, isElf, isMan);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageContaining("[satisfies] ")
.hasMessageContaining("HOBBIT")
.hasMessageContaining("MAN");
}
@Test
void soft_assertions_should_work_with_assertThatObject() {
// GIVEN
TolkienCharacter legolas = TolkienCharacter.of("Legolas", 1000, ELF);
Deque<TolkienCharacter> characters = new LinkedList<>(list(legolas));
Consumer<Deque<TolkienCharacter>> isFirstHobbit = tolkienCharacters -> assertThat(tolkienCharacters.getFirst()
.getRace()).isEqualTo(HOBBIT);
Consumer<Deque<TolkienCharacter>> isFirstMan = tolkienCharacters -> assertThat(tolkienCharacters.getFirst()
.getRace()).isEqualTo(MAN);
// WHEN
softly.assertThatObject(characters)
.as("assertThatObject#satisfiesAnyOf")
.satisfiesAnyOf(isFirstHobbit, isFirstMan);
// THEN
then(softly.errorsCollected()).singleElement(as(THROWABLE))
.hasMessageContaining("[assertThatObject#satisfiesAnyOf] ")
.hasMessageContaining("HOBBIT")
.hasMessageContaining("ELF")
.hasMessageContaining("MAN");
}
@Nested
| TolkienSoftAssertions |
java | quarkusio__quarkus | extensions/elasticsearch-rest-client/deployment/src/main/java/io/quarkus/elasticsearch/restclient/lowlevel/deployment/ElasticsearchBuildTimeConfig.java | {
"start": 384,
"end": 622
} | interface ____ {
/**
* Whether a health check is published in case the smallrye-health extension is present.
*/
@WithName("health.enabled")
@WithDefault("true")
boolean healthEnabled();
}
| ElasticsearchBuildTimeConfig |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/UnsafeArrayWriter.java | {
"start": 1474,
"end": 6982
} | class ____ extends UnsafeWriter {
// The number of elements in this array
private int numElements;
// The element size in this array
private int elementSize;
private int headerInBytes;
private void assertIndexIsValid(int index) {
assert index >= 0 : "index (" + index + ") should >= 0";
assert index < numElements : "index (" + index + ") should < " + numElements;
}
public UnsafeArrayWriter(UnsafeWriter writer, int elementSize) {
super(writer.getBufferHolder());
this.elementSize = elementSize;
}
public void initialize(int numElements) {
// We need 8 bytes to store numElements in header
this.numElements = numElements;
this.headerInBytes = calculateHeaderPortionInBytes(numElements);
this.startingOffset = cursor();
long fixedPartInBytesLong =
ByteArrayMethods.roundNumberOfBytesToNearestWord((long) elementSize * numElements);
long totalInitialSize = headerInBytes + fixedPartInBytesLong;
if (totalInitialSize > Integer.MAX_VALUE) {
throw QueryExecutionErrors.tooManyArrayElementsError(
fixedPartInBytesLong, Integer.MAX_VALUE);
}
// it's now safe to cast fixedPartInBytesLong and totalInitialSize to int
int fixedPartInBytes = (int) fixedPartInBytesLong;
// Grows the global buffer ahead for header and fixed size data.
holder.grow((int)totalInitialSize);
// Write numElements and clear out null bits to header
Platform.putLong(getBuffer(), startingOffset, numElements);
for (int i = 8; i < headerInBytes; i += 8) {
Platform.putLong(getBuffer(), startingOffset + i, 0L);
}
// fill 0 into reminder part of 8-bytes alignment in unsafe array
for (int i = elementSize * numElements; i < fixedPartInBytes; i++) {
Platform.putByte(getBuffer(), startingOffset + headerInBytes + i, (byte) 0);
}
increaseCursor(headerInBytes + fixedPartInBytes);
}
private long getElementOffset(int ordinal) {
return startingOffset + headerInBytes + ordinal * (long) elementSize;
}
private void setNullBit(int ordinal) {
assertIndexIsValid(ordinal);
BitSetMethods.set(getBuffer(), startingOffset + 8, ordinal);
}
@Override
public void setNull1Bytes(int ordinal) {
setNullBit(ordinal);
// put zero into the corresponding field when set null
writeByte(getElementOffset(ordinal), (byte)0);
}
@Override
public void setNull2Bytes(int ordinal) {
setNullBit(ordinal);
// put zero into the corresponding field when set null
writeShort(getElementOffset(ordinal), (short)0);
}
@Override
public void setNull4Bytes(int ordinal) {
setNullBit(ordinal);
// put zero into the corresponding field when set null
writeInt(getElementOffset(ordinal), 0);
}
@Override
public void setNull8Bytes(int ordinal) {
setNullBit(ordinal);
// put zero into the corresponding field when set null
writeLong(getElementOffset(ordinal), 0);
}
public void setNull(int ordinal) { setNull8Bytes(ordinal); }
@Override
public void write(int ordinal, boolean value) {
assertIndexIsValid(ordinal);
writeBoolean(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, byte value) {
assertIndexIsValid(ordinal);
writeByte(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, short value) {
assertIndexIsValid(ordinal);
writeShort(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, int value) {
assertIndexIsValid(ordinal);
writeInt(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, long value) {
assertIndexIsValid(ordinal);
writeLong(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, float value) {
assertIndexIsValid(ordinal);
writeFloat(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, double value) {
assertIndexIsValid(ordinal);
writeDouble(getElementOffset(ordinal), value);
}
@Override
public void write(int ordinal, Decimal input, int precision, int scale) {
// make sure Decimal object has the same scale as DecimalType
assertIndexIsValid(ordinal);
if (input != null && input.changePrecision(precision, scale)) {
if (precision <= Decimal.MAX_LONG_DIGITS()) {
write(ordinal, input.toUnscaledLong());
} else {
final byte[] bytes = input.toJavaBigDecimal().unscaledValue().toByteArray();
final int numBytes = bytes.length;
assert numBytes <= 16;
int roundedSize = ByteArrayMethods.roundNumberOfBytesToNearestWord(numBytes);
holder.grow(roundedSize);
zeroOutPaddingBytes(numBytes);
// Write the bytes to the variable length portion.
Platform.copyMemory(
bytes, Platform.BYTE_ARRAY_OFFSET, getBuffer(), cursor(), numBytes);
setOffsetAndSize(ordinal, numBytes);
// move the cursor forward with 8-bytes boundary
increaseCursor(roundedSize);
}
} else {
setNull(ordinal);
}
}
@Override
public void write(int ordinal, CalendarInterval input) {
assertIndexIsValid(ordinal);
// the UnsafeWriter version of write(int, CalendarInterval) doesn't handle
// null intervals appropriately when the container is an array, so we handle
// that case here.
if (input == null) {
setNull(ordinal);
} else {
super.write(ordinal, input);
}
}
}
| UnsafeArrayWriter |
java | spring-projects__spring-security | webauthn/src/main/java/org/springframework/security/web/webauthn/api/PublicKeyCredentialRpEntity.java | {
"start": 2327,
"end": 3327
} | class ____ {
private @Nullable String name;
private @Nullable String id;
private PublicKeyCredentialRpEntityBuilder() {
}
/**
* Sets the {@link #getName()} property.
* @param name the name property
* @return the {@link PublicKeyCredentialRpEntityBuilder}
*/
public PublicKeyCredentialRpEntityBuilder name(String name) {
this.name = name;
return this;
}
/**
* Sets the {@link #getId()} property.
* @param id the id
* @return the {@link PublicKeyCredentialRpEntityBuilder}
*/
public PublicKeyCredentialRpEntityBuilder id(String id) {
this.id = id;
return this;
}
/**
* Creates a new {@link PublicKeyCredentialRpEntity}.
* @return a new {@link PublicKeyCredentialRpEntity}.
*/
public PublicKeyCredentialRpEntity build() {
Assert.notNull(this.name, "name cannot be null");
Assert.notNull(this.id, "id cannot be null");
return new PublicKeyCredentialRpEntity(this.name, this.id);
}
}
}
| PublicKeyCredentialRpEntityBuilder |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/introspect/JsonPropertyRename5398Test.java | {
"start": 461,
"end": 747
} | class ____ {
private String prop;
@JsonProperty(value = "renamedProp")
public String getProp() {
return prop;
}
@JsonIgnore
public void setProp(String prop) {
this.prop = prop;
}
}
static | TestRename5398 |
java | spring-projects__spring-boot | module/spring-boot-integration/src/main/java/org/springframework/boot/integration/autoconfigure/IntegrationProperties.java | {
"start": 5120,
"end": 5363
} | class ____ {
private final Client client = new Client();
private final Server server = new Server();
public Client getClient() {
return this.client;
}
public Server getServer() {
return this.server;
}
public static | RSocket |
java | spring-projects__spring-security | core/src/test/java/org/springframework/security/authorization/method/AuthorizationMethodPointcutsTests.java | {
"start": 1164,
"end": 2961
} | class ____ {
@Test
public void forAnnotationsWhenAnnotationThenClassBasedAnnotationPointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, ClassController.class)).isTrue();
assertThat(AopUtils.canApply(preAuthorize, NoController.class)).isFalse();
}
@Test
public void forAnnotationsWhenAnnotationThenMethodBasedAnnotationPointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, MethodController.class)).isTrue();
}
@Test
public void forAnnotationsWhenAnnotationThenClassInheritancePointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, InterfacedClassController.class)).isTrue();
}
@Test
public void forAnnotationsWhenAnnotationThenMethodInheritancePointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, InterfacedMethodController.class)).isTrue();
}
@Test
public void forAnnotationsWhenAnnotationThenAnnotationClassInheritancePointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, InterfacedAnnotationClassController.class)).isTrue();
}
@Test
public void forAnnotationsWhenAnnotationThenAnnotationMethodInheritancePointcut() {
Pointcut preAuthorize = AuthorizationMethodPointcuts.forAnnotations(PreAuthorize.class);
assertThat(AopUtils.canApply(preAuthorize, InterfacedAnnotationMethodController.class)).isTrue();
}
@PreAuthorize("hasAuthority('APP')")
public static | AuthorizationMethodPointcutsTests |
java | resilience4j__resilience4j | resilience4j-test/src/main/java/io/github/resilience4j/test/TestContextPropagators.java | {
"start": 890,
"end": 980
} | class ____ {
private TestContextPropagators(){}
public static | TestContextPropagators |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/language/BeanLanguageInvalidOGNLTest.java | {
"start": 2450,
"end": 2620
} | class ____ {
private final Map<?, ?> map = new LinkedHashMap<>();
public Map<?, ?> getOther() {
return map;
}
}
}
| MyReallyCoolBean |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 18002,
"end": 18728
} | class ____<R> extends PrimitiveBuilder<R, LongBuilder<R>> {
private LongBuilder(Completion<R> context, NameContext names) {
super(context, names, Schema.Type.LONG);
}
private static <R> LongBuilder<R> create(Completion<R> context, NameContext names) {
return new LongBuilder<>(context, names);
}
@Override
protected LongBuilder<R> self() {
return this;
}
/** complete building this type, return control to context **/
public R endLong() {
return super.end();
}
}
/**
* Builds an Avro float type with optional properties. Set properties with
* {@link #prop(String, String)}, and finalize with {@link #endFloat()}
**/
public static final | LongBuilder |
java | apache__flink | flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/sharedbuffer/NodeId.java | {
"start": 2492,
"end": 5322
} | class ____ extends TypeSerializerSingleton<NodeId> {
private static final long serialVersionUID = 9209498028181378582L;
/**
* NOTE: this field should actually be final. The reason that it isn't final is due to
* backward compatible deserialization paths. See {@link #readObject(ObjectInputStream)}.
*/
private TypeSerializer<EventId> eventIdSerializer;
public NodeIdSerializer() {
this(EventId.EventIdSerializer.INSTANCE);
}
private NodeIdSerializer(TypeSerializer<EventId> eventIdSerializer) {
this.eventIdSerializer = checkNotNull(eventIdSerializer);
}
@Override
public boolean isImmutableType() {
return true;
}
@Override
public NodeId createInstance() {
return null;
}
@Override
public NodeId copy(NodeId from) {
return new NodeId(from.eventId, from.pageName);
}
@Override
public NodeId copy(NodeId from, NodeId reuse) {
return copy(from);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(NodeId record, DataOutputView target) throws IOException {
if (record != null) {
target.writeByte(1);
eventIdSerializer.serialize(record.eventId, target);
StringValue.writeString(record.pageName, target);
} else {
target.writeByte(0);
}
}
@Override
public NodeId deserialize(DataInputView source) throws IOException {
byte b = source.readByte();
if (b == 0) {
return null;
}
EventId eventId = eventIdSerializer.deserialize(source);
String pageName = StringValue.readString(source);
return new NodeId(eventId, pageName);
}
@Override
public NodeId deserialize(NodeId reuse, DataInputView source) throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
target.writeByte(source.readByte());
eventIdSerializer.copy(source, target);
StringValue.copyString(source, target);
}
// ------------------------------------------------------------------------
@Override
public TypeSerializerSnapshot<NodeId> snapshotConfiguration() {
return new NodeIdSerializerSnapshot(this);
}
/** Serializer configuration snapshot for compatibility and format evolution. */
@SuppressWarnings("WeakerAccess")
public static final | NodeIdSerializer |
java | apache__camel | components/camel-undertow/src/main/java/org/apache/camel/component/undertow/UndertowRestHeaderFilterStrategy.java | {
"start": 958,
"end": 1602
} | class ____ extends UndertowHeaderFilterStrategy {
private final String templateUri;
private final String queryParameters;
public UndertowRestHeaderFilterStrategy(String templateUri, String queryParameters) {
this.templateUri = templateUri;
this.queryParameters = queryParameters;
}
@Override
public boolean applyFilterToCamelHeaders(String headerName, Object headerValue, Exchange exchange) {
boolean answer = super.applyFilterToCamelHeaders(headerName, headerValue, exchange);
return filterCheck(templateUri, queryParameters, headerName, answer);
}
}
| UndertowRestHeaderFilterStrategy |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/format/StringJsonDocument.java | {
"start": 292,
"end": 602
} | class ____ {
/**
* Processing states. This can be (nested)Object or Arrays.
* When processing objects, values are stored as [,]"key":"value"[,]. we add separator when adding new key
* When processing arrays, values are stored as [,]"value"[,]. we add separator when adding new value
*/
| StringJsonDocument |
java | spring-projects__spring-boot | module/spring-boot-session/src/testFixtures/java/org/springframework/boot/session/autoconfigure/AbstractSessionAutoConfigurationTests.java | {
"start": 1369,
"end": 1589
} | class ____ Spring Session auto-configuration tests when the backing store cannot be
* reactive.
*
* @author Stephane Nicoll
* @author Weix Sun
* @see AbstractSessionReactiveAutoConfigurationTests
*/
public abstract | for |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/configuration/spr8955/Spr8955Parent.java | {
"start": 812,
"end": 855
} | class ____ {
@Component
static | Spr8955Parent |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/logger/LogInspectionHelper.java | {
"start": 546,
"end": 1725
} | class ____ {
private LogInspectionHelper() {
}
public static void registerListener(LogListener listener, BasicLogger log) {
convertType( log ).registerListener( listener );
}
public static void clearAllListeners(BasicLogger log) {
convertType( log ).clearAllListeners();
}
private static Log4J2DelegatingLogger convertType(BasicLogger log) {
if ( log instanceof DelegatingBasicLogger wrapper) {
//Most loggers generated via the annotation processor are of this type
try {
return extractFromWrapper( wrapper );
}
catch (Exception cause) {
throw new RuntimeException( cause );
}
}
if ( ! ( log instanceof Log4J2DelegatingLogger ) ) {
throw new AssertionFailure( "Unexpected log type: JBoss Logger didn't register the custom TestableLoggerProvider as logger provider" );
}
return (Log4J2DelegatingLogger) log;
}
private static Log4J2DelegatingLogger extractFromWrapper(DelegatingBasicLogger wrapper) throws Exception {
Field field = DelegatingBasicLogger.class.getDeclaredField( "log" );
field.setAccessible( true );
Object object = field.get( wrapper );
return convertType( (BasicLogger) object );
}
}
| LogInspectionHelper |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/operator/arithmetic/Arithmetics.java | {
"start": 757,
"end": 6783
} | interface ____ extends BiFunction<Number, Number, Number> {
default Object wrap(Object l, Object r) {
if ((l instanceof Number) == false) {
throw new QlIllegalArgumentException("A number is required; received {}", l);
}
if ((r instanceof Number) == false) {
throw new QlIllegalArgumentException("A number is required; received {}", r);
}
return apply((Number) l, (Number) r);
}
}
public static Number add(Number l, Number r) {
if (l == null || r == null) {
return null;
}
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() + r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() + r.floatValue());
}
if (l instanceof BigInteger || r instanceof BigInteger) {
BigInteger bi = asBigInteger(l).add(asBigInteger(r));
return asUnsignedLong(bi);
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.addExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.addExact(l.intValue(), r.intValue()));
}
public static Number sub(Number l, Number r) {
if (l == null || r == null) {
return null;
}
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() - r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() - r.floatValue());
}
if (l instanceof BigInteger || r instanceof BigInteger) {
BigInteger bi = asBigInteger(l).subtract(asBigInteger(r));
return asUnsignedLong(bi);
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.subtractExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.subtractExact(l.intValue(), r.intValue()));
}
public static Number mul(Number l, Number r) {
if (l == null || r == null) {
return null;
}
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() * r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() * r.floatValue());
}
if (l instanceof BigInteger || r instanceof BigInteger) {
BigInteger bi = asBigInteger(l).multiply(asBigInteger(r));
// Note: in case of unsigned_long overflow (or underflow, with negative fixed point numbers), the exception is thrown.
// This is unlike the way some other traditional RDBMS that support unsigned types work, which simply promote the result to a
// floating point type, but in line with how our implementation treats other fixed point type operations (i.e. Math#xxExact()).
return asUnsignedLong(bi);
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(Math.multiplyExact(l.longValue(), r.longValue()));
}
return Integer.valueOf(Math.multiplyExact(l.intValue(), r.intValue()));
}
public static Number div(Number l, Number r) {
if (l == null || r == null) {
return null;
}
if (l instanceof Double || r instanceof Double) {
return l.doubleValue() / r.doubleValue();
}
if (l instanceof Float || r instanceof Float) {
return l.floatValue() / r.floatValue();
}
if (l instanceof BigInteger || r instanceof BigInteger) {
BigInteger bi = asBigInteger(l).divide(asBigInteger(r));
return asUnsignedLong(bi);
}
if (l instanceof Long || r instanceof Long) {
return l.longValue() / r.longValue();
}
return l.intValue() / r.intValue();
}
public static Number mod(Number l, Number r) {
if (l == null || r == null) {
return null;
}
if (l instanceof Double || r instanceof Double) {
return Double.valueOf(l.doubleValue() % r.doubleValue());
}
if (l instanceof Float || r instanceof Float) {
return Float.valueOf(l.floatValue() % r.floatValue());
}
if (l instanceof BigInteger || r instanceof BigInteger) {
BigInteger bi = asBigInteger(l).remainder(asBigInteger(r));
return asUnsignedLong(bi);
}
if (l instanceof Long || r instanceof Long) {
return Long.valueOf(l.longValue() % r.longValue());
}
return l.intValue() % r.intValue();
}
static Number negate(Number n) {
if (n == null) {
return null;
}
if (n instanceof Double) {
double d = n.doubleValue();
if (d == Double.MIN_VALUE) {
throw new ArithmeticException("double overflow");
}
return Double.valueOf(-n.doubleValue());
}
if (n instanceof Float) {
float f = n.floatValue();
if (f == Float.MIN_VALUE) {
throw new ArithmeticException("float overflow");
}
return Float.valueOf(-n.floatValue());
}
if (n instanceof BigInteger) {
if (((BigInteger) n).signum() != 0) {
throw new ArithmeticException("unsigned_long overflow"); // in the scope of the unsigned_long type
}
return n;
}
if (n instanceof Long) {
return Long.valueOf(Math.negateExact(n.longValue()));
}
return Integer.valueOf(Math.negateExact(n.intValue()));
}
public static BigInteger asBigInteger(Number n) {
return n instanceof BigInteger ? (BigInteger) n : BigInteger.valueOf(n.longValue());
}
}
| NumericArithmetic |
java | apache__camel | components/camel-rocketmq/src/main/java/org/apache/camel/component/rocketmq/RocketMQProducer.java | {
"start": 2072,
"end": 10787
} | class ____ extends DefaultAsyncProducer {
public static final String GENERATE_MESSAGE_KEY_PREFIX = "camel-rocketmq-";
private static final Logger LOG = LoggerFactory.getLogger(RocketMQProducer.class);
private final AtomicBoolean started = new AtomicBoolean(false);
private DefaultMQProducer mqProducer;
private ReplyManager replyManager;
public RocketMQProducer(RocketMQEndpoint endpoint) {
super(endpoint);
}
@Override
public RocketMQEndpoint getEndpoint() {
return (RocketMQEndpoint) super.getEndpoint();
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
if (!isRunAllowed()) {
if (exchange.getException() == null) {
exchange.setException(new RejectedExecutionException());
}
callback.done(true);
return true;
}
try {
LOG.trace("Exchange Pattern {}", exchange.getPattern());
if (exchange.getPattern().isOutCapable()) {
return processInOut(exchange, callback);
} else {
return processInOnly(exchange, callback);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
exchange.setException(e);
callback.done(true);
return true;
} catch (Exception e) {
exchange.setException(e);
callback.done(true);
return true;
}
}
protected boolean processInOut(final Exchange exchange, final AsyncCallback callback)
throws RemotingException, MQClientException, InterruptedException, NoTypeConversionAvailableException {
org.apache.camel.Message in = exchange.getIn();
Message message = new Message();
message.setTopic(in.getHeader(RocketMQConstants.OVERRIDE_TOPIC_NAME, () -> getEndpoint().getTopicName(), String.class));
message.setTags(in.getHeader(RocketMQConstants.OVERRIDE_TAG, () -> getEndpoint().getSendTag(), String.class));
message.setBody(exchange.getContext().getTypeConverter().mandatoryConvertTo(byte[].class, exchange, in.getBody()));
message.setKeys(in.getHeader(RocketMQConstants.OVERRIDE_MESSAGE_KEY, "", String.class));
initReplyManager();
String generateKey = GENERATE_MESSAGE_KEY_PREFIX + getEndpoint().getCamelContext().getUuidGenerator().generateUuid();
message.setKeys(Arrays.asList(Optional.ofNullable(message.getKeys()).orElse(""), generateKey));
LOG.debug("RocketMQ Producer sending {}", message);
mqProducer.send(message, new SendCallback() {
@Override
public void onSuccess(SendResult sendResult) {
if (!SendStatus.SEND_OK.equals(sendResult.getSendStatus())) {
exchange.setException(new SendFailedException(sendResult.toString()));
callback.done(false);
return;
}
if (replyManager == null) {
LOG.warn("replyToTopic not set! Will not wait for reply.");
callback.done(false);
return;
}
replyManager.registerReply(replyManager, exchange, callback, generateKey,
getEndpoint().getRequestTimeoutMillis());
}
@Override
public void onException(Throwable e) {
try {
replyManager.cancelMessageKey(generateKey);
exchange.setException(e);
} finally {
callback.done(false);
}
}
});
return false;
}
protected void initReplyManager() {
if (!started.get()) {
lock.lock();
try {
if (started.get()) {
return;
}
LOG.debug("Starting reply manager");
ClassLoader current = Thread.currentThread().getContextClassLoader();
ClassLoader ac = getEndpoint().getCamelContext().getApplicationContextClassLoader();
try {
if (ac != null) {
Thread.currentThread().setContextClassLoader(ac);
}
if (getEndpoint().getReplyToTopic() != null) {
replyManager = createReplyManager();
LOG.debug("Using RocketMQReplyManager: {} to process replies from topic {}", replyManager,
getEndpoint().getReplyToTopic());
}
} catch (Exception e) {
throw new FailedToCreateProducerException(getEndpoint(), e);
} finally {
if (ac != null) {
Thread.currentThread().setContextClassLoader(current);
}
}
started.set(true);
} finally {
lock.unlock();
}
}
}
protected void unInitReplyManager() {
try {
if (replyManager != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping RocketMQReplyManager: {} from processing replies from : {}", replyManager,
getEndpoint().getReplyToTopic());
}
ServiceHelper.stopService(replyManager);
}
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
} finally {
started.set(false);
}
}
private ReplyManager createReplyManager() {
RocketMQReplyManagerSupport replyManager = new RocketMQReplyManagerSupport(getEndpoint().getCamelContext());
replyManager.setEndpoint(getEndpoint());
String name = "RocketMQReplyManagerTimeoutChecker[" + getEndpoint().getTopicName() + "]";
ScheduledExecutorService scheduledExecutorService
= getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadScheduledExecutor(this, name);
replyManager.setScheduledExecutorService(scheduledExecutorService);
LOG.debug("Starting ReplyManager: {}", name);
ServiceHelper.startService(replyManager);
return replyManager;
}
protected boolean processInOnly(Exchange exchange, AsyncCallback callback)
throws NoTypeConversionAvailableException, InterruptedException, RemotingException, MQClientException {
org.apache.camel.Message in = exchange.getIn();
Message message = new Message();
message.setTopic(in.getHeader(RocketMQConstants.OVERRIDE_TOPIC_NAME, () -> getEndpoint().getTopicName(), String.class));
message.setTags(in.getHeader(RocketMQConstants.OVERRIDE_TAG, () -> getEndpoint().getSendTag(), String.class));
message.setBody(exchange.getContext().getTypeConverter().mandatoryConvertTo(byte[].class, exchange, in.getBody()));
message.setKeys(in.getHeader(RocketMQConstants.OVERRIDE_MESSAGE_KEY, "", String.class));
LOG.debug("RocketMQ Producer sending {}", message);
boolean waitForSendResult = getEndpoint().isWaitForSendResult();
mqProducer.send(message, new SendCallback() {
@Override
public void onSuccess(SendResult sendResult) {
if (!SendStatus.SEND_OK.equals(sendResult.getSendStatus())) {
exchange.setException(new SendFailedException(sendResult.toString()));
}
callback.done(!waitForSendResult);
}
@Override
public void onException(Throwable e) {
exchange.setException(e);
callback.done(!waitForSendResult);
}
});
// return false to wait send callback
return !waitForSendResult;
}
@Override
protected void doStart() throws Exception {
this.mqProducer = new DefaultMQProducer(
getEndpoint().getProducerGroup(),
RocketMQAclUtils.getAclRPCHook(getEndpoint().getAccessKey(), getEndpoint().getSecretKey()));
this.mqProducer.setNamesrvAddr(getEndpoint().getNamesrvAddr());
this.mqProducer.setNamespaceV2(getEndpoint().getNamespace());
this.mqProducer.setEnableTrace(getEndpoint().isEnableTrace());
this.mqProducer.setAccessChannel(AccessChannel.valueOf(getEndpoint().getAccessChannel()));
this.mqProducer.start();
}
@Override
protected void doStop() {
unInitReplyManager();
this.mqProducer.shutdown();
this.mqProducer = null;
}
}
| RocketMQProducer |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/AbstractStringBasedJpaQueryUnitTests.java | {
"start": 7589,
"end": 7747
} | class ____ {
List<Object> values = new ArrayList<>(3);
public Arguments(Object... values) {
this.values = Arrays.asList(values);
}
}
| Arguments |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/key/LocalTimeAsKeyTest.java | {
"start": 391,
"end": 1835
} | class ____ extends DateTimeTestBase
{
private static final LocalTime TIME_0 = LocalTime.ofSecondOfDay(0);
/*
* Seconds are omitted if possible
*/
private static final String TIME_0_STRING = "00:00";
private static final LocalTime TIME = LocalTime.of(3, 14, 15, 920 * 1000 * 1000);
private static final String TIME_STRING = "03:14:15.920";
private static final TypeReference<Map<LocalTime, String>> TYPE_REF = new TypeReference<Map<LocalTime, String>>() {
};
private final ObjectMapper MAPPER = newMapper();
private final ObjectReader READER = MAPPER.readerFor(TYPE_REF);
@Test
public void testSerialization0() throws Exception {
assertEquals(mapAsString(TIME_0_STRING, "test"),
MAPPER.writeValueAsString(asMap(TIME_0, "test")));
}
@Test
public void testSerialization1() throws Exception {
assertEquals(mapAsString(TIME_STRING, "test"),
MAPPER.writeValueAsString(asMap(TIME, "test")));
}
@Test
public void testDeserialization0() throws Exception {
assertEquals(asMap(TIME_0, "test"), READER.readValue(mapAsString(TIME_0_STRING, "test")),
"Value is incorrect");
}
@Test
public void testDeserialization1() throws Exception {
assertEquals(asMap(TIME, "test"), READER.readValue(mapAsString(TIME_STRING, "test")),
"Value is incorrect");
}
}
| LocalTimeAsKeyTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/event/EventListenerSupport.java | {
"start": 7504,
"end": 7656
} | class ____ to create the JDK
* dynamic proxy.
*
* @param listenerInterface the listener interface.
* @param classLoader the | loader |
java | apache__dubbo | dubbo-plugin/dubbo-rest-jaxrs/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/AbstractJaxrsArgumentResolver.java | {
"start": 1242,
"end": 1562
} | class ____ extends AbstractAnnotationBaseArgumentResolver {
@Override
protected NamedValueMeta createNamedValueMeta(ParameterMeta param, AnnotationMeta<Annotation> anno) {
return new NamedValueMeta(anno.getValue(), Helper.isRequired(param), Helper.defaultValue(param));
}
}
| AbstractJaxrsArgumentResolver |
java | google__dagger | hilt-core/main/java/dagger/hilt/DefineComponent.java | {
"start": 1896,
"end": 2013
} | class ____ hint that it doesn't need to be nested.
@Retention(CLASS)
@Target(TYPE)
@GeneratesRootInput
public @ | to |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/sql/partition/MySQLPartitionedTableTest.java | {
"start": 786,
"end": 1417
} | class ____ {
@Test void test(EntityManagerFactoryScope scope) {
scope.inTransaction( session -> {
Partitioned partitioned = new Partitioned();
partitioned.id = 1L;
partitioned.pid = 500L;
session.persist( partitioned );
} );
scope.inTransaction( session -> {
Partitioned partitioned = session.find( Partitioned.class, 1L );
assertNotNull( partitioned );
partitioned.text = "updated";
} );
}
@Entity
@Table(name = "myparts",
options =
"""
PARTITION BY RANGE (pid) (
PARTITION p1 VALUES LESS THAN (1000),
PARTITION p2 VALUES LESS THAN (2000)
)
""")
static | MySQLPartitionedTableTest |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/util/pkg1/subpkg/SubclassWithNonStaticPackagePrivateTempDirField.java | {
"start": 633,
"end": 782
} | class ____ extends SuperclassWithStaticPackagePrivateTempDirField {
@InstanceLevelDir
Path tempDir;
}
| SubclassWithNonStaticPackagePrivateTempDirField |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/HazelcastListComponentBuilderFactory.java | {
"start": 6809,
"end": 8043
} | class ____
extends AbstractComponentBuilder<HazelcastListComponent>
implements HazelcastListComponentBuilder {
@Override
protected HazelcastListComponent buildConcreteComponent() {
return new HazelcastListComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((HazelcastListComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((HazelcastListComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((HazelcastListComponent) component).setAutowiredEnabled((boolean) value); return true;
case "hazelcastInstance": ((HazelcastListComponent) component).setHazelcastInstance((com.hazelcast.core.HazelcastInstance) value); return true;
case "hazelcastMode": ((HazelcastListComponent) component).setHazelcastMode((java.lang.String) value); return true;
default: return false;
}
}
}
} | HazelcastListComponentBuilderImpl |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/builder/ToStringStyleTest.java | {
"start": 1167,
"end": 1442
} | class ____ {
/**
* Test String field.
*/
String name;
/**
* Test integer field.
*/
int age;
/**
* Test boolean field.
*/
boolean smoker;
}
private static final | Person |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/cfg/DeserializerFactoryConfig.java | {
"start": 337,
"end": 387
} | class ____ {@link DeserializerFactory}.
*/
public | for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/formula/FormulaBasicsTest.java | {
"start": 912,
"end": 2656
} | class ____ {
@BeforeEach
void setUp(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final Account account = new Account( );
account.setId( 1L );
account.setCredit( 5000d );
account.setRate( 1.25 / 100 );
session.persist( account );
} );
}
@Test
void testLoader(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final Account account = session.find( Account.class, 1L );
assertThat( account.getInterest(), is( 62.5d ));
} );
}
@Test
void testHQL(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final Account account = session.createQuery( "select a from Account a where a.id = :id", Account.class )
.setParameter( "id", 1L ).uniqueResult();
assertThat( account.getInterest(), is( 62.5d ));
} );
}
@Test
void testHqlAmbiguous(SessionFactoryScope scope) {
scope.inTransaction( session -> {
session.createQuery( "select a, b from Account a, Account b" ).list();
} );
}
@Test
void testCriteria(SessionFactoryScope scope) {
scope.inTransaction( session -> {
final CriteriaBuilder criteriaBuilder = scope.getSessionFactory().getCriteriaBuilder();
final CriteriaQuery<Account> criteria = criteriaBuilder.createQuery( Account.class );
final Root<Account> root = criteria.from( Account.class );
criteria.select( root );
criteria.where( criteriaBuilder.equal( root.get( "id" ), criteriaBuilder.literal( 1L ) ) );
final Account account = session.createQuery( criteria ).uniqueResult();
assertThat( account.getInterest(), is( 62.5d ));
} );
}
@AfterEach
void tearDown(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Entity(name = "Account")
public static | FormulaBasicsTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.