language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/fetchscroll/Site.java | {
"start": 250,
"end": 1344
} | class ____ implements Serializable {
private static final long serialVersionUID = 9213996389556805371L;
private Long id;
private String name;
private Customer customer;
@Id
@GeneratedValue
@Column(name = "SITE_ID", nullable = false, updatable = false)
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
@Column(name = "SITE_NAME", length = 40, nullable = false, updatable = false)
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CUSTOMER_ID", referencedColumnName = "CUSTOMER_ID", nullable = false, updatable = false)
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
@Override
public int hashCode() {
return 17 * 31 + id.hashCode();
}
@Override
public boolean equals(Object object) {
boolean result = false;
if(object instanceof Site) {
Site other = (Site)object;
result = other.getId().equals(id);
}
return result;
}
}
| Site |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/qualifiers/AdditionalQualifiersTest.java | {
"start": 4008,
"end": 4094
} | interface ____ {
}
@SuppressWarnings("serial")
public abstract | ToBeQualifier |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-hibernate/src/main/java/smoketest/jpa/repository/JpaNoteRepository.java | {
"start": 872,
"end": 1162
} | class ____ implements NoteRepository {
@PersistenceContext
@SuppressWarnings("NullAway.Init")
private EntityManager entityManager;
@Override
public List<Note> findAll() {
return this.entityManager.createQuery("SELECT n FROM Note n", Note.class).getResultList();
}
}
| JpaNoteRepository |
java | apache__camel | components/camel-rss/src/main/java/org/apache/camel/component/rss/AggregateRssFeedStrategy.java | {
"start": 1173,
"end": 2209
} | class ____ implements AggregationStrategy {
protected final Logger log = LoggerFactory.getLogger(AggregateRssFeedStrategy.class);
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
if (oldExchange == null) {
return newExchange;
}
SyndFeed oldFeed = oldExchange.getIn().getBody(SyndFeed.class);
SyndFeed newFeed = newExchange.getIn().getBody(SyndFeed.class);
if (oldFeed != null && newFeed != null) {
List<SyndEntry> oldEntries = CastUtils.cast(oldFeed.getEntries());
List<SyndEntry> newEntries = CastUtils.cast(newFeed.getEntries());
List<SyndEntry> mergedList = new ArrayList<>(oldEntries.size() + newEntries.size());
mergedList.addAll(oldEntries);
mergedList.addAll(newEntries);
oldFeed.setEntries(mergedList);
} else {
log.debug("Could not merge exchanges. One body was null.");
}
return oldExchange;
}
}
| AggregateRssFeedStrategy |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/jmx/export/MBeanExporter.java | {
"start": 11515,
"end": 15515
} | interface ____ use
* for this exporter. Default is a {@code SimpleReflectiveMBeanInfoAssembler}.
* <p>The passed-in assembler can optionally implement the
* {@code AutodetectCapableMBeanInfoAssembler} interface, which enables it
* to participate in the exporter's MBean auto-detection process.
* @see org.springframework.jmx.export.assembler.SimpleReflectiveMBeanInfoAssembler
* @see org.springframework.jmx.export.assembler.AutodetectCapableMBeanInfoAssembler
* @see org.springframework.jmx.export.assembler.MetadataMBeanInfoAssembler
* @see #setAutodetect
*/
public void setAssembler(MBeanInfoAssembler assembler) {
this.assembler = assembler;
}
/**
* Set the implementation of the {@code ObjectNamingStrategy} interface
* to use for this exporter. Default is a {@code KeyNamingStrategy}.
* @see org.springframework.jmx.export.naming.KeyNamingStrategy
* @see org.springframework.jmx.export.naming.MetadataNamingStrategy
*/
public void setNamingStrategy(ObjectNamingStrategy namingStrategy) {
this.namingStrategy = namingStrategy;
}
/**
* Indicates whether Spring should ensure that {@link ObjectName ObjectNames}
* generated by the configured {@link ObjectNamingStrategy} for
* runtime-registered MBeans ({@link #registerManagedResource}) should get
* modified: to ensure uniqueness for every instance of a managed {@code Class}.
* <p>The default value is {@code true}.
* @see #registerManagedResource
* @see JmxUtils#appendIdentityToObjectName(javax.management.ObjectName, Object)
*/
public void setEnsureUniqueRuntimeObjectNames(boolean ensureUniqueRuntimeObjectNames) {
this.ensureUniqueRuntimeObjectNames = ensureUniqueRuntimeObjectNames;
}
/**
* Indicates whether the managed resource should be exposed on the
* {@link Thread#getContextClassLoader() thread context ClassLoader} before
* allowing any invocations on the MBean to occur.
* <p>The default value is {@code true}, exposing a {@link SpringModelMBean}
* which performs thread context ClassLoader management. Switch this flag off to
* expose a standard JMX {@link javax.management.modelmbean.RequiredModelMBean}.
*/
public void setExposeManagedResourceClassLoader(boolean exposeManagedResourceClassLoader) {
this.exposeManagedResourceClassLoader = exposeManagedResourceClassLoader;
}
/**
* Set the list of names for beans that should be excluded from auto-detection.
*/
public void setExcludedBeans(String... excludedBeans) {
this.excludedBeans.clear();
Collections.addAll(this.excludedBeans, excludedBeans);
}
/**
* Add the name of bean that should be excluded from auto-detection.
*/
public void addExcludedBean(String excludedBean) {
Assert.notNull(excludedBean, "ExcludedBean must not be null");
this.excludedBeans.add(excludedBean);
}
/**
* Set the {@code MBeanExporterListener}s that should be notified
* of MBean registration and unregistration events.
* @see MBeanExporterListener
*/
public void setListeners(MBeanExporterListener... listeners) {
this.listeners = listeners;
}
/**
* Set the {@link NotificationListenerBean NotificationListenerBeans}
* containing the
* {@link javax.management.NotificationListener NotificationListeners}
* that will be registered with the {@link MBeanServer}.
* @see #setNotificationListenerMappings(java.util.Map)
* @see NotificationListenerBean
*/
public void setNotificationListeners(NotificationListenerBean... notificationListeners) {
this.notificationListeners = notificationListeners;
}
/**
* Set the {@link NotificationListener NotificationListeners} to register
* with the {@link javax.management.MBeanServer}.
* <P>The key of each entry in the {@code Map} is a {@link String}
* representation of the {@link javax.management.ObjectName} or the bean
* name of the MBean the listener should be registered for. Specifying an
* asterisk ({@code *}) for a key will cause the listener to be
* associated with all MBeans registered by this | to |
java | netty__netty | codec-http3/src/main/java/io/netty/handler/codec/http3/Http3ControlStreamInboundHandler.java | {
"start": 2183,
"end": 9826
} | class ____ extends Http3FrameTypeInboundValidationHandler<Http3ControlStreamFrame> {
final boolean server;
private final ChannelHandler controlFrameHandler;
private final QpackEncoder qpackEncoder;
private final Http3ControlStreamOutboundHandler remoteControlStreamHandler;
private boolean firstFrameRead;
private Long receivedGoawayId;
private Long receivedMaxPushId;
Http3ControlStreamInboundHandler(boolean server, @Nullable ChannelHandler controlFrameHandler,
QpackEncoder qpackEncoder,
Http3ControlStreamOutboundHandler remoteControlStreamHandler) {
super(Http3ControlStreamFrame.class);
this.server = server;
this.controlFrameHandler = controlFrameHandler;
this.qpackEncoder = qpackEncoder;
this.remoteControlStreamHandler = remoteControlStreamHandler;
}
boolean isServer() {
return server;
}
boolean isGoAwayReceived() {
return receivedGoawayId != null;
}
long maxPushIdReceived() {
return receivedMaxPushId == null ? -1 : receivedMaxPushId;
}
private boolean forwardControlFrames() {
return controlFrameHandler != null;
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
super.handlerAdded(ctx);
// The user want's to be notified about control frames, add the handler to the pipeline.
if (controlFrameHandler != null) {
ctx.pipeline().addLast(controlFrameHandler);
}
}
@Override
void readFrameDiscarded(ChannelHandlerContext ctx, Object discardedFrame) {
if (!firstFrameRead && !(discardedFrame instanceof Http3SettingsFrame)) {
connectionError(ctx, Http3ErrorCode.H3_MISSING_SETTINGS, "Missing settings frame.", forwardControlFrames());
}
}
@Override
void channelRead(ChannelHandlerContext ctx, Http3ControlStreamFrame frame) throws QpackException {
boolean isSettingsFrame = frame instanceof Http3SettingsFrame;
if (!firstFrameRead && !isSettingsFrame) {
connectionError(ctx, H3_MISSING_SETTINGS, "Missing settings frame.", forwardControlFrames());
ReferenceCountUtil.release(frame);
return;
}
if (firstFrameRead && isSettingsFrame) {
connectionError(ctx, H3_FRAME_UNEXPECTED, "Second settings frame received.", forwardControlFrames());
ReferenceCountUtil.release(frame);
return;
}
firstFrameRead = true;
final boolean valid;
if (isSettingsFrame) {
valid = handleHttp3SettingsFrame(ctx, (Http3SettingsFrame) frame);
} else if (frame instanceof Http3GoAwayFrame) {
valid = handleHttp3GoAwayFrame(ctx, (Http3GoAwayFrame) frame);
} else if (frame instanceof Http3MaxPushIdFrame) {
valid = handleHttp3MaxPushIdFrame(ctx, (Http3MaxPushIdFrame) frame);
} else if (frame instanceof Http3CancelPushFrame) {
valid = handleHttp3CancelPushFrame(ctx, (Http3CancelPushFrame) frame);
} else {
// We don't need to do any special handling for Http3UnknownFrames as we either pass these to the next#
// handler or release these directly.
assert frame instanceof Http3UnknownFrame;
valid = true;
}
if (!valid || controlFrameHandler == null) {
ReferenceCountUtil.release(frame);
return;
}
// The user did specify ChannelHandler that should be notified about control stream frames.
// Let's forward the frame so the user can do something with it.
ctx.fireChannelRead(frame);
}
private boolean handleHttp3SettingsFrame(ChannelHandlerContext ctx, Http3SettingsFrame settingsFrame)
throws QpackException {
final QuicChannel quicChannel = (QuicChannel) ctx.channel().parent();
final QpackAttributes qpackAttributes = Http3.getQpackAttributes(quicChannel);
assert qpackAttributes != null;
final GenericFutureListener<Future<? super QuicStreamChannel>> closeOnFailure = future -> {
if (!future.isSuccess()) {
criticalStreamClosed(ctx);
}
};
if (qpackAttributes.dynamicTableDisabled()) {
qpackEncoder.configureDynamicTable(qpackAttributes, 0, 0);
return true;
}
quicChannel.createStream(QuicStreamType.UNIDIRECTIONAL,
new QPackEncoderStreamInitializer(qpackEncoder, qpackAttributes,
settingsFrame.getOrDefault(HTTP3_SETTINGS_QPACK_MAX_TABLE_CAPACITY, 0),
settingsFrame.getOrDefault(HTTP3_SETTINGS_QPACK_BLOCKED_STREAMS, 0)))
.addListener(closeOnFailure);
quicChannel.createStream(QuicStreamType.UNIDIRECTIONAL, new QPackDecoderStreamInitializer(qpackAttributes))
.addListener(closeOnFailure);
return true;
}
private boolean handleHttp3GoAwayFrame(ChannelHandlerContext ctx, Http3GoAwayFrame goAwayFrame) {
long id = goAwayFrame.id();
if (!server && id % 4 != 0) {
connectionError(ctx, H3_FRAME_UNEXPECTED, "GOAWAY received with ID of non-request stream.",
forwardControlFrames());
return false;
}
if (receivedGoawayId != null && id > receivedGoawayId) {
connectionError(ctx, H3_ID_ERROR,
"GOAWAY received with ID larger than previously received.", forwardControlFrames());
return false;
}
receivedGoawayId = id;
return true;
}
private boolean handleHttp3MaxPushIdFrame(ChannelHandlerContext ctx, Http3MaxPushIdFrame frame) {
long id = frame.id();
if (!server) {
connectionError(ctx, H3_FRAME_UNEXPECTED, "MAX_PUSH_ID received by client.",
forwardControlFrames());
return false;
}
if (receivedMaxPushId != null && id < receivedMaxPushId) {
connectionError(ctx, H3_ID_ERROR, "MAX_PUSH_ID reduced limit.", forwardControlFrames());
return false;
}
receivedMaxPushId = id;
return true;
}
private boolean handleHttp3CancelPushFrame(ChannelHandlerContext ctx, Http3CancelPushFrame cancelPushFrame) {
final Long maxPushId = server ? receivedMaxPushId : remoteControlStreamHandler.sentMaxPushId();
if (maxPushId == null || maxPushId < cancelPushFrame.id()) {
connectionError(ctx, H3_ID_ERROR, "CANCEL_PUSH received with an ID greater than MAX_PUSH_ID.",
forwardControlFrames());
return false;
}
return true;
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
// control streams should always be processed, no matter what the user is doing in terms of
// configuration and AUTO_READ.
Http3CodecUtils.readIfNoAutoRead(ctx);
}
@Override
public boolean isSharable() {
// Not sharable as it keeps state.
return false;
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (evt instanceof ChannelInputShutdownEvent) {
// See https://www.ietf.org/archive/id/draft-ietf-quic-qpack-19.html#section-4.2
criticalStreamClosed(ctx);
}
ctx.fireUserEventTriggered(evt);
}
private abstract static | Http3ControlStreamInboundHandler |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/SearchService.java | {
"start": 92503,
"end": 93571
} | enum ____ {
DFS {
@Override
void addResultsObject(SearchContext context) {
context.addDfsResult();
}
},
QUERY {
@Override
void addResultsObject(SearchContext context) {
context.addQueryResult();
}
},
RANK_FEATURE {
@Override
void addResultsObject(SearchContext context) {
context.addRankFeatureResult();
}
},
FETCH {
@Override
void addResultsObject(SearchContext context) {
context.addFetchResult();
}
},
/**
* None is intended for use in testing, when we might not progress all the way to generating results
*/
NONE {
@Override
void addResultsObject(SearchContext context) {
// this space intentionally left blank
}
};
abstract void addResultsObject(SearchContext context);
}
| ResultsType |
java | resilience4j__resilience4j | resilience4j-metrics/src/main/java/io/github/resilience4j/metrics/internal/TimerImpl.java | {
"start": 242,
"end": 1596
} | class ____ implements Timer {
public static final String SUCCESSFUL = "successful";
public static final String TOTAL = "total";
public static final String FAILED = "failed";
private final String timerName;
private final MetricRegistry metricRegistry;
private final TimerMetrics metrics;
private com.codahale.metrics.Timer successfulCallsTimer;
private com.codahale.metrics.Counter totalCallsCounter;
private com.codahale.metrics.Counter failedCallsCounter;
public TimerImpl(String timerName, MetricRegistry metricRegistry) {
this.timerName = timerName;
this.metricRegistry = metricRegistry;
this.successfulCallsTimer = metricRegistry.timer(name(timerName, SUCCESSFUL));
this.totalCallsCounter = metricRegistry.counter(name(timerName, TOTAL));
this.failedCallsCounter = metricRegistry.counter(name(timerName, FAILED));
this.metrics = new TimerMetrics();
}
@Override
public Timer.Context context() {
totalCallsCounter.inc();
return new ContextImpl();
}
@Override
public String getName() {
return timerName;
}
@Override
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
@Override
public Metrics getMetrics() {
return metrics;
}
public final | TimerImpl |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/query/KvStateRegistryListener.java | {
"start": 1206,
"end": 2421
} | interface ____ {
/**
* Notifies the listener about a registered KvState instance.
*
* @param jobId Job ID the KvState instance belongs to
* @param jobVertexId JobVertexID the KvState instance belongs to
* @param keyGroupRange Key group range the KvState instance belongs to
* @param registrationName Name under which the KvState is registered
* @param kvStateId ID of the KvState instance
*/
void notifyKvStateRegistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName,
KvStateID kvStateId);
/**
* Notifies the listener about an unregistered KvState instance.
*
* @param jobId Job ID the KvState instance belongs to
* @param jobVertexId JobVertexID the KvState instance belongs to
* @param keyGroupRange Key group range the KvState instance belongs to
* @param registrationName Name under which the KvState is registered
*/
void notifyKvStateUnregistered(
JobID jobId,
JobVertexID jobVertexId,
KeyGroupRange keyGroupRange,
String registrationName);
}
| KvStateRegistryListener |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java | {
"start": 24869,
"end": 25997
} | class ____ implements ToXContentFragment {
private final Map<String, AtomicInteger> discoveryTypes;
DiscoveryTypes(final List<NodeInfo> nodeInfos) {
final Map<String, AtomicInteger> discoveryTypes = new HashMap<>();
for (final NodeInfo nodeInfo : nodeInfos) {
final Settings settings = nodeInfo.getSettings();
final String discoveryType = DiscoveryModule.DISCOVERY_TYPE_SETTING.get(settings);
discoveryTypes.computeIfAbsent(discoveryType, k -> new AtomicInteger()).incrementAndGet();
}
this.discoveryTypes = Collections.unmodifiableMap(discoveryTypes);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("discovery_types");
for (final Map.Entry<String, AtomicInteger> entry : discoveryTypes.entrySet()) {
builder.field(entry.getKey(), entry.getValue().get());
}
builder.endObject();
return builder;
}
}
static | DiscoveryTypes |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/function/client/ClientResponse.java | {
"start": 9778,
"end": 12766
} | interface ____ {
/**
* Set the status code of the response.
* @param statusCode the new status code
* @return this builder
*/
Builder statusCode(HttpStatusCode statusCode);
/**
* Set the raw status code of the response.
* @param statusCode the new status code
* @return this builder
* @since 5.1.9
*/
Builder rawStatusCode(int statusCode);
/**
* Add the given header value(s) under the given name.
* @param headerName the header name
* @param headerValues the header value(s)
* @return this builder
* @see HttpHeaders#add(String, String)
*/
Builder header(String headerName, String... headerValues);
/**
* Manipulate this response's headers with the given consumer.
* <p>The headers provided to the consumer are "live", so that the consumer
* can be used to {@linkplain HttpHeaders#set(String, String) overwrite}
* existing header values, {@linkplain HttpHeaders#remove(String) remove}
* values, or use any of the other {@link HttpHeaders} methods.
* @param headersConsumer a function that consumes the {@code HttpHeaders}
* @return this builder
*/
Builder headers(Consumer<HttpHeaders> headersConsumer);
/**
* Add a cookie with the given name and value(s).
* @param name the cookie name
* @param values the cookie value(s)
* @return this builder
*/
Builder cookie(String name, String... values);
/**
* Manipulate this response's cookies with the given consumer.
* <p>The map provided to the consumer is "live", so that the consumer can be used to
* {@linkplain MultiValueMap#set(Object, Object) overwrite} existing cookie values,
* {@linkplain MultiValueMap#remove(Object) remove} values, or use any of the other
* {@link MultiValueMap} methods.
* @param cookiesConsumer a function that consumes the cookies map
* @return this builder
*/
Builder cookies(Consumer<MultiValueMap<String, ResponseCookie>> cookiesConsumer);
/**
* Transform the response body, if set in the builder.
* @param transformer the transformation function to use
* @return this builder
* @since 5.3
*/
Builder body(Function<Flux<DataBuffer>, Flux<DataBuffer>> transformer);
/**
* Set the body of the response.
* <p><strong>Note:</strong> This method will drain the existing body,
* if set in the builder.
* @param body the new body to use
* @return this builder
*/
Builder body(Flux<DataBuffer> body);
/**
* Set the body of the response to the UTF-8 encoded bytes of the given string.
* <p><strong>Note:</strong> This method will drain the existing body,
* if set in the builder.
* @param body the new body.
* @return this builder
*/
Builder body(String body);
/**
* Set the request associated with the response.
* @param request the request
* @return this builder
* @since 5.2
*/
Builder request(HttpRequest request);
/**
* Build the response.
*/
ClientResponse build();
}
}
| Builder |
java | bumptech__glide | annotation/compiler/test/src/test/java/com/bumptech/glide/annotation/compiler/InvalidGlideExtensionTest.java | {
"start": 2953,
"end": 3965
} | class ____ {",
" private ConstructorParametersExtension(int failParam) {}",
" public void doSomething() {}",
"}"));
fail("Failed to get expected exception");
} catch (RuntimeException e) {
Throwable cause = e.getCause();
Truth.assertThat(cause.getMessage()).contains("parameters in the constructor");
Truth.assertThat(cause.getMessage()).contains("ConstructorParametersExtension");
}
}
@Test
public void compilation_withNonStaticMethod_succeeds() {
Compilation compilation =
javac()
.withProcessors(new GlideAnnotationProcessor())
.compile(
emptyAppModule(),
JavaFileObjects.forSourceLines(
"Extension",
"package com.bumptech.glide.test;",
"import com.bumptech.glide.annotation.GlideExtension;",
"@GlideExtension",
"public | ConstructorParametersExtension |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-huaweicloud/src/test/java/org/apache/hadoop/fs/obs/TestOBSFileContextCreateMkdir.java | {
"start": 1410,
"end": 2635
} | class ____ extends
FileContextCreateMkdirBaseTest {
@BeforeAll
public static void skipTestCheck() {
assumeTrue(OBSContract.isContractTestEnabled());
}
@SuppressFBWarnings("ST_WRITE_TO_STATIC_FROM_INSTANCE_METHOD")
@Override
public void setUp() throws Exception {
Configuration conf = OBSContract.getConfiguration();
conf.addResource(OBSContract.CONTRACT_XML);
String fileSystem = conf.get(OBSTestConstants.TEST_FS_OBS_NAME);
if (fileSystem == null || fileSystem.trim().length() == 0) {
throw new Exception("Default file system not configured.");
}
URI uri = new URI(fileSystem);
FileSystem fs = OBSTestUtils.createTestFileSystem(conf);
if (fc == null) {
this.fc = FileContext.getFileContext(new DelegateToFileSystem(uri, fs,
conf, fs.getScheme(), false) {
}, conf);
}
super.setUp();
}
@Override
protected FileContextTestHelper createFileContextHelper() {
// On Windows, root directory path is created from local running
// directory.
// obs does not support ':' as part of the path which results in
// failure.
return new FileContextTestHelper(UUID.randomUUID().toString());
}
}
| TestOBSFileContextCreateMkdir |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/support/hierarchical/WorkerThreadPoolHierarchicalTestExecutorService.java | {
"start": 20697,
"end": 21660
} | class ____ extends BlockingAwareFuture<@Nullable Void> {
private final WorkQueue.Entry entry;
WorkStealingFuture(WorkQueue.Entry entry) {
super(entry.future);
this.entry = entry;
}
@Override
protected @Nullable Void handle(Callable<@Nullable Void> callable) throws Exception {
var workerThread = WorkerThread.get();
if (workerThread == null || entry.future.isDone()) {
return callable.call();
}
workerThread.tryToStealWork(entry, BlockingMode.BLOCKING);
if (entry.future.isDone()) {
return callable.call();
}
workerThread.tryToStealWorkFromSubmittedChildren();
if (entry.future.isDone()) {
return callable.call();
}
LOGGER.trace(() -> "blocking for child task: " + entry.task);
return workerThread.runBlocking(entry.future::isDone, () -> {
try {
return callable.call();
}
catch (Exception ex) {
throw throwAsUncheckedException(ex);
}
});
}
}
private | WorkStealingFuture |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/atomic/referencearray/AtomicReferenceArrayAssert_hasSameSizeAs_with_Arrays_Test.java | {
"start": 1091,
"end": 2782
} | class ____ {
@Test
void should_pass_if_actual_object_array_has_same_size_as_other_object_array() {
assertThat(new String[] { "1", "2" }).hasSameSizeAs(new Byte[] { 2, 3 });
assertThat(new String[] { "1", "2" }).hasSameSizeAs(new String[] { "1", "2" });
}
@Test
void should_pass_if_actual_object_array_has_same_size_as_other_primitive_array() {
assertThat(new String[] { "1", "2" }).hasSameSizeAs(new byte[] { 2, 3 });
assertThat(new String[] { "1", "2" }).hasSameSizeAs(new int[] { 2, 3 });
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> {
final String[] actual = null;
assertThat(actual).hasSameSizeAs(new String[] { "1" });
}).withMessage(actualIsNull());
}
@Test
void should_fail_if_other_is_not_an_array() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(new AtomicReferenceArray<>(new Byte[] { 1,
2 })).hasSameSizeAs("a string"))
.withMessage("%nExpecting an array but was: \"a string\"".formatted());
}
@Test
void should_fail_if_size_of_actual_has_same_as_other_array() {
final String[] actual = array("Luke", "Yoda");
final String[] other = array("Yoda");
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> assertThat(actual).hasSameSizeAs(other))
.withMessage(shouldHaveSameSizeAs(actual, other, actual.length,
other.length).create());
}
}
| AtomicReferenceArrayAssert_hasSameSizeAs_with_Arrays_Test |
java | apache__dubbo | dubbo-compatible/src/main/java/com/alibaba/dubbo/rpc/Exporter.java | {
"start": 853,
"end": 1033
} | interface ____<T> extends org.apache.dubbo.rpc.Exporter<T> {
@Override
Invoker<T> getInvoker();
default void register() {}
default void unregister() {}
| Exporter |
java | google__dagger | javatests/dagger/internal/codegen/FullBindingGraphValidationTest.java | {
"start": 7230,
"end": 9018
} | interface ____ {",
" @Binds Object object(Long l);",
"}");
// Make sure the error doesn't show other bindings or a dependency trace afterwards.
private static final Pattern COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE =
endsWithMessage(
"\033[1;31m[Dagger/DuplicateBindings]\033[0m Object is bound multiple times:",
" @Binds Object AModule.object(String)",
" @Binds Object CombinedWithAModuleHasErrors.object(Long)",
" in component: [CombinedWithAModuleHasErrors]",
"",
"======================",
"Full classname legend:",
"======================",
"AModule: test.AModule",
"CombinedWithAModuleHasErrors: test.CombinedWithAModuleHasErrors",
"========================",
"End of classname legend:",
"========================");
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeNone() {
CompilerTests.daggerCompiler(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS)
.compile(
subject -> {
subject.hasErrorCount(0);
subject.hasWarningCount(0);
});
}
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeError() {
CompilerTests.daggerCompiler(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS)
.withProcessingOptions(ImmutableMap.of("dagger.fullBindingGraphValidation", "ERROR"))
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContainingMatch(COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE.pattern())
.onSource(COMBINED_WITH_A_MODULE_HAS_ERRORS)
.onLineContaining(" | CombinedWithAModuleHasErrors |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/test/StubS3ClientFactory.java | {
"start": 1653,
"end": 1724
} | class ____ implements S3ClientFactory {
/**
* The | StubS3ClientFactory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/CheckpointStorageAccess.java | {
"start": 879,
"end": 1153
} | interface ____ implement methods acting as an administration role for
* checkpoint storage, which defined in {@link CheckpointStorageCoordinatorView}. And also implement
* methods acting as a worker role, which defined in {@link CheckpointStorageWorkerView}.
*/
public | should |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/AlterReplicaLogDirsResult.java | {
"start": 1711,
"end": 3811
} | class ____ {
private final Map<TopicPartitionReplica, KafkaFuture<Void>> futures;
AlterReplicaLogDirsResult(Map<TopicPartitionReplica, KafkaFuture<Void>> futures) {
this.futures = futures;
}
/**
* Return a map from {@link TopicPartitionReplica} to {@link KafkaFuture} which holds the status of individual
* replica movement.
*
* To check the result of individual replica movement, call {@link KafkaFuture#get()} from the value contained
* in the returned map. If there is no error, it will return silently; if not, an {@link Exception} will be thrown
* like the following:
*
* <ul>
* <li>{@link CancellationException}: The task was canceled.</li>
* <li>{@link InterruptedException}: Interrupted while joining I/O thread.</li>
* <li>{@link ExecutionException}: Execution failed with the following causes:</li>
* <ul>
* <li>{@link ClusterAuthorizationException}: Authorization failed. (CLUSTER_AUTHORIZATION_FAILED, 31)</li>
* <li>{@link InvalidTopicException}: The specified topic name is too long. (INVALID_TOPIC_EXCEPTION, 17)</li>
* <li>{@link LogDirNotFoundException}: The specified log directory is not found in the broker. (LOG_DIR_NOT_FOUND, 57)</li>
* <li>{@link ReplicaNotAvailableException}: The replica does not exist on the broker. (REPLICA_NOT_AVAILABLE, 9)</li>
* <li>{@link KafkaStorageException}: Disk error occurred. (KAFKA_STORAGE_ERROR, 56)</li>
* <li>{@link UnknownServerException}: Unknown. (UNKNOWN_SERVER_ERROR, -1)</li>
* </ul>
* </ul>
*/
public Map<TopicPartitionReplica, KafkaFuture<Void>> values() {
return futures;
}
/**
* Return a {@link KafkaFuture} which succeeds on {@link KafkaFuture#get()} if all the replica movement have succeeded.
* if not, it throws an {@link Exception} described in {@link #values()} method.
*/
public KafkaFuture<Void> all() {
return KafkaFuture.allOf(futures.values().toArray(new KafkaFuture<?>[0]));
}
}
| AlterReplicaLogDirsResult |
java | spring-projects__spring-boot | loader/spring-boot-jarmode-tools/src/main/java/org/springframework/boot/jarmode/tools/Context.java | {
"start": 1153,
"end": 4464
} | class ____ {
private final File archiveFile;
private final File workingDir;
private final @Nullable String relativeDir;
/**
* Create a new {@link Context} instance.
*/
Context() {
this(getSourceArchiveFile(), Paths.get(".").toAbsolutePath().normalize().toFile());
}
/**
* Create a new {@link Context} instance with the specified value.
* @param archiveFile the source archive file
* @param workingDir the working directory
*/
Context(File archiveFile, File workingDir) {
Assert.state(isExistingFile(archiveFile), "Unable to find source archive");
Assert.state(isJarOrWar(archiveFile), "Source archive " + archiveFile + " must end with .jar or .war");
this.archiveFile = archiveFile;
this.workingDir = workingDir;
this.relativeDir = deduceRelativeDir(archiveFile.getParentFile(), this.workingDir);
}
private boolean isExistingFile(@Nullable File archiveFile) {
return archiveFile != null && archiveFile.isFile() && archiveFile.exists();
}
private boolean isJarOrWar(File jarFile) {
String name = jarFile.getName().toLowerCase(Locale.ROOT);
return name.endsWith(".jar") || name.endsWith(".war");
}
private static File getSourceArchiveFile() {
try {
ProtectionDomain domain = Context.class.getProtectionDomain();
CodeSource codeSource = (domain != null) ? domain.getCodeSource() : null;
URL location = (codeSource != null) ? codeSource.getLocation() : null;
File source = (location != null) ? findSource(location) : null;
if (source != null && source.exists()) {
return source.getAbsoluteFile();
}
throw new IllegalStateException("Unable to find source archive");
}
catch (Exception ex) {
throw new IllegalStateException("Unable to find source archive", ex);
}
}
private static File findSource(URL location) throws IOException, URISyntaxException {
URLConnection connection = location.openConnection();
if (connection instanceof JarURLConnection jarURLConnection) {
return getRootJarFile(jarURLConnection.getJarFile());
}
return new File(location.toURI());
}
private static File getRootJarFile(JarFile jarFile) {
String name = jarFile.getName();
int separator = name.indexOf("!/");
if (separator > 0) {
name = name.substring(0, separator);
}
return new File(name);
}
private @Nullable String deduceRelativeDir(File sourceDirectory, File workingDir) {
String sourcePath = sourceDirectory.getAbsolutePath();
String workingPath = workingDir.getAbsolutePath();
if (sourcePath.equals(workingPath) || !sourcePath.startsWith(workingPath)) {
return null;
}
String relativePath = sourcePath.substring(workingPath.length() + 1);
return !relativePath.isEmpty() ? relativePath : null;
}
/**
* Return the source archive file that is running in tools mode.
* @return the archive file
*/
File getArchiveFile() {
return this.archiveFile;
}
/**
* Return the current working directory.
* @return the working dir
*/
File getWorkingDir() {
return this.workingDir;
}
/**
* Return the directory relative to {@link #getWorkingDir()} that contains the archive
* or {@code null} if none relative directory can be deduced.
* @return the relative dir ending in {@code /} or {@code null}
*/
@Nullable String getRelativeArchiveDir() {
return this.relativeDir;
}
}
| Context |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/config/DefaultAuditingViaJavaConfigRepositoriesTests.java | {
"start": 1228,
"end": 1409
} | class ____ extends AbstractAuditingViaJavaConfigRepositoriesTests {
@Configuration
@EnableJpaAuditing
@Import(TestConfig.class)
static | DefaultAuditingViaJavaConfigRepositoriesTests |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/FluxConcatMapNoPrefetchStressTest.java | {
"start": 1807,
"end": 2416
} | class ____ extends FluxConcatMapNoPrefetchStressTest {
{
concatMapImmediate.state = FluxConcatMapNoPrefetchSubscriber.State.ACTIVE;
}
@Actor
public void inner() {
concatMapImmediate.innerNext("hello");
}
@Actor
public void outer() {
concatMapImmediate.onError(new RuntimeException("Boom!"));
}
@Arbiter
public void arbiter(ZZ_Result r) {
r.r1 = stressSubscriber.concurrentOnNext.get();
r.r2 = stressSubscriber.concurrentOnError.get();
}
}
@JCStressTest
@Outcome(id = {"1"}, expect = ACCEPTABLE, desc = "Exactly one onComplete")
@State
public static | OnErrorStressTest |
java | quarkusio__quarkus | integration-tests/gradle/src/main/resources/it-test-basic-project/src/integrationTest/java/org/acme/ExampleResourceIT.java | {
"start": 232,
"end": 288
} | class ____ extends ExampleResourceTest {
} | ExampleResourceIT |
java | hibernate__hibernate-orm | hibernate-spatial/src/test/java/org/hibernate/spatial/testing/dialects/h2gis/H2GISExpectationsFactory.java | {
"start": 462,
"end": 586
} | class ____ generates expected {@link NativeSQLStatement}s for
* GeoDB.
*
* @Author Jan Boonen, Geodan IT b.v.
*/
public | that |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/convert/DurationToNumberConverter.java | {
"start": 1227,
"end": 2277
} | class ____ implements GenericConverter {
@Override
public Set<ConvertiblePair> getConvertibleTypes() {
return Collections.singleton(new ConvertiblePair(Duration.class, Number.class));
}
@Override
public @Nullable Object convert(@Nullable Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
if (source == null) {
return null;
}
return convert((Duration) source, getDurationUnit(sourceType), targetType.getObjectType());
}
private @Nullable ChronoUnit getDurationUnit(TypeDescriptor sourceType) {
DurationUnit annotation = sourceType.getAnnotation(DurationUnit.class);
return (annotation != null) ? annotation.value() : null;
}
private Object convert(Duration source, @Nullable ChronoUnit unit, Class<?> type) {
try {
return type.getConstructor(String.class)
.newInstance(String.valueOf(DurationStyle.Unit.fromChronoUnit(unit).longValue(source)));
}
catch (Exception ex) {
ReflectionUtils.rethrowRuntimeException(ex);
throw new IllegalStateException(ex);
}
}
}
| DurationToNumberConverter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/hql/ASTParserLoadingTest.java | {
"start": 141772,
"end": 142685
} | class ____ {
private final String hql;
private final QueryPreparer preparer;
public SyntaxChecker(String hql) {
this( hql, DEFAULT_PREPARER );
}
public SyntaxChecker(String hql, QueryPreparer preparer) {
this.hql = hql;
this.preparer = preparer;
}
public void checkAll(SessionFactoryScope scope) {
checkList( scope );
checkScroll( scope );
}
public SyntaxChecker checkList(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Query query = session.createQuery( hql, Object[].class );
preparer.prepare( query );
query.list();
}
);
return this;
}
public SyntaxChecker checkScroll(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Query query = session.createQuery( hql, Object[].class );
preparer.prepare( query );
query.scroll().close();
}
);
return this;
}
}
}
| SyntaxChecker |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAppController.java | {
"start": 8715,
"end": 8926
} | class ____ rendering
*/
@Test
public void testTasks() {
appController.tasks();
assertEquals(TasksPage.class, appController.getClazz());
}
/**
* Test method 'task'. Should set TaskPage | for |
java | apache__maven | api/maven-api-core/src/main/java/org/apache/maven/api/services/OsService.java | {
"start": 2194,
"end": 3579
} | interface ____ extends Service {
/**
* Returns the OS full name as reported by the system property "os.name".
* The value is converted to lowercase for consistency.
*
* @return the operating system name (never null)
*/
@Nonnull
String name();
/**
* Returns the OS architecture as reported by the system property "os.arch".
* The value is converted to lowercase for consistency.
*
* @return the operating system architecture (never null)
*/
@Nonnull
String arch();
/**
* Returns the OS version as reported by the system property "os.version".
* The value is converted to lowercase for consistency.
*
* @return the operating system version (never null)
*/
@Nonnull
String version();
/**
* Returns the OS family name based on OS detection rules.
* This categorizes the OS into one of the supported families
* (e.g., "windows", "unix", "mac").
*
* @return the operating system family name (never null)
*/
@Nonnull
String family();
/**
* Checks if the current operating system belongs to the Windows family.
* This includes all Windows variants (95, 98, ME, NT, 2000, XP, Vista, 7, 8, 10, 11).
*
* @return true if the current OS is any Windows variant, false otherwise
*/
boolean isWindows();
}
| OsService |
java | apache__camel | test-infra/camel-test-infra-postgres/src/main/java/org/apache/camel/test/infra/postgres/services/PostgresInfraService.java | {
"start": 984,
"end": 1167
} | interface ____ extends InfrastructureService {
String host();
int port();
String userName();
String password();
String getServiceAddress();
}
| PostgresInfraService |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/collector/node/NodeStatsMonitoringDoc.java | {
"start": 857,
"end": 7476
} | class ____ extends FilteredMonitoringDoc {
public static final String TYPE = "node_stats";
private final String nodeId;
private final boolean nodeMaster;
private final NodeStats nodeStats;
private final boolean mlockall;
NodeStatsMonitoringDoc(
final String cluster,
final long timestamp,
final long interval,
final MonitoringDoc.Node node,
final String nodeId,
final boolean isMaster,
final NodeStats nodeStats,
final boolean mlockall
) {
super(cluster, timestamp, interval, node, MonitoredSystem.ES, TYPE, null, XCONTENT_FILTERS);
this.nodeId = Objects.requireNonNull(nodeId);
this.nodeStats = Objects.requireNonNull(nodeStats);
this.nodeMaster = isMaster;
this.mlockall = mlockall;
}
String getNodeId() {
return nodeId;
}
boolean isNodeMaster() {
return nodeMaster;
}
NodeStats getNodeStats() {
return nodeStats;
}
boolean isMlockall() {
return mlockall;
}
@Override
protected void innerToXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(TYPE);
{
builder.field("node_id", nodeId);
builder.field("node_master", nodeMaster);
builder.field("mlockall", mlockall);
ChunkedToXContent.wrapAsToXContent(nodeStats).toXContent(builder, params);
}
builder.endObject();
}
public static final Set<String> XCONTENT_FILTERS = Set.of(
"node_stats.node_id",
"node_stats.node_master",
"node_stats.mlockall",
"node_stats.indices.docs.count",
"node_stats.indices.fielddata.memory_size_in_bytes",
"node_stats.indices.fielddata.evictions",
"node_stats.indices.store.size_in_bytes",
"node_stats.indices.indexing.throttle_time_in_millis",
"node_stats.indices.indexing.index_total",
"node_stats.indices.indexing.index_time_in_millis",
"node_stats.indices.query_cache.memory_size_in_bytes",
"node_stats.indices.query_cache.evictions",
"node_stats.indices.query_cache.hit_count",
"node_stats.indices.query_cache.miss_count",
"node_stats.indices.request_cache.memory_size_in_bytes",
"node_stats.indices.request_cache.evictions",
"node_stats.indices.request_cache.hit_count",
"node_stats.indices.request_cache.miss_count",
"node_stats.indices.search.query_total",
"node_stats.indices.search.query_time_in_millis",
"node_stats.indices.segments.count",
"node_stats.indices.segments.memory_in_bytes",
"node_stats.indices.segments.terms_memory_in_bytes",
"node_stats.indices.segments.stored_fields_memory_in_bytes",
"node_stats.indices.segments.term_vectors_memory_in_bytes",
"node_stats.indices.segments.norms_memory_in_bytes",
"node_stats.indices.segments.points_memory_in_bytes",
"node_stats.indices.segments.doc_values_memory_in_bytes",
"node_stats.indices.segments.index_writer_memory_in_bytes",
"node_stats.indices.segments.version_map_memory_in_bytes",
"node_stats.indices.segments.fixed_bit_set_memory_in_bytes",
"node_stats.indices.bulk.total_operations",
"node_stats.indices.bulk.total_time_in_millis",
"node_stats.indices.bulk.total_size_in_bytes",
"node_stats.indices.bulk.avg_time_in_millis",
"node_stats.indices.bulk.avg_size_in_bytes",
"node_stats.fs.io_stats.total.operations",
"node_stats.fs.io_stats.total.read_operations",
"node_stats.fs.io_stats.total.write_operations",
"node_stats.fs.io_stats.total.read_kilobytes",
"node_stats.fs.io_stats.total.write_kilobytes",
"node_stats.fs.total.total_in_bytes",
"node_stats.fs.total.free_in_bytes",
"node_stats.fs.total.available_in_bytes",
"node_stats.os.cgroup.cpuacct.control_group",
"node_stats.os.cgroup.cpuacct.usage_nanos",
"node_stats.os.cgroup.cpu.control_group",
"node_stats.os.cgroup.cpu.cfs_period_micros",
"node_stats.os.cgroup.cpu.cfs_quota_micros",
"node_stats.os.cgroup.cpu.stat.number_of_elapsed_periods",
"node_stats.os.cgroup.cpu.stat.number_of_times_throttled",
"node_stats.os.cgroup.cpu.stat.time_throttled_nanos",
"node_stats.os.cgroup.memory.control_group",
"node_stats.os.cgroup.memory.limit_in_bytes",
"node_stats.os.cgroup.memory.usage_in_bytes",
"node_stats.os.cpu.load_average.1m",
"node_stats.os.cpu.load_average.5m",
"node_stats.os.cpu.load_average.15m",
"node_stats.process.cpu.percent",
"node_stats.process.max_file_descriptors",
"node_stats.process.open_file_descriptors",
"node_stats.jvm.mem.heap_max_in_bytes",
"node_stats.jvm.mem.heap_used_in_bytes",
"node_stats.jvm.mem.heap_used_percent",
"node_stats.jvm.gc.collectors.young",
"node_stats.jvm.gc.collectors.young.collection_count",
"node_stats.jvm.gc.collectors.young.collection_time_in_millis",
"node_stats.jvm.gc.collectors.old",
"node_stats.jvm.gc.collectors.old.collection_count",
"node_stats.jvm.gc.collectors.old.collection_time_in_millis",
/*
* We whitelist both bulk and write in case the user is running in a mixed-version cluster or has the display name
* on the write thread pool set to "bulk".
*/
"node_stats.thread_pool.bulk.threads",
"node_stats.thread_pool.bulk.queue",
"node_stats.thread_pool.bulk.rejected",
"node_stats.thread_pool.write.threads",
"node_stats.thread_pool.write.queue",
"node_stats.thread_pool.write.rejected",
"node_stats.thread_pool.generic.threads",
"node_stats.thread_pool.generic.queue",
"node_stats.thread_pool.generic.rejected",
"node_stats.thread_pool.get.threads",
"node_stats.thread_pool.get.queue",
"node_stats.thread_pool.get.rejected",
"node_stats.thread_pool.management.threads",
"node_stats.thread_pool.management.queue",
"node_stats.thread_pool.management.rejected",
"node_stats.thread_pool.search.threads",
"node_stats.thread_pool.search.queue",
"node_stats.thread_pool.search.rejected",
"node_stats.thread_pool.watcher.threads",
"node_stats.thread_pool.watcher.queue",
"node_stats.thread_pool.watcher.rejected"
);
}
| NodeStatsMonitoringDoc |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/config/meta/ConfigClassesAndProfileResolverWithCustomDefaultsMetaConfigWithOverridesTests.java | {
"start": 1817,
"end": 1903
} | class ____ {
@Bean
public String foo() {
return "Local Dev Foo";
}
}
| LocalDevConfig |
java | netty__netty | microbench/src/main/java/io/netty/microbench/http2/NoPriorityByteDistributionBenchmark.java | {
"start": 3028,
"end": 7838
} | class ____ {
int minWriteSize = Integer.MAX_VALUE;
int maxWriteSize = Integer.MIN_VALUE;
long totalBytes;
long numWrites;
int invocations;
public int minWriteSize() {
return minWriteSize;
}
public int avgWriteSize() {
return (int) (totalBytes / numWrites);
}
public int maxWriteSize() {
return maxWriteSize;
}
}
private final Http2StreamVisitor invocationVisitor = new Http2StreamVisitor() {
@Override
public boolean visit(Http2Stream stream) throws Http2Exception {
// Restore the connection window.
resetWindow(stream);
// Restore the data to each stream.
dataRefresher(stream).refreshData();
return true;
}
};
@TearDown(Level.Trial)
public void tearDownTrial() throws Exception {
ctx.close();
}
@Setup(Level.Trial)
public void setupTrial() throws Exception {
connection = new DefaultHttp2Connection(false);
dataRefresherKey = connection.newKey();
// Create the flow controller
switch (algorithm) {
case WFQ:
distributor = new WeightedFairQueueByteDistributor(connection, 0);
break;
case UNIFORM:
distributor = new UniformStreamByteDistributor(connection);
break;
}
controller = new DefaultHttp2RemoteFlowController(connection, new ByteCounter(distributor));
connection.remote().flowController(controller);
Http2ConnectionHandler handler = new Http2ConnectionHandlerBuilder()
.encoderEnforceMaxConcurrentStreams(false).validateHeaders(false)
.frameListener(new Http2FrameAdapter())
.connection(connection)
.build();
ctx = new EmbeddedChannelWriteReleaseHandlerContext(PooledByteBufAllocator.DEFAULT, handler) {
@Override
protected void handleException(Throwable t) {
handleUnexpectedException(t);
}
};
handler.handlerAdded(ctx);
handler.channelActive(ctx);
// Create the streams, each initialized with MAX_INT bytes.
for (int i = 0; i < numStreams; ++i) {
Http2Stream stream = connection.local().createStream(toStreamId(i), false);
addData(stream, Integer.MAX_VALUE);
stream.setProperty(dataRefresherKey, new DataRefresher(stream));
}
}
@Setup(Level.Invocation)
public void setupInvocation() throws Http2Exception {
resetWindow(connection.connectionStream());
connection.forEachActiveStream(invocationVisitor);
}
@Benchmark
public void write(AdditionalCounters counters) throws Http2Exception {
// Set up for this invocation. Doing this in the benchmark method since this
// seems to throw off the counters when run as a setup step for the invocation.
this.counters = counters;
counters.invocations++;
// Now run the benchmark method.
controller.writePendingBytes();
}
private void resetWindow(Http2Stream stream) throws Http2Exception {
controller.incrementWindowSize(stream, windowSize - controller.windowSize(stream));
}
private DataRefresher dataRefresher(Http2Stream stream) {
return (DataRefresher) stream.getProperty(dataRefresherKey);
}
private void addData(Http2Stream stream, final int dataSize) {
controller.addFlowControlled(stream, new Http2RemoteFlowController.FlowControlled() {
private int size = dataSize;
@Override
public int size() {
return size;
}
@Override
public void error(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
}
@Override
public void writeComplete() {
// Don't care.
}
@Override
public void write(ChannelHandlerContext ctx, int allowedBytes) {
size -= allowedBytes;
}
@Override
public boolean merge(ChannelHandlerContext ctx,
Http2RemoteFlowController.FlowControlled next) {
int nextSize = next.size();
if (Integer.MAX_VALUE - nextSize < size) {
// Disallow merge to avoid integer overflow.
return false;
}
// Merge.
size += nextSize;
return true;
}
});
}
private static int toStreamId(int i) {
return 2 * i + 1;
}
private final | AdditionalCounters |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/http/HttpUtils.java | {
"start": 2420,
"end": 10823
} | class ____ {
private static final Pattern CONTEXT_PATH_MATCH = Pattern.compile("(\\/)\\1+");
/**
* Init http header.
*
* @param requestBase requestBase {@link HttpUriRequestBase}
* @param header header
*/
public static void initRequestHeader(ClassicHttpRequest requestBase, Header header) {
Iterator<Map.Entry<String, String>> iterator = header.iterator();
while (iterator.hasNext()) {
Map.Entry<String, String> entry = iterator.next();
requestBase.setHeader(entry.getKey(), entry.getValue());
}
}
/**
* Init http entity.
*
* @param requestBase requestBase {@link HttpUriRequestBase}
* @param body body
* @param header request header
* @throws Exception exception
*/
public static void initRequestEntity(ClassicHttpRequest requestBase, Object body, Header header) throws Exception {
if (body == null) {
return;
}
if (requestBase instanceof HttpEntityContainer) {
HttpEntityContainer request = requestBase;
MediaType mediaType = MediaType.valueOf(header.getValue(HttpHeaderConsts.CONTENT_TYPE));
ContentType contentType = ContentType.create(mediaType.getType(), mediaType.getCharset());
HttpEntity entity;
if (body instanceof byte[]) {
entity = new ByteArrayEntity((byte[]) body, contentType);
} else {
entity = new StringEntity(body instanceof String ? (String) body : JacksonUtils.toJson(body),
contentType);
}
request.setEntity(entity);
}
}
/**
* Init request from entity map.
*
* @param requestBase requestBase {@link HttpUriRequestBase}
* @param body body map
* @param charset charset of entity
* @throws Exception exception
*/
public static void initRequestFromEntity(ClassicHttpRequest requestBase, Map<String, String> body, String charset)
throws Exception {
if (body == null || body.isEmpty()) {
return;
}
List<NameValuePair> params = new ArrayList<>(body.size());
for (Map.Entry<String, String> entry : body.entrySet()) {
params.add(new BasicNameValuePair(entry.getKey(), entry.getValue()));
}
if (requestBase instanceof HttpEntityContainer) {
HttpEntityContainer request = requestBase;
HttpEntity entity = new UrlEncodedFormEntity(params, Charset.forName(charset));
request.setEntity(entity);
}
}
/**
* Build URL.
*
* @param isHttps whether is https
* @param serverAddr server ip/address
* @param subPaths api path
* @return URL string
*/
public static String buildUrl(boolean isHttps, String serverAddr, String... subPaths) {
StringBuilder sb = new StringBuilder();
if (isHttps) {
sb.append(HTTPS_PREFIX);
} else {
sb.append(HTTP_PREFIX);
}
sb.append(serverAddr);
String pre = null;
for (String subPath : subPaths) {
if (StringUtils.isBlank(subPath)) {
continue;
}
Matcher matcher = CONTEXT_PATH_MATCH.matcher(subPath);
if (matcher.find()) {
throw new IllegalArgumentException("Illegal url path expression : " + subPath);
}
if (pre == null || !pre.endsWith("/")) {
if (subPath.startsWith("/")) {
sb.append(subPath);
} else {
sb.append('/').append(subPath);
}
} else {
if (subPath.startsWith("/")) {
sb.append(subPath.replaceFirst("\\/", ""));
} else {
sb.append(subPath);
}
}
pre = subPath;
}
return sb.toString();
}
/**
* Translate parameter map.
*
* @param parameterMap parameter map
* @return parameter map
* @throws Exception exception
*/
public static Map<String, String> translateParameterMap(Map<String, String[]> parameterMap) throws Exception {
Map<String, String> map = new HashMap<>(16);
for (Map.Entry<String, String[]> entry : parameterMap.entrySet()) {
map.put(entry.getKey(), entry.getValue()[0]);
}
return map;
}
/**
* Encoding parameters to url string.
*
* @param params parameters
* @param encoding encoding charset
* @return url string
* @throws UnsupportedEncodingException if encoding string is illegal
*/
public static String encodingParams(Map<String, String> params, String encoding)
throws UnsupportedEncodingException {
StringBuilder sb = new StringBuilder();
if (null == params || params.isEmpty()) {
return null;
}
for (Map.Entry<String, String> entry : params.entrySet()) {
if (StringUtils.isEmpty(entry.getValue())) {
continue;
}
sb.append(entry.getKey()).append('=');
sb.append(URLEncoder.encode(entry.getValue(), encoding));
sb.append('&');
}
return sb.toString();
}
/**
* Encoding KV list to url string.
*
* @param paramValues parameters
* @param encoding encoding charset
* @return url string
* @throws UnsupportedEncodingException if encoding string is illegal
*/
public static String encodingParams(List<String> paramValues, String encoding) throws UnsupportedEncodingException {
StringBuilder sb = new StringBuilder();
if (null == paramValues) {
return null;
}
for (Iterator<String> iter = paramValues.iterator(); iter.hasNext(); ) {
sb.append(iter.next()).append('=');
sb.append(URLEncoder.encode(iter.next(), encoding));
if (iter.hasNext()) {
sb.append('&');
}
}
return sb.toString();
}
public static String decode(String str, String encode) throws UnsupportedEncodingException {
return innerDecode(null, str, encode);
}
/**
* build URI By url and query.
*
* @param url url
* @param query query param {@link Query}
* @return {@link URI}
*/
public static URI buildUri(String url, Query query) throws URISyntaxException {
if (query != null && !query.isEmpty()) {
url = url + "?" + query.toQueryUrl();
}
return new URI(url);
}
/**
* HTTP request exception is a timeout exception.
*
* @param throwable http request throwable
* @return boolean
*/
public static boolean isTimeoutException(Throwable throwable) {
return throwable instanceof SocketTimeoutException || throwable instanceof ConnectTimeoutException
|| throwable instanceof TimeoutException || throwable.getCause() instanceof TimeoutException;
}
/**
* Build header.
*
* @return header
*/
public static Header builderHeader(String module) {
Header header = Header.newInstance();
header.addParam(HttpHeaderConsts.CLIENT_VERSION_HEADER, VersionUtils.version);
header.addParam(HttpHeaderConsts.USER_AGENT_HEADER, VersionUtils.getFullClientVersion());
header.addParam(HttpHeaderConsts.ACCEPT_ENCODING, "gzip,deflate,sdch");
header.addParam(HttpHeaderConsts.CONNECTION, "Keep-Alive");
header.addParam(HttpHeaderConsts.REQUEST_ID, UuidUtils.generateUuid());
header.addParam(HttpHeaderConsts.REQUEST_MODULE, module);
return header;
}
private static String innerDecode(String pre, String now, String encode) throws UnsupportedEncodingException {
// Because the data may be encoded by the URL more than once,
// it needs to be decoded recursively until it is fully successful
if (StringUtils.equals(pre, now)) {
return pre;
}
pre = now;
now = URLDecoder.decode(now, encode);
return innerDecode(pre, now, encode);
}
}
| HttpUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NullableOnContainingClassTest.java | {
"start": 1414,
"end": 1908
} | class ____ {}
void test(@Anno A.B x) {}
void test2(A.@Anno B x) {}
}
""")
.doTest();
}
@Test
public void annotationNotExclusivelyTypeUse_noFinding() {
helper
.addSourceLines(
"Test.java",
"""
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.ElementType.TYPE_USE;
import java.lang.annotation.Target;
| B |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/builders/BuilderManager.java | {
"start": 2166,
"end": 4481
} | class ____ {
/** Plugin category. */
public static final String CATEGORY = "Log4j Builder";
public static final Appender INVALID_APPENDER = new AppenderWrapper(null);
public static final Filter INVALID_FILTER = new FilterWrapper(null);
public static final Layout INVALID_LAYOUT = new LayoutWrapper(null);
public static final RewritePolicy INVALID_REWRITE_POLICY = new RewritePolicyWrapper(null);
private static final Logger LOGGER = StatusLogger.getLogger();
private static final Class<?>[] CONSTRUCTOR_PARAMS = new Class[] {String.class, Properties.class};
private final Map<String, PluginType<?>> plugins;
/**
* Constructs a new instance.
*/
public BuilderManager() {
final PluginManager manager = new PluginManager(CATEGORY);
manager.collectPlugins();
plugins = manager.getPlugins();
}
private <T extends Builder<U>, U> T createBuilder(
final PluginType<T> plugin, final String prefix, final Properties props) {
if (plugin == null) {
return null;
}
try {
final Class<T> clazz = plugin.getPluginClass();
if (AbstractBuilder.class.isAssignableFrom(clazz)) {
return clazz.getConstructor(CONSTRUCTOR_PARAMS).newInstance(prefix, props);
}
final T builder = LoaderUtil.newInstanceOf(clazz);
// Reasonable message instead of `ClassCastException`
if (!Builder.class.isAssignableFrom(clazz)) {
LOGGER.warn("Unable to load plugin: builder {} does not implement {}", clazz, Builder.class);
return null;
}
return builder;
} catch (final ReflectiveOperationException ex) {
LOGGER.warn("Unable to load plugin: {} due to: {}", plugin.getKey(), ex.getMessage());
return null;
}
}
@SuppressWarnings("unchecked")
private <T> PluginType<T> getPlugin(final String className) {
Objects.requireNonNull(plugins, "plugins");
Objects.requireNonNull(className, "className");
final String key = toRootLowerCase(className).trim();
final PluginType<?> pluginType = plugins.get(key);
if (pluginType == null) {
LOGGER.warn("Unable to load plugin | BuilderManager |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/disk/iomanager/IOManagerTest.java | {
"start": 1335,
"end": 3285
} | class ____ {
@Test
void channelEnumerator(@TempDir File tempPath) throws Exception {
String[] tempDirs =
new String[] {
new File(tempPath, "a").getAbsolutePath(),
new File(tempPath, "b").getAbsolutePath(),
new File(tempPath, "c").getAbsolutePath(),
new File(tempPath, "d").getAbsolutePath(),
new File(tempPath, "e").getAbsolutePath(),
};
int[] counters = new int[tempDirs.length];
try (IOManager ioMan = new TestIOManager(tempDirs)) {
FileIOChannel.Enumerator enumerator = ioMan.createChannelEnumerator();
for (int i = 0; i < 3 * tempDirs.length; i++) {
FileIOChannel.ID id = enumerator.next();
File pathFile = id.getPathFile();
Files.createFile(pathFile.toPath());
assertThat(pathFile)
.withFailMessage("Channel IDs must name an absolute path.")
.isAbsolute();
assertThat(pathFile)
.withFailMessage("Channel IDs must name a file, not a directory.")
.isFile();
assertThat(pathFile.getParentFile().getParentFile().getParentFile())
.withFailMessage("Path is not in the temp directory.")
.isEqualTo(tempPath);
for (int k = 0; k < tempDirs.length; k++) {
if (pathFile.getParentFile().getParent().equals(tempDirs[k])) {
counters[k]++;
}
}
}
for (int k = 0; k < tempDirs.length; k++) {
assertThat(counters[k]).isEqualTo(3);
}
}
}
// --------------------------------------------------------------------------------------------
private static | IOManagerTest |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ConfigurationHelper.java | {
"start": 4125,
"end": 4869
} | enum ____
* @throws IllegalArgumentException if there are two entries which differ only by case.
*/
public static <E extends Enum<E>> Map<String, E> mapEnumNamesToValues(
final String prefix,
final Class<E> enumClass) {
final E[] constants = enumClass.getEnumConstants();
Map<String, E> mapping = new HashMap<>(constants.length);
for (E constant : constants) {
final String lc = constant.name().toLowerCase(Locale.ROOT);
final E orig = mapping.put(prefix + lc, constant);
checkArgument(orig == null,
"Enum %s "
+ ERROR_MULTIPLE_ELEMENTS_MATCHING_TO_LOWER_CASE_VALUE
+ " %s",
enumClass, lc);
}
return mapping;
}
/**
* Look up an | values |
java | apache__camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/integration/consumers/SingleMessageSameTopicIT.java | {
"start": 1388,
"end": 4155
} | class ____ extends AbstractPersistentJMSTest {
@BeforeEach
void prepare() {
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:b").expectedBodiesReceived("Hello World");
template.sendBody("activemq:topic:SingleMessageSameTopicIT", "Hello World");
}
@Order(1)
@Test
void testTwoConsumerOnSameTopic() throws Exception {
MockEndpoint.assertIsSatisfied(context);
}
@Order(2)
@Test
void testStopAndStartOneRoute() throws Exception {
MockEndpoint.assertIsSatisfied(context);
// now stop route A
context.getRouteController().stopRoute("a");
// send a new message should go to B only
MockEndpoint.resetMocks(context);
getMockEndpoint("mock:a").expectedMessageCount(0);
getMockEndpoint("mock:b").expectedBodiesReceived("Bye World");
template.sendBody("activemq:topic:SingleMessageSameTopicIT", "Bye World");
MockEndpoint.assertIsSatisfied(context);
// send new message should go to both A and B
MockEndpoint.resetMocks(context);
// now start route A
context.getRouteController().startRoute("a");
getMockEndpoint("mock:a").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:b").expectedBodiesReceived("Hello World");
template.sendBody("activemq:topic:SingleMessageSameTopicIT", "Hello World");
}
@Order(3)
@Test
void testRemoveOneRoute() throws Exception {
MockEndpoint.assertIsSatisfied(context);
// now stop and remove route A
context.getRouteController().stopRoute("a");
assertTrue(context.removeRoute("a"));
// send new message should go to B only
MockEndpoint.resetMocks(context);
getMockEndpoint("mock:a").expectedMessageCount(0);
getMockEndpoint("mock:b").expectedBodiesReceived("Bye World");
template.sendBody("activemq:topic:SingleMessageSameTopicIT", "Bye World");
MockEndpoint.assertIsSatisfied(context);
}
@BeforeEach
void waitForConnections() {
Awaitility.await().until(() -> context.getRoute("a").getUptimeMillis() > 200);
Awaitility.await().until(() -> context.getRoute("b").getUptimeMillis() > 200);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("activemq:topic:SingleMessageSameTopicIT").routeId("a")
.to("log:a", "mock:a");
from("activemq:topic:SingleMessageSameTopicIT").routeId("b")
.to("log:b", "mock:b");
}
};
}
}
| SingleMessageSameTopicIT |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/ast/OpDivide.java | {
"start": 1345,
"end": 5006
} | class ____ extends Operator {
public OpDivide(int startPos, int endPos, SpelNodeImpl... operands) {
super("/", startPos, endPos, operands);
}
@Override
public TypedValue getValueInternal(ExpressionState state) throws EvaluationException {
Object leftOperand = getLeftOperand().getValueInternal(state).getValue();
Object rightOperand = getRightOperand().getValueInternal(state).getValue();
if (leftOperand instanceof Number leftNumber && rightOperand instanceof Number rightNumber) {
if (leftNumber instanceof BigDecimal || rightNumber instanceof BigDecimal) {
BigDecimal leftBigDecimal = NumberUtils.convertNumberToTargetClass(leftNumber, BigDecimal.class);
BigDecimal rightBigDecimal = NumberUtils.convertNumberToTargetClass(rightNumber, BigDecimal.class);
int scale = Math.max(leftBigDecimal.scale(), rightBigDecimal.scale());
return new TypedValue(leftBigDecimal.divide(rightBigDecimal, scale, RoundingMode.HALF_EVEN));
}
else if (leftNumber instanceof Double || rightNumber instanceof Double) {
this.exitTypeDescriptor = "D";
return new TypedValue(leftNumber.doubleValue() / rightNumber.doubleValue());
}
else if (leftNumber instanceof Float || rightNumber instanceof Float) {
this.exitTypeDescriptor = "F";
return new TypedValue(leftNumber.floatValue() / rightNumber.floatValue());
}
else if (leftNumber instanceof BigInteger || rightNumber instanceof BigInteger) {
BigInteger leftBigInteger = NumberUtils.convertNumberToTargetClass(leftNumber, BigInteger.class);
BigInteger rightBigInteger = NumberUtils.convertNumberToTargetClass(rightNumber, BigInteger.class);
return new TypedValue(leftBigInteger.divide(rightBigInteger));
}
else if (leftNumber instanceof Long || rightNumber instanceof Long) {
this.exitTypeDescriptor = "J";
return new TypedValue(leftNumber.longValue() / rightNumber.longValue());
}
else if (CodeFlow.isIntegerForNumericOp(leftNumber) || CodeFlow.isIntegerForNumericOp(rightNumber)) {
this.exitTypeDescriptor = "I";
return new TypedValue(leftNumber.intValue() / rightNumber.intValue());
}
else {
// Unknown Number subtypes -> best guess is double division
return new TypedValue(leftNumber.doubleValue() / rightNumber.doubleValue());
}
}
return state.operate(Operation.DIVIDE, leftOperand, rightOperand);
}
@Override
public boolean isCompilable() {
if (!getLeftOperand().isCompilable()) {
return false;
}
if (this.children.length > 1) {
if (!getRightOperand().isCompilable()) {
return false;
}
}
return (this.exitTypeDescriptor != null);
}
@Override
public void generateCode(MethodVisitor mv, CodeFlow cf) {
getLeftOperand().generateCode(mv, cf);
String leftDesc = getLeftOperand().exitTypeDescriptor;
String exitDesc = this.exitTypeDescriptor;
Assert.state(exitDesc != null, "No exit type descriptor");
char targetDesc = exitDesc.charAt(0);
CodeFlow.insertNumericUnboxOrPrimitiveTypeCoercion(mv, leftDesc, targetDesc);
if (this.children.length > 1) {
cf.enterCompilationScope();
getRightOperand().generateCode(mv, cf);
String rightDesc = getRightOperand().exitTypeDescriptor;
cf.exitCompilationScope();
CodeFlow.insertNumericUnboxOrPrimitiveTypeCoercion(mv, rightDesc, targetDesc);
switch (targetDesc) {
case 'I' -> mv.visitInsn(IDIV);
case 'J' -> mv.visitInsn(LDIV);
case 'F' -> mv.visitInsn(FDIV);
case 'D' -> mv.visitInsn(DDIV);
default -> throw new IllegalStateException(
"Unrecognized exit type descriptor: '" + this.exitTypeDescriptor + "'");
}
}
cf.pushDescriptor(this.exitTypeDescriptor);
}
}
| OpDivide |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/stats/CCSUsage.java | {
"start": 7786,
"end": 8171
} | class ____ {
// if MRT=true, the took time on the remote cluster (if MRT=true), otherwise the overall took time
private long took;
public PerClusterUsage(TimeValue took) {
if (took != null) {
this.took = took.millis();
}
}
public long getTook() {
return took;
}
}
}
| PerClusterUsage |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/intercept/InterceptFromWhenWithChoiceTest.java | {
"start": 983,
"end": 2806
} | class ____ extends ContextTestSupport {
@Test
public void testInterceptorHelloWorld() throws Exception {
getMockEndpoint("mock:goofy").expectedMessageCount(0);
getMockEndpoint("mock:hello").expectedMessageCount(0);
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:end").expectedMessageCount(1);
sendBody("direct:start", "Hello World!");
assertMockEndpointsSatisfied();
}
@Test
public void testInterceptorHelloGoofy() throws Exception {
getMockEndpoint("mock:goofy").expectedMessageCount(0);
getMockEndpoint("mock:hello").expectedMessageCount(1);
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
sendBody("direct:start", "Hello Goofy");
assertMockEndpointsSatisfied();
}
@Test
public void testInterceptorByeGoofy() throws Exception {
getMockEndpoint("mock:goofy").expectedMessageCount(1);
getMockEndpoint("mock:hello").expectedMessageCount(0);
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:end").expectedMessageCount(0);
sendBody("direct:start", "Bye Goofy");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
interceptFrom().onWhen(simple("${body} contains 'Goofy'")).choice().when(body().contains("Hello"))
.to("mock:hello").otherwise().to("log:foo").to("mock:goofy").end()
.stop();
from("direct:start").to("mock:foo").to("mock:end");
}
};
}
}
| InterceptFromWhenWithChoiceTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/monitor/ContainersMonitorEventType.java | {
"start": 876,
"end": 1010
} | enum ____ {
START_MONITORING_CONTAINER,
STOP_MONITORING_CONTAINER,
CHANGE_MONITORING_CONTAINER_RESOURCE
}
| ContainersMonitorEventType |
java | alibaba__nacos | plugin/datasource/src/main/java/com/alibaba/nacos/plugin/datasource/mapper/ConfigInfoGrayMapper.java | {
"start": 1057,
"end": 4598
} | interface ____ extends Mapper {
/**
* Update gray configuration information. The default sql: UPDATE config_info_gray SET content=?, md5 = ?,
* src_ip=?,src_user=?,gmt_modified=?,app_name=?,gray_rule=? WHERE data_id=? AND group_id=? AND tenant_id=? AND
* gray_name=? AND (md5=? or md5 is null or md5='')
*
* @param context sql paramMap
* @return The sql of updating gray configuration information.
*/
default MapperResult updateConfigInfo4GrayCas(MapperContext context) {
Object content = context.getUpdateParameter(FieldConstant.CONTENT);
Object md5 = context.getUpdateParameter(FieldConstant.MD5);
Object srcIp = context.getUpdateParameter(FieldConstant.SRC_IP);
Object srcUser = context.getUpdateParameter(FieldConstant.SRC_USER);
Object gmtModified = context.getUpdateParameter(FieldConstant.GMT_MODIFIED);
Object appName = context.getUpdateParameter(FieldConstant.APP_NAME);
Object dataId = context.getWhereParameter(FieldConstant.DATA_ID);
Object groupId = context.getWhereParameter(FieldConstant.GROUP_ID);
Object tenantId = context.getWhereParameter(FieldConstant.TENANT_ID);
Object grayName = context.getWhereParameter(FieldConstant.GRAY_NAME);
Object grayRule = context.getWhereParameter(FieldConstant.GRAY_RULE);
Object oldMd5 = context.getWhereParameter(FieldConstant.MD5);
String sql = "UPDATE config_info_gray SET content = ?, md5 = ?, src_ip = ?,src_user = ?,gmt_modified = "
+ getFunction("NOW()") + ",app_name = ?, gray_rule = ?"
+ "WHERE data_id = ? AND group_id = ? AND tenant_id = ? AND gray_name = ? AND (md5 = ? OR md5 IS NULL OR md5 = '')";
return new MapperResult(sql,
CollectionUtils.list(content, md5, srcIp, srcUser, appName, grayRule, dataId, groupId,
tenantId, grayName, oldMd5));
}
/**
* Query change config. <br/>The default sql: SELECT data_id, group_id, tenant_id, app_name, content,
* gmt_modified,encrypted_data_key FROM config_info WHERE gmt_modified >=? AND gmt_modified <= ?
*
* @param context sql paramMap
* @return The sql of querying change config.
*/
default MapperResult findChangeConfig(MapperContext context) {
String sql =
"SELECT id, data_id, group_id, tenant_id, app_name,content,gray_name,gray_rule,md5, gmt_modified, encrypted_data_key "
+ "FROM config_info_gray WHERE " + "gmt_modified >= ? and id > ? order by id limit ? ";
return new MapperResult(sql, CollectionUtils.list(context.getWhereParameter(FieldConstant.START_TIME),
context.getWhereParameter(FieldConstant.LAST_MAX_ID),
context.getWhereParameter(FieldConstant.PAGE_SIZE)));
}
/**
* Query all gray config info for dump task. The default sql: SELECT
* t.id,data_id,group_id,tenant_id,gray_name,app_name,content,md5,gmt_modified FROM ( SELECT id FROM
* config_info_gray ORDER BY id LIMIT startRow,pageSize ) g, config_info_gray t WHERE g.id = t.id
*
* @param context The start index.
* @return The sql of querying all gray config info for dump task.
*/
MapperResult findAllConfigInfoGrayForDumpAllFetchRows(MapperContext context);
/**
* 获取返回表名.
*
* @return 表名
*/
default String getTableName() {
return TableConstant.CONFIG_INFO_GRAY;
}
}
| ConfigInfoGrayMapper |
java | elastic__elasticsearch | x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/Exporters.java | {
"start": 1787,
"end": 3802
} | class ____ extends AbstractLifecycleComponent {
private static final Logger LOGGER = LogManager.getLogger(Exporters.class);
private final Settings settings;
private final Map<String, Exporter.Factory> factories;
private final AtomicReference<Map<String, Exporter>> exporters;
private final AtomicReference<Map<String, Exporter.Config>> disabledExporterConfigs;
private final ClusterService clusterService;
private final XPackLicenseState licenseState;
private final ThreadContext threadContext;
@SuppressWarnings("this-escape")
public Exporters(
Settings settings,
Map<String, Exporter.Factory> factories,
ClusterService clusterService,
XPackLicenseState licenseState,
ThreadContext threadContext,
SSLService sslService
) {
this.settings = settings;
this.factories = factories;
this.exporters = new AtomicReference<>(emptyMap());
this.disabledExporterConfigs = new AtomicReference<>(emptyMap());
this.threadContext = Objects.requireNonNull(threadContext);
this.clusterService = Objects.requireNonNull(clusterService);
this.licenseState = Objects.requireNonNull(licenseState);
final List<Setting.AffixSetting<?>> dynamicSettings = getSettings().stream().filter(Setting::isDynamic).toList();
final List<Setting<?>> updateSettings = new ArrayList<Setting<?>>(dynamicSettings);
updateSettings.add(Monitoring.MIGRATION_DECOMMISSION_ALERTS);
clusterService.getClusterSettings().addSettingsUpdateConsumer(this::setExportersSetting, updateSettings);
HttpExporter.registerSettingValidators(clusterService, sslService);
// this ensures that logging is happening by adding an empty consumer per affix setting
for (Setting.AffixSetting<?> affixSetting : dynamicSettings) {
clusterService.getClusterSettings().addAffixUpdateConsumer(affixSetting, (s, o) -> {}, (s, o) -> {});
}
}
static | Exporters |
java | elastic__elasticsearch | build-conventions/src/main/java/org/elasticsearch/gradle/internal/checkstyle/MissingJavadocTypeCheck.java | {
"start": 1811,
"end": 1868
} | class ____ full documentation.
*/
@StatelessCheck
public | for |
java | processing__processing4 | core/src/processing/core/PGraphics.java | {
"start": 240249,
"end": 287473
} | class ____ not handle any details of settings lights. It does however
// display warning messages that the functions are not available.
/**
*
* Sets the default ambient light, directional light, falloff, and specular
* values. The defaults are <b>ambientLight(128, 128, 128)</b> and
* <b>directionalLight(128, 128, 128, 0, 0, -1)</b>, <b>lightFalloff(1, 0, 0)</b>, and
* <b>lightSpecular(0, 0, 0)</b>. Lights need to be included in the <b>draw()</b> to
* remain persistent in a looping program. Placing them in the <b>setup()</b> of a
* looping program will cause them to only have an effect the first time
* through the loop.
*
* @webref lights_camera:lights
* @webBrief Sets the default ambient light, directional light, falloff, and specular
* values
* @usage web_application
* @see PGraphics#ambientLight(float, float, float, float, float, float)
* @see PGraphics#directionalLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float, float, float, float, float)
* @see PGraphics#noLights()
*/
public void lights() {
showMethodWarning("lights");
}
/**
*
* Disable all lighting. Lighting is turned off by default and enabled with
* the <b>lights()</b> function. This function can be used to disable
* lighting so that 2D geometry (which does not require lighting) can be
* drawn after a set of lighted 3D geometry.
*
* @webref lights_camera:lights
* @webBrief Disable all lighting
* @usage web_application
* @see PGraphics#lights()
*/
public void noLights() {
showMethodWarning("noLights");
}
/**
*
* Adds an ambient light. Ambient light doesn't come from a specific direction,
* the rays of light have bounced around so much that objects are evenly lit
* from all sides. Ambient lights are almost always used in combination with
* other types of lights. Lights need to be included in the <b>draw()</b> to
* remain persistent in a looping program. Placing them in the <b>setup()</b> of
* a looping program will cause them to only have an effect the first time
* through the loop. The <b>v1</b>, <b>v2</b>, and <b>v3</b> parameters are
* interpreted as either RGB or HSB values, depending on the current color mode.
*
* @webref lights_camera:lights
* @webBrief Adds an ambient light
* @usage web_application
* @param v1 red or hue value (depending on current color mode)
* @param v2 green or saturation value (depending on current color mode)
* @param v3 blue or brightness value (depending on current color mode)
* @see PGraphics#lights()
* @see PGraphics#directionalLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float,
* float, float, float, float)
*/
public void ambientLight(float v1, float v2, float v3) {
showMethodWarning("ambientLight");
}
/**
* @param x x-coordinate of the light
* @param y y-coordinate of the light
* @param z z-coordinate of the light
*/
public void ambientLight(float v1, float v2, float v3,
float x, float y, float z) {
showMethodWarning("ambientLight");
}
/**
*
* Adds a directional light. Directional light comes from one direction and
* is stronger when hitting a surface squarely and weaker if it hits at a
* gentle angle. After hitting a surface, a directional lights scatters in
* all directions. Lights need to be included in the <b>draw()</b> to
* remain persistent in a looping program. Placing them in the
* <b>setup()</b> of a looping program will cause them to only have an
* effect the first time through the loop. The affect of the <b>v1</b>,
* <b>v2</b>, and <b>v3</b> parameters is determined by the current color
* mode. The <b>nx</b>, <b>ny</b>, and <b>nz</b> parameters specify the
* direction the light is facing. For example, setting <b>ny</b> to -1 will
* cause the geometry to be lit from below (the light is facing directly upward).
*
* @webref lights_camera:lights
* @webBrief Adds a directional light
* @usage web_application
* @param v1 red or hue value (depending on current color mode)
* @param v2 green or saturation value (depending on current color mode)
* @param v3 blue or brightness value (depending on current color mode)
* @param nx direction along the x-axis
* @param ny direction along the y-axis
* @param nz direction along the z-axis
* @see PGraphics#lights()
* @see PGraphics#ambientLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float, float, float, float, float)
*/
public void directionalLight(float v1, float v2, float v3,
float nx, float ny, float nz) {
showMethodWarning("directionalLight");
}
/**
*
* Adds a point light. Lights need to be included in the <b>draw()</b> to remain
* persistent in a looping program. Placing them in the <b>setup()</b> of a
* looping program will cause them to only have an effect the first time through
* the loop. The <b>v1</b>, <b>v2</b>, and <b>v3</b> parameters are interpreted
* as either RGB or HSB values, depending on the current color mode. The
* <b>x</b>, <b>y</b>, and <b>z</b> parameters set the position of the light.
*
* @webref lights_camera:lights
* @webBrief Adds a point light
* @usage web_application
* @param v1 red or hue value (depending on current color mode)
* @param v2 green or saturation value (depending on current color mode)
* @param v3 blue or brightness value (depending on current color mode)
* @param x x-coordinate of the light
* @param y y-coordinate of the light
* @param z z-coordinate of the light
* @see PGraphics#lights()
* @see PGraphics#directionalLight(float, float, float, float, float, float)
* @see PGraphics#ambientLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float,
* float, float, float, float)
*/
public void pointLight(float v1, float v2, float v3,
float x, float y, float z) {
showMethodWarning("pointLight");
}
/**
*
* Adds a spotlight. Lights need to be included in the <b>draw()</b> to remain
* persistent in a looping program. Placing them in the <b>setup()</b> of a
* looping program will cause them to only have an effect the first time through
* the loop. The <b>v1</b>, <b>v2</b>, and <b>v3</b> parameters are interpreted
* as either RGB or HSB values, depending on the current color mode. The
* <b>x</b>, <b>y</b>, and <b>z</b> parameters specify the position of the light
* and <b>nx</b>, <b>ny</b>, <b>nz</b> specify the direction of light. The
* <b>angle</b> parameter affects angle of the spotlight cone, while
* <b>concentration</b> sets the bias of light focusing toward the center of
* that cone.
*
* @webref lights_camera:lights
* @webBrief Adds a spotlight
* @usage web_application
* @param v1 red or hue value (depending on current color mode)
* @param v2 green or saturation value (depending on current color
* mode)
* @param v3 blue or brightness value (depending on current color
* mode)
* @param x x-coordinate of the light
* @param y y-coordinate of the light
* @param z z-coordinate of the light
* @param nx direction along the x-axis
* @param ny direction along the y-axis
* @param nz direction along the z-axis
* @param angle angle of the spotlight cone
* @param concentration exponent determining the center bias of the cone
* @see PGraphics#lights()
* @see PGraphics#directionalLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#ambientLight(float, float, float, float, float, float)
*/
public void spotLight(float v1, float v2, float v3,
float x, float y, float z,
float nx, float ny, float nz,
float angle, float concentration) {
showMethodWarning("spotLight");
}
/**
*
* Sets the falloff rates for point lights, spotlights, and ambient lights.
* Like <b>fill()</b>, it affects only the elements which are created after it
* in the code. The default value is <b>lightFalloff(1.0, 0.0, 0.0)</b>, and the
* parameters are used to calculate the falloff with the following
* equation:<br />
* <br />
* d = distance from light position to vertex position<br />
* falloff = 1 / (CONSTANT + d * LINEAR + (d*d) * QUADRATIC)<br />
* <br />
* Thinking about an ambient light with a falloff can be tricky. If you want a
* region of your scene to be ambient lit with one color and another region to
* be ambient lit with another color, you could use an ambient light with
* location and falloff. You can think of it as a point light that doesn't care
* which direction a surface is facing.
*
* @webref lights_camera:lights
* @webBrief Sets the falloff rates for point lights, spotlights, and ambient
* lights
* @usage web_application
* @param constant constant value or determining falloff
* @param linear linear value for determining falloff
* @param quadratic quadratic value for determining falloff
* @see PGraphics#lights()
* @see PGraphics#ambientLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float,
* float, float, float, float)
* @see PGraphics#lightSpecular(float, float, float)
*/
public void lightFalloff(float constant, float linear, float quadratic) {
showMethodWarning("lightFalloff");
}
/**
*
* Sets the specular color for lights. Like <b>fill()</b>, it affects only
* the elements which are created after it in the code. Specular refers to
* light which bounces off a surface in a preferred direction (rather than
* bouncing in all directions like a diffuse light) and is used for
* creating highlights. The specular quality of a light interacts with the
* specular material qualities set through the <b>specular()</b> and
* <b>shininess()</b> functions.
*
* @webref lights_camera:lights
* @webBrief Sets the specular color for lights
* @usage web_application
* @param v1 red or hue value (depending on current color mode)
* @param v2 green or saturation value (depending on current color mode)
* @param v3 blue or brightness value (depending on current color mode)
* @see PGraphics#specular(float, float, float)
* @see PGraphics#lights()
* @see PGraphics#ambientLight(float, float, float, float, float, float)
* @see PGraphics#pointLight(float, float, float, float, float, float)
* @see PGraphics#spotLight(float, float, float, float, float, float, float, float, float, float, float)
*/
public void lightSpecular(float v1, float v2, float v3) {
showMethodWarning("lightSpecular");
}
//////////////////////////////////////////////////////////////
// BACKGROUND
/**
*
* The <b>background()</b> function sets the color used for the background of
* the Processing window. The default background is light gray. This function is
* typically used within <b>draw()</b> to clear the display window at the
* beginning of each frame, but it can be used inside <b>setup()</b> to set the
* background on the first frame of animation or if the background need only be
* set once. <br />
* <br />
* An image can also be used as the background for a sketch, although the
* image's width and height must match that of the sketch window. Images used
* with <b>background()</b> will ignore the current <b>tint()</b> setting. To
* resize an image to the size of the sketch window, use image.resize(width,
* height). <br />
* <br />
* It is not possible to use the transparency <b>alpha</b> parameter with
* background colors on the main drawing surface. It can only be used along with
* a <b>PGraphics</b> object and <b>createGraphics()</b>.
*
* <h3>Advanced</h3>
* <p>
* Clear the background with a color that includes an alpha value. This can only
* be used with objects created by createGraphics(), because the main drawing
* surface cannot be set transparent.
* </p>
* <p>
* It might be tempting to use this function to partially clear the screen on
* each frame, however that's not how this function works. When calling
* background(), the pixels will be replaced with pixels that have that level of
* transparency. To do a semi-transparent overlay, use fill() with alpha and
* draw a rectangle.
* </p>
*
* @webref color:setting
* @webBrief Sets the color used for the background of the Processing window
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#stroke(float)
* @see PGraphics#fill(float)
* @see PGraphics#tint(float)
* @see PGraphics#colorMode(int)
*/
public void background(int rgb) {
// if (((rgb & 0xff000000) == 0) && (rgb <= colorModeX)) {
// background((float) rgb);
//
// } else {
// if (format == RGB) {
// rgb |= 0xff000000; // ignore alpha for main drawing surface
// }
// colorCalcARGB(rgb, colorModeA);
// backgroundFromCalc();
// backgroundImpl();
// }
colorCalc(rgb);
backgroundFromCalc();
}
/**
* @param alpha opacity of the background
*/
public void background(int rgb, float alpha) {
// if (format == RGB) {
// background(rgb); // ignore alpha for main drawing surface
//
// } else {
// if (((rgb & 0xff000000) == 0) && (rgb <= colorModeX)) {
// background((float) rgb, alpha);
//
// } else {
// colorCalcARGB(rgb, alpha);
// backgroundFromCalc();
// backgroundImpl();
// }
// }
colorCalc(rgb, alpha);
backgroundFromCalc();
}
/**
* @param gray specifies a value between white and black
*/
public void background(float gray) {
colorCalc(gray);
backgroundFromCalc();
// backgroundImpl();
}
public void background(float gray, float alpha) {
if (format == RGB) {
background(gray); // ignore alpha for main drawing surface
} else {
colorCalc(gray, alpha);
backgroundFromCalc();
// backgroundImpl();
}
}
/**
* @param v1 red or hue value (depending on the current color mode)
* @param v2 green or saturation value (depending on the current color mode)
* @param v3 blue or brightness value (depending on the current color mode)
*/
public void background(float v1, float v2, float v3) {
colorCalc(v1, v2, v3);
backgroundFromCalc();
// backgroundImpl();
}
public void background(float v1, float v2, float v3, float alpha) {
colorCalc(v1, v2, v3, alpha);
backgroundFromCalc();
}
/**
* Clears the pixels within a buffer. This function only works on
* <b>PGraphics</b> objects created with the <b>createGraphics()</b>
* function. Unlike the main graphics context (the display window),
* pixels in additional graphics areas created with <b>createGraphics()</b>
* can be entirely or partially transparent. This function clears
* everything in a <b>PGraphics</b> object to make all the pixels
* 100% transparent.
*
* @webref color:setting
* @webBrief Clears the pixels within a buffer
*/
public void clear() {
if (primaryGraphics) {
showWarning("clear() can only be used with createGraphics()");
} else {
background(0, 0, 0, 0);
}
}
protected void backgroundFromCalc() {
backgroundR = calcR;
backgroundG = calcG;
backgroundB = calcB;
//backgroundA = (format == RGB) ? colorModeA : calcA;
// If drawing surface is opaque, this maxes out at 1.0. [fry 150513]
backgroundA = (format == RGB) ? 1 : calcA;
backgroundRi = calcRi;
backgroundGi = calcGi;
backgroundBi = calcBi;
backgroundAi = (format == RGB) ? 255 : calcAi;
backgroundAlpha = format != RGB && calcAlpha;
backgroundColor = calcColor;
backgroundImpl();
}
/**
* Takes an RGB or ARGB image and sets it as the background.
* The width and height of the image must be the same size as the sketch.
* Use image.resize(width, height) to make short work of such a task.<br/>
* <br/>
* Note that even if the image is set as RGB, the high 8 bits of each pixel
* should be set opaque (0xFF000000) because the image data will be copied
* directly to the screen, and non-opaque background images may have strange
* behavior. Use image.filter(OPAQUE) to handle this easily.<br/>
* <br/>
* When using 3D, this will also clear the zbuffer (if it exists).
*
* @param image PImage to set as background (must be same size as the sketch window)
*/
public void background(PImage image) {
if ((image.pixelWidth != pixelWidth) || (image.pixelHeight != pixelHeight)) {
throw new RuntimeException(ERROR_BACKGROUND_IMAGE_SIZE);
}
if ((image.format != RGB) && (image.format != ARGB)) {
throw new RuntimeException(ERROR_BACKGROUND_IMAGE_FORMAT);
}
backgroundColor = 0; // just zero it out for images
backgroundImpl(image);
}
/**
* Actually set the background image. This is separated from the error
* handling and other semantic goofiness that is shared across renderers.
*/
protected void backgroundImpl(PImage image) {
// blit image to the screen
set(0, 0, image);
}
/**
* Actual implementation of clearing the background, now that the
* internal variables for background color have been set. Called by the
* backgroundFromCalc() method, which is what all the other background()
* methods call once the work is done.
*/
protected void backgroundImpl() {
pushStyle();
pushMatrix();
resetMatrix();
noStroke();
fill(backgroundColor);
rect(0, 0, width, height);
popMatrix();
popStyle();
}
//////////////////////////////////////////////////////////////
// COLOR MODE
/**
*
* Changes the way Processing interprets color data. By default, the parameters
* for <b>fill()</b>, <b>stroke()</b>, <b>background()</b>, and <b>color()</b>
* are defined by values between 0 and 255 using the RGB color model. The
* <b>colorMode()</b> function is used to change the numerical range used for
* specifying colors and to switch color systems. For example, calling
* <b>colorMode(RGB, 1.0)</b> will specify that values are specified between 0
* and 1. The limits for defining colors are altered by setting the parameters
* <b>max</b>, <b>max1</b>, <b>max2</b>, <b>max3</b>, and <b>maxA</b>. <br />
* <br />
* After changing the range of values for colors with code like
* <b>colorMode(HSB, 360, 100, 100)</b>, those ranges remain in use until they
* are explicitly changed again. For example, after running <b>colorMode(HSB,
* 360, 100, 100)</b> and then changing back to <b>colorMode(RGB)</b>, the range
* for R will be 0 to 360 and the range for G and B will be 0 to 100. To avoid
* this, be explicit about the ranges when changing the color mode. For
* instance, instead of <b>colorMode(RGB)</b>, write <b>colorMode(RGB, 255, 255,
* 255)</b>.
*
* @webref color:setting
* @webBrief Changes the way Processing interprets color data
* @usage web_application
* @param mode Either RGB or HSB, corresponding to Red/Green/Blue and
* Hue/Saturation/Brightness
* @see PGraphics#background(float)
* @see PGraphics#fill(float)
* @see PGraphics#stroke(float)
*/
public void colorMode(int mode) {
colorMode(mode, colorModeX, colorModeY, colorModeZ, colorModeA);
}
/**
* @param max range for all color elements
*/
public void colorMode(int mode, float max) {
colorMode(mode, max, max, max, max);
}
/**
* @param max1 range for the red or hue depending on the current color mode
* @param max2 range for the green or saturation depending on the current color mode
* @param max3 range for the blue or brightness depending on the current color mode
*/
public void colorMode(int mode, float max1, float max2, float max3) {
colorMode(mode, max1, max2, max3, colorModeA);
}
/**
* @param maxA range for the alpha
*/
public void colorMode(int mode,
float max1, float max2, float max3, float maxA) {
colorMode = mode;
colorModeX = max1; // still needs to be set for hsb
colorModeY = max2;
colorModeZ = max3;
colorModeA = maxA;
// if color max values are all 1, then no need to scale
colorModeScale =
((maxA != 1) || (max1 != max2) || (max2 != max3) || (max3 != maxA));
// if color is rgb/0..255 this will make it easier for the
// red() green() etc functions
colorModeDefault = (colorMode == RGB) &&
(colorModeA == 255) && (colorModeX == 255) &&
(colorModeY == 255) && (colorModeZ == 255);
}
//////////////////////////////////////////////////////////////
// COLOR CALCULATIONS
// Given input values for coloring, these functions will fill the calcXxxx
// variables with values that have been properly filtered through the
// current colorMode settings.
// Renderers that need to subclass any drawing properties such as fill or
// stroke will usually want to override methods like fillFromCalc (or the
// same for stroke, ambient, etc.) That way the color calculations are
// covered by this based PGraphics class, leaving only a single function
// to override/implement in the subclass.
/**
* Set the fill to either a grayscale value or an ARGB int.
* <P>
* The problem with this code is that it has to detect between these two
* situations automatically. This is done by checking to see if the high bits
* (the alpha for 0xAA000000) is set, and if not, whether the color value
* that follows is less than colorModeX (first param passed to colorMode).
* <P>
* This auto-detection would break in the following situation:
* <PRE>size(256, 256);
* for (int i = 0; i < 256; i++) {
* color c = color(0, 0, 0, i);
* stroke(c);
* line(i, 0, i, 256);
* }</PRE>
* ...on the first time through the loop, where (i == 0), since the color
* itself is zero (black) then it would appear indistinguishable from code
* that reads "fill(0)". The solution is to use the four parameter versions
* of stroke or fill to more directly specify the desired result.
*/
protected void colorCalc(int rgb) {
if (((rgb & 0xff000000) == 0) && (rgb <= colorModeX)) {
colorCalc((float) rgb);
} else {
colorCalcARGB(rgb, colorModeA);
}
}
protected void colorCalc(int rgb, float alpha) {
if (((rgb & 0xff000000) == 0) && (rgb <= colorModeX)) { // see above
colorCalc((float) rgb, alpha);
} else {
colorCalcARGB(rgb, alpha);
}
}
protected void colorCalc(float gray) {
colorCalc(gray, colorModeA);
}
protected void colorCalc(float gray, float alpha) {
if (gray > colorModeX) gray = colorModeX;
if (alpha > colorModeA) alpha = colorModeA;
if (gray < 0) gray = 0;
if (alpha < 0) alpha = 0;
calcR = colorModeScale ? (gray / colorModeX) : gray;
calcG = calcR;
calcB = calcR;
calcA = colorModeScale ? (alpha / colorModeA) : alpha;
calcRi = (int)(calcR*255); calcGi = (int)(calcG*255);
calcBi = (int)(calcB*255); calcAi = (int)(calcA*255);
calcColor = (calcAi << 24) | (calcRi << 16) | (calcGi << 8) | calcBi;
calcAlpha = (calcAi != 255);
}
protected void colorCalc(float x, float y, float z) {
colorCalc(x, y, z, colorModeA);
}
protected void colorCalc(float x, float y, float z, float a) {
if (x > colorModeX) x = colorModeX;
if (y > colorModeY) y = colorModeY;
if (z > colorModeZ) z = colorModeZ;
if (a > colorModeA) a = colorModeA;
if (x < 0) x = 0;
if (y < 0) y = 0;
if (z < 0) z = 0;
if (a < 0) a = 0;
switch (colorMode) {
case RGB:
if (colorModeScale) {
calcR = x / colorModeX;
calcG = y / colorModeY;
calcB = z / colorModeZ;
calcA = a / colorModeA;
} else {
calcR = x; calcG = y; calcB = z; calcA = a;
}
break;
case HSB:
x /= colorModeX; // h
y /= colorModeY; // s
z /= colorModeZ; // b
calcA = colorModeScale ? (a/colorModeA) : a;
if (y == 0) { // saturation == 0
calcR = calcG = calcB = z;
} else {
float which = (x - (int)x) * 6.0f;
float f = which - (int)which;
float p = z * (1.0f - y);
float q = z * (1.0f - y * f);
float t = z * (1.0f - (y * (1.0f - f)));
switch ((int)which) {
case 0: calcR = z; calcG = t; calcB = p; break;
case 1: calcR = q; calcG = z; calcB = p; break;
case 2: calcR = p; calcG = z; calcB = t; break;
case 3: calcR = p; calcG = q; calcB = z; break;
case 4: calcR = t; calcG = p; calcB = z; break;
case 5: calcR = z; calcG = p; calcB = q; break;
}
}
break;
}
calcRi = (int)(255*calcR); calcGi = (int)(255*calcG);
calcBi = (int)(255*calcB); calcAi = (int)(255*calcA);
calcColor = (calcAi << 24) | (calcRi << 16) | (calcGi << 8) | calcBi;
calcAlpha = (calcAi != 255);
}
/**
* Unpacks AARRGGBB color for direct use with colorCalc.
* <P>
* Handled here with its own function since this is independent
* of the color mode.
* <P>
* Strangely the old version of this code ignored the alpha
* value. not sure if that was a bug or what.
* <P>
* Note, no need for a bounds check for 'argb' since it's a 32-bit number.
* Bounds now checked on alpha, however (rev 0225).
*/
protected void colorCalcARGB(int argb, float alpha) {
if (alpha == colorModeA) {
calcAi = (argb >> 24) & 0xff;
calcColor = argb;
} else {
calcAi = (int) (((argb >> 24) & 0xff) * PApplet.constrain((alpha / colorModeA), 0, 1));
calcColor = (calcAi << 24) | (argb & 0xFFFFFF);
}
calcRi = (argb >> 16) & 0xff;
calcGi = (argb >> 8) & 0xff;
calcBi = argb & 0xff;
calcA = calcAi / 255.0f;
calcR = calcRi / 255.0f;
calcG = calcGi / 255.0f;
calcB = calcBi / 255.0f;
calcAlpha = (calcAi != 255);
}
//////////////////////////////////////////////////////////////
// COLOR DATATYPE STUFFING
// The 'color' primitive type in Processing syntax is in fact a 32-bit int.
// These functions handle stuffing color values into a 32-bit cage based
// on the current colorMode settings.
// These functions are really slow (because they take the current colorMode
// into account), but they're easy to use. Advanced users can write their
// own bit shifting operations to set up 'color' data types.
public final int color(int c) { // ignore
// if (((c & 0xff000000) == 0) && (c <= colorModeX)) {
// if (colorModeDefault) {
// // bounds checking to make sure the numbers aren't too high or low
// if (c > 255) c = 255; else if (c < 0) c = 0;
// return 0xff000000 | (c << 16) | (c << 8) | c;
// } else {
// colorCalc(c);
// }
// } else {
// colorCalcARGB(c, colorModeA);
// }
colorCalc(c);
return calcColor;
}
public final int color(float gray) { // ignore
colorCalc(gray);
return calcColor;
}
/**
* @param c can be packed ARGB or a gray in this case
*/
public final int color(int c, int alpha) { // ignore
// if (colorModeDefault) {
// // bounds checking to make sure the numbers aren't too high or low
// if (c > 255) c = 255; else if (c < 0) c = 0;
// if (alpha > 255) alpha = 255; else if (alpha < 0) alpha = 0;
//
// return ((alpha & 0xff) << 24) | (c << 16) | (c << 8) | c;
// }
colorCalc(c, alpha);
return calcColor;
}
/**
* @param c can be packed ARGB or a gray in this case
*/
public final int color(int c, float alpha) { // ignore
// if (((c & 0xff000000) == 0) && (c <= colorModeX)) {
colorCalc(c, alpha);
// } else {
// colorCalcARGB(c, alpha);
// }
return calcColor;
}
public final int color(float gray, float alpha) { // ignore
colorCalc(gray, alpha);
return calcColor;
}
public final int color(int v1, int v2, int v3) { // ignore
colorCalc(v1, v2, v3);
return calcColor;
}
public final int color(float v1, float v2, float v3) { // ignore
colorCalc(v1, v2, v3);
return calcColor;
}
public final int color(int v1, int v2, int v3, int a) { // ignore
colorCalc(v1, v2, v3, a);
return calcColor;
}
public final int color(float v1, float v2, float v3, float a) { // ignore
colorCalc(v1, v2, v3, a);
return calcColor;
}
//////////////////////////////////////////////////////////////
// COLOR DATATYPE EXTRACTION
// Vee have veys of making the colors talk.
/**
*
* Extracts the alpha value from a color.
*
* @webref color:creating & reading
* @webBrief Extracts the alpha value from a color
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#green(int)
* @see PGraphics#blue(int)
* @see PGraphics#hue(int)
* @see PGraphics#saturation(int)
* @see PGraphics#brightness(int)
*/
public final float alpha(int rgb) {
float outgoing = (rgb >> 24) & 0xff;
if (colorModeA == 255) return outgoing;
return (outgoing / 255.0f) * colorModeA;
}
/**
*
* Extracts the red value from a color, scaled to match current
* <b>colorMode()</b>. The value is always returned as a float, so be careful
* not to assign it to an int value.<br />
* <br />
* The <b>red()</b> function is easy to use and understand, but it is slower
* than a technique called bit shifting. When working in <b>colorMode(RGB,
* 255)</b>, you can achieve the same results as <b>red()</b> but with greater
* speed by using the right shift operator (<b>>></b>) with a bit mask. For
* example, the following two lines of code are equivalent means of getting the
* red value of the color value <b>c</b>:<br />
* <br />
*
* <pre>
* float r1 = red(c); // Simpler, but slower to calculate
* float r2 = c >> 16 & 0xFF; // Very fast to calculate
* </pre>
*
*
* @webref color:creating & reading
* @webBrief Extracts the red value from a color, scaled to match current
* <b>colorMode()</b>
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#green(int)
* @see PGraphics#blue(int)
* @see PGraphics#alpha(int)
* @see PGraphics#hue(int)
* @see PGraphics#saturation(int)
* @see PGraphics#brightness(int)
* @see_external rightshift
*/
public final float red(int rgb) {
float c = (rgb >> 16) & 0xff;
if (colorModeDefault) return c;
return (c / 255.0f) * colorModeX;
}
/**
*
* Extracts the green value from a color, scaled to match current
* <b>colorMode()</b>. The value is always returned as a float, so be careful
* not to assign it to an int value.<br />
* <br />
* The <b>green()</b> function is easy to use and understand, but it is slower
* than a technique called bit shifting. When working in <b>colorMode(RGB,
* 255)</b>, you can achieve the same results as <b>green()</b> but with greater
* speed by using the right shift operator (<b>>></b>) with a bit mask. For
* example, the following two lines of code are equivalent means of getting the
* green value of the color value <b>c</b>:<br />
* <br />
*
* <pre>
* float g1 = green(c); // Simpler, but slower to calculate
* float g2 = c >> 8 & 0xFF; // Very fast to calculate
* </pre>
*
*
* @webref color:creating & reading
* @webBrief Extracts the green value from a color, scaled to match current
* <b>colorMode()</b>
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#blue(int)
* @see PGraphics#alpha(int)
* @see PGraphics#hue(int)
* @see PGraphics#saturation(int)
* @see PGraphics#brightness(int)
* @see_external rightshift
*/
public final float green(int rgb) {
float c = (rgb >> 8) & 0xff;
if (colorModeDefault) return c;
return (c / 255.0f) * colorModeY;
}
/**
*
* Extracts the blue value from a color, scaled to match current
* <b>colorMode()</b>. The value is always returned as a float, so be careful
* not to assign it to an int value.<br />
* <br />
* The <b>blue()</b> function is easy to use and understand, but it is slower
* than a technique called bit masking. When working in <b>colorMode(RGB,
* 255)</b>, you can achieve the same results as <b>blue()</b> but with greater
* speed by using a bit mask to remove the other color components. For example,
* the following two lines of code are equivalent means of getting the blue
* value of the color value <b>c</b>:<br />
* <br />
*
* <pre>
* float b1 = blue(c); // Simpler, but slower to calculate
* float b2 = c & 0xFF; // Very fast to calculate
* </pre>
*
*
* @webref color:creating & reading
* @webBrief Extracts the blue value from a color, scaled to match current
* <b>colorMode()</b>
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#green(int)
* @see PGraphics#alpha(int)
* @see PGraphics#hue(int)
* @see PGraphics#saturation(int)
* @see PGraphics#brightness(int)
* @see_external rightshift
*/
public final float blue(int rgb) {
float c = (rgb) & 0xff;
if (colorModeDefault) return c;
return (c / 255.0f) * colorModeZ;
}
/**
*
* Extracts the hue value from a color.
*
* @webref color:creating & reading
* @webBrief Extracts the hue value from a color
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#green(int)
* @see PGraphics#blue(int)
* @see PGraphics#alpha(int)
* @see PGraphics#saturation(int)
* @see PGraphics#brightness(int)
*/
public final float hue(int rgb) {
if (rgb != cacheHsbKey) {
Color.RGBtoHSB((rgb >> 16) & 0xff, (rgb >> 8) & 0xff,
rgb & 0xff, cacheHsbValue);
cacheHsbKey = rgb;
}
return cacheHsbValue[0] * colorModeX;
}
/**
*
* Extracts the saturation value from a color.
*
* @webref color:creating & reading
* @webBrief Extracts the saturation value from a color
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#green(int)
* @see PGraphics#blue(int)
* @see PGraphics#alpha(int)
* @see PGraphics#hue(int)
* @see PGraphics#brightness(int)
*/
public final float saturation(int rgb) {
if (rgb != cacheHsbKey) {
Color.RGBtoHSB((rgb >> 16) & 0xff, (rgb >> 8) & 0xff,
rgb & 0xff, cacheHsbValue);
cacheHsbKey = rgb;
}
return cacheHsbValue[1] * colorModeY;
}
/**
* Extracts the brightness value from a color.
*
* @webref color:creating & reading
* @webBrief Extracts the brightness value from a color
* @usage web_application
* @param rgb any value of the color datatype
* @see PGraphics#red(int)
* @see PGraphics#green(int)
* @see PGraphics#blue(int)
* @see PGraphics#alpha(int)
* @see PGraphics#hue(int)
* @see PGraphics#saturation(int)
*/
public final float brightness(int rgb) {
if (rgb != cacheHsbKey) {
Color.RGBtoHSB((rgb >> 16) & 0xff, (rgb >> 8) & 0xff,
rgb & 0xff, cacheHsbValue);
cacheHsbKey = rgb;
}
return cacheHsbValue[2] * colorModeZ;
}
//////////////////////////////////////////////////////////////
// COLOR DATATYPE INTERPOLATION
// Against our better judgement.
/**
*
* Calculates a color between two colors at a specific increment. The <b>amt</b>
* parameter is the amount to interpolate between the two values where 0.0 is
* equal to the first point, 0.1 is very near the first point, 0.5 is halfway in
* between, etc. <br />
* An amount below 0 will be treated as 0. Likewise, amounts above 1 will be
* capped at 1. This is different from the behavior of <b>lerp()</b>, but necessary
* because otherwise numbers outside the range will produce strange and
* unexpected colors.
*
* @webref color:creating & reading
* @webBrief Calculates a <b>color</b> or <b>colors</b> between two <b>colors</b> at a specific
* increment
* @usage web_application
* @param c1 interpolate from this color
* @param c2 interpolate to this color
* @param amt between 0.0 and 1.0
* @see PImage#blendColor(int, int, int)
* @see PGraphics#color(float, float, float, float)
* @see PApplet#lerp(float, float, float)
*/
public int lerpColor(int c1, int c2, float amt) { // ignore
return lerpColor(c1, c2, amt, colorMode);
}
static float[] lerpColorHSB1;
static float[] lerpColorHSB2;
/**
* @nowebref
* Interpolate between two colors. Like lerp(), but for the
* individual color components of a color supplied as an int value.
*/
static public int lerpColor(int c1, int c2, float amt, int mode) {
if (amt < 0) amt = 0;
if (amt > 1) amt = 1;
if (mode == RGB) {
float a1 = ((c1 >> 24) & 0xff);
float r1 = (c1 >> 16) & 0xff;
float g1 = (c1 >> 8) & 0xff;
float b1 = c1 & 0xff;
float a2 = (c2 >> 24) & 0xff;
float r2 = (c2 >> 16) & 0xff;
float g2 = (c2 >> 8) & 0xff;
float b2 = c2 & 0xff;
return ((PApplet.round(a1 + (a2-a1)*amt) << 24) |
(PApplet.round(r1 + (r2-r1)*amt) << 16) |
(PApplet.round(g1 + (g2-g1)*amt) << 8) |
(PApplet.round(b1 + (b2-b1)*amt)));
} else if (mode == HSB) {
if (lerpColorHSB1 == null) {
lerpColorHSB1 = new float[3];
lerpColorHSB2 = new float[3];
}
float a1 = (c1 >> 24) & 0xff;
float a2 = (c2 >> 24) & 0xff;
int alfa = (PApplet.round(a1 + (a2-a1)*amt)) << 24;
Color.RGBtoHSB((c1 >> 16) & 0xff, (c1 >> 8) & 0xff, c1 & 0xff,
lerpColorHSB1);
Color.RGBtoHSB((c2 >> 16) & 0xff, (c2 >> 8) & 0xff, c2 & 0xff,
lerpColorHSB2);
/* If mode is HSB, this will take the shortest path around the
* color wheel to find the new color. For instance, red to blue
* will go red violet blue (backwards in hue space) rather than
* cycling through ROYGBIV.
*/
// Disabling rollover (wasn't working anyway) for 0126.
// Otherwise, it makes full spectrum scale impossible for
// those who might want it...in spite of how despicable
// a full spectrum scale might be.
// roll around when 0.9 to 0.1
// more than 0.5 away means that it should roll in the other direction
/*
float h1 = lerpColorHSB1[0];
float h2 = lerpColorHSB2[0];
if (Math.abs(h1 - h2) > 0.5f) {
if (h1 > h2) {
// i.e. h1 is 0.7, h2 is 0.1
h2 += 1;
} else {
// i.e. h1 is 0.1, h2 is 0.7
h1 += 1;
}
}
float ho = (PApplet.lerp(lerpColorHSB1[0], lerpColorHSB2[0], amt)) % 1.0f;
*/
float ho = PApplet.lerp(lerpColorHSB1[0], lerpColorHSB2[0], amt);
float so = PApplet.lerp(lerpColorHSB1[1], lerpColorHSB2[1], amt);
float bo = PApplet.lerp(lerpColorHSB1[2], lerpColorHSB2[2], amt);
return alfa | (Color.HSBtoRGB(ho, so, bo) & 0xFFFFFF);
}
return 0;
}
//////////////////////////////////////////////////////////////
// BEGIN/END RAW
/**
* Record individual lines and triangles by echoing them to another renderer.
*/
public void beginRaw(PGraphics rawGraphics) { // ignore
this.raw = rawGraphics;
rawGraphics.beginDraw();
}
public void endRaw() { // ignore
if (raw != null) {
// for 3D, need to flush any geometry that's been stored for sorting
// (particularly if the ENABLE_DEPTH_SORT hint is set)
flush();
// just like beginDraw, this will have to be called because
// endDraw() will be happening outside of draw()
raw.endDraw();
raw.dispose();
raw = null;
}
}
public boolean haveRaw() { // ignore
return raw != null;
}
public PGraphics getRaw() { // ignore
return raw;
}
//////////////////////////////////////////////////////////////
// WARNINGS and EXCEPTIONS
static protected Map<String, Object> warnings;
/**
* Show a renderer error, and keep track of it so that it's only shown once.
* @param msg the error message (which will be stored for later comparison)
*/
static public void showWarning(String msg) { // ignore
if (warnings == null) {
warnings = new HashMap<>();
}
if (!warnings.containsKey(msg)) {
System.err.println(msg);
warnings.put(msg, new Object());
}
}
/**
* Version of showWarning() that takes a parsed String.
*/
static public void showWarning(String msg, Object... args) { // ignore
showWarning(String.format(msg, args));
}
/**
* Display a warning that the specified method is only available with 3D.
* @param method The method name (no parentheses)
*/
static public void showDepthWarning(String method) {
showWarning(method + "() can only be used with a renderer that " +
"supports 3D, such as P3D.");
}
/**
* Display a warning that the specified method that takes x, y, z parameters
* can only be used with x and y parameters in this renderer.
* @param method The method name (no parentheses)
*/
static public void showDepthWarningXYZ(String method) {
showWarning(method + "() with x, y, and z coordinates " +
"can only be used with a renderer that " +
"supports 3D, such as P3D. " +
"Use a version without a z-coordinate instead.");
}
/**
* Display a warning that the specified method is simply unavailable.
*/
static public void showMethodWarning(String method) {
showWarning(method + "() is not available with this renderer.");
}
/**
* Error that a particular variation of a method is unavailable (even though
* other variations are). For instance, if vertex(x, y, u, v) is not
* available, but vertex(x, y) is just fine.
*/
static public void showVariationWarning(String str) {
showWarning(str + " is not available with this renderer.");
}
/**
* Display a warning that the specified method is not implemented, meaning
* that it could be either a completely missing function, although other
* variations of it may still work properly.
*/
static public void showMissingWarning(String method) {
showWarning(method + "(), or this particular variation of it, " +
"is not available with this renderer.");
}
/**
* Show a renderer-related exception that halts the program. Currently just
* wraps the message as a RuntimeException and throws it, but might do
* something more specific might be used in the future.
*/
static public void showException(String msg) { // ignore
throw new RuntimeException(msg);
}
/**
* Same as below, but defaults to a 12 point font, just as MacWrite intended.
*/
protected void defaultFontOrDeath(String method) {
defaultFontOrDeath(method, 12);
}
/**
* First try to create a default font, but if that's not possible, throw
* an exception that halts the program because textFont() has not been used
* prior to the specified method.
*/
protected void defaultFontOrDeath(String method, float size) {
if (parent != null) {
// Call textFont() so that subclasses can do necessary setup
// https://github.com/processing/processing4/issues/303
textFont(createDefaultFont(size));
} else {
throw new RuntimeException("Use textFont() before " + method + "()");
}
}
//////////////////////////////////////////////////////////////
// RENDERER SUPPORT QUERIES
/**
* Return true if this renderer should be drawn to the screen. Defaults to
* returning true, since nearly all renderers are on-screen beasts. But can
* be overridden for subclasses like PDF so that a window doesn't open up.
* <br/> <br/>
* A better name? showFrame, displayable, isVisible, visible, shouldDisplay,
* what to call this?
*/
public boolean displayable() { // ignore
return true;
}
/**
* Return true if this renderer supports 2D drawing. Defaults to true.
*/
public boolean is2D() { // ignore
return true;
}
/**
* Return true if this renderer supports 3D drawing. Defaults to false.
*/
public boolean is3D() { // ignore
return false;
}
/**
* Return true if this renderer uses OpenGL. Defaults to false.
*/
public boolean isGL() { // ignore
return false;
}
public boolean is2X() {
return pixelDensity == 2;
}
//////////////////////////////////////////////////////////////
// ASYNC IMAGE SAVING
@Override
public boolean save(String filename) { // ignore
if (hints[DISABLE_ASYNC_SAVEFRAME]) {
return super.save(filename);
}
if (asyncImageSaver == null) {
asyncImageSaver = new AsyncImageSaver();
}
if (!loaded) loadPixels();
PImage target =
asyncImageSaver.getAvailableTarget(pixelWidth, pixelHeight, format);
if (target != null) {
int count = PApplet.min(pixels.length, target.pixels.length);
System.arraycopy(pixels, 0, target.pixels, 0, count);
asyncImageSaver.saveTargetAsync(this, target, parent.sketchFile(filename));
return true;
}
return false;
}
protected void processImageBeforeAsyncSave(PImage image) { }
/**
* If there is running async save task for this file, blocks until it completes.
* Has to be called on main thread because OpenGL overrides this and calls GL.
*/
protected void awaitAsyncSaveCompletion(String filename) {
if (asyncImageSaver != null) {
asyncImageSaver.awaitAsyncSaveCompletion(parent.sketchFile(filename));
}
}
protected static AsyncImageSaver asyncImageSaver;
protected static | does |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestRefreshNamenodeReplicationConfig.java | {
"start": 1526,
"end": 1661
} | class ____ the replication related parameters in the namenode can
* be refreshed dynamically, without a namenode restart.
*/
public | tests |
java | apache__rocketmq | client/src/main/java/org/apache/rocketmq/client/impl/consumer/ConsumeMessagePopConcurrentlyService.java | {
"start": 2641,
"end": 14257
} | class ____ implements ConsumeMessageService {
private static final Logger log = LoggerFactory.getLogger(ConsumeMessagePopConcurrentlyService.class);
private final DefaultMQPushConsumerImpl defaultMQPushConsumerImpl;
private final DefaultMQPushConsumer defaultMQPushConsumer;
private final MessageListenerConcurrently messageListener;
private final BlockingQueue<Runnable> consumeRequestQueue;
private final ThreadPoolExecutor consumeExecutor;
private final String consumerGroup;
private final ScheduledExecutorService scheduledExecutorService;
public ConsumeMessagePopConcurrentlyService(DefaultMQPushConsumerImpl defaultMQPushConsumerImpl,
MessageListenerConcurrently messageListener) {
this.defaultMQPushConsumerImpl = defaultMQPushConsumerImpl;
this.messageListener = messageListener;
this.defaultMQPushConsumer = this.defaultMQPushConsumerImpl.getDefaultMQPushConsumer();
this.consumerGroup = this.defaultMQPushConsumer.getConsumerGroup();
this.consumeRequestQueue = new LinkedBlockingQueue<>();
this.consumeExecutor = new ThreadPoolExecutor(
this.defaultMQPushConsumer.getConsumeThreadMin(),
this.defaultMQPushConsumer.getConsumeThreadMax(),
1000 * 60,
TimeUnit.MILLISECONDS,
this.consumeRequestQueue,
new ThreadFactoryImpl("ConsumeMessageThread_"));
this.scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactoryImpl("ConsumeMessageScheduledThread_"));
}
public void start() {
}
public void shutdown(long awaitTerminateMillis) {
this.scheduledExecutorService.shutdown();
ThreadUtils.shutdownGracefully(this.consumeExecutor, awaitTerminateMillis, TimeUnit.MILLISECONDS);
}
@Override
public void updateCorePoolSize(int corePoolSize) {
if (corePoolSize > 0
&& corePoolSize <= Short.MAX_VALUE
&& corePoolSize < this.defaultMQPushConsumer.getConsumeThreadMax()) {
this.consumeExecutor.setCorePoolSize(corePoolSize);
}
}
@Override
public void incCorePoolSize() {
}
@Override
public void decCorePoolSize() {
}
@Override
public int getCorePoolSize() {
return this.consumeExecutor.getCorePoolSize();
}
@Override
public ConsumeMessageDirectlyResult consumeMessageDirectly(MessageExt msg, String brokerName) {
ConsumeMessageDirectlyResult result = new ConsumeMessageDirectlyResult();
result.setOrder(false);
result.setAutoCommit(true);
List<MessageExt> msgs = new ArrayList<>();
msgs.add(msg);
MessageQueue mq = new MessageQueue();
mq.setBrokerName(brokerName);
mq.setTopic(msg.getTopic());
mq.setQueueId(msg.getQueueId());
ConsumeConcurrentlyContext context = new ConsumeConcurrentlyContext(mq);
this.defaultMQPushConsumerImpl.resetRetryAndNamespace(msgs, this.consumerGroup);
final long beginTime = System.currentTimeMillis();
log.info("consumeMessageDirectly receive new message: {}", msg);
try {
ConsumeConcurrentlyStatus status = this.messageListener.consumeMessage(msgs, context);
if (status != null) {
switch (status) {
case CONSUME_SUCCESS:
result.setConsumeResult(CMResult.CR_SUCCESS);
break;
case RECONSUME_LATER:
result.setConsumeResult(CMResult.CR_LATER);
break;
default:
break;
}
} else {
result.setConsumeResult(CMResult.CR_RETURN_NULL);
}
} catch (Throwable e) {
result.setConsumeResult(CMResult.CR_THROW_EXCEPTION);
result.setRemark(UtilAll.exceptionSimpleDesc(e));
log.warn("consumeMessageDirectly exception: {} Group: {} Msgs: {} MQ: {}",
UtilAll.exceptionSimpleDesc(e),
ConsumeMessagePopConcurrentlyService.this.consumerGroup,
msgs,
mq, e);
}
result.setSpentTimeMills(System.currentTimeMillis() - beginTime);
log.info("consumeMessageDirectly Result: {}", result);
return result;
}
@Override
public void submitConsumeRequest(List<MessageExt> msgs, ProcessQueue processQueue,
MessageQueue messageQueue, boolean dispathToConsume) {
throw new UnsupportedOperationException();
}
@Override
public void submitPopConsumeRequest(
final List<MessageExt> msgs,
final PopProcessQueue processQueue,
final MessageQueue messageQueue) {
final int consumeBatchSize = this.defaultMQPushConsumer.getConsumeMessageBatchMaxSize();
if (msgs.size() <= consumeBatchSize) {
ConsumeRequest consumeRequest = new ConsumeRequest(msgs, processQueue, messageQueue);
try {
this.consumeExecutor.submit(consumeRequest);
} catch (RejectedExecutionException e) {
this.submitConsumeRequestLater(consumeRequest);
}
} else {
for (int total = 0; total < msgs.size(); ) {
List<MessageExt> msgThis = new ArrayList<>(consumeBatchSize);
for (int i = 0; i < consumeBatchSize; i++, total++) {
if (total < msgs.size()) {
msgThis.add(msgs.get(total));
} else {
break;
}
}
ConsumeRequest consumeRequest = new ConsumeRequest(msgThis, processQueue, messageQueue);
try {
this.consumeExecutor.submit(consumeRequest);
} catch (RejectedExecutionException e) {
for (; total < msgs.size(); total++) {
msgThis.add(msgs.get(total));
}
this.submitConsumeRequestLater(consumeRequest);
}
}
}
}
public void processConsumeResult(
final ConsumeConcurrentlyStatus status,
final ConsumeConcurrentlyContext context,
final ConsumeRequest consumeRequest) {
if (consumeRequest.getMsgs().isEmpty()) {
return;
}
int ackIndex = context.getAckIndex();
String topic = consumeRequest.getMessageQueue().getTopic();
switch (status) {
case CONSUME_SUCCESS:
if (ackIndex >= consumeRequest.getMsgs().size()) {
ackIndex = consumeRequest.getMsgs().size() - 1;
}
int ok = ackIndex + 1;
int failed = consumeRequest.getMsgs().size() - ok;
this.getConsumerStatsManager().incConsumeOKTPS(consumerGroup, topic, ok);
this.getConsumerStatsManager().incConsumeFailedTPS(consumerGroup, topic, failed);
break;
case RECONSUME_LATER:
ackIndex = -1;
this.getConsumerStatsManager().incConsumeFailedTPS(consumerGroup, topic,
consumeRequest.getMsgs().size());
break;
default:
break;
}
//ack if consume success
for (int i = 0; i <= ackIndex; i++) {
this.defaultMQPushConsumerImpl.ackAsync(consumeRequest.getMsgs().get(i), consumerGroup);
consumeRequest.getPopProcessQueue().ack();
}
//consume later if consume fail
for (int i = ackIndex + 1; i < consumeRequest.getMsgs().size(); i++) {
MessageExt msgExt = consumeRequest.getMsgs().get(i);
consumeRequest.getPopProcessQueue().ack();
if (msgExt.getReconsumeTimes() >= this.defaultMQPushConsumerImpl.getMaxReconsumeTimes()) {
checkNeedAckOrDelay(msgExt);
continue;
}
int delayLevel = context.getDelayLevelWhenNextConsume();
changePopInvisibleTime(consumeRequest.getMsgs().get(i), consumerGroup, delayLevel);
}
}
private void checkNeedAckOrDelay(MessageExt msgExt) {
int[] delayLevelTable = this.defaultMQPushConsumerImpl.getPopDelayLevel();
long msgDelaytime = System.currentTimeMillis() - msgExt.getBornTimestamp();
if (msgDelaytime > delayLevelTable[delayLevelTable.length - 1] * 1000 * 2) {
log.warn("Consume too many times, ack message async. message {}", msgExt.toString());
this.defaultMQPushConsumerImpl.ackAsync(msgExt, consumerGroup);
} else {
int delayLevel = delayLevelTable.length - 1;
for (; delayLevel >= 0; delayLevel--) {
if (msgDelaytime >= delayLevelTable[delayLevel] * 1000) {
delayLevel++;
break;
}
}
changePopInvisibleTime(msgExt, consumerGroup, delayLevel);
log.warn("Consume too many times, but delay time {} not enough. changePopInvisibleTime to delayLevel {} . message key:{}",
msgDelaytime, delayLevel, msgExt.getKeys());
}
}
private void changePopInvisibleTime(final MessageExt msg, String consumerGroup, int delayLevel) {
if (0 == delayLevel) {
delayLevel = msg.getReconsumeTimes();
}
int[] delayLevelTable = this.defaultMQPushConsumerImpl.getPopDelayLevel();
int delaySecond = delayLevel >= delayLevelTable.length ? delayLevelTable[delayLevelTable.length - 1] : delayLevelTable[delayLevel];
String extraInfo = msg.getProperty(MessageConst.PROPERTY_POP_CK);
try {
this.defaultMQPushConsumerImpl.changePopInvisibleTimeAsync(msg.getTopic(), consumerGroup, extraInfo,
delaySecond * 1000L, new AckCallback() {
@Override
public void onSuccess(AckResult ackResult) {
}
@Override
public void onException(Throwable e) {
log.error("changePopInvisibleTimeAsync fail. msg:{} error info: {}", msg.toString(), e.toString());
}
});
} catch (Throwable t) {
log.error("changePopInvisibleTimeAsync fail, group:{} msg:{} errorInfo:{}", consumerGroup, msg.toString(), t.toString());
}
}
public ConsumerStatsManager getConsumerStatsManager() {
return this.defaultMQPushConsumerImpl.getConsumerStatsManager();
}
private void submitConsumeRequestLater(
final List<MessageExt> msgs,
final PopProcessQueue processQueue,
final MessageQueue messageQueue
) {
this.scheduledExecutorService.schedule(new Runnable() {
@Override
public void run() {
ConsumeMessagePopConcurrentlyService.this.submitPopConsumeRequest(msgs, processQueue, messageQueue);
}
}, 5000, TimeUnit.MILLISECONDS);
}
private void submitConsumeRequestLater(final ConsumeRequest consumeRequest
) {
this.scheduledExecutorService.schedule(new Runnable() {
@Override
public void run() {
ConsumeMessagePopConcurrentlyService.this.consumeExecutor.submit(consumeRequest);
}
}, 5000, TimeUnit.MILLISECONDS);
}
| ConsumeMessagePopConcurrentlyService |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/get/GetActionIT.java | {
"start": 3714,
"end": 54020
} | class ____ extends Plugin {
public static boolean enabled = false;
public static final AtomicInteger calls = new AtomicInteger();
@Override
public void onIndexModule(IndexModule indexModule) {
super.onIndexModule(indexModule);
if (enabled) {
indexModule.setReaderWrapper(indexService -> {
CheckedFunction<DirectoryReader, DirectoryReader, IOException> wrapper = EngineTestCase.randomReaderWrapper();
return reader -> {
calls.incrementAndGet();
return wrapper.apply(reader);
};
});
}
}
}
@Before
public void maybeEnableSearcherWrapper() {
SearcherWrapperPlugin.enabled = randomBoolean();
SearcherWrapperPlugin.calls.set(0);
}
public void testSimpleGet() {
assertAcked(
prepareCreate("test").setMapping("field1", "type=keyword,store=true", "field2", "type=keyword,store=true")
.setSettings(Settings.builder().put("index.refresh_interval", -1))
.addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null)))
);
ensureGreen();
final Function<UnaryOperator<GetRequestBuilder>, GetResponse> docGetter = op -> getDocument(indexOrAlias(), "1", op);
GetResponse response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get();
logger.info("--> non realtime get 1");
response = docGetter.apply(r -> r.setRealtime(false));
assertThat(response.isExists(), equalTo(false));
logger.info("--> realtime get 1");
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime get 1 (no source, implicit)");
response = docGetter.apply(r -> r.setStoredFields(Strings.EMPTY_ARRAY));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
Set<String> fields = new HashSet<>(response.getFields().keySet());
assertThat(fields, equalTo(Collections.<String>emptySet()));
assertThat(response.getSourceAsBytesRef(), nullValue());
logger.info("--> realtime get 1 (no source, explicit)");
response = docGetter.apply(r -> r.setFetchSource(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
fields = new HashSet<>(response.getFields().keySet());
assertThat(fields, equalTo(Collections.<String>emptySet()));
assertThat(response.getSourceAsBytesRef(), nullValue());
logger.info("--> realtime get 1 (no type)");
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field");
response = docGetter.apply(r -> r.setStoredFields("field1"));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytesRef(), nullValue());
assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1"));
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source");
response = docGetter.apply(r -> r.setStoredFields("field1").setFetchSource("field1", null));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap(), hasKey("field1"));
assertThat(response.getSourceAsMap(), not(hasKey("field2")));
assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1"));
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime get 1");
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> refresh the index, so we load it from it");
refresh();
logger.info("--> non realtime get 1 (loaded from index)");
response = docGetter.apply(r -> r.setRealtime(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2"));
logger.info("--> realtime fetch of field (loaded from index)");
response = docGetter.apply(r -> r.setStoredFields("field1"));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytesRef(), nullValue());
assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1"));
assertThat(response.getField("field2"), nullValue());
logger.info("--> realtime fetch of field & source (loaded from index)");
response = docGetter.apply(r -> r.setStoredFields("field1").setFetchSource(true));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsBytesRef(), not(nullValue()));
assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1"));
assertThat(response.getField("field2"), nullValue());
logger.info("--> update doc 1");
prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").get();
logger.info("--> realtime get 1");
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1"));
logger.info("--> update doc 1 again");
prepareIndex("test").setId("1").setSource("field1", "value1_2", "field2", "value2_2").get();
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2"));
assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2"));
DeleteResponse deleteResponse = client().prepareDelete("test", "1").get();
assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(false));
}
public void testGetWithAliasPointingToMultipleIndices() {
indicesAdmin().prepareCreate("index1").addAlias(new Alias("alias1").indexRouting("0")).get();
if (randomBoolean()) {
indicesAdmin().prepareCreate("index2")
.addAlias(new Alias("alias1").indexRouting("0").writeIndex(randomFrom(false, null)))
.get();
} else {
indicesAdmin().prepareCreate("index3").addAlias(new Alias("alias1").indexRouting("1").writeIndex(true)).get();
}
DocWriteResponse indexResponse = prepareIndex("index1").setId("id").setSource(Collections.singletonMap("foo", "bar")).get();
assertThat(indexResponse.status().getStatus(), equalTo(RestStatus.CREATED.getStatus()));
assertThat(
asInstanceOf(IllegalArgumentException.class, getDocumentFailure("alias1", "_alias_id", r -> r)).getMessage(),
endsWith("can't execute a single index op")
);
}
static String indexOrAlias() {
return randomBoolean() ? "test" : "alias";
}
private static GetResponse getDocument(String index, String id, UnaryOperator<GetRequestBuilder> requestOperator) {
return safeAwait(l -> getDocumentAsync(index, id, requestOperator, l));
}
private static Throwable getDocumentFailure(String index, String id, UnaryOperator<GetRequestBuilder> requestOperator) {
return ExceptionsHelper.unwrapCause(safeAwaitFailure(GetResponse.class, l -> getDocumentAsync(index, id, requestOperator, l)));
}
private static void getDocumentAsync(
String index,
String id,
UnaryOperator<GetRequestBuilder> requestOperator,
ActionListener<GetResponse> listener
) {
requestOperator.apply(client().prepareGet(index, id))
.execute(ActionListener.runBefore(listener, () -> ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GET)));
}
public void testSimpleMultiGet() throws Exception {
assertAcked(
prepareCreate("test").addAlias(new Alias("alias").writeIndex(randomFrom(true, false, null)))
.setMapping("field", "type=keyword,store=true")
.setSettings(Settings.builder().put("index.refresh_interval", -1))
);
ensureGreen();
MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
for (int i = 0; i < 10; i++) {
prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get();
}
response = client().prepareMultiGet()
.add(indexOrAlias(), "1")
.add(indexOrAlias(), "15")
.add(indexOrAlias(), "3")
.add(indexOrAlias(), "9")
.add(indexOrAlias(), "11")
.get();
assertThat(response.getResponses().length, equalTo(5));
assertThat(response.getResponses()[0].getId(), equalTo("1"));
assertThat(response.getResponses()[0].getIndex(), equalTo("test"));
assertThat(response.getResponses()[0].getResponse().getIndex(), equalTo("test"));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1"));
assertThat(response.getResponses()[1].getId(), equalTo("15"));
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
assertThat(response.getResponses()[1].getResponse().getIndex(), equalTo("test"));
assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(false));
assertThat(response.getResponses()[2].getId(), equalTo("3"));
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[3].getId(), equalTo("9"));
assertThat(response.getResponses()[3].getIndex(), equalTo("test"));
assertThat(response.getResponses()[3].getResponse().getIndex(), equalTo("test"));
assertThat(response.getResponses()[3].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[4].getId(), equalTo("11"));
assertThat(response.getResponses()[4].getIndex(), equalTo("test"));
assertThat(response.getResponses()[4].getResponse().getIndex(), equalTo("test"));
assertThat(response.getResponses()[4].getResponse().isExists(), equalTo(false));
// multi get with specific field
response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "1").storedFields("field"))
.add(new MultiGetRequest.Item(indexOrAlias(), "3").storedFields("field"))
.get();
assertThat(response.getResponses().length, equalTo(2));
assertThat(response.getResponses()[0].getResponse().getSourceAsBytesRef(), nullValue());
assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1"));
}
public void testGetDocWithMultivaluedFields() throws Exception {
String mapping1 = Strings.toString(
XContentFactory.jsonBuilder()
.startObject()
.startObject("properties")
.startObject("field")
.field("type", "text")
.field("store", true)
.endObject()
.endObject()
.endObject()
);
assertAcked(prepareCreate("test").setMapping(mapping1));
ensureGreen();
final Function<UnaryOperator<GetRequestBuilder>, GetResponse> docGetter = op -> getDocument("test", "1", op);
GetResponse response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(false));
prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get();
response = docGetter.apply(r -> r.setStoredFields("field"));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
Set<String> fields = new HashSet<>(response.getFields().keySet());
assertThat(fields, equalTo(singleton("field")));
assertThat(response.getFields().get("field").getValues().size(), equalTo(2));
assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1"));
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
// Now test values being fetched from stored fields.
refresh();
response = docGetter.apply(r -> r.setStoredFields("field"));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
fields = new HashSet<>(response.getFields().keySet());
assertThat(fields, equalTo(singleton("field")));
assertThat(response.getFields().get("field").getValues().size(), equalTo(2));
assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1"));
assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2"));
}
public void testGetWithVersion() {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
final Function<UnaryOperator<GetRequestBuilder>, GetResponse> docGetter = op -> getDocument(indexOrAlias(), "1", op);
GetResponse response = docGetter.apply(UnaryOperator.identity());
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get();
// From translog:
response = docGetter.apply(r -> r.setVersion(Versions.MATCH_ANY));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
response = docGetter.apply(r -> r.setVersion(1));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
assertThat(getDocumentFailure(indexOrAlias(), "1", r -> r.setVersion(2)), instanceOf(VersionConflictEngineException.class));
// From Lucene index:
refresh();
response = docGetter.apply(r -> r.setVersion(Versions.MATCH_ANY).setRealtime(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
response = docGetter.apply(r -> r.setVersion(1).setRealtime(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
assertThat(
getDocumentFailure(indexOrAlias(), "1", r -> r.setVersion(2).setRealtime(false)),
instanceOf(VersionConflictEngineException.class)
);
logger.info("--> index doc 1 again, so increasing the version");
prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").get();
// From translog:
response = docGetter.apply(r -> r.setVersion(Versions.MATCH_ANY));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
assertThat(getDocumentFailure(indexOrAlias(), "1", r -> r.setVersion(1)), instanceOf(VersionConflictEngineException.class));
response = docGetter.apply(r -> r.setVersion(2));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
// From Lucene index:
refresh();
response = docGetter.apply(r -> r.setVersion(Versions.MATCH_ANY).setRealtime(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
assertThat(
getDocumentFailure(indexOrAlias(), "1", r -> r.setVersion(1).setRealtime(false)),
instanceOf(VersionConflictEngineException.class)
);
response = docGetter.apply(r -> r.setVersion(2).setRealtime(false));
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(2L));
}
public void testMultiGetWithVersion() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
for (int i = 0; i < 3; i++) {
prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get();
}
// Version from translog
response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY))
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1))
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
assertThat(response.getResponses()[0].getFailure(), nullValue());
assertThat(response.getResponses()[0].getId(), equalTo("1"));
assertThat(response.getResponses()[0].getIndex(), equalTo("test"));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1"));
assertThat(response.getResponses()[1].getId(), equalTo("1"));
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
assertThat(response.getResponses()[1].getFailure(), nullValue());
assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1"));
assertThat(response.getResponses()[2].getFailure(), notNullValue());
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("[1]: version conflict"));
assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class));
// Version from Lucene index
refresh();
response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(Versions.MATCH_ANY))
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(1))
.add(new MultiGetRequest.Item(indexOrAlias(), "1").version(2))
.setRealtime(false)
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
assertThat(response.getResponses()[0].getFailure(), nullValue());
assertThat(response.getResponses()[0].getId(), equalTo("1"));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1"));
assertThat(response.getResponses()[1].getId(), equalTo("1"));
assertThat(response.getResponses()[1].getFailure(), nullValue());
assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1"));
assertThat(response.getResponses()[2].getFailure(), notNullValue());
assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1"));
assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("[1]: version conflict"));
assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class));
for (int i = 0; i < 3; i++) {
prepareIndex("test").setId(Integer.toString(i)).setSource("field", "value" + i).get();
}
// Version from translog
response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY))
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1))
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
assertThat(response.getResponses()[0].getFailure(), nullValue());
assertThat(response.getResponses()[0].getId(), equalTo("2"));
assertThat(response.getResponses()[0].getIndex(), equalTo("test"));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2"));
assertThat(response.getResponses()[1].getFailure(), notNullValue());
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("[2]: version conflict"));
assertThat(response.getResponses()[2].getId(), equalTo("2"));
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
assertThat(response.getResponses()[2].getFailure(), nullValue());
assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2"));
// Version from Lucene index
refresh();
response = client().prepareMultiGet()
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(Versions.MATCH_ANY))
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(1))
.add(new MultiGetRequest.Item(indexOrAlias(), "2").version(2))
.setRealtime(false)
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
assertThat(response.getResponses()[0].getFailure(), nullValue());
assertThat(response.getResponses()[0].getId(), equalTo("2"));
assertThat(response.getResponses()[0].getIndex(), equalTo("test"));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2"));
assertThat(response.getResponses()[1].getFailure(), notNullValue());
assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2"));
assertThat(response.getResponses()[1].getIndex(), equalTo("test"));
assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("[2]: version conflict"));
assertThat(response.getResponses()[2].getId(), equalTo("2"));
assertThat(response.getResponses()[2].getIndex(), equalTo("test"));
assertThat(response.getResponses()[2].getFailure(), nullValue());
assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true));
assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2"));
}
public void testGetFieldsNonLeafField() throws Exception {
assertAcked(
prepareCreate("test").addAlias(new Alias("alias"))
.setMapping(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("field1")
.startObject("properties")
.startObject("field2")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
.setSettings(Settings.builder().put("index.refresh_interval", -1))
);
prepareIndex("test").setId("1")
.setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject())
.get();
IllegalArgumentException exc = asInstanceOf(
IllegalArgumentException.class,
getDocumentFailure(indexOrAlias(), "1", r -> r.setStoredFields("field1"))
);
assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
flush();
exc = asInstanceOf(IllegalArgumentException.class, getDocumentFailure(indexOrAlias(), "1", r -> r.setStoredFields("field1")));
assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field"));
}
public void testGetFieldsComplexField() throws Exception {
assertAcked(
prepareCreate("my-index")
// multi types in 5.6
.setSettings(Settings.builder().put("index.refresh_interval", -1))
.setMapping(
jsonBuilder().startObject()
.startObject("_doc")
.startObject("properties")
.startObject("field1")
.field("type", "object")
.startObject("properties")
.startObject("field2")
.field("type", "object")
.startObject("properties")
.startObject("field3")
.field("type", "object")
.startObject("properties")
.startObject("field4")
.field("type", "text")
.field("store", true)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
)
);
BytesReference source = BytesReference.bytes(
jsonBuilder().startObject()
.startArray("field1")
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value1")
.endObject()
.endArray()
.endObject()
.endObject()
.startObject()
.startObject("field2")
.startArray("field3")
.startObject()
.field("field4", "value2")
.endObject()
.endArray()
.endObject()
.endObject()
.endArray()
.endObject()
);
logger.info("indexing documents");
prepareIndex("my-index").setId("1").setSource(source, XContentType.JSON).get();
logger.info("checking real time retrieval");
String field = "field1.field2.field3.field4";
GetResponse getResponse = getDocument("my-index", "1", r -> r.setStoredFields(field));
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
getResponse = getDocument("my-index", "1", r -> r.setStoredFields(field));
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
logger.info("waiting for recoveries to complete");
// Flush fails if shard has ongoing recoveries, make sure the cluster is settled down
ensureGreen();
logger.info("flushing");
BroadcastResponse flushResponse = indicesAdmin().prepareFlush("my-index").setForce(true).get();
if (flushResponse.getSuccessfulShards() == 0) {
StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [").append(
flushResponse.getTotalShards()
).append("], failed shards: [").append(flushResponse.getFailedShards()).append("]");
for (DefaultShardOperationFailedException failure : flushResponse.getShardFailures()) {
sb.append("\nShard failure: ").append(failure);
}
fail(sb.toString());
}
logger.info("checking post-flush retrieval");
getResponse = getDocument("my-index", "1", r -> r.setStoredFields(field));
assertThat(getResponse.isExists(), equalTo(true));
assertThat(getResponse.getField(field).getValues().size(), equalTo(2));
assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1"));
assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2"));
}
public void testUngeneratedFieldsThatAreNeverStored() throws IOException {
String createIndexSource = """
{
"settings": {
"index.translog.flush_threshold_size": "1pb",
"refresh_interval": "-1"
},
"mappings": {
"_doc": {
"properties": {
"suggest": {
"type": "completion"
}
}
}
}
}""";
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON));
ensureGreen();
String doc = """
{
"suggest": {
"input": [
"Nevermind",
"Nirvana"
]
}
}""";
index("test", "1", doc);
String[] fieldsList = { "suggest" };
// before refresh - document is only in translog
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
refresh();
// after refresh - document is in translog and also indexed
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
flush();
// after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
}
public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException {
String createIndexSource = """
{
"settings": {
"index.translog.flush_threshold_size": "1pb",
"refresh_interval": "-1"
}
}""";
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON));
ensureGreen();
prepareIndex("test").setId("1").setRouting("routingValue").setId("1").setSource("{}", XContentType.JSON).get();
String[] fieldsList = { "_routing" };
// before refresh - document is only in translog
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "routingValue");
refresh();
// after refresh - document is in translog and also indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "routingValue");
flush();
// after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "routingValue");
}
public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException {
String createIndexSource = """
{
"settings": {
"index.translog.flush_threshold_size": "1pb",
"refresh_interval": "-1"
}
}""";
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON));
ensureGreen();
String doc = """
{
"text": "some text."
}
""";
prepareIndex("test").setId("1").setSource(doc, XContentType.JSON).setRouting("1").get();
String[] fieldsList = { "_routing" };
// before refresh - document is only in translog
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "1");
refresh();
// after refresh - document is in translog and also indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "1");
flush();
// after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList, "1");
}
public void testGeneratedStringFieldsUnstored() throws IOException {
indexSingleDocumentWithStringFieldsGeneratedFromText(false, randomBoolean());
String[] fieldsList = { "_field_names" };
// before refresh - document is only in translog
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
refresh();
// after refresh - document is in translog and also indexed
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
flush();
// after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysNull(indexOrAlias(), "1", fieldsList);
}
public void testGeneratedStringFieldsStored() throws IOException {
indexSingleDocumentWithStringFieldsGeneratedFromText(true, randomBoolean());
String[] fieldsList = { "text1", "text2" };
String[] alwaysNotStoredFieldsList = { "_field_names" };
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList);
assertGetFieldsNull(indexOrAlias(), "1", alwaysNotStoredFieldsList);
flush();
// after flush - document is in not anymore translog - only indexed
assertGetFieldsAlwaysWorks(indexOrAlias(), "1", fieldsList);
assertGetFieldsNull(indexOrAlias(), "1", alwaysNotStoredFieldsList);
}
void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) {
String storedString = stored ? "true" : "false";
String createIndexSource = Strings.format("""
{
"settings": {
"index.translog.flush_threshold_size": "1pb",
"refresh_interval": "-1"
},
"mappings": {
"_doc": {
"_source": {
"enabled": %s
},
"properties": {
"text1": {
"type": "text",
"store": "%s"
},
"text2": {
"type": "text",
"store": "%s"
}
}
}
}
}""", sourceEnabled, storedString, storedString);
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource, XContentType.JSON));
ensureGreen();
String doc = """
{
"text1": "some text."
, "text2": "more text."
}
""";
index("test", "1", doc);
}
public void testAvoidWrappingSearcherInMultiGet() {
SearcherWrapperPlugin.enabled = true;
assertAcked(
prepareCreate("test").setMapping("f", "type=keyword")
.setSettings(indexSettings(1, 0).put("index.refresh_interval", "-1").put("index.routing.rebalance.enable", "none"))
);
// start tracking translog locations in the live version map
{
index("test", "0", Map.of("f", "empty"));
getDocument("test", "0", r -> r.setRealtime(true));
refresh("test");
}
Map<String, String> indexedDocs = new HashMap<>();
indexedDocs.put("0", "empty");
Map<String, String> visibleDocs = new HashMap<>(indexedDocs);
Set<String> pendingIds = new HashSet<>();
int iters = between(1, 20);
for (int iter = 0; iter < iters; iter++) {
int numDocs = randomIntBetween(1, 20);
BulkRequestBuilder bulk = client().prepareBulk();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(between(1, 50));
String value = "v-" + between(1, 1000);
indexedDocs.put(id, value);
pendingIds.add(id);
bulk.add(new IndexRequest("test").id(id).source("f", value));
}
assertNoFailures(bulk.get());
SearcherWrapperPlugin.calls.set(0);
boolean realTime = randomBoolean();
MultiGetRequestBuilder mget = client().prepareMultiGet().setRealtime(realTime);
List<String> ids = randomSubsetOf(between(1, indexedDocs.size()), indexedDocs.keySet());
Randomness.shuffle(ids);
for (String id : ids) {
mget.add("test", id);
}
MultiGetResponse resp = mget.get();
Map<String, String> expected = realTime ? indexedDocs : visibleDocs;
int getFromTranslog = 0;
for (int i = 0; i < ids.size(); i++) {
String id = ids.get(i);
MultiGetItemResponse item = resp.getResponses()[i];
assertThat(item.getId(), equalTo(id));
if (expected.containsKey(id)) {
assertTrue(item.getResponse().isExists());
assertThat(item.getResponse().getSource().get("f"), equalTo(expected.get(id)));
} else {
assertFalse(item.getResponse().isExists());
}
if (realTime && pendingIds.contains(id)) {
getFromTranslog++;
}
}
int expectedCalls = getFromTranslog == ids.size() ? getFromTranslog : getFromTranslog + 1;
assertThat(SearcherWrapperPlugin.calls.get(), equalTo(expectedCalls));
if (randomBoolean()) {
refresh("test");
visibleDocs = new HashMap<>(indexedDocs);
pendingIds.clear();
}
}
}
public void testGetRemoteIndex() {
IllegalArgumentException iae = asInstanceOf(IllegalArgumentException.class, getDocumentFailure("cluster:index", "id", r -> r));
assertEquals(
"Cross-cluster calls are not supported in this context but remote indices were requested: [cluster:index]",
iae.getMessage()
);
}
public void testRealTimeGetNestedFields() {
String index = "test";
SourceFieldMapper.Mode sourceMode = randomFrom(SourceFieldMapper.Mode.values());
assertAcked(
prepareCreate(index).setMapping("title", "type=keyword", "author", "type=nested")
.setSettings(
indexSettings(1, 0).put("index.refresh_interval", -1)
.put(IndexSettings.INDEX_MAPPER_SOURCE_MODE_SETTING.getKey(), sourceMode)
)
);
ensureGreen();
String source0 = """
{
"title": "t0",
"author": [
{
"name": "a0"
}
]
}
""";
prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("0").setSource(source0, XContentType.JSON).get();
// start tracking translog locations
assertTrue(client().prepareGet(index, "0").setRealtime(true).get().isExists());
String source1 = """
{
"title": ["t1"],
"author": [
{
"name": "a1"
}
]
}
""";
prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("1").setSource(source1, XContentType.JSON).get();
String source2 = """
{
"title": ["t1", "t2"],
"author": [
{
"name": "a1"
},
{
"name": "a2"
}
]
}
""";
prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("2").setSource(source2, XContentType.JSON).get();
String source3 = """
{
"title": ["t1", "t3", "t2"]
}
""";
prepareIndex(index).setRefreshPolicy(WriteRequest.RefreshPolicy.NONE).setId("3").setSource(source3, XContentType.JSON).get();
GetResponse translog1 = client().prepareGet(index, "1").setRealtime(true).get();
GetResponse translog2 = client().prepareGet(index, "2").setRealtime(true).get();
GetResponse translog3 = client().prepareGet(index, "3").setRealtime(true).get();
assertTrue(translog1.isExists());
assertTrue(translog2.isExists());
assertTrue(translog3.isExists());
switch (sourceMode) {
case STORED -> {
assertThat(translog1.getSourceAsBytesRef().utf8ToString(), equalTo(source1));
assertThat(translog2.getSourceAsBytesRef().utf8ToString(), equalTo(source2));
assertThat(translog3.getSourceAsBytesRef().utf8ToString(), equalTo(source3));
}
case SYNTHETIC -> {
assertThat(translog1.getSourceAsBytesRef().utf8ToString(), equalTo("""
{"author":{"name":"a1"},"title":"t1"}"""));
assertThat(translog2.getSourceAsBytesRef().utf8ToString(), equalTo("""
{"author":[{"name":"a1"},{"name":"a2"}],"title":["t1","t2"]}"""));
assertThat(translog3.getSourceAsBytesRef().utf8ToString(), equalTo("""
{"title":["t1","t2","t3"]}"""));
}
case DISABLED -> {
assertNull(translog1.getSourceAsBytesRef());
assertNull(translog2.getSourceAsBytesRef());
assertNull(translog3.getSourceAsBytesRef());
}
}
assertFalse(client().prepareGet(index, "1").setRealtime(false).get().isExists());
assertFalse(client().prepareGet(index, "2").setRealtime(false).get().isExists());
assertFalse(client().prepareGet(index, "3").setRealtime(false).get().isExists());
refresh(index);
GetResponse lucene1 = client().prepareGet(index, "1").setRealtime(randomBoolean()).get();
GetResponse lucene2 = client().prepareGet(index, "2").setRealtime(randomBoolean()).get();
GetResponse lucene3 = client().prepareGet(index, "3").setRealtime(randomBoolean()).get();
assertTrue(lucene1.isExists());
assertTrue(lucene2.isExists());
assertTrue(lucene3.isExists());
assertThat(translog1.getSourceAsBytesRef(), equalBytes(lucene1.getSourceAsBytesRef()));
assertThat(translog2.getSourceAsBytesRef(), equalBytes(lucene2.getSourceAsBytesRef()));
assertThat(translog3.getSourceAsBytesRef(), equalBytes(lucene3.getSourceAsBytesRef()));
}
private void assertGetFieldsAlwaysWorks(String index, String docId, String[] fields) {
assertGetFieldsAlwaysWorks(index, docId, fields, null);
}
private void assertGetFieldsAlwaysWorks(String index, String docId, String[] fields, @Nullable String routing) {
for (String field : fields) {
assertGetFieldWorks(index, docId, field, routing);
assertGetFieldWorks(index, docId, field, routing);
}
}
private void assertGetFieldWorks(String index, String docId, String field, @Nullable String routing) {
GetResponse response = getDocument(index, docId, field, routing);
assertThat(response.getId(), equalTo(docId));
assertTrue(response.isExists());
assertNotNull(response.getField(field));
response = multiGetDocument(index, docId, field, routing);
assertThat(response.getId(), equalTo(docId));
assertTrue(response.isExists());
assertNotNull(response.getField(field));
}
protected void assertGetFieldsNull(String index, String docId, String[] fields) {
assertGetFieldsNull(index, docId, fields, null);
}
protected void assertGetFieldsNull(String index, String docId, String[] fields, @Nullable String routing) {
for (String field : fields) {
assertGetFieldNull(index, docId, field, routing);
}
}
protected void assertGetFieldsAlwaysNull(String index, String docId, String[] fields) {
assertGetFieldsAlwaysNull(index, docId, fields, null);
}
protected void assertGetFieldsAlwaysNull(String index, String docId, String[] fields, @Nullable String routing) {
for (String field : fields) {
assertGetFieldNull(index, docId, field, routing);
assertGetFieldNull(index, docId, field, routing);
}
}
protected void assertGetFieldNull(String index, String docId, String field, @Nullable String routing) {
// for get
GetResponse response = getDocument(index, docId, field, routing);
assertTrue(response.isExists());
assertNull(response.getField(field));
assertThat(response.getId(), equalTo(docId));
// same for multi get
response = multiGetDocument(index, docId, field, routing);
assertNull(response.getField(field));
assertThat(response.getId(), equalTo(docId));
assertTrue(response.isExists());
}
private GetResponse multiGetDocument(String index, String docId, String field, @Nullable String routing) {
MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, docId).storedFields(field);
if (routing != null) {
getItem.routing(routing);
}
MultiGetRequestBuilder multiGetRequestBuilder = client().prepareMultiGet().add(getItem);
MultiGetResponse multiGetResponse = multiGetRequestBuilder.get();
assertThat(multiGetResponse.getResponses().length, equalTo(1));
return multiGetResponse.getResponses()[0].getResponse();
}
private GetResponse getDocument(String index, String docId, String field, @Nullable String routing) {
return getDocument(index, docId, r -> {
r.setStoredFields(field);
if (routing != null) {
r.setRouting(routing);
}
return r;
});
}
}
| SearcherWrapperPlugin |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java | {
"start": 1906,
"end": 8346
} | class ____ extends ESIntegTestCase {
static int numDocs, numTag1Docs;
@Override
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx");
createIndex("idx2");
numDocs = randomIntBetween(5, 20);
numTag1Docs = randomIntBetween(1, numDocs - 1);
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < numTag1Docs; i++) {
builders.add(
prepareIndex("idx").setId("" + i)
.setSource(jsonBuilder().startObject().field("value", i + 1).field("tag", "tag1").endObject())
);
}
for (int i = numTag1Docs; i < numDocs; i++) {
XContentBuilder source = jsonBuilder().startObject()
.field("value", i)
.field("tag", "tag2")
.field("name", "name" + i)
.endObject();
builders.add(prepareIndex("idx").setId("" + i).setSource(source));
if (randomBoolean()) {
// randomly index the document twice so that we have deleted docs that match the filter
builders.add(prepareIndex("idx").setId("" + i).setSource(source));
}
}
prepareCreate("empty_bucket_idx").setMapping("value", "type=integer").get();
for (int i = 0; i < 2; i++) {
builders.add(
prepareIndex("empty_bucket_idx").setId("" + i).setSource(jsonBuilder().startObject().field("value", i * 2).endObject())
);
}
indexRandom(true, builders);
ensureSearchable();
}
public void testSimple() throws Exception {
assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))), response -> {
SingleBucketAggregation filter = response.getAggregations().get("tag1");
assertThat(filter, notNullValue());
assertThat(filter.getName(), equalTo("tag1"));
assertThat(filter.getDocCount(), equalTo((long) numTag1Docs));
});
}
// See NullPointer issue when filters are empty:
// https://github.com/elastic/elasticsearch/issues/8438
public void testEmptyFilterDeclarations() throws Exception {
QueryBuilder emptyFilter = new BoolQueryBuilder();
assertNoFailuresAndResponse(prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)), response -> {
SingleBucketAggregation filter = response.getAggregations().get("tag1");
assertThat(filter, notNullValue());
assertThat(filter.getDocCount(), equalTo((long) numDocs));
});
}
public void testWithSubAggregation() throws Exception {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value"))),
response -> {
SingleBucketAggregation filter = response.getAggregations().get("tag1");
assertThat(filter, notNullValue());
assertThat(filter.getName(), equalTo("tag1"));
assertThat(filter.getDocCount(), equalTo((long) numTag1Docs));
assertThat((long) ((InternalAggregation) filter).getProperty("_count"), equalTo((long) numTag1Docs));
long sum = 0;
for (int i = 0; i < numTag1Docs; ++i) {
sum += i + 1;
}
assertThat(filter.getAggregations().asList().isEmpty(), is(false));
Avg avgValue = filter.getAggregations().get("avg_value");
assertThat(avgValue, notNullValue());
assertThat(avgValue.getName(), equalTo("avg_value"));
assertThat(avgValue.getValue(), equalTo((double) sum / numTag1Docs));
assertThat((double) ((InternalAggregation) filter).getProperty("avg_value.value"), equalTo((double) sum / numTag1Docs));
}
);
}
public void testAsSubAggregation() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery()))
),
response -> {
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, notNullValue());
assertThat(histo.getBuckets().size(), greaterThanOrEqualTo(1));
for (Histogram.Bucket bucket : histo.getBuckets()) {
SingleBucketAggregation filter = bucket.getAggregations().get("filter");
assertThat(filter, notNullValue());
assertEquals(bucket.getDocCount(), filter.getDocCount());
}
}
);
}
public void testWithContextBasedSubAggregation() throws Exception {
try {
prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get();
fail(
"expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source"
+ "context which the sub-aggregation can inherit"
);
} catch (ElasticsearchException e) {
assertThat(e.getMessage(), is("all shards failed"));
}
}
public void testEmptyAggregation() throws Exception {
assertNoFailuresAndResponse(
prepareSearch("empty_bucket_idx").setQuery(matchAllQuery())
.addAggregation(
histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery()))
),
response -> {
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
Histogram histo = response.getAggregations().get("histo");
assertThat(histo, Matchers.notNullValue());
Histogram.Bucket bucket = histo.getBuckets().get(1);
assertThat(bucket, Matchers.notNullValue());
SingleBucketAggregation filter = bucket.getAggregations().get("filter");
assertThat(filter, Matchers.notNullValue());
assertThat(filter.getName(), equalTo("filter"));
assertThat(filter.getDocCount(), is(0L));
}
);
}
}
| FilterIT |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/sink/constraint/EnforcerException.java | {
"start": 1161,
"end": 1612
} | class ____ extends TableRuntimeException {
private final String columnName;
private final String format;
public EnforcerException(String format, String columnName) {
super(String.format(format, columnName));
this.columnName = columnName;
this.format = format;
}
public String getColumnName() {
return columnName;
}
public String getFormat() {
return format;
}
}
| EnforcerException |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeTimeInterval.java | {
"start": 1177,
"end": 1809
} | class ____<T> extends Maybe<Timed<T>> {
final MaybeSource<T> source;
final TimeUnit unit;
final Scheduler scheduler;
final boolean start;
public MaybeTimeInterval(MaybeSource<T> source, TimeUnit unit, Scheduler scheduler, boolean start) {
this.source = source;
this.unit = unit;
this.scheduler = scheduler;
this.start = start;
}
@Override
protected void subscribeActual(@NonNull MaybeObserver<? super @NonNull Timed<T>> observer) {
source.subscribe(new TimeIntervalMaybeObserver<>(observer, unit, scheduler, start));
}
static final | MaybeTimeInterval |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/OperatorUidPathParameter.java | {
"start": 908,
"end": 1763
} | class ____ extends MessagePathParameter<String> {
/**
* This id must be defined to identify an operator on the client side before submit jobs.
* Otherwise, the query cannot be executed correctly. Note that we use operatorUid instead of
* operatorID because the latter is an internal runtime concept that cannot be recognized by the
* client.
*/
public static final String KEY = "operatorUid";
public OperatorUidPathParameter() {
super(KEY);
}
@Override
protected String convertFromString(String value) throws ConversionException {
return value;
}
@Override
protected String convertToString(String value) {
return value;
}
@Override
public String getDescription() {
return "string value that identifies an operator.";
}
}
| OperatorUidPathParameter |
java | netty__netty | codec-socks/src/main/java/io/netty/handler/codec/socksx/SocksMessage.java | {
"start": 740,
"end": 805
} | interface ____ all SOCKS protocol messages implement.
*/
public | that |
java | hibernate__hibernate-orm | hibernate-envers/src/main/java/org/hibernate/envers/EntityTrackingRevisionListener.java | {
"start": 651,
"end": 1075
} | class ____ mapped multiple times,
* potentially to different tables.
* @param entityId Identifier of modified entity.
* @param revisionType Modification type (addition, update or removal).
* @param revisionEntity An instance of the entity annotated with {@link RevisionEntity}.
*/
void entityChanged(
Class entityClass, String entityName, Object entityId, RevisionType revisionType,
Object revisionEntity);
}
| is |
java | google__dagger | javatests/dagger/internal/codegen/SetMultibindingValidationTest.java | {
"start": 1724,
"end": 3109
} | class ____ {",
"",
" @Provides",
" @IntoSet",
" static Produced<String> provideProducer() {",
" return null;",
" }",
"}");
// Entry points aren't needed because the check we care about here is a module validation
Source providesComponent = component("");
CompilerTests.daggerCompiler(providesModule, providesComponent)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(2);
subject.hasErrorContaining(
"@Provides methods with @IntoSet/@ElementsIntoSet must not return framework "
+ "types");
subject.hasErrorContaining("test.SetModule has errors")
.onSource(providesComponent)
.onLineContaining("@Component(modules = {SetModule.class})");
});
}
@Test
public void setBindingOfProduced_binds() {
Source bindsModule =
CompilerTests.javaSource(
"test.SetModule",
"package test;",
"",
"import dagger.Module;",
"import dagger.Binds;",
"import dagger.multibindings.IntoSet;",
"import dagger.producers.Produced;",
"",
"@Module",
"abstract | SetModule |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/annotations/JavaTypeRegistration.java | {
"start": 1496,
"end": 1608
} | interface ____ {
Class<?> javaType();
Class<? extends BasicJavaType<?>> descriptorClass();
}
| JavaTypeRegistration |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FilerConsumerDoneFileNoopTest.java | {
"start": 1080,
"end": 2501
} | class ____ extends ContextTestSupport {
private static final String TEST_FILE_NAME = "hello" + UUID.randomUUID() + ".txt";
@Test
public void testDoneFile() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(0);
// wait a bit and it should not pickup the written file as there are no
// done file
getMockEndpoint("mock:result").setResultMinimumWaitTime(50);
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, TEST_FILE_NAME);
assertMockEndpointsSatisfied();
resetMocks();
oneExchangeDone.reset();
getMockEndpoint("mock:result").expectedBodiesReceived("Hello World");
// write the done file
template.sendBodyAndHeader(fileUri(), "", Exchange.FILE_NAME, "done");
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
// done file should be kept now
assertFileExists(testFile("done"));
// as well the original file should be kept due noop
assertFileExists(testFile(TEST_FILE_NAME));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(fileUri("?initialDelay=0&delay=10&doneFileName=done&noop=true")).to("mock:result");
}
};
}
}
| FilerConsumerDoneFileNoopTest |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/GuavaEventbusComponentBuilderFactory.java | {
"start": 6837,
"end": 8059
} | class ____
extends AbstractComponentBuilder<GuavaEventBusComponent>
implements GuavaEventbusComponentBuilder {
@Override
protected GuavaEventBusComponent buildConcreteComponent() {
return new GuavaEventBusComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "eventBus": ((GuavaEventBusComponent) component).setEventBus((com.google.common.eventbus.EventBus) value); return true;
case "bridgeErrorHandler": ((GuavaEventBusComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "listenerInterface": ((GuavaEventBusComponent) component).setListenerInterface((java.lang.Class) value); return true;
case "lazyStartProducer": ((GuavaEventBusComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((GuavaEventBusComponent) component).setAutowiredEnabled((boolean) value); return true;
default: return false;
}
}
}
} | GuavaEventbusComponentBuilderImpl |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/KubernetesSecretsComponentBuilderFactory.java | {
"start": 1392,
"end": 1922
} | interface ____ {
/**
* Kubernetes Secrets (camel-kubernetes)
* Perform operations on Kubernetes Secrets.
*
* Category: container,cloud
* Since: 2.17
* Maven coordinates: org.apache.camel:camel-kubernetes
*
* @return the dsl builder
*/
static KubernetesSecretsComponentBuilder kubernetesSecrets() {
return new KubernetesSecretsComponentBuilderImpl();
}
/**
* Builder for the Kubernetes Secrets component.
*/
| KubernetesSecretsComponentBuilderFactory |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/support/ReflectionSupportTests.java | {
"start": 11971,
"end": 21284
} | class ____",
() -> ReflectionSupport.findAllClassesInModule("org.junit.platform.commons", null, allNames));
assertPreconditionViolationNotNullFor("name predicate",
() -> ReflectionSupport.findAllClassesInModule("org.junit.platform.commons", allTypes, null));
}
/**
* @since 1.11
*/
@SuppressWarnings("removal")
@Test
void findAllResourcesInModuleDelegates() {
assertEquals(
ReflectionUtils.findAllResourcesInModule("org.junit.platform.commons", ResourceFilter.of(__ -> true)),
ReflectionSupport.findAllResourcesInModule("org.junit.platform.commons", allResources));
}
/**
* @since 1.11
*/
@SuppressWarnings({ "DataFlowIssue", "removal" })
@Test
void findAllResourcesInModulePreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("Module name",
() -> ReflectionSupport.findAllResourcesInModule(null, allResources));
assertPreconditionViolationNotNullFor("resourceFilter",
() -> ReflectionSupport.findAllResourcesInModule("org.junit.platform.commons", null));
}
/**
* @since 1.11
*/
@SuppressWarnings("removal")
@Test
void streamAllResourcesInModuleDelegates() {
assertEquals(
toSupportResourcesStream(ReflectionUtils.streamAllResourcesInModule("org.junit.platform.commons",
ResourceFilter.of(__ -> true))).toList(),
ReflectionSupport.streamAllResourcesInModule("org.junit.platform.commons", allResources).toList());
}
/**
* @since 1.11
*/
@SuppressWarnings({ "DataFlowIssue", "removal" })
@Test
void streamAllResourcesInModulePreconditions() {
assertPreconditionViolationNotNullOrEmptyFor("Module name",
() -> ReflectionSupport.streamAllResourcesInModule(null, allResources));
assertPreconditionViolationNotNullFor("resourceFilter",
() -> ReflectionSupport.streamAllResourcesInModule("org.junit.platform.commons", null));
}
@Test
void newInstanceDelegates() {
assertEquals(ReflectionUtils.newInstance(String.class, "foo"),
ReflectionSupport.newInstance(String.class, "foo"));
}
@SuppressWarnings("DataFlowIssue")
@Test
void newInstancePreconditions() {
assertPreconditionViolationNotNullFor("Class", () -> ReflectionSupport.newInstance(null));
assertPreconditionViolationNotNullFor("Argument array",
() -> ReflectionSupport.newInstance(String.class, (Object[]) null));
assertPreconditionViolationNotNullFor("Individual arguments",
() -> ReflectionSupport.newInstance(String.class, new Object[] { null }));
}
@Test
void invokeMethodDelegates() throws Exception {
var method = Boolean.class.getMethod("valueOf", String.class);
assertEquals(ReflectionUtils.invokeMethod(method, null, "true"),
ReflectionSupport.invokeMethod(method, null, "true"));
}
@SuppressWarnings("DataFlowIssue")
@Test
void invokeMethodPreconditions() throws Exception {
assertPreconditionViolationNotNullFor("Method", () -> ReflectionSupport.invokeMethod(null, null, "true"));
var method = Boolean.class.getMethod("toString");
assertPreconditionViolationFor(() -> ReflectionSupport.invokeMethod(method, null))//
.withMessage("Cannot invoke non-static method [" + method.toGenericString() + "] on a null target.");
}
@Test
void findFieldsDelegates() {
assertEquals(
ReflectionUtils.findFields(ReflectionSupportTests.class, allFields,
ReflectionUtils.HierarchyTraversalMode.BOTTOM_UP),
ReflectionSupport.findFields(ReflectionSupportTests.class, allFields, HierarchyTraversalMode.BOTTOM_UP));
assertEquals(
ReflectionUtils.findFields(ReflectionSupportTests.class, allFields,
ReflectionUtils.HierarchyTraversalMode.TOP_DOWN),
ReflectionSupport.findFields(ReflectionSupportTests.class, allFields, HierarchyTraversalMode.TOP_DOWN));
}
@SuppressWarnings("DataFlowIssue")
@Test
void findFieldsPreconditions() {
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findFields(null, allFields, HierarchyTraversalMode.BOTTOM_UP));
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findFields(null, allFields, HierarchyTraversalMode.TOP_DOWN));
assertPreconditionViolationNotNullFor("Predicate",
() -> ReflectionSupport.findFields(ReflectionSupportTests.class, null, HierarchyTraversalMode.BOTTOM_UP));
assertPreconditionViolationNotNullFor("Predicate",
() -> ReflectionSupport.findFields(ReflectionSupportTests.class, null, HierarchyTraversalMode.TOP_DOWN));
assertPreconditionViolationNotNullFor("HierarchyTraversalMode",
() -> ReflectionSupport.findFields(ReflectionSupportTests.class, allFields, null));
}
@Test
void tryToReadFieldValueDelegates() throws Exception {
var staticField = getClass().getDeclaredField("staticField");
assertEquals(ReflectionUtils.tryToReadFieldValue(staticField, null),
ReflectionSupport.tryToReadFieldValue(staticField, null));
var instanceField = getClass().getDeclaredField("instanceField");
assertEquals(ReflectionUtils.tryToReadFieldValue(instanceField, this),
ReflectionSupport.tryToReadFieldValue(instanceField, this));
}
@SuppressWarnings("DataFlowIssue")
@Test
void tryToReadFieldValuePreconditions() throws Exception {
assertPreconditionViolationNotNullFor("Field", () -> ReflectionSupport.tryToReadFieldValue(null, this));
var instanceField = getClass().getDeclaredField("instanceField");
assertPreconditionViolationFor(() -> ReflectionSupport.tryToReadFieldValue(instanceField, null))//
.withMessageStartingWith("Cannot read non-static field")//
.withMessageEndingWith("on a null instance.");
}
@Test
void findMethodDelegates() {
assertEquals(ReflectionUtils.findMethod(Boolean.class, "valueOf", String.class.getName()),
ReflectionSupport.findMethod(Boolean.class, "valueOf", String.class.getName()));
assertEquals(ReflectionUtils.findMethod(Boolean.class, "valueOf", String.class),
ReflectionSupport.findMethod(Boolean.class, "valueOf", String.class));
}
@SuppressWarnings("DataFlowIssue")
@Test
void findMethodPreconditions() {
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findMethod(null, "valueOf", String.class.getName()));
assertPreconditionViolationNotNullOrBlankFor("Method name",
() -> ReflectionSupport.findMethod(Boolean.class, "", String.class.getName()));
assertPreconditionViolationNotNullOrBlankFor("Method name",
() -> ReflectionSupport.findMethod(Boolean.class, " ", String.class.getName()));
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findMethod(null, "valueOf", String.class));
assertPreconditionViolationNotNullOrBlankFor("Method name",
() -> ReflectionSupport.findMethod(Boolean.class, "", String.class));
assertPreconditionViolationNotNullOrBlankFor("Method name",
() -> ReflectionSupport.findMethod(Boolean.class, " ", String.class));
assertPreconditionViolationNotNullFor("Parameter types array",
() -> ReflectionSupport.findMethod(Boolean.class, "valueOf", (Class<?>[]) null));
assertPreconditionViolationNotNullFor("Individual parameter types",
() -> ReflectionSupport.findMethod(Boolean.class, "valueOf", new Class<?>[] { null }));
}
@Test
void findMethodsDelegates() {
assertEquals(
ReflectionUtils.findMethods(ReflectionSupportTests.class, allMethods,
ReflectionUtils.HierarchyTraversalMode.BOTTOM_UP),
ReflectionSupport.findMethods(ReflectionSupportTests.class, allMethods, HierarchyTraversalMode.BOTTOM_UP));
assertEquals(
ReflectionUtils.findMethods(ReflectionSupportTests.class, allMethods,
ReflectionUtils.HierarchyTraversalMode.TOP_DOWN),
ReflectionSupport.findMethods(ReflectionSupportTests.class, allMethods, HierarchyTraversalMode.TOP_DOWN));
}
@SuppressWarnings("DataFlowIssue")
@Test
void findMethodsPreconditions() {
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findMethods(null, allMethods, HierarchyTraversalMode.BOTTOM_UP));
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findMethods(null, allMethods, HierarchyTraversalMode.TOP_DOWN));
assertPreconditionViolationNotNullFor("Predicate",
() -> ReflectionSupport.findMethods(ReflectionSupportTests.class, null, HierarchyTraversalMode.BOTTOM_UP));
assertPreconditionViolationNotNullFor("Predicate",
() -> ReflectionSupport.findMethods(ReflectionSupportTests.class, null, HierarchyTraversalMode.TOP_DOWN));
assertPreconditionViolationNotNullFor("HierarchyTraversalMode",
() -> ReflectionSupport.findMethods(ReflectionSupportTests.class, allMethods, null));
}
@Test
void findNestedClassesDelegates() {
assertEquals(ReflectionUtils.findNestedClasses(ClassWithNestedClasses.class, ReflectionUtils::isStatic),
ReflectionSupport.findNestedClasses(ClassWithNestedClasses.class, ReflectionUtils::isStatic));
}
@SuppressWarnings("DataFlowIssue")
@Test
void findNestedClassesPreconditions() {
assertPreconditionViolationNotNullFor("Class",
() -> ReflectionSupport.findNestedClasses(null, ReflectionUtils::isStatic));
assertPreconditionViolationNotNullFor("Predicate",
() -> ReflectionSupport.findNestedClasses(ClassWithNestedClasses.class, null));
}
private static String createDisplayName(URI root) {
var displayName = root.getPath();
if (displayName.length() > 42) {
displayName = "..." + displayName.substring(displayName.length() - 42);
}
return displayName;
}
static | predicate |
java | apache__camel | components/camel-microprofile/camel-microprofile-health/src/test/java/org/apache/camel/microprofile/health/CamelMicroProfileHealthSupervisedRoutesMainTest.java | {
"start": 1617,
"end": 4468
} | class ____ {
private final SmallRyeHealthReporter reporter = new SmallRyeHealthReporter();
@Test
public void testSupervisedRouteHealthChecks() throws Exception {
CamelContext context = new DefaultCamelContext();
CamelMicroProfileHealthCheckRegistry registry = new CamelMicroProfileHealthCheckRegistry(context);
context.addComponent("my", new CamelMicroProfileHealthTestHelper.MyComponent());
context.getCamelContextExtension().addContextPlugin(HealthCheckRegistry.class, registry);
context.getRouteController().supervising();
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("my:start").routeId("healthyRoute")
.setBody(constant("Hello Camel MicroProfile Health"));
}
});
SimpleMain main = new SimpleMain(context);
main.addInitialProperty("camel.health.routes-enabled", "true");
main.addInitialProperty("camel.health.consumers-enabled", "true");
main.addInitialProperty("camel.health.producers-enabled", "true");
main.start();
try {
SmallRyeHealth health = reporter.getHealth();
JsonObject healthObject = getHealthJson(reporter, health);
assertEquals(Status.UP.name(), healthObject.getString("status"));
JsonArray checks = healthObject.getJsonArray("checks");
assertEquals(5, checks.size());
Optional<JsonObject> camelRoutesCheck = findHealthCheck("camel-routes", checks);
camelRoutesCheck.ifPresentOrElse(check -> {
assertEquals(Status.UP.toString(), check.getString("status"));
}, () -> fail("Expected camel-routes check not found in health output"));
Optional<JsonObject> camelConsumersCheck = findHealthCheck("camel-consumers", checks);
camelConsumersCheck.ifPresentOrElse(check -> {
assertEquals(Status.UP.toString(), check.getString("status"));
}, () -> fail("Expected camel-consumers check not found in health output"));
Optional<JsonObject> camelComponentsCheck = findHealthCheck("camel-producers", checks);
camelComponentsCheck.ifPresentOrElse(check -> {
assertEquals(Status.UP.toString(), check.getString("status"));
}, () -> fail("Expected camel-producers check not found in health output"));
} finally {
main.stop();
}
}
private Optional<JsonObject> findHealthCheck(String name, JsonArray checks) {
return checks.stream()
.map(JsonValue::asJsonObject)
.filter(jsonObject -> jsonObject.getString("name").equals(name))
.findFirst();
}
}
| CamelMicroProfileHealthSupervisedRoutesMainTest |
java | apache__camel | components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/cronjob/KubernetesCronJobComponent.java | {
"start": 1095,
"end": 1375
} | class ____ extends AbstractKubernetesComponent {
@Override
protected KubernetesCronJobEndpoint doCreateEndpoint(String uri, String remaining, KubernetesConfiguration config) {
return new KubernetesCronJobEndpoint(uri, this, config);
}
}
| KubernetesCronJobComponent |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/NodeHeartbeatRequest.java | {
"start": 1352,
"end": 4600
} | class ____ {
public static NodeHeartbeatRequest newInstance(NodeStatus nodeStatus,
MasterKey lastKnownContainerTokenMasterKey,
MasterKey lastKnownNMTokenMasterKey, Set<NodeLabel> nodeLabels) {
NodeHeartbeatRequest nodeHeartbeatRequest =
Records.newRecord(NodeHeartbeatRequest.class);
nodeHeartbeatRequest.setNodeStatus(nodeStatus);
nodeHeartbeatRequest
.setLastKnownContainerTokenMasterKey(lastKnownContainerTokenMasterKey);
nodeHeartbeatRequest
.setLastKnownNMTokenMasterKey(lastKnownNMTokenMasterKey);
nodeHeartbeatRequest.setNodeLabels(nodeLabels);
return nodeHeartbeatRequest;
}
public static NodeHeartbeatRequest newInstance(NodeStatus nodeStatus,
MasterKey lastKnownContainerTokenMasterKey,
MasterKey lastKnownNMTokenMasterKey, Set<NodeLabel> nodeLabels,
Map<ApplicationId, AppCollectorData> registeringCollectors) {
NodeHeartbeatRequest nodeHeartbeatRequest =
Records.newRecord(NodeHeartbeatRequest.class);
nodeHeartbeatRequest.setNodeStatus(nodeStatus);
nodeHeartbeatRequest
.setLastKnownContainerTokenMasterKey(lastKnownContainerTokenMasterKey);
nodeHeartbeatRequest
.setLastKnownNMTokenMasterKey(lastKnownNMTokenMasterKey);
nodeHeartbeatRequest.setNodeLabels(nodeLabels);
nodeHeartbeatRequest.setRegisteringCollectors(registeringCollectors);
return nodeHeartbeatRequest;
}
public static NodeHeartbeatRequest newInstance(NodeStatus nodeStatus,
MasterKey lastKnownContainerTokenMasterKey,
MasterKey lastKnownNMTokenMasterKey, Set<NodeLabel> nodeLabels,
Set<NodeAttribute> nodeAttributes,
Map<ApplicationId, AppCollectorData> registeringCollectors) {
NodeHeartbeatRequest request = NodeHeartbeatRequest
.newInstance(nodeStatus, lastKnownContainerTokenMasterKey,
lastKnownNMTokenMasterKey, nodeLabels, registeringCollectors);
request.setNodeAttributes(nodeAttributes);
return request;
}
public abstract NodeStatus getNodeStatus();
public abstract void setNodeStatus(NodeStatus status);
public abstract MasterKey getLastKnownContainerTokenMasterKey();
public abstract void setLastKnownContainerTokenMasterKey(MasterKey secretKey);
public abstract MasterKey getLastKnownNMTokenMasterKey();
public abstract void setLastKnownNMTokenMasterKey(MasterKey secretKey);
public abstract Set<NodeLabel> getNodeLabels();
public abstract void setNodeLabels(Set<NodeLabel> nodeLabels);
public abstract List<LogAggregationReport>
getLogAggregationReportsForApps();
public abstract void setLogAggregationReportsForApps(
List<LogAggregationReport> logAggregationReportsForApps);
// This tells RM registered collectors' address info on this node
public abstract Map<ApplicationId, AppCollectorData>
getRegisteringCollectors();
public abstract void setRegisteringCollectors(Map<ApplicationId,
AppCollectorData> appCollectorsMap);
public abstract Set<NodeAttribute> getNodeAttributes();
public abstract void setNodeAttributes(Set<NodeAttribute> nodeAttributes);
public abstract void setTokenSequenceNo(long tokenSequenceNo);
public abstract long getTokenSequenceNo();
}
| NodeHeartbeatRequest |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/mapping/meta/ParameterMeta.java | {
"start": 1394,
"end": 4931
} | class ____ extends AnnotationSupport {
private final String prefix;
private final String name;
private Boolean simple;
private Class<?> actualType;
private Type actualGenericType;
private BeanMeta beanMeta;
private NamedValueMeta namedValueMeta;
protected ParameterMeta(RestToolKit toolKit, String prefix, String name) {
super(toolKit);
this.prefix = prefix;
this.name = name;
}
protected ParameterMeta(RestToolKit toolKit, String name) {
super(toolKit);
prefix = null;
this.name = name;
}
public String getPrefix() {
return prefix;
}
@Nullable
public String getName() {
return name;
}
public String getRequiredName() {
String name = getName();
if (name == null) {
throw new RestException(Messages.ARGUMENT_NAME_MISSING, getType());
}
return name;
}
public final boolean isSimple() {
Boolean simple = this.simple;
if (simple == null) {
Class<?> type = Collection.class.isAssignableFrom(getType())
? TypeUtils.getNestedActualType(getGenericType(), 0)
: getActualType();
simple = TypeUtils.isSimpleProperty(type);
this.simple = simple;
}
return simple;
}
public final boolean isStream() {
return ReflectionPackableMethod.isStreamType(getType());
}
public final Class<?> getActualType() {
Class<?> type = actualType;
if (type == null) {
type = getType();
if (TypeUtils.isWrapperType(type)) {
type = TypeUtils.getNestedActualType(getGenericType(), 0);
if (type == null) {
type = Object.class;
}
}
actualType = type;
}
return type;
}
public final Type getActualGenericType() {
Type type = actualGenericType;
if (type == null) {
type = getGenericType();
if (TypeUtils.isWrapperType(TypeUtils.getActualType(type))) {
type = TypeUtils.getNestedGenericType(type, 0);
if (type == null) {
type = Object.class;
}
}
actualGenericType = type;
}
return type;
}
public final BeanMeta getBeanMeta() {
BeanMeta beanMeta = this.beanMeta;
if (beanMeta == null) {
this.beanMeta = beanMeta = new BeanMeta(getToolKit(), getActualType());
}
return beanMeta;
}
public final Object bind(HttpRequest request, HttpResponse response) {
return getToolKit().bind(this, request, response);
}
public final NamedValueMeta getNamedValueMeta() {
NamedValueMeta namedValueMeta = this.namedValueMeta;
if (namedValueMeta == null) {
namedValueMeta = getToolKit().getNamedValueMeta(this);
if (namedValueMeta == null) {
namedValueMeta = NamedValueMeta.EMPTY;
}
this.namedValueMeta = namedValueMeta;
}
return namedValueMeta;
}
public int getIndex() {
return -1;
}
public String getDescription() {
return name;
}
public abstract Class<?> getType();
public abstract Type getGenericType();
@Override
public String toString() {
return "ParameterMeta{name='" + name + "', type=" + getType() + '}';
}
}
| ParameterMeta |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/EnumMapDeserializer5165Test.java | {
"start": 641,
"end": 1662
} | class ____ {
private EnumMap<MyEnum, Integer> map;
public EnumMap<MyEnum, Integer> getMap() {
return map;
}
public void setMap(EnumMap<MyEnum, Integer> map) {
this.map = map;
}
}
@Test
public void nullsFailTest() {
ObjectMapper mapper = JsonMapper.builder()
.changeDefaultNullHandling(n -> JsonSetter.Value.forContentNulls(Nulls.FAIL))
.build();
assertThrows(
InvalidNullException.class,
() -> mapper.readValue("{\"map\":{\"FOO\":\"\"}}", new TypeReference<Dst>(){})
);
}
@Test
public void nullsSkipTest() throws Exception {
ObjectMapper mapper = JsonMapper.builder()
.changeDefaultNullHandling(n -> JsonSetter.Value.forContentNulls(Nulls.SKIP))
.build();
Dst dst = mapper.readValue("{\"map\":{\"FOO\":\"\"}}", new TypeReference<Dst>() {});
assertTrue(dst.getMap().isEmpty());
}
}
| Dst |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/ClassLoaderUtils.java | {
"start": 1860,
"end": 2864
} | class ____ to a String calling {@link #toString(URLClassLoader)}.
*
* @param classLoader to URLClassLoader to convert.
* @return the formatted string.
*/
public static String toString(final ClassLoader classLoader) {
if (classLoader instanceof URLClassLoader) {
return toString((URLClassLoader) classLoader);
}
return Objects.toString(classLoader);
}
/**
* Converts the given URLClassLoader to a String in the format {@code "URLClassLoader.toString() + [URL1, URL2, ...]"}.
*
* @param classLoader to URLClassLoader to convert.
* @return the formatted string.
*/
public static String toString(final URLClassLoader classLoader) {
return classLoader != null ? classLoader + Arrays.toString(classLoader.getURLs()) : "null";
}
/**
* Make private in 4.0.
*
* @deprecated TODO Make private in 4.0.
*/
@Deprecated
public ClassLoaderUtils() {
// empty
}
}
| loader |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/GoogleSecretManagerEndpointBuilderFactory.java | {
"start": 8223,
"end": 10529
} | interface ____ {
/**
* Google Secret Manager (camel-google-secret-manager)
* Manage Google Secret Manager Secrets
*
* Category: cloud
* Since: 3.16
* Maven coordinates: org.apache.camel:camel-google-secret-manager
*
* @return the dsl builder for the headers' name.
*/
default GoogleSecretManagerHeaderNameBuilder googleSecretManager() {
return GoogleSecretManagerHeaderNameBuilder.INSTANCE;
}
/**
* Google Secret Manager (camel-google-secret-manager)
* Manage Google Secret Manager Secrets
*
* Category: cloud
* Since: 3.16
* Maven coordinates: org.apache.camel:camel-google-secret-manager
*
* Syntax: <code>google-secret-manager:project</code>
*
* Path parameter: project (required)
* The Google Cloud Project Id name related to the Secret Manager
*
* @param path project
* @return the dsl builder
*/
default GoogleSecretManagerEndpointBuilder googleSecretManager(String path) {
return GoogleSecretManagerEndpointBuilderFactory.endpointBuilder("google-secret-manager", path);
}
/**
* Google Secret Manager (camel-google-secret-manager)
* Manage Google Secret Manager Secrets
*
* Category: cloud
* Since: 3.16
* Maven coordinates: org.apache.camel:camel-google-secret-manager
*
* Syntax: <code>google-secret-manager:project</code>
*
* Path parameter: project (required)
* The Google Cloud Project Id name related to the Secret Manager
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path project
* @return the dsl builder
*/
default GoogleSecretManagerEndpointBuilder googleSecretManager(String componentName, String path) {
return GoogleSecretManagerEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the Google Secret Manager component.
*/
public static | GoogleSecretManagerBuilders |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/AutoConfigurationImportSelectorIntegrationTests.java | {
"start": 3459,
"end": 3566
} | class ____ {
}
@Configuration(proxyBeanMethods = false)
@AutoConfigureAfter(ConfigC.class)
static | ConfigC |
java | micronaut-projects__micronaut-core | discovery-core/src/main/java/io/micronaut/discovery/config/DefaultCompositeConfigurationClient.java | {
"start": 1289,
"end": 2544
} | class ____ implements ConfigurationClient {
private final ConfigurationClient[] configurationClients;
/**
* Create a default composite configuration client from given configuration clients.
*
* @param configurationClients The configuration clients
*/
public DefaultCompositeConfigurationClient(ConfigurationClient[] configurationClients) {
this.configurationClients = configurationClients;
}
@NonNull
@Override
public String getDescription() {
return toString();
}
@Override
public Publisher<PropertySource> getPropertySources(Environment environment) {
if (ArrayUtils.isEmpty(configurationClients)) {
return Flux.empty();
}
List<Publisher<PropertySource>> publishers = Arrays.stream(configurationClients)
.map(configurationClient -> configurationClient.getPropertySources(environment))
.collect(Collectors.toList());
return Flux.merge(publishers);
}
@Override
public String toString() {
return "compositeConfigurationClient(" + Arrays.stream(configurationClients).map(ConfigurationClient::getDescription).collect(Collectors.joining(",")) + ")";
}
}
| DefaultCompositeConfigurationClient |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/toolchain/ToolchainManagerFactory.java | {
"start": 7569,
"end": 11276
} | class ____ implements ToolchainManager, ToolchainManagerPrivate {
@Override
public Toolchain getToolchainFromBuildContext(String type, MavenSession session) {
return getDelegate()
.getToolchainFromBuildContext(session.getSession(), type)
.map(ToolchainManagerFactory.this::getToolchainV3)
.orElse(null);
}
@Override
public List<Toolchain> getToolchains(MavenSession session, String type, Map<String, String> requirements) {
return new MappedList<>(
getDelegate().getToolchains(session.getSession(), type, requirements),
ToolchainManagerFactory.this::getToolchainV3);
}
@Override
public ToolchainPrivate[] getToolchainsForType(String type, MavenSession session)
throws MisconfiguredToolchainException {
try {
List<org.apache.maven.api.Toolchain> toolchains =
getDelegate().getToolchains(session.getSession(), type);
return toolchains.stream()
.map(ToolchainManagerFactory.this::getToolchainV3)
.toArray(ToolchainPrivate[]::new);
} catch (org.apache.maven.api.services.ToolchainManagerException e) {
throw new MisconfiguredToolchainException(e.getMessage(), e);
}
}
@Override
public void storeToolchainToBuildContext(ToolchainPrivate toolchain, MavenSession session) {
org.apache.maven.api.Toolchain tc = getToolchainV4(toolchain);
getDelegate().storeToolchainToBuildContext(session.getSession(), tc);
}
}
private org.apache.maven.api.Toolchain getToolchainV4(ToolchainPrivate toolchain) {
return toolchain instanceof ToolchainWrapperV3 v3tc ? v3tc.delegate : new ToolchainWrapperV4(toolchain);
}
private ToolchainPrivate getToolchainV3(org.apache.maven.api.Toolchain toolchain) {
return toolchain instanceof ToolchainWrapperV4 v3tc ? v3tc.delegate : new ToolchainWrapperV3(toolchain);
}
private record ToolchainWrapperV4(ToolchainPrivate delegate) implements org.apache.maven.api.Toolchain {
@Override
public String getType() {
return delegate.getType();
}
@Override
public String findTool(String toolName) {
return delegate.findTool(toolName);
}
@Override
public org.apache.maven.api.toolchain.ToolchainModel getModel() {
return delegate.getModel().getDelegate();
}
@Override
public boolean matchesRequirements(Map<String, String> requirements) {
return delegate.matchesRequirements(requirements);
}
@Override
public String toString() {
return delegate.toString();
}
}
private record ToolchainWrapperV3(org.apache.maven.api.Toolchain delegate) implements Toolchain, ToolchainPrivate {
@Override
public String getType() {
return delegate.getType();
}
@Override
public String findTool(String toolName) {
return delegate.findTool(toolName);
}
@Override
public boolean matchesRequirements(Map<String, String> requirements) {
return delegate.matchesRequirements(requirements);
}
@Override
public ToolchainModel getModel() {
return new ToolchainModel(delegate.getModel());
}
@Override
public String toString() {
return delegate.toString();
}
}
}
| DefaultToolchainManagerV3 |
java | apache__camel | components/camel-kafka/src/test/java/org/apache/camel/component/kafka/transform/KafkaHeaderDeserializerTest.java | {
"start": 1241,
"end": 3268
} | class ____ {
private DefaultCamelContext camelContext;
private final KafkaHeaderDeserializer processor = new KafkaHeaderDeserializer();
@BeforeEach
void setup() {
this.camelContext = new DefaultCamelContext();
}
@Test
void shouldDeserializeHeaders() throws Exception {
Exchange exchange = new DefaultExchange(camelContext);
exchange.getMessage().setHeader("foo", "bar");
exchange.getMessage().setHeader("fooBytes", "barBytes".getBytes(StandardCharsets.UTF_8));
exchange.getMessage().setHeader("fooNull", null);
exchange.getMessage().setHeader("number", 1L);
processor.enabled = true;
processor.process(exchange);
Assertions.assertTrue(exchange.getMessage().hasHeaders());
Assertions.assertEquals("bar", exchange.getMessage().getHeader("foo"));
Assertions.assertEquals("barBytes", exchange.getMessage().getHeader("fooBytes"));
Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey("fooNull"));
Assertions.assertNull(exchange.getMessage().getHeader("fooNull"));
Assertions.assertEquals("1", exchange.getMessage().getHeader("number"));
}
@Test
void shouldNotDeserializeHeadersWhenDisabled() throws Exception {
Exchange exchange = new DefaultExchange(camelContext);
exchange.getMessage().setHeader("foo", "bar");
exchange.getMessage().setHeader("fooBytes", "barBytes".getBytes(StandardCharsets.UTF_8));
processor.enabled = false;
processor.process(exchange);
Assertions.assertTrue(exchange.getMessage().hasHeaders());
Assertions.assertEquals("bar", exchange.getMessage().getHeader("foo"));
Assertions.assertTrue(exchange.getMessage().getHeader("fooBytes") instanceof byte[]);
Assertions.assertEquals(Arrays.toString("barBytes".getBytes(StandardCharsets.UTF_8)),
Arrays.toString((byte[]) exchange.getMessage().getHeader("fooBytes")));
}
}
| KafkaHeaderDeserializerTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/configuration/JMXServerOptions.java | {
"start": 1170,
"end": 2391
} | class ____ {
/** Port configured to enable JMX server for metrics and debugging. */
@Documentation.Section(Documentation.Sections.EXPERT_DEBUGGING_AND_TUNING)
public static final ConfigOption<String> JMX_SERVER_PORT =
key("jmx.server.port")
.stringType()
.noDefaultValue()
.withDescription(
new Description.DescriptionBuilder()
.text(
"The port range for the JMX server to start the registry. The "
+ "port config can be a single port: \"9123\", a range of ports: \"50100-50200\", "
+ "or a list of ranges and ports: \"50100-50200,50300-50400,51234\". ")
.linebreak()
.text("This option overrides metrics.reporter.*.port option.")
.build());
// ------------------------------------------------------------------------
/** Not intended to be instantiated. */
private JMXServerOptions() {}
}
| JMXServerOptions |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/common/ThreadPools.java | {
"start": 1381,
"end": 5242
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(ThreadPools.class);
private ThreadPools() {
}
public static final String WORKER_THREAD_POOL_SIZE_PROP = "tos.worker.num-threads";
public static final int WORKER_THREAD_POOL_SIZE =
poolSize(Math.max(2, Runtime.getRuntime().availableProcessors()));
private static final ExecutorService WORKER_POOL = newWorkerPool("tos-default-worker-pool");
public static ExecutorService defaultWorkerPool() {
return WORKER_POOL;
}
public static ExecutorService newWorkerPool(String namePrefix) {
return newWorkerPool(namePrefix, WORKER_THREAD_POOL_SIZE);
}
public static ExecutorService newWorkerPool(String namePrefix, int poolSize) {
return Executors.newFixedThreadPool(poolSize, newDaemonThreadFactory(namePrefix));
}
public static ScheduledExecutorService newScheduleWorkerPool(String namePrefix, int poolSize) {
return Executors.newScheduledThreadPool(poolSize, newDaemonThreadFactory(namePrefix));
}
/**
* Helper routine to shutdown a {@link ExecutorService}. Will wait up to a
* certain timeout for the ExecutorService to gracefully shutdown. If the
* ExecutorService did not shutdown and there are still tasks unfinished after
* the timeout period, the ExecutorService will be notified to forcibly shut
* down. Another timeout period will be waited before giving up. So, at most,
* a shutdown will be allowed to wait up to twice the timeout value before
* giving up.
* <p>
* This method is copied from
* {@link HadoopExecutors#shutdown(ExecutorService, Logger, long, TimeUnit)}.
*
* @param executorService ExecutorService to shutdown
* @param timeout the maximum time to wait
* @param unit the time unit of the timeout argument
*/
public static void shutdown(ExecutorService executorService, long timeout, TimeUnit unit) {
if (executorService == null) {
return;
}
try {
executorService.shutdown();
LOG.debug("Gracefully shutting down executor service. Waiting max {} {}", timeout, unit);
if (!executorService.awaitTermination(timeout, unit)) {
LOG.debug("Executor service has not shutdown yet. Forcing. Will wait up to an additional"
+ " {} {} for shutdown", timeout, unit);
executorService.shutdownNow();
}
if (executorService.awaitTermination(timeout, unit)) {
LOG.debug("Succesfully shutdown executor service");
} else {
LOG.error("Unable to shutdown executor service after timeout {} {}", (2 * timeout), unit);
}
} catch (InterruptedException e) {
LOG.error("Interrupted while attempting to shutdown", e);
executorService.shutdownNow();
} catch (Exception e) {
LOG.warn("Exception closing executor service {}", e.getMessage());
LOG.debug("Exception closing executor service", e);
throw e;
}
}
private static int poolSize(int defaultSize) {
String value = System.getProperty(WORKER_THREAD_POOL_SIZE_PROP);
if (value != null) {
try {
return Integer.parseUnsignedInt(value);
} catch (NumberFormatException e) {
// will return the default
}
}
return defaultSize;
}
public static ThreadFactory newDaemonThreadFactory(String namePrefix) {
return new ThreadFactoryBuilder().setDaemon(true).setNameFormat(namePrefix + "-%d")
.setUncaughtExceptionHandler(
(t, e) -> LOG.error("Thread {} encounter uncaught exception", t, e)).build();
}
public static Thread newDaemonThread(String name, Runnable runnable,
UncaughtExceptionHandler handler) {
Thread t = new Thread(runnable);
t.setName(name);
t.setDaemon(true);
if (handler != null) {
t.setUncaughtExceptionHandler(handler);
}
return t;
}
}
| ThreadPools |
java | spring-projects__spring-boot | module/spring-boot-micrometer-metrics/src/main/java/org/springframework/boot/micrometer/metrics/autoconfigure/export/graphite/GraphiteMetricsExportAutoConfiguration.java | {
"start": 2286,
"end": 2700
} | class ____ {
@Bean
@ConditionalOnMissingBean
GraphiteConfig graphiteConfig(GraphiteProperties graphiteProperties) {
return new GraphitePropertiesConfigAdapter(graphiteProperties);
}
@Bean
@ConditionalOnMissingBean
GraphiteMeterRegistry graphiteMeterRegistry(GraphiteConfig graphiteConfig, Clock clock) {
return new GraphiteMeterRegistry(graphiteConfig, clock);
}
}
| GraphiteMetricsExportAutoConfiguration |
java | spring-projects__spring-framework | spring-tx/src/test/java/org/springframework/dao/annotation/PersistenceExceptionTranslationPostProcessorTests.java | {
"start": 5062,
"end": 5246
} | class ____ {
@Before("execution(void *.additionalMethod(*))")
public void log(JoinPoint jp) {
// System.out.println("Before " + jp.getSignature().getName());
}
}
}
| LogAllAspect |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/ParallelReduceSeed.java | {
"start": 1112,
"end": 2923
} | class ____<T, R> extends ParallelFlux<R> implements
Scannable, Fuseable {
final ParallelFlux<? extends T> source;
final Supplier<R> initialSupplier;
final BiFunction<R, ? super T, R> reducer;
ParallelReduceSeed(ParallelFlux<? extends T> source,
Supplier<R> initialSupplier,
BiFunction<R, ? super T, R> reducer) {
this.source = ParallelFlux.from(source);
this.initialSupplier = initialSupplier;
this.reducer = reducer;
}
@Override
public @Nullable Object scanUnsafe(Scannable.Attr key) {
if (key == Attr.PARENT) return source;
if (key == Attr.PREFETCH) return getPrefetch();
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
if (key == InternalProducerAttr.INSTANCE) return true;
return null;
}
@Override
public int getPrefetch() {
return Integer.MAX_VALUE;
}
@Override
public void subscribe(CoreSubscriber<? super R>[] subscribers) {
if (!validate(subscribers)) {
return;
}
int n = subscribers.length;
@SuppressWarnings("unchecked") CoreSubscriber<T>[] parents = new CoreSubscriber[n];
for (int i = 0; i < n; i++) {
R initialValue;
try {
initialValue = Objects.requireNonNull(initialSupplier.get(),
"The initialSupplier returned a null value");
}
catch (Throwable ex) {
reportError(subscribers, Operators.onOperatorError(ex,
subscribers[i].currentContext()));
return;
}
parents[i] =
new ParallelReduceSeedSubscriber<>(subscribers[i], initialValue, reducer);
}
source.subscribe(parents);
}
void reportError(Subscriber<?>[] subscribers, Throwable ex) {
for (Subscriber<?> s : subscribers) {
Operators.error(s, ex);
}
}
@Override
public int parallelism() {
return source.parallelism();
}
static final | ParallelReduceSeed |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/polymorphism/BidirectionalOneToOnePolymorphismTest.java | {
"start": 1040,
"end": 3055
} | class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Level1 level1 = new Level1();
level1.setId( 1 );
DerivedLevel2 level2 = new DerivedLevel2();
level2.setId( 2 );
level1.setDerivedLevel2( level2 );
level2.setLevel1( level1 );
Level3 level3 = new Level3();
level3.setId( 3 );
level2.setLevel3( level3 );
level3.setLevel2( level2 );
session.persist( level1 );
session.persist( level2 );
session.persist( level3 );
}
);
}
@Test
public void loadAndUnProxyTest(SessionFactoryScope scope) {
scope.inTransaction( session -> {
Level1 reference = session.getReference( Level1.class, 1 );
assertThat( reference )
.extracting( Level1::getDerivedLevel2 )
.isNotNull();
} );
scope.inTransaction(
session -> {
Level2 level2Proxy = session.getReference( Level2.class, 2 );
assertFalse( Hibernate.isInitialized( level2Proxy ) );
Object unproxy = Hibernate.unproxy( level2Proxy );
assertThat( unproxy ).isInstanceOf( DerivedLevel2.class );
DerivedLevel2 level2 = (DerivedLevel2) unproxy;
Level1 level1 = level2.getLevel1();
DerivedLevel2 derivedLevel2 = level1.getDerivedLevel2();
assertThat( derivedLevel2 ).isNotNull();
assertThat( derivedLevel2 ).isSameAs( level2 );
} );
scope.inTransaction(
session -> {
Level3 level3Proxy = session.getReference( Level3.class, 3 );
assertFalse( Hibernate.isInitialized( level3Proxy ) );
Object unproxy = Hibernate.unproxy( level3Proxy.getLevel2() );
assertThat( unproxy ).isInstanceOf( DerivedLevel2.class );
DerivedLevel2 level2 = (DerivedLevel2) unproxy;
Level1 level1 = level2.getLevel1();
DerivedLevel2 derivedLevel2 = level1.getDerivedLevel2();
assertThat( derivedLevel2 ).isNotNull();
assertThat( derivedLevel2 ).isSameAs( level2 );
} );
}
@Entity(name = "Level1")
static | BidirectionalOneToOnePolymorphismTest |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/expression/predicate/operator/comparison/InsensitiveBinaryComparisonPipeTests.java | {
"start": 1111,
"end": 6497
} | class ____ extends AbstractNodeTestCase<InsensitiveBinaryComparisonPipe, Pipe> {
@Override
protected InsensitiveBinaryComparisonPipe randomInstance() {
return randomInsensitiveBinaryComparisonPipe();
}
private Expression randomInsensitiveBinaryComparisonExpression() {
return randomInsensitiveBinaryComparisonPipe().expression();
}
public static InsensitiveBinaryComparisonPipe randomInsensitiveBinaryComparisonPipe() {
return (InsensitiveBinaryComparisonPipe) (new InsensitiveEquals(
randomSource(),
randomStringLiteral(),
randomStringLiteral(),
TestUtils.UTC
).makePipe());
}
@Override
public void testTransform() {
// test transforming only the properties (source, expression),
// skipping the children (string and substring) which are tested separately
InsensitiveBinaryComparisonPipe pipe = randomInstance();
Expression newExpression = randomValueOtherThan(pipe.expression(), this::randomInsensitiveBinaryComparisonExpression);
InsensitiveBinaryComparisonPipe newPipe = new InsensitiveBinaryComparisonPipe(
pipe.source(),
newExpression,
pipe.left(),
pipe.right(),
pipe.asProcessor().function()
);
assertEquals(
newPipe,
pipe.transformPropertiesOnly(Expression.class, v -> Objects.equals(v, pipe.expression()) ? newExpression : v)
);
InsensitiveBinaryComparisonPipe anotherPipe = randomInstance();
Source newLoc = randomValueOtherThan(anotherPipe.source(), SourceTests::randomSource);
newPipe = new InsensitiveBinaryComparisonPipe(
newLoc,
anotherPipe.expression(),
anotherPipe.left(),
anotherPipe.right(),
anotherPipe.asProcessor().function()
);
assertEquals(newPipe, anotherPipe.transformPropertiesOnly(Source.class, v -> Objects.equals(v, anotherPipe.source()) ? newLoc : v));
}
@Override
public void testReplaceChildren() {
InsensitiveBinaryComparisonPipe pipe = randomInstance();
Pipe newLeft = pipe(((Expression) randomValueOtherThan(pipe.left(), FunctionTestUtils::randomStringLiteral)));
Pipe newRight = pipe(((Expression) randomValueOtherThan(pipe.right(), FunctionTestUtils::randomStringLiteral)));
InsensitiveBinaryComparisonPipe newPipe = new InsensitiveBinaryComparisonPipe(
pipe.source(),
pipe.expression(),
pipe.left(),
pipe.right(),
pipe.asProcessor().function()
);
InsensitiveBinaryComparisonPipe transformed = newPipe.replaceChildren(newLeft, pipe.right());
assertEquals(transformed.source(), pipe.source());
assertEquals(transformed.expression(), pipe.expression());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.right(), pipe.right());
transformed = newPipe.replaceChildren(pipe.left(), newRight);
assertEquals(transformed.source(), pipe.source());
assertEquals(transformed.expression(), pipe.expression());
assertEquals(transformed.left(), pipe.left());
assertEquals(transformed.right(), newRight);
transformed = newPipe.replaceChildren(newLeft, newRight);
assertEquals(transformed.source(), pipe.source());
assertEquals(transformed.expression(), pipe.expression());
assertEquals(transformed.left(), newLeft);
assertEquals(transformed.right(), newRight);
}
@Override
protected InsensitiveBinaryComparisonPipe mutate(InsensitiveBinaryComparisonPipe instance) {
List<Function<InsensitiveBinaryComparisonPipe, InsensitiveBinaryComparisonPipe>> randoms = new ArrayList<>();
randoms.add(
f -> new InsensitiveBinaryComparisonPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
f.right(),
f.asProcessor().function()
)
);
randoms.add(
f -> new InsensitiveBinaryComparisonPipe(
f.source(),
f.expression(),
f.left(),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
f.asProcessor().function()
)
);
randoms.add(
f -> new InsensitiveBinaryComparisonPipe(
f.source(),
f.expression(),
pipe(((Expression) randomValueOtherThan(f.left(), FunctionTestUtils::randomStringLiteral))),
pipe(((Expression) randomValueOtherThan(f.right(), FunctionTestUtils::randomStringLiteral))),
f.asProcessor().function()
)
);
return randomFrom(randoms).apply(instance);
}
@Override
protected InsensitiveBinaryComparisonPipe copy(InsensitiveBinaryComparisonPipe instance) {
return new InsensitiveBinaryComparisonPipe(
instance.source(),
instance.expression(),
instance.left(),
instance.right(),
instance.asProcessor().function()
);
}
}
| InsensitiveBinaryComparisonPipeTests |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/nestedbeans/mixed/_target/MaterialDto.java | {
"start": 249,
"end": 722
} | class ____ {
private String manufacturer;
private MaterialTypeDto materialType;
public String getManufacturer() {
return manufacturer;
}
public void setManufacturer(String manufacturer) {
this.manufacturer = manufacturer;
}
public MaterialTypeDto getMaterialType() {
return materialType;
}
public void setMaterialType(MaterialTypeDto materialType) {
this.materialType = materialType;
}
}
| MaterialDto |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/ServiceMonitor.java | {
"start": 6123,
"end": 6356
} | class ____ implements Runnable {
@Override
public void run() {
for (Component component : context.scheduler.getAllComponents().values()) {
component.resetCompFailureCount();
}
}
}
}
| ContainerFailureReset |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerialization.java | {
"start": 1429,
"end": 2169
} | class ____<T extends Serializable>
implements Deserializer<T> {
private ObjectInputStream ois;
@Override
public void open(InputStream in) throws IOException {
ois = new ObjectInputStream(in) {
@Override protected void readStreamHeader() {
// no header
}
};
}
@Override
@SuppressWarnings("unchecked")
public T deserialize(T object) throws IOException {
try {
// ignore passed-in object
return (T) ois.readObject();
} catch (ClassNotFoundException e) {
throw new IOException(e.toString());
}
}
@Override
public void close() throws IOException {
ois.close();
}
}
static | JavaSerializationDeserializer |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/constraint/PlacementConstraintParser.java | {
"start": 5490,
"end": 5830
} | interface ____ used to parse an expression. It first
* validates if the syntax of the given expression is valid, then traverse
* the expression and parse it to an enumeration of strings. Each parsed
* string can be further consumed by a {@link ConstraintParser} and
* transformed to a {@link AbstractConstraint}.
*/
public | that |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonSortedSet.java | {
"start": 4009,
"end": 21692
} | class ____ of " + className + " differs from used by this SortedSet!");
}
Class<?> clazz = Class.forName(className);
comparator = (Comparator<V>) clazz.newInstance();
}
} catch (IllegalStateException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
// TODO cache result
private static String calcClassSign(String name) {
try {
Class<?> clazz = Class.forName(name);
ByteArrayOutputStream result = new ByteArrayOutputStream();
ObjectOutputStream outputStream = new ObjectOutputStream(result);
outputStream.writeObject(clazz);
outputStream.close();
MessageDigest crypt = MessageDigest.getInstance("SHA-1");
crypt.reset();
crypt.update(result.toByteArray());
return new BigInteger(1, crypt.digest()).toString(16);
} catch (Exception e) {
throw new IllegalStateException("Can't calculate sign of " + name, e);
}
}
@Override
public Collection<V> readAll() {
return get(readAllAsync());
}
@Override
public RFuture<Collection<V>> readAllAsync() {
return (RFuture<Collection<V>>) (Object) list.readAllAsync();
}
protected final <T> RFuture<V> wrapLockedAsync(RedisCommand<T> command, Object... params) {
return wrapLockedAsync(() -> {
return commandExecutor.writeAsync(list.getRawName(), codec, command, params);
});
}
protected final <T, R> RFuture<R> wrapLockedAsync(Supplier<RFuture<R>> callable) {
long randomId = getServiceManager().getRandom().nextLong();
CompletionStage<R> f = lock.lockAsync(randomId).thenCompose(r -> {
RFuture<R> callback = callable.get();
return callback.handle((value, ex) -> {
CompletableFuture<R> result = new CompletableFuture<>();
lock.unlockAsync(randomId)
.whenComplete((r2, ex2) -> {
if (ex2 != null) {
if (ex != null) {
ex2.addSuppressed(ex);
}
result.completeExceptionally(ex2);
return;
}
if (ex != null) {
result.completeExceptionally(ex);
return;
}
result.complete(value);
});
return result;
}).thenCompose(ff -> ff);
});
return new CompletableFutureWrapper<>(f);
}
protected <T> void takeAsync(CompletableFuture<V> result, long delay, long timeoutInMicro, RedisCommand<T> command, Object... params) {
if (result.isDone()) {
return;
}
long start = System.currentTimeMillis();
getServiceManager().newTimeout(t -> {
if (result.isDone()) {
return;
}
RFuture<V> future = wrapLockedAsync(command, params);
future.whenComplete((res, e) -> {
if (e != null && !(e instanceof RedisConnectionException)) {
result.completeExceptionally(e);
return;
}
if (res != null && !(res instanceof List)) {
result.complete(res);
return;
}
if (res instanceof List && !((List) res).isEmpty()) {
result.complete(res);
return;
}
if (result.isCancelled()) {
return;
}
long remain = 0;
if (timeoutInMicro > 0) {
remain = timeoutInMicro - ((System.currentTimeMillis() - start))*1000;
if (remain <= 0) {
result.complete(res);
return;
}
}
long del = ThreadLocalRandom.current().nextInt(2000000);
if (timeoutInMicro > 0 && remain < 2000000) {
del = 0;
}
takeAsync(result, del, remain, command, params);
});
}, delay, TimeUnit.MICROSECONDS);
}
@Override
public V pollFirst() {
return get(pollFirstAsync());
}
@Override
public RFuture<V> pollFirstAsync() {
return wrapLockedAsync(RedisCommands.LPOP, list.getRawName());
}
@Override
public Collection<V> pollFirst(int count) {
return get(pollFirstAsync(count));
}
@Override
public RFuture<Collection<V>> pollFirstAsync(int count) {
return (RFuture<Collection<V>>) wrapLockedAsync(RedisCommands.LPOP_LIST, list.getRawName(), count);
}
@Override
public V pollFirst(Duration duration) {
return get(pollFirstAsync(duration));
}
@Override
public RFuture<V> pollFirstAsync(Duration duration) {
CompletableFuture<V> result = new CompletableFuture<V>();
takeAsync(result, 0, duration.toMillis() * 1000, RedisCommands.LPOP, list.getRawName());
return new CompletableFutureWrapper<>(result);
}
@Override
public List<V> pollFirst(Duration duration, int count) {
return get(pollFirstAsync(duration, count));
}
@Override
public RFuture<List<V>> pollFirstAsync(Duration duration, int count) {
CompletableFuture<V> result = new CompletableFuture<>();
takeAsync(result, 0, duration.toMillis() * 1000, RedisCommands.LPOP_LIST, list.getRawName(), count);
return new CompletableFutureWrapper<>((CompletableFuture<List<V>>) result);
}
@Override
public V pollLast() {
return get(pollLastAsync());
}
@Override
public RFuture<V> pollLastAsync() {
return wrapLockedAsync(RedisCommands.RPOP, list.getRawName());
}
@Override
public Collection<V> pollLast(int count) {
return get(pollLastAsync(count));
}
@Override
public RFuture<Collection<V>> pollLastAsync(int count) {
return (RFuture<Collection<V>>) wrapLockedAsync(RedisCommands.RPOP_LIST, list.getRawName(), count);
}
@Override
public V pollLast(Duration duration) {
return get(pollLastAsync(duration));
}
@Override
public RFuture<V> pollLastAsync(Duration duration) {
CompletableFuture<V> result = new CompletableFuture<V>();
takeAsync(result, 0, duration.toMillis() * 1000, RedisCommands.RPOP, list.getRawName());
return new CompletableFutureWrapper<>(result);
}
@Override
public List<V> pollLast(Duration duration, int count) {
return get(pollLastAsync(duration, count));
}
@Override
public RFuture<List<V>> pollLastAsync(Duration duration, int count) {
CompletableFuture<V> result = new CompletableFuture<>();
takeAsync(result, 0, duration.toMillis() * 1000, RedisCommands.RPOP_LIST, list.getRawName(), count);
return new CompletableFutureWrapper<>((CompletableFuture<List<V>>) result);
}
@Override
public int size() {
return list.size();
}
@Override
public boolean isEmpty() {
return list.isEmpty();
}
@Override
public boolean contains(final Object o) {
return binarySearch((V) o, codec).getIndex() >= 0;
}
@Override
public Iterator<V> iterator() {
return list.iterator();
}
@Override
public Object[] toArray() {
return list.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return list.toArray(a);
}
@Override
public boolean add(V value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch(value, codec);
if (res.getIndex() < 0) {
int index = -(res.getIndex() + 1);
ByteBuf encodedValue = encode(value);
commandExecutor.get(commandExecutor.evalWriteNoRetryAsync(list.getRawName(), codec, RedisCommands.EVAL_VOID,
"local len = redis.call('llen', KEYS[1]);"
+ "if tonumber(ARGV[1]) < len then "
+ "local pivot = redis.call('lindex', KEYS[1], ARGV[1]);"
+ "redis.call('linsert', KEYS[1], 'before', pivot, ARGV[2]);"
+ "return;"
+ "end;"
+ "redis.call('rpush', KEYS[1], ARGV[2]);", Arrays.<Object>asList(list.getRawName()), index, encodedValue));
return true;
} else {
return false;
}
} finally {
lock.unlock();
}
}
private void checkComparator() {
String comparatorSign = comparatorHolder.get();
if (comparatorSign != null) {
String[] vals = comparatorSign.split(":");
String className = vals[0];
if (!comparator.getClass().getName().equals(className)) {
loadComparator();
}
}
}
@Override
public RFuture<Boolean> addAsync(V value) {
CompletableFuture<Boolean> f = CompletableFuture.supplyAsync(() -> add(value), getServiceManager().getExecutor());
return new CompletableFutureWrapper<>(f);
}
@Override
public RFuture<Boolean> removeAsync(Object value) {
CompletableFuture<Boolean> f = CompletableFuture.supplyAsync(() -> remove(value), getServiceManager().getExecutor());
return new CompletableFutureWrapper<>(f);
}
@Override
public boolean remove(Object value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch((V) value, codec);
if (res.getIndex() < 0) {
return false;
}
list.remove((int) res.getIndex());
return true;
} finally {
lock.unlock();
}
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object object : c) {
if (!contains(object)) {
return false;
}
}
return true;
}
@Override
public boolean addAll(Collection<? extends V> c) {
boolean changed = false;
for (V v : c) {
if (add(v)) {
changed = true;
}
}
return changed;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean changed = false;
for (Iterator<?> iterator = iterator(); iterator.hasNext();) {
Object object = iterator.next();
if (!c.contains(object)) {
iterator.remove();
changed = true;
}
}
return changed;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean changed = false;
for (Object obj : c) {
if (remove(obj)) {
changed = true;
}
}
return changed;
}
@Override
public void clear() {
delete();
}
@Override
public Comparator<? super V> comparator() {
return comparator;
}
@Override
public SortedSet<V> subSet(V fromElement, V toElement) {
throw new UnsupportedOperationException();
// return new RedissonSubSortedSet<V>(this, connectionManager, fromElement, toElement);
}
@Override
public SortedSet<V> headSet(V toElement) {
return subSet(null, toElement);
}
@Override
public SortedSet<V> tailSet(V fromElement) {
return subSet(fromElement, null);
}
@Override
public V first() {
V res = list.getValue(0);
if (res == null) {
throw new NoSuchElementException();
}
return res;
}
@Override
public V last() {
V res = list.getValue(-1);
if (res == null) {
throw new NoSuchElementException();
}
return res;
}
private String getLockName() {
return prefixName("redisson_sortedset_lock", getRawName());
}
private String getComparatorKeyName() {
return prefixName("redisson_sortedset_comparator", getRawName());
}
@Override
public boolean trySetComparator(Comparator<? super V> comparator) {
String className = comparator.getClass().getName();
final String comparatorSign = className + ":" + calcClassSign(className);
Boolean res = commandExecutor.get(commandExecutor.evalWriteAsync(list.getRawName(), StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"if redis.call('llen', KEYS[1]) == 0 then "
+ "redis.call('set', KEYS[2], ARGV[1]); "
+ "return 1; "
+ "else "
+ "return 0; "
+ "end",
Arrays.asList(list.getRawName(), getComparatorKeyName()), comparatorSign));
if (res) {
this.comparator = comparator;
}
return res;
}
@Override
public Iterator<V> distributedIterator(final int count) {
String iteratorName = "__redisson_sorted_set_cursor_{" + getRawName() + "}";
return distributedIterator(iteratorName, count);
}
@Override
public Iterator<V> distributedIterator(final String iteratorName, final int count) {
return new RedissonBaseIterator<V>() {
@Override
protected ScanResult<Object> iterator(RedisClient client, String nextIterPos) {
return distributedScanIterator(iteratorName, count);
}
@Override
protected void remove(Object value) {
RedissonSortedSet.this.remove(value);
}
};
}
private ScanResult<Object> distributedScanIterator(String iteratorName, int count) {
return get(distributedScanIteratorAsync(iteratorName, count));
}
private RFuture<ScanResult<Object>> distributedScanIteratorAsync(String iteratorName, int count) {
return commandExecutor.evalWriteAsync(list.getRawName(), codec, RedisCommands.EVAL_SCAN,
"local start_index = redis.call('get', KEYS[2]); "
+ "if start_index ~= false then "
+ "start_index = tonumber(start_index); "
+ "else "
+ "start_index = 0;"
+ "end;"
+ "if start_index == -1 then "
+ "return {'0', {}}; "
+ "end;"
+ "local end_index = start_index + ARGV[1];"
+ "local result; "
+ "result = redis.call('lrange', KEYS[1], start_index, end_index - 1); "
+ "if end_index > redis.call('llen', KEYS[1]) then "
+ "end_index = -1;"
+ "end; "
+ "redis.call('setex', KEYS[2], 3600, end_index);"
+ "return {tostring(end_index), result};",
Arrays.asList(list.getRawName(), iteratorName), count);
}
// TODO optimize: get three values each time instead of single
public BinarySearchResult<V> binarySearch(V value, Codec codec) {
int size = list.size();
int upperIndex = size - 1;
int lowerIndex = 0;
while (lowerIndex <= upperIndex) {
int index = lowerIndex + (upperIndex - lowerIndex) / 2;
V res = list.getValue(index);
if (res == null) {
return new BinarySearchResult<V>();
}
int cmp = comparator.compare(value, res);
if (cmp == 0) {
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(index);
return indexRes;
} else if (cmp < 0) {
upperIndex = index - 1;
} else {
lowerIndex = index + 1;
}
}
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(-(lowerIndex + 1));
return indexRes;
}
@SuppressWarnings("AvoidInlineConditionals")
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
@Override
public RFuture<Boolean> deleteAsync() {
return deleteAsync(getRawName(), getComparatorKeyName(), getLockName());
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
return super.expireAsync(timeToLive, timeUnit, param, getRawName(), getComparatorKeyName(), getLockName());
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
return super.expireAtAsync(timestamp, param, getRawName(), getComparatorKeyName(), getLockName());
}
@Override
public RFuture<Boolean> clearExpireAsync() {
return clearExpireAsync(getRawName(), getComparatorKeyName(), getLockName());
}
}
| signature |
java | spring-projects__spring-boot | module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/WebFilterHandlerTests.java | {
"start": 7518,
"end": 7783
} | class ____ implements Filter {
@Override
public void init(FilterConfig filterConfig) {
}
@Override
public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) {
}
@Override
public void destroy() {
}
}
}
| BaseFilter |
java | google__auto | value/src/main/java/com/google/auto/value/processor/AutoBuilderProcessor.java | {
"start": 27461,
"end": 32001
} | class ____ be built; actually nested inside %s %s.",
Ascii.toLowerCase(enclosingKind.name()),
enclosing);
}
return MoreElements.asType(enclosing);
}
private TypeElement findOfClassValue(AnnotationMirror autoBuilderAnnotation) {
AnnotationValue ofClassValue =
AnnotationMirrors.getAnnotationValue(autoBuilderAnnotation, "ofClass");
Object value = ofClassValue.getValue();
if (value instanceof TypeMirror) {
TypeMirror ofClassType = (TypeMirror) value;
switch (ofClassType.getKind()) {
case DECLARED:
return MoreTypes.asTypeElement(ofClassType);
case ERROR:
throw new MissingTypeException(MoreTypes.asError(ofClassType));
default:
break;
}
}
throw new MissingTypeException(null);
}
private String findCallMethodValue(AnnotationMirror autoBuilderAnnotation) {
AnnotationValue callMethodValue =
AnnotationMirrors.getAnnotationValue(autoBuilderAnnotation, "callMethod");
return AnnotationValues.getString(callMethodValue);
}
@Override
Optional<String> nullableAnnotationForMethod(ExecutableElement propertyMethod) {
// TODO(b/183005059): implement
return Optional.empty();
}
private void buildAnnotation(
TypeElement autoBuilderType, TypeElement annotationType, String callMethod) {
if (!callMethod.isEmpty()) {
errorReporter()
.abortWithError(
autoBuilderType,
"[AutoBuilderAnnotationMethod] @AutoBuilder for an annotation must have an empty"
+ " callMethod, not \"%s\"",
callMethod);
}
String autoAnnotationClassName =
generatedClassName(autoBuilderType, AUTO_ANNOTATION_CLASS_PREFIX);
TypeElement autoAnnotationClass = elementUtils().getTypeElement(autoAnnotationClassName);
if (autoAnnotationClass != null) {
processType(autoBuilderType, autoAnnotationClass, "newAnnotation");
return;
}
AutoBuilderAnnotationTemplateVars vars = new AutoBuilderAnnotationTemplateVars();
vars.autoBuilderType = TypeEncoder.encode(autoBuilderType.asType());
vars.props = annotationBuilderPropertySet(annotationType);
vars.pkg = TypeSimplifier.packageNameOf(autoBuilderType);
vars.generated =
generatedAnnotation(elementUtils(), processingEnv.getSourceVersion())
.map(annotation -> TypeEncoder.encode(annotation.asType()))
.orElse("");
vars.className = TypeSimplifier.simpleNameOf(autoAnnotationClassName);
vars.annotationType = TypeEncoder.encode(annotationType.asType());
String text = vars.toText();
text = TypeEncoder.decode(text, processingEnv, vars.pkg, /* baseType= */ javaLangVoid);
text = Reformatter.fixup(text);
writeSourceFile(autoAnnotationClassName, text, autoBuilderType);
addDeferredType(autoBuilderType, autoAnnotationClassName);
}
private ImmutableSet<Property> annotationBuilderPropertySet(TypeElement annotationType) {
// Annotation methods can't have their own annotations so there's nowhere for us to discover
// a user @Nullable. We can only use our default @Nullable type annotation.
Nullables nullables = Nullables.fromMethods(processingEnv, ImmutableList.of());
// Translate the annotation elements into fake Property instances. We're really only interested
// in the name and type, so we can use them to declare a parameter of the generated
// @AutoAnnotation method. We'll generate a parameter for every element, even elements that
// don't have setters in the builder. The generated builder implementation will pass the default
// value from the annotation to those parameters.
return methodsIn(annotationType.getEnclosedElements()).stream()
.filter(m -> m.getParameters().isEmpty() && !m.getModifiers().contains(Modifier.STATIC))
.map(method -> annotationBuilderProperty(method, nullables))
.collect(toImmutableSet());
}
private static Property annotationBuilderProperty(
ExecutableElement annotationMethod, Nullables nullables) {
String name = annotationMethod.getSimpleName().toString();
TypeMirror type = annotationMethod.getReturnType();
return new Property(
name,
name,
TypeEncoder.encode(type),
new AnnotatedTypeMirror(type),
/* nullableAnnotation= */ Optional.empty(),
nullables,
/* getter= */ "",
/* maybeBuilderInitializer= */ Optional.empty(),
/* hasDefault= */ false);
}
}
| to |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/pingpong/ClientAutoPingIntervalTest.java | {
"start": 766,
"end": 1538
} | class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(ServerEndpoint.class, ClientEndpoint.class);
}).overrideConfigKey("quarkus.websockets-next.client.auto-ping-interval", "200ms");
@TestHTTPResource("/")
URI uri;
@Inject
WebSocketConnector<ClientEndpoint> connector;
@Test
public void testPingPong() throws InterruptedException, ExecutionException {
connector.baseUri(uri.toString()).connectAndAwait();
// Ping messages are sent automatically
assertTrue(ClientEndpoint.PONG.await(5, TimeUnit.SECONDS));
}
@WebSocket(path = "/end")
public static | ClientAutoPingIntervalTest |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/analytics/reports/DetailColumnInfo.java | {
"start": 1002,
"end": 1399
} | class ____ extends AbstractDTOBase {
private String label;
private String dataType;
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
}
| DetailColumnInfo |
java | quarkusio__quarkus | devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/gradle/tasks/GradleMessageWriter.java | {
"start": 137,
"end": 747
} | class ____ implements MessageWriter {
private final Logger logger;
public GradleMessageWriter(Logger logger) {
this.logger = logger;
}
@Override
public void debug(String msg) {
logger.debug(msg);
}
@Override
public void error(String msg) {
logger.error(msg);
}
@Override
public void info(String msg) {
logger.lifecycle(msg);
}
@Override
public boolean isDebugEnabled() {
return logger.isDebugEnabled();
}
@Override
public void warn(String msg) {
logger.warn(msg);
}
}
| GradleMessageWriter |
java | square__retrofit | retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java | {
"start": 26093,
"end": 26615
} | class ____ {
@POST("/foo/bar/") //
Call<ResponseBody> method(@Body RequestBody body) {
return null;
}
}
RequestBody body = RequestBody.create(TEXT_PLAIN, "hi");
Request request = buildRequest(Example.class, body);
assertThat(request.method()).isEqualTo("POST");
assertThat(request.headers().size()).isEqualTo(0);
assertThat(request.url().toString()).isEqualTo("http://example.com/foo/bar/");
assertBody(request.body(), "hi");
}
@Test
public void put() {
| Example |
java | google__truth | core/src/test/java/com/google/common/truth/GraphMatchingTest.java | {
"start": 13270,
"end": 17196
} | class ____ {
private int lhsIndex; // index into lhsVertices
private int rhsIndexForLhs; // index into edges.get(lhsVertices.get(lhsIndex))
/** Constructs the first edge in the sequence. */
Edge() {
this.lhsIndex = 0;
this.rhsIndexForLhs = 0;
}
/** Constructs a copy of the given edge. */
Edge(Edge other) {
this.lhsIndex = other.lhsIndex;
this.rhsIndexForLhs = other.rhsIndexForLhs;
}
/**
* Returns whether this cursor is valid. Returns true if it has been advanced past the end
* of the sequence.
*/
boolean valid() {
// When advance() has advanced through all the edges, the final state is that lhsIndex ==
// lhsVertices.size(), so we use that state as a marker of the final invalid cursor.
return lhsIndex < lhsVertices.size();
}
/**
* Adds the current edge to the matching. Fails if either of the vertices in the edge is
* already in the matching. Fails if this cursor is invalid.
*/
void addToSelected() {
checkState(valid());
checkState(!selectedEdges.containsKey(lhsVertex()));
checkState(!selectedEdges.containsValue(rhsVertex()));
selectedEdges.put(lhsVertex(), rhsVertex());
}
/**
* Removes the current edge from the matching. Fails if this edge is not in the matching.
* Fails if this cursor is invalid.
*/
void removeFromSelected() {
checkState(valid());
checkState(Objects.equals(selectedEdges.remove(lhsVertex()), rhsVertex()));
}
/**
* Advances to the next edge in the sequence, or invalidates the cursor if this was the
* last. Skips over edges which cannot be added to the matching because either vertex is
* already in it. Fails if this cursor is invalid.
*/
void advance() {
checkState(valid());
// We iterate over the possible edges in a lexicographical order with the LHS index as the
// most significant part and the RHS index as the least significant. So we first try
// advancing to the next RHS index for the current LHS index, and if we can't we advance
// to the next LHS index in the map and the first RHS index for that.
++rhsIndexForLhs;
while (lhsIndex < lhsVertices.size()) {
if (!selectedEdges.containsKey(lhsVertex())) {
while (rhsIndexForLhs < edges.get(lhsVertex()).size()) {
if (!selectedEdges.containsValue(rhsVertex())) {
return;
}
++rhsIndexForLhs;
}
}
++lhsIndex;
rhsIndexForLhs = 0;
}
// We have reached the end of the sequence, and lhsIndex == lhsVertices.size().
}
private String lhsVertex() {
return lhsVertices.get(lhsIndex);
}
private String rhsVertex() {
return edges.get(lhsVertex()).get(rhsIndexForLhs);
}
}
}
}
/** Returns a bitset corresponding to the binary representation of the given integer. */
private static BitSet intBits(int intValue) {
BitSet bits = new BitSet();
for (int bitIndex = 0; bitIndex < Integer.SIZE; bitIndex++) {
bits.set(bitIndex, (intValue & (1L << bitIndex)) != 0);
}
return bits;
}
/**
* Returns a bitset of up to {@code maxBits} bits where each bit is set with a probability {@code
* bitProbability} using the given RNG.
*/
private static BitSet randomBits(int maxBits, double bitProbability, Random rng) {
BitSet bits = new BitSet();
for (int bitIndex = 0; bitIndex < maxBits; bitIndex++) {
bits.set(bitIndex, rng.nextDouble() < bitProbability);
}
return bits;
}
}
| Edge |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java | {
"start": 10170,
"end": 11448
} | class ____ extends MRApp {
TimeOutTaskMRApp(int maps, int reduces) {
super(maps, reduces, false, "TimeOutTaskMRApp", true);
}
@Override
protected TaskAttemptListener createTaskAttemptListener(
AppContext context, AMPreemptionPolicy policy) {
//This will create the TaskAttemptListener with TaskHeartbeatHandler
//RPC servers are not started
//task time out is reduced
//when attempt times out, heartbeat handler will send the lost event
//leading to Attempt failure
return new TaskAttemptListenerImpl(getContext(), null, null, policy) {
@Override
public void startRpcServer(){}
@Override
public void stopRpcServer(){}
@Override
public InetSocketAddress getAddress() {
return NetUtils.createSocketAddr("localhost", 1234);
}
protected void serviceInit(Configuration conf) throws Exception {
conf.setInt(MRJobConfig.TASK_TIMEOUT, 1000); //reduce timeout
conf.setInt(MRJobConfig.TASK_TIMEOUT_CHECK_INTERVAL_MS, 1000);
conf.setDouble(MRJobConfig.TASK_LOG_PROGRESS_DELTA_THRESHOLD, 0.01);
super.serviceInit(conf);
}
};
}
}
//Attempts of first Task are failed
static | TimeOutTaskMRApp |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_divde_zero.java | {
"start": 112,
"end": 327
} | class ____ extends TestCase {
public void test_divideZero() throws Exception {
Double d = 1.0D / 0.0D;
String text = JSON.toJSONString(d);
System.out.println(text);
}
}
| Bug_for_divde_zero |
java | apache__camel | components/camel-mllp/src/test/java/org/apache/camel/component/mllp/MllpAcknowledgementReceiveExceptionTest.java | {
"start": 1164,
"end": 5568
} | class ____ extends MllpExceptionTestSupport {
static final String ALTERNATE_EXCEPTION_MESSAGE = "Alternate Acknowledgment Exception Message";
MllpAcknowledgementReceiveException instance;
/**
* Description of test.
*
*/
@Test
public void testConstructorOne() {
instance = new MllpAcknowledgementReceiveException(HL7_MESSAGE_BYTES, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(MllpAcknowledgementReceiveException.EXCEPTION_MESSAGE));
assertNull(instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertNull(instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorTwo() {
instance = new MllpAcknowledgementReceiveException(HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(MllpAcknowledgementReceiveException.EXCEPTION_MESSAGE));
assertNull(instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertArrayEquals(HL7_ACKNOWLEDGEMENT_BYTES, instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorThree() {
instance = new MllpAcknowledgementReceiveException(HL7_MESSAGE_BYTES, CAUSE, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(MllpAcknowledgementReceiveException.EXCEPTION_MESSAGE));
assertSame(CAUSE, instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertNull(instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorFour() {
instance = new MllpAcknowledgementReceiveException(HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, CAUSE, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(MllpAcknowledgementReceiveException.EXCEPTION_MESSAGE));
assertSame(CAUSE, instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertArrayEquals(HL7_ACKNOWLEDGEMENT_BYTES, instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorFive() {
instance = new MllpAcknowledgementReceiveException(ALTERNATE_EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(ALTERNATE_EXCEPTION_MESSAGE));
assertNull(instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertNull(instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorSix() {
instance = new MllpAcknowledgementReceiveException(
ALTERNATE_EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(ALTERNATE_EXCEPTION_MESSAGE));
assertNull(instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertArrayEquals(HL7_ACKNOWLEDGEMENT_BYTES, instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorSeven() {
instance = new MllpAcknowledgementReceiveException(ALTERNATE_EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, CAUSE, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(ALTERNATE_EXCEPTION_MESSAGE));
assertSame(CAUSE, instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertNull(instance.hl7AcknowledgementBytes);
}
/**
* Description of test.
*
*/
@Test
public void testConstructorEight() {
instance = new MllpAcknowledgementReceiveException(
ALTERNATE_EXCEPTION_MESSAGE, HL7_MESSAGE_BYTES, HL7_ACKNOWLEDGEMENT_BYTES, CAUSE, LOG_PHI_TRUE);
assertTrue(instance.getMessage().startsWith(ALTERNATE_EXCEPTION_MESSAGE));
assertSame(CAUSE, instance.getCause());
assertArrayEquals(HL7_MESSAGE_BYTES, instance.hl7MessageBytes);
assertArrayEquals(HL7_ACKNOWLEDGEMENT_BYTES, instance.hl7AcknowledgementBytes);
}
}
| MllpAcknowledgementReceiveExceptionTest |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/asm/MethodVisitor.java | {
"start": 3872,
"end": 7383
} | class ____
* parameter.
*
* @param opcode the opcode of the type instruction to be visited. This opcode is either NEW, ANEWARRAY, CHECKCAST
* or INSTANCEOF.
* @param type the operand of the instruction to be visited. This operand must be the internal name of an object or
* array class (see {@link Type#getInternalName() getInternalName}).
*/
void visitTypeInsn(int opcode, String type);
/**
* Visits a field instruction. A field instruction is an instruction that loads or stores the value of a field of an
* object.
*
* @param opcode the opcode of the type instruction to be visited. This opcode is either GETSTATIC, PUTSTATIC,
* GETFIELD or PUTFIELD.
* @param owner the internal name of the field's owner class (see {@link Type#getInternalName() getInternalName}).
* @param name the field's name.
* @param desc the field's descriptor (see {@link Type Type}).
*/
void visitFieldInsn(int opcode, String owner, String name, String desc);
void visitMethodInsn(int opcode, String owner, String name, String desc);
/**
* Visits a jump instruction. A jump instruction is an instruction that may jump to another instruction.
*
* @param opcode the opcode of the type instruction to be visited. This opcode is either IFEQ, IFNE, IFLT, IFGE,
* IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
* IFNULL or IFNONNULL.
* @param label the operand of the instruction to be visited. This operand is a label that designates the
* instruction to which the jump instruction may jump.
*/
void visitJumpInsn(int opcode, Label label);
/**
* Visits a label. A label designates the instruction that will be visited just after it.
*
* @param label a {@link Label Label} object.
*/
void visitLabel(Label label);
// -------------------------------------------------------------------------
// Special instructions
// -------------------------------------------------------------------------
/**
* Visits a LDC instruction.
*
* @param cst the constant to be loaded on the stack. This parameter must be a non null {@link Integer}, a
* {@link Float}, a {@link Long}, a {@link Double} a {@link String} (or a {@link Type} for <tt>.class</tt>
* constants, for classes whose version is 49.0 or more).
*/
void visitLdcInsn(Object cst);
/**
* Visits an IINC instruction.
*
* @param var index of the local variable to be incremented.
* @param increment amount to increment the local variable by.
*/
void visitIincInsn(int var, int increment);
// -------------------------------------------------------------------------
// Exceptions table entries, debug information, max stack and max locals
// -------------------------------------------------------------------------
/**
* Visits the maximum stack size and the maximum number of local variables of the method.
*
* @param maxStack maximum stack size of the method.
* @param maxLocals maximum number of local variables for the method.
*/
void visitMaxs(int maxStack, int maxLocals);
/**
* Visits the end of the method. This method, which is the last one to be called, is used to inform the visitor that
* all the annotations and attributes of the method have been visited.
*/
void visitEnd();
}
| as |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/graph/JobGraphGeneratorTestBase.java | {
"start": 117427,
"end": 117880
} | class ____ implements SimpleVersionedSerializer<Long>, Serializable {
@Override
public int getVersion() {
return 0;
}
@Override
public byte[] serialize(Long obj) throws IOException {
return new byte[0];
}
@Override
public Long deserialize(int version, byte[] serialized) throws IOException {
return null;
}
}
private static | LongSerializer |
java | reactor__reactor-core | reactor-test/src/main/java/reactor/test/subscriber/TestSubscriber.java | {
"start": 3777,
"end": 15123
} | interface ____<T> extends CoreSubscriber<T>, Scannable {
/**
* Create a simple plain {@link TestSubscriber} which will make an unbounded demand {@link #onSubscribe(Subscription) on subscription},
* has an empty {@link Context} and makes no attempt at fusion negotiation.
*
* @param <T> the type of data received by this subscriber
* @return a new plain {@link TestSubscriber}
*/
static <T> TestSubscriber<T> create() {
return new DefaultTestSubscriber<>(new TestSubscriberBuilder());
}
/**
* Create a {@link TestSubscriber} with tuning. See {@link TestSubscriberBuilder}.
*
* @return a {@link TestSubscriberBuilder} to fine tune the {@link TestSubscriber} to produce
*/
static TestSubscriberBuilder builder() {
return new TestSubscriberBuilder();
}
/**
* Cancel the underlying subscription to the {@link org.reactivestreams.Publisher} and
* unblock any pending {@link #block()} calls.
*/
void cancel();
/**
* Request {@code n} elements from the {@link org.reactivestreams.Publisher}'s {@link Subscription}.
* If this method is called before the {@link TestSubscriber} has subscribed to the {@link org.reactivestreams.Publisher},
* pre-request is accumulated (including {@link TestSubscriberBuilder#initialRequest(long) configured initial request}
* and replayed in a single batch upon subscription.
* <p>
* Note that if/once {@link Fuseable#SYNC} fusion mode is established, this method MUST NOT be used, and this will
* throw an {@link IllegalStateException}.
*
* @param n the additional amount to request
*/
void request(long n);
/**
* Check if this {@link TestSubscriber} has either:
* <ul>
* <li>been cancelled: {@link #isCancelled()} would return true</li>
* <li>been terminated, having been signalled with onComplete or onError: {@link #isTerminated()} would return true and {@link #getTerminalSignal()}
* would return a non-null {@link Signal}</li>
* </ul>
* The third possible failure condition, subscription failure, results in an {@link AssertionError} being thrown by this method
* (like all other accessors, see also {@link TestSubscriber} javadoc).
* <p>
* Once this method starts returning true, any pending {@link #block()} calls should finish, and subsequent
* block calls will return immediately.
*
* @return true if the {@link TestSubscriber} has reached an end state
*
* @throws AssertionError in case of failure at subscription time
*/
boolean isTerminatedOrCancelled();
/**
* Check if this {@link TestSubscriber} has received a terminal signal, ie. onComplete or onError.
* When returning {@code true}, implies:
* <ul>
* <li>{@link #isTerminatedOrCancelled()} is also true</li>
* <li>{@link #getTerminalSignal()} returns a non-null {@link Signal}</li>
* <li>{@link #expectTerminalSignal()}} returns the {@link Signal}</li>
* <li>{@link #expectTerminalError()}} returns the {@link Signal} in case of onError but throws in case of onComplete</li>
* </ul>
*
* @return true if the {@link TestSubscriber} has been terminated via onComplete or onError
*
* @throws AssertionError in case of failure at subscription time
*/
boolean isTerminated();
/**
* Check if this {@link TestSubscriber} has received a terminal signal that is specifically onComplete.
* When returning {@code true}, implies:
* <ul>
* <li>{@link #isTerminatedOrCancelled()} is also true</li>
* <li>{@link #isTerminated()} is also true</li>
* <li>{@link #getTerminalSignal()} returns a non-null onComplete {@link Signal}</li>
* <li>{@link #expectTerminalSignal()}} returns the same onComplete {@link Signal}</li>
* <li>{@link #expectTerminalError()}} throws</li>
* </ul>
*
* @return true if the {@link TestSubscriber} has been terminated via onComplete
*
* @throws AssertionError in case of failure at subscription time
*/
boolean isTerminatedComplete();
/**
* Check if this {@link TestSubscriber} has received a terminal signal that is specifically onError.
* When returning {@code true}, implies:
* <ul>
* <li>{@link #isTerminatedOrCancelled()} is also true</li>
* <li>{@link #isTerminated()} is also true</li>
* <li>{@link #getTerminalSignal()} returns a non-null onError {@link Signal}</li>
* <li>{@link #expectTerminalSignal()}} returns the same onError {@link Signal}</li>
* <li>{@link #expectTerminalError()}} returns the terminating {@link Throwable}</li>
* </ul>
*
* @return true if the {@link TestSubscriber} has been terminated via onComplete
*
* @throws AssertionError in case of failure at subscription time
*/
boolean isTerminatedError();
/**
* Check if this {@link TestSubscriber} has been {@link #cancel() cancelled}, which implies {@link #isTerminatedOrCancelled()} is also true.
*
* @return true if the {@link TestSubscriber} has been cancelled
*
* @throws AssertionError in case of failure at subscription time
*/
boolean isCancelled();
/**
* Return the terminal {@link Signal} if this {@link TestSubscriber} {@link #isTerminated()}, or {@code null} otherwise.
* See also {@link #expectTerminalSignal()} as a stricter way of asserting the terminal state.
*
* @return the terminal {@link Signal} or null if not terminated
*
* @throws AssertionError in case of failure at subscription time
* @see #isTerminated()
* @see #expectTerminalSignal()
*/
@Nullable Signal<T> getTerminalSignal();
/**
* Expect the {@link TestSubscriber} to be {@link #isTerminated() terminated}, and return the terminal {@link Signal}
* if so. Otherwise, <strong>cancel the subscription</strong> and throw an {@link AssertionError}.
* <p>
* Note that is there was already a subscription failure, the corresponding {@link AssertionError} is raised by this
* method instead.
*
* @return the terminal {@link Signal} (cannot be null)
*
* @throws AssertionError in case of failure at subscription time, or if the subscriber hasn't terminated yet
* @see #isTerminated()
* @see #getTerminalSignal()
*/
Signal<T> expectTerminalSignal();
/**
* Expect the {@link TestSubscriber} to be {@link #isTerminated() terminated} with an {@link #onError(Throwable)}
* and return the terminating {@link Throwable} if so.
* Otherwise, <strong>cancel the subscription</strong> and throw an {@link AssertionError}.
*
* @return the terminal {@link Throwable} (cannot be null)
*
* @throws AssertionError in case of failure at subscription time, or if the subscriber hasn't errored.
* @see #isTerminated()
* @see #isTerminatedError()
* @see #getTerminalSignal()
*/
Throwable expectTerminalError();
/**
* Return the {@link List} of all elements that have correctly been emitted to the {@link TestSubscriber} (onNext signals)
* so far. This returns a new list that is not backed by the {@link TestSubscriber}.
* <p>
* Note that this includes elements that would arrive after {@link #cancel()}, as this is allowed by the Reactive Streams
* specification (cancellation is not necessarily synchronous and some elements may already be in flight when the source
* takes notice of the cancellation).
* These elements are also mirrored in the {@link #getReceivedOnNextAfterCancellation()} getter.
*
* @return the {@link List} of all elements received by the {@link TestSubscriber} as part of normal operation
*
* @throws AssertionError in case of failure at subscription time
* @see #getReceivedOnNextAfterCancellation()
* @see #getProtocolErrors()
*/
List<T> getReceivedOnNext();
/**
* Return the {@link List} of elements that have been emitted to the {@link TestSubscriber} (onNext signals) so far,
* after a {@link #cancel()} was triggered. This returns a new list that is not backed by the {@link TestSubscriber}.
* <p>
* Note that this is allowed by the Reactive Streams specification (cancellation is not necessarily synchronous and
* some elements may already be in flight when the source takes notice of the cancellation).
* This is a sub-list of the one returned by {@link #getReceivedOnNext()} (in the conceptual sense, as the two lists
* are independent copies).
*
* @return the {@link List} of elements of {@link #getReceivedOnNext()} that were received by the {@link TestSubscriber}
* after {@link #cancel()} was triggered
*
* @throws AssertionError in case of failure at subscription time
* @see #getReceivedOnNext()
* @see #getProtocolErrors()
*/
List<T> getReceivedOnNextAfterCancellation();
/**
* Return a {@link List} of {@link Signal} which represent detected protocol error from the source {@link org.reactivestreams.Publisher},
* that is to say signals that were emitted to this {@link TestSubscriber} in violation of the Reactive Streams
* specification. An example would be an {@link #onNext(Object)} signal emitted after an {@link #onComplete()} signal.
* <p>
* Note that the {@link Signal} in the collection don't bear any {@link reactor.util.context.ContextView},
* since they would all be the configured {@link #currentContext()}.
*
* @return a {@link List} of {@link Signal} representing the detected protocol errors from the source {@link org.reactivestreams.Publisher}
*
* @throws AssertionError in case of failure at subscription time
*/
List<Signal<T>> getProtocolErrors();
/**
* Return an {@code int} code that represents the negotiated fusion mode for this {@link TestSubscriber}.
* Fusion codes can be converted to a human-readable value for display via {@link Fuseable#fusionModeName(int)}.
* If no particular fusion has been requested, returns {@link Fuseable#NONE}.
* Note that as long as this {@link TestSubscriber} hasn't been subscribed to a {@link org.reactivestreams.Publisher},
* this method will return {@code -1}. It will also throw an {@link AssertionError} if the configured fusion mode
* couldn't be negotiated at subscription.
*
* @return -1 if not subscribed, 0 ({@link Fuseable#NONE}) if no fusion negotiated, a relevant fusion code otherwise
*
* @throws AssertionError in case of failure at subscription time
*/
int getFusionMode();
/**
* Block until an assertable end state has been reached. This can be either a cancellation ({@link #isCancelled()}),
* a "normal" termination ({@link #isTerminated()}) or subscription failure. In the later case only, this method
* throws the corresponding {@link AssertionError}.
* <p>
* An AssertionError is also thrown if the thread is interrupted.
*
* @throws AssertionError in case of failure at subscription time (or thread interruption)
*/
void block();
/**
* Block until an assertable end state has been reached, or a timeout {@link Duration} has elapsed.
* End state can be either a cancellation ({@link #isCancelled()}), a "normal" termination ({@link #isTerminated()})
* or a subscription failure. In the later case only, this method throws the corresponding {@link AssertionError}.
* In case of timeout, an {@link AssertionError} with a message reflecting the configured duration is thrown.
* <p>
* An AssertionError is also thrown if the thread is interrupted.
*
* @throws AssertionError in case of failure at subscription time (or thread interruption)
*/
void block(Duration timeout);
/**
* An | TestSubscriber |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/OneInputStreamTaskTest.java | {
"start": 49877,
"end": 50602
} | class ____ extends AbstractStreamOperator<Integer>
implements OneInputStreamOperator<Integer, Integer> {
private final OutputTag<Integer> oddOutputTag =
new OutputTag<>("odd", BasicTypeInfo.INT_TYPE_INFO);
private final OutputTag<Integer> evenOutputTag =
new OutputTag<>("even", BasicTypeInfo.INT_TYPE_INFO);
@Override
public void processElement(StreamRecord<Integer> element) {
if (element.getValue() % 2 == 0) {
output.collect(evenOutputTag, element);
} else {
output.collect(oddOutputTag, element);
}
output.collect(element);
}
}
static | OddEvenOperator |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/stubbing/defaultanswers/ReturnsSmartNullsTest.java | {
"start": 3969,
"end": 13644
} | interface ____ extends GenericFoo<Foo> {
<I> I method();
<I> I methodWithArgs(int firstArg, I secondArg);
<I> I methodWithVarArgs(int firstArg, I... secondArg);
}
@Test
public void should_return_an_object_that_has_been_defined_with_class_generic()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Foo smartNull = (Foo) answer.answer(invocationOf(GenericFooBar.class, "get"));
assertThat(smartNull.toString())
.contains("SmartNull returned by")
.contains("genericFooBar.get()");
}
@Test
public void should_return_an_object_that_has_been_defined_with_method_generic()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
String smartNull = (String) answer.answer(invocationOf(GenericFooBar.class, "method"));
assertThat(smartNull).isNull();
}
private static <T> InterceptedInvocation invocationMethodWithArgs(final T obj)
throws NoSuchMethodException {
return new InterceptedInvocation(
new MockStrongReference<Object>(mock(GenericFooBar.class), false),
new SerializableMethod(
GenericFooBar.class.getMethod("methodWithArgs", int.class, Object.class)),
new Object[] {1, obj},
InterceptedInvocation.NO_OP,
LocationFactory.create(),
1);
}
@Test
public void
should_return_a_String_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithArgs("secondArg"));
assertThat(smartNull).isNotNull().isInstanceOf(String.class).asString().isEmpty();
}
@Test
public void
should_return_a_empty_list_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
final List<String> list = Collections.singletonList("String");
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithArgs(list));
assertThat(smartNull).isNotNull().isInstanceOf(List.class);
assertThat((List) smartNull).isEmpty();
}
@Test
public void
should_return_a_empty_map_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
final Map<String, String> map = new HashMap<String, String>();
map.put("key-1", "value-1");
map.put("key-2", "value-2");
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithArgs(map));
assertThat(smartNull).isNotNull().isInstanceOf(Map.class);
assertThat((Map) smartNull).isEmpty();
}
@Test
public void
should_return_a_empty_set_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull =
answer.answer(
invocationMethodWithArgs(
new HashSet<String>(Arrays.asList("set-1", "set-2"))));
assertThat(smartNull).isNotNull().isInstanceOf(Set.class);
assertThat((Set) smartNull).isEmpty();
}
@Test
public void
should_return_a_new_mock_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
final Foo mock = mock(Foo.class);
Object smartNull = answer.answer(invocationMethodWithArgs(mock));
assertThat(smartNull).isNotNull().isNotSameAs(mock);
assertThat(smartNull.toString())
.contains("SmartNull returned by")
.contains("genericFooBar.methodWithArgs(");
}
@Test
public void
should_return_an_Object_that_has_been_defined_with_method_generic_and_provided_in_argument()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithArgs(new Object() {}));
assertThat(smartNull.toString())
.contains("SmartNull returned by")
.contains("genericFooBar.methodWithArgs(");
}
@Test
public void should_throw_a_error_on_invocation_of_returned_mock() throws Throwable {
final Answer<Object> answer = new ReturnsSmartNulls();
final Foo mock = mock(Foo.class);
final Throwable throwable =
Assertions.catchThrowable(
new ThrowableAssert.ThrowingCallable() {
@Override
public void call() throws Throwable {
((Foo) answer.answer(invocationMethodWithArgs(mock))).get();
}
});
Assertions.assertThat(throwable)
.isInstanceOf(SmartNullPointerException.class)
.hasMessageContaining("genericFooBar.methodWithArgs(")
.hasMessageContaining("1")
.hasMessageContaining(mock.toString());
}
private static <T> InterceptedInvocation invocationMethodWithVarArgs(final T[] obj)
throws NoSuchMethodException {
return new InterceptedInvocation(
new MockStrongReference<Object>(mock(GenericFooBar.class), false),
new SerializableMethod(
GenericFooBar.class.getMethod(
"methodWithVarArgs", int.class, Object[].class)),
new Object[] {1, obj},
InterceptedInvocation.NO_OP,
LocationFactory.create(),
1);
}
@Test
public void
should_return_a_String_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull =
answer.answer(invocationMethodWithVarArgs(new String[] {"varArg-1", "varArg-2"}));
assertThat(smartNull).isNotNull().isInstanceOf(String.class).asString().isEmpty();
}
@Test
public void
should_return_a_empty_list_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
final List<String> arg1 = Collections.singletonList("String");
final List<String> arg2 = Arrays.asList("str-1", "str-2");
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithVarArgs(new List[] {arg1, arg2}));
assertThat(smartNull).isNotNull().isInstanceOf(List.class);
assertThat((List) smartNull).isEmpty();
}
@Test
public void
should_return_a_empty_map_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
final Map<String, String> map1 = new HashMap<>();
map1.put("key-1", "value-1");
map1.put("key-2", "value-2");
final Map<String, String> map2 = new HashMap<>();
map2.put("key-3", "value-1");
map2.put("key-4", "value-2");
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithVarArgs(new Map[] {map1, map2}));
assertThat(smartNull).isNotNull().isInstanceOf(Map.class);
assertThat((Map) smartNull).isEmpty();
}
@Test
public void
should_return_a_empty_set_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
final HashSet<String> set1 = new HashSet<String>(Arrays.asList("set-1", "set-2"));
final HashSet<String> set2 = new HashSet<String>(Arrays.asList("set-1", "set-2"));
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull = answer.answer(invocationMethodWithVarArgs(new HashSet[] {set1, set2}));
assertThat(smartNull).isNotNull().isInstanceOf(Set.class);
assertThat((Set) smartNull).isEmpty();
}
@Test
public void
should_return_a_new_mock_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
final Foo mock1 = mock(Foo.class);
final Foo mock2 = mock(Foo.class);
Object smartNull = answer.answer(invocationMethodWithVarArgs(new Foo[] {mock1, mock2}));
assertThat(smartNull).isNotNull().isNotSameAs(mock1).isNotSameAs(mock2);
assertThat(smartNull.toString())
.contains("SmartNull returned by")
.contains("genericFooBar.methodWithVarArgs(");
}
@Test
public void
should_return_an_Object_that_has_been_defined_with_method_generic_and_provided_in_var_args()
throws Throwable {
Answer<Object> answer = new ReturnsSmartNulls();
Object smartNull =
answer.answer(
invocationMethodWithVarArgs(
new Object[] {new Object() {}, new Object() {}}));
assertThat(smartNull.toString())
.contains("SmartNull returned by")
.contains("genericFooBar.methodWithVarArgs(");
}
}
| GenericFooBar |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.