language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
components/camel-jdbc/src/test/java/org/apache/camel/component/jdbc/JdbcEndpointTest.java
|
{
"start": 899,
"end": 1377
}
|
class ____ extends JdbcRouteTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() throws Exception {
JdbcEndpoint jdbc = new JdbcEndpoint();
jdbc.setCamelContext(context);
jdbc.setDataSource(db);
context.addEndpoint("foo", jdbc);
from("direct:hello").to("foo");
}
};
}
}
|
JdbcEndpointTest
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/materializedtable/scheduler/CreateEmbeddedSchedulerWorkflowRequestBody.java
|
{
"start": 1387,
"end": 3628
}
|
class ____ implements RequestBody {
private static final String FIELD_NAME_MATERIALIZED_TABLE = "materializedTableIdentifier";
private static final String FIELD_NAME_CRON_EXPRESSION = "cronExpression";
private static final String FIELD_NAME_INIT_CONFIG = "initConfig";
private static final String FIELD_NAME_EXECUTION_CONFIG = "executionConfig";
private static final String FIELD_NAME_REST_ENDPOINT_URL = "restEndpointUrl";
@JsonProperty(FIELD_NAME_MATERIALIZED_TABLE)
private final String materializedTableIdentifier;
@JsonProperty(FIELD_NAME_CRON_EXPRESSION)
private final String cronExpression;
@JsonProperty(FIELD_NAME_INIT_CONFIG)
private final Map<String, String> initConfig;
@JsonProperty(FIELD_NAME_EXECUTION_CONFIG)
@Nullable
private final Map<String, String> executionConfig;
@JsonProperty(FIELD_NAME_REST_ENDPOINT_URL)
private final String restEndpointUrl;
@JsonCreator
public CreateEmbeddedSchedulerWorkflowRequestBody(
@JsonProperty(FIELD_NAME_MATERIALIZED_TABLE) String materializedTableIdentifier,
@JsonProperty(FIELD_NAME_CRON_EXPRESSION) String cronExpression,
@Nullable @JsonProperty(FIELD_NAME_INIT_CONFIG) Map<String, String> initConfig,
@Nullable @JsonProperty(FIELD_NAME_EXECUTION_CONFIG)
Map<String, String> executionConfig,
@JsonProperty(FIELD_NAME_REST_ENDPOINT_URL) String restEndpointUrl) {
this.materializedTableIdentifier = materializedTableIdentifier;
this.cronExpression = cronExpression;
this.initConfig = initConfig;
this.executionConfig = executionConfig;
this.restEndpointUrl = restEndpointUrl;
}
public String getMaterializedTableIdentifier() {
return materializedTableIdentifier;
}
@Nullable
public Map<String, String> getInitConfig() {
return initConfig;
}
@Nullable
public Map<String, String> getExecutionConfig() {
return executionConfig;
}
public String getCronExpression() {
return cronExpression;
}
public String getRestEndpointUrl() {
return restEndpointUrl;
}
}
|
CreateEmbeddedSchedulerWorkflowRequestBody
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/execution/search/extractor/ComputingExtractor.java
|
{
"start": 1297,
"end": 3296
}
|
class ____ implements HitExtractor, BucketExtractor {
/**
* Stands for {@code comPuting}. We try to use short names for {@link HitExtractor}s
* to save a few bytes when when we send them back to the user.
*/
static final String NAME = "p";
private final Processor processor;
private final String hitName;
public ComputingExtractor(Processor processor) {
this(processor, null);
}
public ComputingExtractor(Processor processor, String hitName) {
this.processor = processor;
this.hitName = hitName;
}
// Visibility required for tests
public ComputingExtractor(StreamInput in) throws IOException {
processor = in.readNamedWriteable(Processor.class);
hitName = in.readOptionalString();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(processor);
out.writeOptionalString(hitName);
}
@Override
public String getWriteableName() {
return NAME;
}
public Processor processor() {
return processor;
}
public Object extract(Object input) {
return processor.process(input);
}
@Override
public Object extract(Bucket bucket) {
return processor.process(bucket);
}
@Override
public Object extract(SearchHit hit) {
return processor.process(hit);
}
@Override
public String hitName() {
return hitName;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
ComputingExtractor other = (ComputingExtractor) obj;
return Objects.equals(processor, other.processor) && Objects.equals(hitName, other.hitName);
}
@Override
public int hashCode() {
return Objects.hash(processor, hitName);
}
@Override
public String toString() {
return processor.toString();
}
}
|
ComputingExtractor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/merge/BidirectionalOneToManyMergeTest.java
|
{
"start": 997,
"end": 1582
}
|
class ____ {
@BeforeAll
public void setUp(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
entityManager.persist(
new Post( "High-Performance Java Persistence" ).setId( 1L )
);
} );
}
@Test
public void testMerge(EntityManagerFactoryScope scope) {
scope.inTransaction( entityManager -> {
Post post = entityManager.find( Post.class, 1L );
post.addComment( new PostComment( "This post rocks!", post ) );
post.getComments().isEmpty();
entityManager.merge( post );
} );
}
@Entity
public static
|
BidirectionalOneToManyMergeTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/AbstractS3ACostTest.java
|
{
"start": 2361,
"end": 2488
}
|
class ____ tests which make assertions about cost.
* <p></p>
* Factored out from {@code ITestS3AFileOperationCost}
*/
public
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schema/BaseSchemaGeneratorTest.java
|
{
"start": 2263,
"end": 3057
}
|
class ____ {
@Id
private Long id;
private String title;
@NaturalId
private String isbn;
@ManyToOne
private Person author;
//Getters and setters are omitted for brevity
//end::schema-generation-domain-model-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Person getAuthor() {
return author;
}
public void setAuthor(Person author) {
this.author = author;
}
public String getIsbn() {
return isbn;
}
public void setIsbn(String isbn) {
this.isbn = isbn;
}
//tag::schema-generation-domain-model-example[]
}
//end::schema-generation-domain-model-example[]
}
|
Book
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/tcp/reactor/ReactorNettyTcpClient.java
|
{
"start": 6015,
"end": 9977
}
|
class ____.
* @param logger the logger to use
* @since 5.1
*/
public void setLogger(Log logger) {
this.logger = logger;
}
/**
* Return the currently configured Logger.
* @since 5.1
*/
public Log getLogger() {
return logger;
}
@Override
public CompletableFuture<Void> connectAsync(TcpConnectionHandler<P> handler) {
Assert.notNull(handler, "TcpConnectionHandler is required");
if (this.stopping) {
return handleShuttingDownConnectFailure(handler);
}
return extendTcpClient(this.tcpClient, handler)
.handle(new ReactorNettyHandler(handler))
.connect()
.doOnError(handler::afterConnectFailure)
.then().toFuture();
}
/**
* Provides an opportunity to initialize the {@link TcpClient} for the given
* {@link TcpConnectionHandler} which may implement sub-interfaces such as
* {@link org.springframework.messaging.simp.stomp.StompTcpConnectionHandler}
* that expose further information.
* @param tcpClient the candidate TcpClient
* @param handler the handler for the TCP connection
* @return the same handler or an updated instance
*/
protected TcpClient extendTcpClient(TcpClient tcpClient, TcpConnectionHandler<P> handler) {
return tcpClient;
}
@Override
public CompletableFuture<Void> connectAsync(TcpConnectionHandler<P> handler, ReconnectStrategy strategy) {
Assert.notNull(handler, "TcpConnectionHandler is required");
Assert.notNull(strategy, "ReconnectStrategy is required");
if (this.stopping) {
return handleShuttingDownConnectFailure(handler);
}
// Report first connect to the ListenableFuture
CompletableFuture<Void> connectFuture = new CompletableFuture<>();
extendTcpClient(this.tcpClient, handler)
.handle(new ReactorNettyHandler(handler))
.connect()
.doOnNext(conn -> connectFuture.complete(null))
.doOnError(connectFuture::completeExceptionally)
.doOnError(handler::afterConnectFailure) // report all connect failures to the handler
.flatMap(Connection::onDispose) // post-connect issues
.retryWhen(Retry.from(signals -> signals
.map(retrySignal -> (int) retrySignal.totalRetriesInARow())
.flatMap(attempt -> reconnect(attempt, strategy))))
.repeatWhen(flux -> flux
.scan(1, (count, element) -> count++)
.flatMap(attempt -> reconnect(attempt, strategy)))
.subscribe();
return connectFuture;
}
private CompletableFuture<Void> handleShuttingDownConnectFailure(TcpConnectionHandler<P> handler) {
IllegalStateException ex = new IllegalStateException("Shutting down.");
handler.afterConnectFailure(ex);
return Mono.<Void>error(ex).toFuture();
}
private Publisher<? extends Long> reconnect(Integer attempt, ReconnectStrategy reconnectStrategy) {
Long time = reconnectStrategy.getTimeToNextAttempt(attempt);
return (time != null ? Mono.delay(Duration.ofMillis(time), this.scheduler) : Mono.empty());
}
@Override
public CompletableFuture<Void> shutdownAsync() {
if (this.stopping) {
return CompletableFuture.completedFuture(null);
}
this.stopping = true;
Mono<Void> result;
if (this.channelGroup != null) {
result = FutureMono.from(this.channelGroup.close());
if (this.loopResources != null) {
result = result.onErrorComplete().then(this.loopResources.disposeLater());
}
if (this.poolResources != null) {
result = result.onErrorComplete().then(this.poolResources.disposeLater());
}
result = result.onErrorComplete().then(stopScheduler());
}
else {
result = stopScheduler();
}
return result.toFuture();
}
private Mono<Void> stopScheduler() {
return Mono.fromRunnable(() -> {
this.scheduler.dispose();
for (int i = 0; i < 20; i++) {
if (this.scheduler.isDisposed()) {
break;
}
try {
Thread.sleep(100);
}
catch (Throwable ex) {
break;
}
}
});
}
@Override
public String toString() {
return "ReactorNettyTcpClient[" + this.tcpClient + "]";
}
private
|
name
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/ApplicationContextInitializer.java
|
{
"start": 680,
"end": 1436
}
|
interface ____ initializing a Spring {@link ConfigurableApplicationContext}
* prior to being {@linkplain ConfigurableApplicationContext#refresh() refreshed}.
*
* <p>Typically used within web applications that require some programmatic initialization
* of the application context. For example, registering property sources or activating
* profiles against the {@linkplain ConfigurableApplicationContext#getEnvironment()
* context's environment}. See {@code ContextLoader} and {@code FrameworkServlet} support
* for declaring a "contextInitializerClasses" context-param and init-param, respectively.
*
* <p>{@code ApplicationContextInitializer} processors are encouraged to detect
* whether Spring's {@link org.springframework.core.Ordered Ordered}
|
for
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/BeanPropertyWriter.java
|
{
"start": 2865,
"end": 3588
}
|
class ____ or
* associated annotations.
*/
protected final JavaType _declaredType;
/**
* Type to use for locating serializer; normally same as return type of the
* accessor method, but may be overridden by annotations.
*/
protected final JavaType _cfgSerializationType;
/**
* Base type of the property, if the declared type is "non-trivial"; meaning
* it is either a structured type (collection, map, array), or
* parameterized. Used to retain type information about contained type,
* which is mostly necessary if type meta-data is to be included.
*/
protected JavaType _nonTrivialBaseType;
/**
* Annotations from context (most often,
|
definition
|
java
|
apache__camel
|
components/camel-soap/src/main/java/org/apache/camel/dataformat/soap/SoapDataFormat.java
|
{
"start": 5422,
"end": 8795
}
|
class ____.
if (fromObj instanceof jakarta.xml.ws.Holder) {
jakarta.xml.ws.Holder holder = (jakarta.xml.ws.Holder) fromObj;
value = holder.value;
if (null == value) {
return null;
}
} else {
value = fromObj;
}
return new JAXBElement(name, value.getClass(), value);
}
/**
* Unmarshal a given SOAP xml stream and return the content of the SOAP body
*/
@Override
public Object unmarshal(Exchange exchange, Object body) throws Exception {
String soapAction = getSoapActionFromExchange(exchange);
// Determine the method name for an eventual BeanProcessor in the route
if (soapAction != null && elementNameStrategy instanceof ServiceInterfaceStrategy) {
ServiceInterfaceStrategy strategy = (ServiceInterfaceStrategy) elementNameStrategy;
String methodName = strategy.getMethodForSoapAction(soapAction);
exchange.getOut().setHeader(SOAP_METHOD_NAME, methodName);
}
// Store soap action for an eventual later marshal step.
// This is necessary as the soap action in the message may get lost on the way
if (soapAction != null) {
exchange.setProperty(SoapConstants.SOAP_ACTION, soapAction);
}
Object unmarshalledObject = super.unmarshal(exchange, body);
Object rootObject = JAXBIntrospector.getValue(unmarshalledObject);
InputStream stream = exchange.getContext().getTypeConverter().mandatoryConvertTo(InputStream.class, exchange, body);
return adapter.doUnmarshal(exchange, stream, rootObject);
}
private String getSoapActionFromExchange(Exchange exchange) {
Message inMessage = exchange.getIn();
String soapAction = inMessage.getHeader(SoapConstants.SOAP_ACTION, String.class);
if (soapAction == null) {
soapAction = inMessage.getHeader("SOAPAction", String.class);
if (soapAction != null && soapAction.startsWith("\"")) {
soapAction = soapAction.substring(1, soapAction.length() - 1);
}
}
if (soapAction == null) {
soapAction = exchange.getProperty(SoapConstants.SOAP_ACTION, String.class);
}
return soapAction;
}
/**
* Added the generated SOAP package to the JAXB context so Soap datatypes are available
*/
@Override
protected JAXBContext createContext() throws JAXBException {
if (getContextPath() != null) {
return JAXBContext.newInstance(adapter.getSoapPackageName() + ":" + getContextPath());
} else {
return JAXBContext.newInstance();
}
}
public ElementNameStrategy getElementNameStrategy() {
return elementNameStrategy;
}
public void setElementNameStrategy(ElementNameStrategy nameStrategy) {
this.elementNameStrategy = nameStrategy;
}
public boolean isIgnoreUnmarshalledHeaders() {
return ignoreUnmarshalledHeaders;
}
public void setIgnoreUnmarshalledHeaders(boolean ignoreUnmarshalledHeaders) {
this.ignoreUnmarshalledHeaders = ignoreUnmarshalledHeaders;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
}
|
itself
|
java
|
apache__avro
|
lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
|
{
"start": 26953,
"end": 28171
}
|
enum ____ value
* that the exception returned is more descriptive than just a NPE or an
* incorrect mention of an unspecified non-null field.
*/
@Test
void enumWriteUnknownField() throws IOException {
Schema schema = Schema.createRecord("record1", "doc", "", false);
String goodValue = "HELLO";
Schema enumSchema = Schema.createEnum("enum1", "doc", "", Arrays.asList(goodValue));
Field field1 = new Field("field1", enumSchema);
schema.setFields(Collections.singletonList(field1));
GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);
GenericRecordBuilder builder = new GenericRecordBuilder(schema);
String badValue = "GOODBYE";
builder.set(field1, new EnumSymbol(enumSchema, badValue));
Record record = builder.build();
try {
datumWriter.write(record, encoder);
fail("should have thrown");
} catch (AvroTypeException ate) {
assertTrue(ate.getMessage().contains(goodValue));
assertTrue(ate.getMessage().contains(badValue));
}
}
}
|
symbol
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/Langchain4jWebSearchComponentBuilderFactory.java
|
{
"start": 1935,
"end": 4093
}
|
interface ____ extends ComponentBuilder<LangChain4jWebSearchComponent> {
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default Langchain4jWebSearchComponentBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default Langchain4jWebSearchComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
}
|
Langchain4jWebSearchComponentBuilder
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ProducerModuleFactoryGeneratorTest.java
|
{
"start": 14361,
"end": 15039
}
|
class ____ {",
" @Produces String produceString(int module) {",
" return null;",
" }",
"}");
daggerCompiler(moduleFile)
.compile(subject -> subject.hasErrorCount(0));
}
@Test public void singleProducesMethodNoArgsFuture() {
Source moduleFile =
CompilerTests.javaSource(
"test.TestModule",
"package test;",
"",
"import com.google.common.util.concurrent.ListenableFuture;",
"import dagger.producers.ProducerModule;",
"import dagger.producers.Produces;",
"",
"@ProducerModule",
"final
|
TestModule
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/service/trace/ConfigTraceService.java
|
{
"start": 1142,
"end": 9943
}
|
class ____ {
/**
* persist event.
*/
public static final String PERSISTENCE_EVENT = "persist";
public static final String PERSISTENCE_EVENT_BETA = "persist-beta";
public static final String PERSISTENCE_EVENT_TAG = "persist-tag";
/**
* persist type.
*/
public static final String PERSISTENCE_TYPE_PUB = "pub";
public static final String PERSISTENCE_TYPE_REMOVE = "remove";
public static final String PERSISTENCE_TYPE_MERGE = "merge";
/**
* notify event.
*/
public static final String NOTIFY_EVENT = "notify";
public static final String NOTIFY_EVENT_BETA = "notify-beta";
public static final String NOTIFY_EVENT_BATCH = "notify-batch";
public static final String NOTIFY_EVENT_TAG = "notify-tag";
/**
* notify type.
*/
public static final String NOTIFY_TYPE_OK = "ok";
public static final String NOTIFY_TYPE_ERROR = "error";
public static final String NOTIFY_TYPE_UNHEALTH = "unhealth";
public static final String NOTIFY_TYPE_EXCEPTION = "exception";
/**
* dump event.
*/
public static final String DUMP_EVENT = "dump";
public static final String DUMP_EVENT_BETA = "dump-beta";
public static final String DUMP_EVENT_BATCH = "dump-batch";
public static final String DUMP_EVENT_TAG = "dump-tag";
/**
* dump type.
*/
public static final String DUMP_TYPE_OK = "ok";
public static final String DUMP_TYPE_REMOVE_OK = "remove-ok";
public static final String DUMP_TYPE_ERROR = "error";
/**
* pull event.
*/
public static final String PULL_EVENT = "pull";
/**
* pull type.
*/
public static final String PULL_TYPE_OK = "ok";
public static final String PULL_TYPE_NOTFOUND = "not-found";
public static final String PULL_TYPE_CONFLICT = "conflict";
public static final String PULL_TYPE_ERROR = "error";
/**
* log persistence event.
*
* @param dataId data id
* @param group group
* @param tenant tenant
* @param requestIpAppName request ip app name
* @param ts ts
* @param handleIp remote ip
* @param type type
* @param content content
*/
public static void logPersistenceEvent(String dataId, String group, String tenant, String requestIpAppName, long ts,
String handleIp, String event, String type, String content) {
if (!LogUtil.TRACE_LOG.isInfoEnabled()) {
return;
}
// Convenient tlog segmentation.
if (StringUtils.isBlank(tenant)) {
tenant = null;
}
//localIp | dataid | group | tenant | requestIpAppName | ts | client ip | event | type | [delayed = -1] | ext
// (md5)
String md5 = content == null ? null : MD5Utils.md5Hex(content, Constants.PERSIST_ENCODE);
LogUtil.TRACE_LOG.info("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}", InetUtils.getSelfIP(), dataId, group, tenant,
requestIpAppName, ts, handleIp, event, type, -1, md5);
}
/**
* log notify event.
*
* @param dataId data id
* @param group group
* @param tenant tenant
* @param requestIpAppName request ip app name
* @param ts ts
* @param handleIp handle ip
* @param type type
* @param delayed delayed
* @param targetIp target ip
*/
public static void logNotifyEvent(String dataId, String group, String tenant, String requestIpAppName, long ts,
String handleIp, String event, String type, long delayed, String targetIp) {
if (!LogUtil.TRACE_LOG.isInfoEnabled()) {
return;
}
if (delayed < 0) {
delayed = 0;
}
MetricsMonitor.getNotifyRtTimer().record(delayed, TimeUnit.MILLISECONDS);
// Convenient tlog segmentation
if (StringUtils.isBlank(tenant)) {
tenant = null;
}
//localIp | dataid | group | tenant | requestIpAppName | ts | handleIp | event | type | [delayed] | ext
// (targetIp)
LogUtil.TRACE_LOG.info("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}", InetUtils.getSelfIP(), dataId, group, tenant,
requestIpAppName, ts, handleIp, event, type, delayed, targetIp);
}
/**
* log dump event.
*
* @param dataId data id
* @param group group
* @param tenant tenant
* @param requestIpAppName request ip app name
* @param ts ts
* @param handleIp handle ip
* @param type type
* @param delayed delayed
* @param length length
*/
public static void logDumpEvent(String dataId, String group, String tenant, String requestIpAppName, long ts,
String handleIp, String type, long delayed, long length) {
logDumpEventInner(dataId, group, tenant, requestIpAppName, ts, handleIp, ConfigTraceService.DUMP_EVENT, type,
delayed, length);
}
public static void logDumpGrayNameEvent(String dataId, String group, String tenant, String grayName,
String requestIpAppName, long ts, String handleIp, String type, long delayed, long length) {
logDumpEventInner(dataId, group, tenant, requestIpAppName, ts, handleIp,
ConfigTraceService.DUMP_EVENT + "-" + grayName, type, delayed, length);
}
private static void logDumpEventInner(String dataId, String group, String tenant, String requestIpAppName, long ts,
String handleIp, String event, String type, long delayed, long length) {
if (!LogUtil.TRACE_LOG.isInfoEnabled()) {
return;
}
if (delayed < 0) {
delayed = 0;
}
MetricsMonitor.getDumpRtTimer().record(delayed, TimeUnit.MILLISECONDS);
// Convenient tlog segmentation
if (StringUtils.isBlank(tenant)) {
tenant = null;
}
//localIp | dataid | group | tenant | requestIpAppName | ts | handleIp | event | type | [delayed] | length
LogUtil.TRACE_LOG.info("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}", InetUtils.getSelfIP(), dataId, group, tenant,
requestIpAppName, ts, handleIp, event, type, delayed, length);
}
/**
* log dump all event.
*
* @param dataId data id
* @param group group
* @param tenant tenant
* @param requestIpAppName request ip app name
* @param ts ts
* @param handleIp handle ip
* @param type type
*/
public static void logDumpAllEvent(String dataId, String group, String tenant, String requestIpAppName, long ts,
String handleIp, String type) {
if (!LogUtil.TRACE_LOG.isInfoEnabled()) {
return;
}
// Convenient tlog segmentation
if (StringUtils.isBlank(tenant)) {
tenant = null;
}
//localIp | dataid | group | tenant | requestIpAppName | ts | handleIp | event | type | [delayed = -1]
LogUtil.TRACE_LOG.info("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}", InetUtils.getSelfIP(), dataId, group, tenant,
requestIpAppName, ts, handleIp, "dump-all", type, -1);
}
/**
* log pull event.
*
* @param dataId data id
* @param group group
* @param tenant tenant
* @param requestIpAppName request ip app name
* @param ts ts
* @param type type
* @param delayed delayed
* @param clientIp clientIp
* @param isNotify isNotify
* @param model model
*/
public static void logPullEvent(String dataId, String group, String tenant, String requestIpAppName, long ts,
String event, String type, long delayed, String clientIp, boolean isNotify, String model) {
if (!LogUtil.TRACE_LOG.isInfoEnabled()) {
return;
}
// Convenient tlog segmentation
if (StringUtils.isBlank(tenant)) {
tenant = null;
}
if (isNotify && delayed < 0) {
delayed = 0;
}
// localIp | dataid | group | tenant| requestIpAppName| ts | event | type | [delayed] |clientIp| isNotify | mode(http/grpc)
LogUtil.TRACE_LOG.info("{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}|{}", InetUtils.getSelfIP(), dataId, group, tenant,
requestIpAppName, ts, event, type, delayed, clientIp, isNotify, model);
}
}
|
ConfigTraceService
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PatternMatchingInstanceofTest.java
|
{
"start": 7173,
"end": 7597
}
|
class ____ {
void test(Object o) {
if (o instanceof Test) {
Integer test = (Integer) o;
test(test);
}
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void noInstanceofAtAll_noFinding() {
helper
.addInputLines(
"Test.java",
"""
|
Test
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/openconnections/ConnectionEventsTest.java
|
{
"start": 2248,
"end": 2587
}
|
class ____ {
static final CountDownLatch CLOSED_LATCH = new CountDownLatch(1);
@OnOpen
String open(WebSocketConnection connection) {
return connection.id();
}
@OnClose
void close() {
CLOSED_LATCH.countDown();
}
}
@Singleton
public static
|
Endpoint
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/time/ZoneIdOfZ.java
|
{
"start": 2066,
"end": 2979
}
|
class ____ extends BugChecker implements MethodInvocationTreeMatcher {
private static final String ZONE_OFFSET = "java.time.ZoneOffset";
private static final Matcher<ExpressionTree> ZONE_ID_OF =
allOf(
staticMethod().onClass("java.time.ZoneId").named("of"),
not(anyOf(packageStartsWith("java."), packageStartsWith("tck.java."))));
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (ZONE_ID_OF.matches(tree, state)) {
String zone = constValue(tree.getArguments().getFirst(), String.class);
if (zone != null && zone.equals("Z")) {
SuggestedFix.Builder fix = SuggestedFix.builder().addImport(ZONE_OFFSET);
fix.replace(tree, String.format("%s.UTC", qualifyType(state, fix, ZONE_OFFSET)));
return describeMatch(tree, fix.build());
}
}
return Description.NO_MATCH;
}
}
|
ZoneIdOfZ
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/ReadWriteDiskValidator.java
|
{
"start": 1134,
"end": 1366
}
|
class ____ check a directory by to create a file,
* write some bytes into it, read back, and verify if they are identical.
* Read time and write time are recorded and put into an
* {@link ReadWriteDiskValidatorMetrics}.
*/
public
|
to
|
java
|
spring-projects__spring-framework
|
spring-r2dbc/src/test/java/org/springframework/r2dbc/connection/DelegatingConnectionFactoryTests.java
|
{
"start": 1039,
"end": 1667
}
|
class ____ {
ConnectionFactory delegate = mock();
Connection connectionMock = mock();
DelegatingConnectionFactory connectionFactory = new ExampleConnectionFactory(delegate);
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
void shouldDelegateGetConnection() {
Mono<Connection> connectionMono = Mono.just(connectionMock);
when(delegate.create()).thenReturn((Mono) connectionMono);
assertThat(connectionFactory.create()).isSameAs(connectionMono);
}
@Test
void shouldDelegateUnwrapWithoutImplementing() {
assertThat(connectionFactory.unwrap()).isSameAs(delegate);
}
static
|
DelegatingConnectionFactoryTests
|
java
|
apache__kafka
|
jmh-benchmarks/src/main/java/org/apache/kafka/jmh/producer/ProducerRecordBenchmark.java
|
{
"start": 1515,
"end": 1736
}
|
class ____ {
@Benchmark
@OutputTimeUnit(TimeUnit.NANOSECONDS)
public ProducerRecord<String, String> constructorBenchmark() {
return new ProducerRecord<>("topic", "value");
}
}
|
ProducerRecordBenchmark
|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusTestExtension.java
|
{
"start": 53415,
"end": 53989
}
|
class ____...
resource.close();
} catch (Throwable e) {
log.error("Failed to shutdown Quarkus", e);
} finally {
if (runningQuarkusApplication != null) {
((SmallRyeConfigProviderResolver) ConfigProviderResolver.instance())
.releaseConfig(runningQuarkusApplication.getClassLoader());
}
runningQuarkusApplication = null;
Thread.currentThread().setContextClassLoader(old);
}
}
}
|
loader
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/DatafeedContextProvider.java
|
{
"start": 665,
"end": 2222
}
|
class ____ {
private final JobConfigProvider jobConfigProvider;
private final DatafeedConfigProvider datafeedConfigProvider;
private final JobResultsProvider resultsProvider;
public DatafeedContextProvider(
JobConfigProvider jobConfigProvider,
DatafeedConfigProvider datafeedConfigProvider,
JobResultsProvider jobResultsProvider
) {
this.jobConfigProvider = Objects.requireNonNull(jobConfigProvider);
this.datafeedConfigProvider = Objects.requireNonNull(datafeedConfigProvider);
this.resultsProvider = Objects.requireNonNull(jobResultsProvider);
}
public void buildDatafeedContext(String datafeedId, ActionListener<DatafeedContext> listener) {
datafeedConfigProvider.getDatafeedConfig(datafeedId, null, listener.delegateFailureAndWrap((delegate1, datafeedConfigBuilder) -> {
DatafeedConfig datafeedConfig = datafeedConfigBuilder.build();
jobConfigProvider.getJob(datafeedConfig.getJobId(), null, delegate1.delegateFailureAndWrap((delegate2, jobBuilder) -> {
resultsProvider.getRestartTimeInfo(jobBuilder.getId(), delegate2.delegateFailureAndWrap((delegate3, restartTimeInfo) -> {
resultsProvider.datafeedTimingStats(jobBuilder.getId(), timingStats -> {
delegate3.onResponse(new DatafeedContext(datafeedConfig, jobBuilder.build(), restartTimeInfo, timingStats));
}, delegate3::onFailure);
}));
}));
}));
}
}
|
DatafeedContextProvider
|
java
|
bumptech__glide
|
annotation/src/main/java/com/bumptech/glide/annotation/GlideModule.java
|
{
"start": 522,
"end": 709
}
|
class ____ will be used as a replacement for {@code
* com.bumptech.glide.Glide} in Applications that depend on Glide's generated code.
*/
String glideName() default "GlideApp";
}
|
that
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/sql/ast/DB2iSqlAstTranslator.java
|
{
"start": 588,
"end": 1148
}
|
class ____<T extends JdbcOperation> extends DB2SqlAstTranslator<T> {
private final DatabaseVersion version;
public DB2iSqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement, DatabaseVersion version) {
super( sessionFactory, statement );
this.version = version;
}
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
renderComparisonStandard( lhs, operator, rhs );
}
@Override
public DatabaseVersion getDB2Version() {
return DB2_LUW_VERSION;
}
}
|
DB2iSqlAstTranslator
|
java
|
apache__camel
|
components/camel-huawei/camel-huaweicloud-obs/src/test/java/org/apache/camel/component/huaweicloud/obs/CreateBucketJsonFunctionalTest.java
|
{
"start": 1360,
"end": 3708
}
|
class ____ extends CamelTestSupport {
private static final String ACCESS_KEY = "replace_this_with_access_key";
private static final String SECRET_KEY = "replace_this_with_secret_key";
private static final String REGION = "replace_this_with_region";
private static final String BUCKET_NAME = "replace_this_with_bucket_name";
private static final String BUCKET_LOCATION = "replace_this_with_bucket_location";
@BindToRegistry("serviceKeys")
ServiceKeys serviceKeys = new ServiceKeys(ACCESS_KEY, SECRET_KEY);
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:create_bucket")
.to("hwcloud-obs:createBucket?" +
"serviceKeys=#serviceKeys" +
"®ion=" + REGION +
"&ignoreSslVerification=true")
.log("Create bucket successful")
.to("log:LOG?showAll=true")
.to("mock:create_bucket_result");
}
};
}
/**
* The following test cases should be manually enabled to perform test against the actual HuaweiCloud OBS server
* with real user credentials. To perform this test, manually comment out the @Ignore annotation and enter relevant
* service parameters in the placeholders above (static variables of this test class)
*
* @throws Exception
*/
@Disabled("Manually enable this once you configure the parameters in the placeholders above")
@Test
public void testCreateBucket() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:create_bucket_result");
mock.expectedMinimumMessageCount(1);
// More parameters can be added to the Json string below. E.g. acl, availableZone, storageClass
String newBucket = "{\"bucketName\":\"" + BUCKET_NAME + "\",\"location\":\"" + BUCKET_LOCATION + "\"}";
template.sendBody("direct:create_bucket", newBucket);
Exchange responseExchange = mock.getExchanges().get(0);
mock.assertIsSatisfied();
assertNotNull(responseExchange.getIn().getBody(String.class));
assertTrue(responseExchange.getIn().getBody(String.class).length() > 0);
}
}
|
CreateBucketJsonFunctionalTest
|
java
|
quarkusio__quarkus
|
extensions/oidc-db-token-state-manager/deployment/src/test/java/io/quarkus/oidc/db/token/state/manager/UnprotectedResource.java
|
{
"start": 132,
"end": 238
}
|
class ____ {
@GET
public String getName() {
return "unprotected";
}
}
|
UnprotectedResource
|
java
|
quarkusio__quarkus
|
extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/UnremovableBeanBuildItem.java
|
{
"start": 8076,
"end": 8717
}
|
class ____ implements Predicate<BeanInfo> {
private final DotName dotName;
public BeanTypeExclusion(DotName dotName) {
this.dotName = Objects.requireNonNull(dotName);
}
@Override
public boolean test(BeanInfo bean) {
for (Type type : bean.getTypes()) {
if (dotName.equals(type.name())) {
return true;
}
}
return false;
}
@Override
public String toString() {
return "BeanTypeExclusion [dotName=" + dotName + "]";
}
}
public static
|
BeanTypeExclusion
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/taskmanager/TaskManagerIdPathParameter.java
|
{
"start": 1086,
"end": 1668
}
|
class ____ extends MessagePathParameter<ResourceID> {
public static final String KEY = "taskmanagerid";
public TaskManagerIdPathParameter() {
super(KEY);
}
@Override
protected ResourceID convertFromString(String value) {
return new ResourceID(value);
}
@Override
protected String convertToString(ResourceID value) {
return value.getResourceIdString();
}
@Override
public String getDescription() {
return "32-character hexadecimal string that identifies a task manager.";
}
}
|
TaskManagerIdPathParameter
|
java
|
apache__camel
|
components/camel-dynamic-router/src/test/java/org/apache/camel/component/dynamicrouter/control/DynamicRouterControlChannelSendDynamicAwareTest.java
|
{
"start": 1732,
"end": 6960
}
|
class ____ {
@Mock
Exchange exchange;
@Mock
Message message;
@Test
void prepare() throws Exception {
String originalUri = "dynamic-router-control://subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
assertAll(
() -> assertEquals(entry.getOriginalUri(), originalUri),
() -> assertEquals(entry.getUri(), uri),
() -> assertEquals(2, entry.getProperties().size()),
() -> assertEquals("subscribe", entry.getProperties().get("controlAction")),
() -> assertEquals("testSub1", entry.getProperties().get("subscriptionId")));
}
}
@Test
void resolveStaticUri() throws Exception {
String originalUri = "dynamic-router-control:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
String result = testSubject.resolveStaticUri(exchange, entry);
assertEquals("dynamic-router-control://subscribe", result);
}
}
@Test
void resolveStaticUriShouldNotOptimize() throws Exception {
String originalUri = "dynamic-router-ctrl:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-ctrl://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
String result = testSubject.resolveStaticUri(exchange, entry);
assertEquals(null, result);
}
}
@Test
void createPreProcessor() throws Exception {
Mockito.when(exchange.getMessage()).thenReturn(message);
Mockito.doNothing().when(message).setHeader(CONTROL_ACTION_HEADER, "subscribe");
Mockito.doNothing().when(message).setHeader(CONTROL_SUBSCRIPTION_ID, "testSub1");
String originalUri = "dynamic-router-control:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
Processor preProcessor = testSubject.createPreProcessor(exchange, entry);
Assertions.assertNotNull(preProcessor);
preProcessor.process(exchange);
}
}
@Test
void createPreProcessorShouldNotOptimize() throws Exception {
String originalUri = "dynamic-router-ctrl:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-ctrl://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
Processor preProcessor = testSubject.createPreProcessor(exchange, entry);
Assertions.assertNull(preProcessor);
}
}
@Test
void createPostProcessor() throws Exception {
Mockito.when(exchange.getMessage()).thenReturn(message);
Mockito.when(message.removeHeader(any())).thenReturn("test");
String originalUri = "dynamic-router-control:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-control://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
Processor postProcessor = testSubject.createPostProcessor(exchange, entry);
postProcessor.process(exchange);
}
Mockito.verify(message, Mockito.times(URI_PARAMS_TO_HEADER_NAMES.size())).removeHeader(any());
}
@Test
void createPostProcessorShouldNotOptimize() throws Exception {
String originalUri = "dynamic-router-ctrl:subscribe?subscriptionId=testSub1";
String uri = "dynamic-router-ctrl://subscribe?subscriptionId=testSub1";
try (DynamicRouterControlChannelSendDynamicAware testSubject = new DynamicRouterControlChannelSendDynamicAware()) {
SendDynamicAware.DynamicAwareEntry entry = testSubject.prepare(exchange, uri, originalUri);
Processor postProcessor = testSubject.createPostProcessor(exchange, entry);
assertNull(postProcessor);
}
}
}
|
DynamicRouterControlChannelSendDynamicAwareTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-http-codec/src/main/java/org/springframework/boot/http/codec/CodecCustomizer.java
|
{
"start": 948,
"end": 1149
}
|
interface ____ {
/**
* Callback to customize a {@link CodecConfigurer} instance.
* @param configurer codec configurer to customize
*/
void customize(CodecConfigurer configurer);
}
|
CodecCustomizer
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/index/query/SearchExecutionContextTests.java
|
{
"start": 5263,
"end": 42139
}
|
class ____ extends ESTestCase {
public void testFailIfFieldMappingNotFound() {
SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, null);
context.setAllowUnmappedFields(false);
MappedFieldType fieldType = new TextFieldMapper.TextFieldType("text", randomBoolean(), false);
MappedFieldType result = context.failIfFieldMappingNotFound("name", fieldType);
assertThat(result, sameInstance(fieldType));
QueryShardException e = expectThrows(QueryShardException.class, () -> context.failIfFieldMappingNotFound("name", null));
assertEquals("No field mapping can be found for the field with name [name]", e.getMessage());
context.setAllowUnmappedFields(true);
result = context.failIfFieldMappingNotFound("name", fieldType);
assertThat(result, sameInstance(fieldType));
result = context.failIfFieldMappingNotFound("name", null);
assertThat(result, nullValue());
context.setAllowUnmappedFields(false);
context.setMapUnmappedFieldAsString(true);
result = context.failIfFieldMappingNotFound("name", fieldType);
assertThat(result, sameInstance(fieldType));
result = context.failIfFieldMappingNotFound("name", null);
assertThat(result, notNullValue());
assertThat(result, instanceOf(TextFieldMapper.TextFieldType.class));
assertThat(result.name(), equalTo("name"));
}
public void testBuildAnonymousFieldType() {
SearchExecutionContext context = createSearchExecutionContext("uuid", null);
assertThat(context.buildAnonymousFieldType("keyword"), instanceOf(KeywordFieldMapper.KeywordFieldType.class));
assertThat(context.buildAnonymousFieldType("long"), instanceOf(NumberFieldMapper.NumberFieldType.class));
}
public void testToQueryFails() {
SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, null);
Exception exc = expectThrows(Exception.class, () -> context.toQuery(new DummyQueryBuilder() {
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
throw new RuntimeException("boom");
}
}));
assertThat(exc.getMessage(), equalTo("failed to create query: boom"));
}
public void testClusterAlias() throws IOException {
final String clusterAlias = randomBoolean() ? null : "remote_cluster";
SearchExecutionContext context = createSearchExecutionContext(IndexMetadata.INDEX_UUID_NA_VALUE, clusterAlias);
IndexFieldMapper mapper = new IndexFieldMapper();
IndexFieldData<?> forField = context.getForField(mapper.fieldType(), MappedFieldType.FielddataOperation.SEARCH);
String expected = clusterAlias == null
? context.getIndexSettings().getIndexMetadata().getIndex().getName()
: clusterAlias + ":" + context.getIndexSettings().getIndex().getName();
assertEquals(expected, ((AbstractLeafOrdinalsFieldData) forField.load(null)).getOrdinalsValues().lookupOrd(0).utf8ToString());
}
public void testGetFullyQualifiedIndex() {
String clusterAlias = randomAlphaOfLengthBetween(5, 10);
String indexUuid = randomAlphaOfLengthBetween(3, 10);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(indexUuid, clusterAlias);
assertThat(searchExecutionContext.getFullyQualifiedIndex().getName(), equalTo(clusterAlias + ":index"));
assertThat(searchExecutionContext.getFullyQualifiedIndex().getUUID(), equalTo(indexUuid));
}
public void testIndexSortedOnField() {
Settings settings = indexSettings(IndexVersion.current(), 1, 1).put("index.sort.field", "sort_field").build();
IndexMetadata indexMetadata = new IndexMetadata.Builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
SearchExecutionContext context = SearchExecutionContextHelper.createSimple(
indexSettings,
XContentParserConfiguration.EMPTY,
new NamedWriteableRegistry(Collections.emptyList())
);
assertTrue(context.indexSortedOnField("sort_field"));
assertFalse(context.indexSortedOnField("second_sort_field"));
assertFalse(context.indexSortedOnField("non_sort_field"));
}
public void testFielddataLookupSelfReference() {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
// simulate a runtime field that depends on itself e.g. field: doc['field']
runtimeField("field", leafLookup -> leafLookup.doc().get("field").toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field", searchExecutionContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: field -> field", iae.getMessage());
}
public void testFielddataLookupLooseLoop() {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
// simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['1']
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("1").get(0).toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", searchExecutionContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 1", iae.getMessage());
}
public void testFielddataLookupTerminatesInLoop() {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
// simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("4").get(0).toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", searchExecutionContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage());
}
public void testFielddataLookupSometimesLoop() throws IOException {
// create this field so we can use it to make sure we're escaping the loop on only the "first" document
var concreteField = new KeywordFieldMapper.KeywordFieldType("indexed_field", true, true, Collections.emptyMap());
// simulate a runtime field cycle in the second doc: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
var runtimeFields = List.of(
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> {
if (leafLookup.doc().get("indexed_field").getFirst().equals("first")) {
return "escape!";
}
return leafLookup.doc().get("4").getFirst().toString();
})
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
"uuid",
null,
createMappingLookup(List.of(concreteField), runtimeFields),
Collections.emptyMap()
);
List<String> values = collect("1", searchExecutionContext, new TermQuery(new Term("indexed_field", "first")));
assertEquals(List.of("escape!"), values);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", searchExecutionContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage());
}
public void testFielddataLookupBeyondMaxDepth() {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()),
runtimeField("5", leafLookup -> leafLookup.doc().get("6").get(0).toString()),
runtimeField("6", leafLookup -> "cat")
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", searchExecutionContext));
assertEquals("Field requires resolving too many dependent fields: 1 -> 2 -> 3 -> 4 -> 5 -> 6", iae.getMessage());
}
public void testFielddataLookupReferencesBelowMaxDepth() throws IOException {
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()),
runtimeField("5", (leafLookup, docId) -> "cat on doc " + docId)
);
assertEquals(List.of("cat on doc 0", "cat on doc 1"), collect("1", searchExecutionContext));
}
public void testFielddataLookupOneFieldManyReferences() throws IOException {
int numFields = randomIntBetween(5, 20);
List<RuntimeField> fields = new ArrayList<>(numFields + 1);
fields.add(runtimeField("root", leafLookup -> {
StringBuilder value = new StringBuilder();
for (int i = 0; i < numFields; i++) {
value.append(leafLookup.doc().get(i).get(0));
}
return value.toString();
}));
StringBuilder expected = new StringBuilder();
for (int i = 0; i < numFields; i++) {
String fieldValue = Integer.toString(i);
fields.add(runtimeField(Integer.toString(i), leafLookup -> fieldValue));
expected.append(i);
}
assertEquals(
List.of(expected.toString(), expected.toString()),
collect("root", createSearchExecutionContext("uuid", null, createMappingLookup(List.of(), fields), Map.of()))
);
}
private static MappingLookup createMappingLookup(List<MappedFieldType> concreteFields, List<RuntimeField> runtimeFields) {
List<FieldMapper> mappers = concreteFields.stream().<FieldMapper>map(MockFieldMapper::new).toList();
RootObjectMapper.Builder builder = new RootObjectMapper.Builder("_doc", ObjectMapper.Defaults.SUBOBJECTS);
Map<String, RuntimeField> runtimeFieldTypes = runtimeFields.stream().collect(Collectors.toMap(RuntimeField::name, r -> r));
builder.addRuntimeFields(runtimeFieldTypes);
Mapping mapping = new Mapping(
builder.build(MapperBuilderContext.root(false, false)),
new MetadataFieldMapper[0],
Collections.emptyMap()
);
return MappingLookup.fromMappers(mapping, mappers, Collections.emptyList());
}
public void testSearchRequestRuntimeFields() {
/*
* Making these immutable here test that we don't modify them.
* Modifying them would cause all kinds of problems if two
* shards are parsed on the same node.
*/
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry("cat", Map.of("type", "keyword")),
Map.entry("dog", Map.of("type", "long"))
);
SearchExecutionContext context = createSearchExecutionContext(
"uuid",
null,
createMappingLookup(
List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
List.of(new TestRuntimeField("runtime", "long"))
),
runtimeMappings
);
assertTrue(context.isFieldMapped("cat"));
assertThat(context.getFieldType("cat"), instanceOf(KeywordScriptFieldType.class));
assertThat(context.getMatchingFieldNames("cat"), equalTo(Set.of("cat")));
assertTrue(context.isFieldMapped("dog"));
assertThat(context.getFieldType("dog"), instanceOf(LongScriptFieldType.class));
assertThat(context.getMatchingFieldNames("dog"), equalTo(Set.of("dog")));
assertTrue(context.isFieldMapped("pig"));
assertThat(context.getFieldType("pig"), instanceOf(MockFieldMapper.FakeFieldType.class));
assertThat(context.getMatchingFieldNames("pig"), equalTo(Set.of("pig")));
assertThat(context.getMatchingFieldNames("*"), equalTo(Set.of("cat", "dog", "pig", "runtime")));
}
public void testSearchRequestRuntimeFieldsWithNamespaceValidator() {
final String errorMessage = "error 12345";
final String disallowed = "_project";
RootObjectMapperNamespaceValidator validator = new RootObjectMapperNamespaceValidator() {
@Override
public void validateNamespace(ObjectMapper.Subobjects subobjects, String name) {
if (name.equals(disallowed)) {
throw new IllegalArgumentException(errorMessage);
} else if (subobjects != ObjectMapper.Subobjects.ENABLED) {
// name here will be something like _project.my_field, rather than just _project
if (name.startsWith(disallowed + ".")) {
throw new IllegalArgumentException(errorMessage);
}
}
}
};
{
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry(disallowed, Map.of("type", randomFrom("keyword", "long"))),
Map.entry("dog", Map.of("type", "long"))
);
Exception e = expectThrows(
IllegalArgumentException.class,
() -> createSearchExecutionContext(
"uuid",
null,
createMappingLookup(
List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
List.of(new TestRuntimeField("runtime", "long"))
),
runtimeMappings,
validator
)
);
assertThat(e.getMessage(), equalTo(errorMessage));
}
{
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry(disallowed + ".subfield", Map.of("type", randomFrom("keyword", "long"))),
Map.entry("dog", Map.of("type", "long"))
);
Exception e = expectThrows(
IllegalArgumentException.class,
() -> createSearchExecutionContext(
"uuid",
null,
createMappingLookup(
List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
List.of(new TestRuntimeField("runtime", "long"))
),
runtimeMappings,
validator
)
);
assertThat(e.getMessage(), equalTo(errorMessage));
}
// _projectx should be allowed
{
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry(disallowed + "x", Map.of("type", "keyword")),
Map.entry("dog", Map.of("type", "long"))
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(
"uuid",
null,
createMappingLookup(
List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
List.of(new TestRuntimeField("runtime", "long"))
),
runtimeMappings,
validator
);
assertNotNull(searchExecutionContext);
}
}
public void testSearchRequestRuntimeFieldsWrongFormat() {
Map<String, Object> runtimeMappings = new HashMap<>();
runtimeMappings.put("field", Arrays.asList("test1", "test2"));
MapperParsingException exception = expectThrows(
MapperParsingException.class,
() -> createSearchExecutionContext(
"uuid",
null,
createMappingLookup(List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), List.of()),
runtimeMappings
)
);
assertEquals("Expected map for runtime field [field] definition but got a java.util.Arrays$ArrayList", exception.getMessage());
}
public void testSearchRequestRuntimeFieldsRemoval() {
Map<String, Object> runtimeMappings = new HashMap<>();
runtimeMappings.put("field", null);
MapperParsingException exception = expectThrows(
MapperParsingException.class,
() -> createSearchExecutionContext(
"uuid",
null,
createMappingLookup(List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")), List.of()),
runtimeMappings
)
);
assertEquals("Runtime field [field] was set to null but its removal is not supported in this context", exception.getMessage());
}
public void testSearchRequestRuntimeFieldsAndMultifieldDetection() {
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry("cat", Map.of("type", "keyword")),
Map.entry("cat.subfield", Map.of("type", "keyword")),
Map.entry("dog", Map.of("type", "long"))
);
MappingLookup mappingLookup = createMappingLookup(
List.of(
new MockFieldMapper.FakeFieldType("pig"),
new MockFieldMapper.FakeFieldType("pig.subfield"),
new MockFieldMapper.FakeFieldType("cat"),
new MockFieldMapper.FakeFieldType("cat.subfield")
),
List.of(new TestRuntimeField("runtime", "long"))
);
SearchExecutionContext context = createSearchExecutionContext("uuid", null, mappingLookup, runtimeMappings);
assertTrue(context.isMultiField("pig.subfield"));
assertFalse(context.isMultiField("cat.subfield"));
assertTrue(mappingLookup.isMultiField("cat.subfield"));
}
public void testSyntheticSourceSearchLookup() throws IOException {
// Build a mapping using synthetic source
SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder(null, Settings.EMPTY, false, false, false).setSynthetic().build();
RootObjectMapper root = new RootObjectMapper.Builder("_doc").add(
new KeywordFieldMapper.Builder("cat", defaultIndexSettings()).ignoreAbove(100)
).build(MapperBuilderContext.root(true, false));
Mapping mapping = new Mapping(root, new MetadataFieldMapper[] { sourceMapper }, Map.of());
MappingLookup lookup = MappingLookup.fromMapping(mapping);
SearchExecutionContext sec = createSearchExecutionContext("index", "", lookup, Map.of());
assertTrue(sec.isSourceSynthetic());
MemoryIndex mi = new MemoryIndex();
mi.addField(new KeywordField("cat", "meow", Field.Store.YES), null);
LeafReaderContext leafReaderContext = mi.createSearcher().getIndexReader().leaves().get(0);
SearchLookup searchLookup = sec.lookup();
Source source = searchLookup.getSource(leafReaderContext, 0);
assertEquals(1, source.source().size());
assertEquals("meow", source.source().get("cat"));
}
public void testAllowedFields() {
Map<String, Object> runtimeMappings = Map.ofEntries(
Map.entry("runtimecat", Map.of("type", "keyword")),
Map.entry("runtimedog", Map.of("type", "long"))
);
SearchExecutionContext context = createSearchExecutionContext(
"uuid",
null,
SearchExecutionContextTests.createMappingLookup(
List.of(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
List.of(new TestRuntimeField("runtime", "long"))
),
runtimeMappings
);
assertNotNull(context.getFieldType("pig"));
assertNotNull(context.getFieldType("cat"));
assertNotNull(context.getFieldType("runtimecat"));
assertNotNull(context.getFieldType("runtimedog"));
assertNotNull(context.getFieldType("runtime"));
assertEquals(3, context.getMatchingFieldNames("runtime*").size());
assertEquals(2, context.getMatchingFieldNames("*cat").size());
assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtimedog", "runtime"));
context.setAllowedFields(s -> true);
assertNotNull(context.getFieldType("pig"));
assertTrue(context.isFieldMapped("pig"));
assertNotNull(context.getFieldType("cat"));
assertTrue(context.isFieldMapped("cat"));
assertNotNull(context.getFieldType("runtimecat"));
assertTrue(context.isFieldMapped("runtimecat"));
assertNotNull(context.getFieldType("runtimedog"));
assertTrue(context.isFieldMapped("runtimedog"));
assertNotNull(context.getFieldType("runtime"));
assertTrue(context.isFieldMapped("runtime"));
assertEquals(3, context.getMatchingFieldNames("runtime*").size());
assertEquals(2, context.getMatchingFieldNames("*cat").size());
assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtimedog", "runtime"));
context.setAllowedFields(s -> s.equals("cat"));
assertNull(context.getFieldType("pig"));
assertFalse(context.isFieldMapped("pig"));
assertNotNull(context.getFieldType("cat"));
assertTrue(context.isFieldMapped("cat"));
assertNull(context.getFieldType("runtimecat"));
assertFalse(context.isFieldMapped("runtimecat"));
assertNull(context.getFieldType("runtimedog"));
assertFalse(context.isFieldMapped("runtimedog"));
assertNull(context.getFieldType("runtime"));
assertFalse(context.isFieldMapped("runtime"));
assertEquals(0, context.getMatchingFieldNames("runtime*").size());
assertEquals(1, context.getMatchingFieldNames("*cat").size());
assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("cat"));
context.setAllowedFields(s -> s.contains("dog") == false);
assertNotNull(context.getFieldType("pig"));
assertTrue(context.isFieldMapped("pig"));
assertNotNull(context.getFieldType("cat"));
assertTrue(context.isFieldMapped("cat"));
assertNotNull(context.getFieldType("runtimecat"));
assertTrue(context.isFieldMapped("runtimecat"));
assertNull(context.getFieldType("runtimedog"));
assertFalse(context.isFieldMapped("runtimedog"));
assertNotNull(context.getFieldType("runtime"));
assertTrue(context.isFieldMapped("runtime"));
assertEquals(2, context.getMatchingFieldNames("runtime*").size());
assertEquals(2, context.getMatchingFieldNames("*cat").size());
assertThat(getFieldNames(context.getAllFields()), containsInAnyOrder("pig", "cat", "runtimecat", "runtime"));
}
private static List<String> getFieldNames(Iterable<Map.Entry<String, MappedFieldType>> fields) {
List<String> fieldNames = new ArrayList<>();
for (Map.Entry<String, MappedFieldType> field : fields) {
fieldNames.add(field.getKey());
}
return fieldNames;
}
public static SearchExecutionContext createSearchExecutionContext(String indexUuid, String clusterAlias) {
return createSearchExecutionContext(indexUuid, clusterAlias, MappingLookup.EMPTY, Map.of());
}
private static SearchExecutionContext createSearchExecutionContext(RuntimeField... fieldTypes) {
return createSearchExecutionContext(
"uuid",
null,
createMappingLookup(Collections.emptyList(), List.of(fieldTypes)),
Collections.emptyMap()
);
}
private static SearchExecutionContext createSearchExecutionContext(
String indexUuid,
String clusterAlias,
MappingLookup mappingLookup,
Map<String, Object> runtimeMappings
) {
return createSearchExecutionContext(indexUuid, clusterAlias, mappingLookup, runtimeMappings, null);
}
private static SearchExecutionContext createSearchExecutionContext(
String indexUuid,
String clusterAlias,
MappingLookup mappingLookup,
Map<String, Object> runtimeMappings,
RootObjectMapperNamespaceValidator namespaceValidator
) {
IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder("index");
indexMetadataBuilder.settings(indexSettings(IndexVersion.current(), 1, 1).put(IndexMetadata.SETTING_INDEX_UUID, indexUuid));
IndexMetadata indexMetadata = indexMetadataBuilder.build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY);
MapperService mapperService = createMapperServiceWithNamespaceValidator(indexSettings, mappingLookup, namespaceValidator);
final long nowInMillis = randomNonNegativeLong();
return new SearchExecutionContext(
0,
0,
indexSettings,
null,
(mappedFieldType, fdc) -> mappedFieldType.fielddataBuilder(fdc).build(null, null),
mapperService,
mappingLookup,
null,
null,
XContentParserConfiguration.EMPTY,
new NamedWriteableRegistry(Collections.emptyList()),
null,
null,
() -> nowInMillis,
clusterAlias,
null,
() -> true,
null,
runtimeMappings,
MapperMetrics.NOOP
);
}
private static MapperService createMapperServiceWithNamespaceValidator(
IndexSettings indexSettings,
MappingLookup mappingLookup,
RootObjectMapperNamespaceValidator namespaceValidator
) {
IndexAnalyzers indexAnalyzers = IndexAnalyzers.of(singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, null)));
IndicesModule indicesModule = new IndicesModule(Collections.emptyList(), Collections.emptyList(), namespaceValidator);
MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
Supplier<SearchExecutionContext> searchExecutionContextSupplier = () -> { throw new UnsupportedOperationException(); };
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);
when(mapperService.parserContext()).thenReturn(
new MappingParserContext(
null,
type -> mapperRegistry.getMapperParser(type, indexSettings.getIndexVersionCreated()),
mapperRegistry.getRuntimeFieldParsers()::get,
indexSettings.getIndexVersionCreated(),
() -> TransportVersion.current(),
searchExecutionContextSupplier,
ScriptCompiler.NONE,
indexAnalyzers,
indexSettings,
indexSettings.getMode().buildIdFieldMapper(() -> true),
query -> {
throw new UnsupportedOperationException();
},
null,
namespaceValidator
)
);
when(mapperService.isMultiField(anyString())).then(
(Answer<Boolean>) invocation -> mappingLookup.isMultiField(invocation.getArgument(0))
);
return mapperService;
}
private static RuntimeField runtimeField(String name, Function<LeafSearchLookup, String> runtimeDocValues) {
return runtimeField(name, (leafLookup, docId) -> runtimeDocValues.apply(leafLookup));
}
private static RuntimeField runtimeField(String name, BiFunction<LeafSearchLookup, Integer, String> runtimeDocValues) {
TestRuntimeField.TestRuntimeFieldType fieldType = new TestRuntimeField.TestRuntimeFieldType(name, null) {
@Override
public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) {
return (cache, breakerService) -> new IndexFieldData<>() {
@Override
public String getFieldName() {
return name;
}
@Override
public ValuesSourceType getValuesSourceType() {
throw new UnsupportedOperationException();
}
@Override
public LeafFieldData load(LeafReaderContext context) {
return new LeafFieldData() {
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return new DelegateDocValuesField(new ScriptDocValues<String>(new ScriptDocValues.Supplier<String>() {
String value;
@Override
public int size() {
return 1;
}
@Override
public String getInternal(int index) {
assert index == 0;
return value;
}
@Override
public void setNextDocId(int docId) {
assert docId >= 0;
LeafSearchLookup leafLookup = fieldDataContext.lookupSupplier().get().getLeafSearchLookup(context);
leafLookup.setDocument(docId);
value = runtimeDocValues.apply(leafLookup, docId);
}
}) {
@Override
public int size() {
return supplier.size();
}
@Override
public String get(int i) {
return supplier.getInternal(i);
}
}, name);
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException();
}
@Override
public long ramBytesUsed() {
throw new UnsupportedOperationException();
}
@Override
public void close() {
throw new UnsupportedOperationException();
}
};
}
@Override
public LeafFieldData loadDirect(LeafReaderContext context) {
throw new UnsupportedOperationException();
}
@Override
public SortField sortField(
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
boolean reverse
) {
throw new UnsupportedOperationException();
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
XFieldComparatorSource.Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new UnsupportedOperationException();
}
};
}
};
return new TestRuntimeField(name, Collections.singleton(fieldType));
}
private static List<String> collect(String field, SearchExecutionContext searchExecutionContext) throws IOException {
return collect(field, searchExecutionContext, new MatchAllDocsQuery());
}
private static List<String> collect(String field, SearchExecutionContext searchExecutionContext, Query query) throws IOException {
List<String> result = new ArrayList<>();
try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
indexWriter.addDocument(List.of(new KeywordField("indexed_field", "first", Field.Store.YES)));
indexWriter.addDocument(List.of(new KeywordField("indexed_field", "second", Field.Store.YES)));
try (DirectoryReader reader = indexWriter.getReader()) {
IndexSearcher searcher = newSearcher(reader);
MappedFieldType fieldType = searchExecutionContext.getFieldType(field);
IndexFieldData<?> indexFieldData;
if (randomBoolean()) {
indexFieldData = searchExecutionContext.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH);
} else {
indexFieldData = searchExecutionContext.lookup().getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH);
}
searcher.search(query, new Collector() {
@Override
public ScoreMode scoreMode() {
return ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafCollector getLeafCollector(LeafReaderContext context) {
return new LeafCollector() {
@Override
public void setScorer(Scorable scorer) {}
@Override
public void collect(int doc) throws IOException {
ScriptDocValues<?> scriptDocValues;
if (randomBoolean()) {
LeafDocLookup leafDocLookup = searchExecutionContext.lookup().getLeafSearchLookup(context).doc();
leafDocLookup.setDocument(doc);
scriptDocValues = leafDocLookup.get(field);
} else {
scriptDocValues = indexFieldData.load(context).getScriptFieldFactory("test").toScriptDocValues();
;
}
scriptDocValues.getSupplier().setNextDocId(doc);
for (int i = 0; i < scriptDocValues.size(); i++) {
result.add(scriptDocValues.get(i).toString());
}
}
};
}
});
}
return result;
}
}
}
|
SearchExecutionContextTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/sqm/tree/domain/AbstractSqmPath.java
|
{
"start": 1706,
"end": 13724
}
|
class ____<T> extends AbstractSqmExpression<T> implements SqmPath<T> {
private final NavigablePath navigablePath;
private final SqmPathSource<T> referencedPathSource;
private final @Nullable SqmPath<?> lhs;
/**
* For HQL and Criteria processing - used to track reusable paths relative to this path.
* E.g., given {@code p.mate.mate} the {@code SqmRoot} identified by {@code p} would
* have a reusable path for the {@code p.mate} path.
*/
private Map<String, SqmPath<?>> reusablePaths;
protected AbstractSqmPath(
NavigablePath navigablePath,
SqmPathSource<T> referencedPathSource,
@Nullable SqmPath<?> lhs,
NodeBuilder nodeBuilder) {
super( referencedPathSource.getSqmType(), nodeBuilder );
this.navigablePath = navigablePath;
this.referencedPathSource = referencedPathSource;
this.lhs = lhs;
// assert super.getNodeType() == referencedPathSource;
}
protected void copyTo(AbstractSqmPath<T> target, SqmCopyContext context) {
assert navigablePathsMatch( target );
super.copyTo( target, context );
}
// meant for assertions only
private boolean navigablePathsMatch(AbstractSqmPath<T> target) {
final SqmPath<?> lhs = getLhsOrRoot();
final SqmPath<?> targetLhs = target.getLhsOrRoot();
return lhs == null && targetLhs == null
|| lhs != null && targetLhs != null
&& (lhs.getNavigablePath() == targetLhs.getNavigablePath()
|| getRoot( lhs ).getNodeType() instanceof SqmPolymorphicRootDescriptor
);
}
private @Nullable SqmPath<?> getLhsOrRoot() {
final SqmPath<?> lhs = getLhs();
return lhs != null ? lhs : findRoot();
}
private SqmPath<?> getRoot(SqmPath<?> lhs) {
final SqmPath<?> parent = lhs.getLhs();
return parent == null ? lhs : getRoot( parent );
}
@Override
public @NonNull SqmBindableType<T> getNodeType() {
return referencedPathSource.getPathType();
}
@Override
public SqmPathSource<T> getReferencedPathSource() {
return referencedPathSource;
}
@Override
public NavigablePath getNavigablePath() {
return navigablePath;
}
@Override
public @Nullable SqmPath<?> getLhs() {
return lhs;
}
@Override
public List<SqmPath<?>> getReusablePaths() {
return reusablePaths == null ? emptyList() : new ArrayList<>( reusablePaths.values() );
}
@Override
public void visitReusablePaths(Consumer<SqmPath<?>> consumer) {
if ( reusablePaths != null ) {
reusablePaths.values().forEach( consumer );
}
}
@Override
public void registerReusablePath(SqmPath<?> path) {
assert path.getLhs() == this;
if ( reusablePaths == null ) {
reusablePaths = new HashMap<>();
}
final String relativeName = path.getNavigablePath().getLocalName();
final SqmPath<?> previous = reusablePaths.put( relativeName, path );
if ( previous != null && previous != path ) {
throw new IllegalStateException( "Implicit-join path registration unexpectedly overrode previous registration - " + relativeName );
}
}
@Override
public @Nullable SqmPath<?> getReusablePath(String name) {
return reusablePaths == null ? null : reusablePaths.get( name );
}
@Override
public @Nullable String getExplicitAlias() {
return getAlias();
}
@Override
public void setExplicitAlias(@Nullable String explicitAlias) {
setAlias( explicitAlias );
}
@Override
public SqmPathSource<T> getModel() {
return getReferencedPathSource();
}
@Override
public SqmPathSource<T> getResolvedModel() {
final SqmPathSource<T> pathSource = getReferencedPathSource();
final SqmPath<?> lhs = getLhs();
if ( pathSource.isGeneric() && lhs != null
&& lhs.getResolvedModel().getPathType() instanceof SqmManagedDomainType<?> lhsType ) {
final var concreteAttribute = lhsType.findConcreteGenericAttribute( pathSource.getPathName() );
if ( concreteAttribute != null ) {
//noinspection unchecked
return (SqmPathSource<T>) concreteAttribute;
}
}
return getModel();
}
@Override
public @NonNull SqmBindableType<T> getExpressible() {
return getResolvedModel().getExpressible();
}
@Override
public @NonNull JavaType<T> getJavaTypeDescriptor() {
return castNonNull( super.getJavaTypeDescriptor() );
}
@Override
public @NonNull JavaType<T> getNodeJavaType() {
return castNonNull( super.getNodeJavaType() );
}
@Override
public SqmExpression<Class<? extends T>> type() {
final SqmPathSource<T> referencedPathSource = getReferencedPathSource();
final SqmPathSource<?> subPathSource =
referencedPathSource.findSubPathSource( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME );
if ( subPathSource == null ) {
return new SqmLiteral<>(
referencedPathSource.getBindableJavaType(),
nodeBuilder().getTypeConfiguration().getBasicTypeForJavaType( Class.class ),
nodeBuilder()
);
}
else {
@SuppressWarnings("unchecked")
final var discriminatorSource = (SqmPathSource<Class<? extends T>>) subPathSource;
return resolvePath( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME, discriminatorSource );
}
}
@Override
public <Y> SqmPath<Y> get(String attributeName) {
@SuppressWarnings("unchecked")
final SqmPathSource<Y> subNavigable = (SqmPathSource<Y>) getResolvedModel().getSubPathSource( attributeName );
return resolvePath( attributeName, subNavigable );
}
@Override
public <Y> SqmPath<Y> get(String attributeName, boolean includeSubtypes) {
@SuppressWarnings("unchecked")
final SqmPathSource<Y> subPathSource = (SqmPathSource<Y>)
getResolvedModel().getSubPathSource( attributeName, includeSubtypes );
return resolvePath( attributeName, subPathSource );
}
protected <X> SqmPath<X> resolvePath(PersistentAttribute<?, X> attribute) {
//noinspection unchecked
return resolvePath( attribute.getName(), (SqmPathSource<X>) attribute );
}
protected <X> SqmPath<X> resolvePath(String attributeName, SqmPathSource<X> pathSource) {
final SqmPathSource<?> intermediatePathSource =
getResolvedModel().getIntermediatePathSource( pathSource );
if ( reusablePaths == null ) {
reusablePaths = new HashMap<>();
final SqmPath<X> path = pathSource.createSqmPath( this, intermediatePathSource );
reusablePaths.put( attributeName, path );
return path;
}
else {
//noinspection unchecked
return (SqmPath<X>)
reusablePaths.computeIfAbsent( attributeName,
name -> pathSource.createSqmPath( this, intermediatePathSource ) );
}
}
protected <S extends T> SqmTreatedPath<T, S> getTreatedPath(ManagedDomainType<S> treatTarget) {
final NavigablePath treat = getNavigablePath().treatAs( treatTarget.getTypeName() );
final SqmPath<?> lhs = castNonNull( getLhs() );
final SqmPath<?> reusablePath = lhs.getReusablePath( treat.getLocalName() );
//TODO: check this cast
@SuppressWarnings("unchecked")
final SqmTreatedPath<T, S> path = (SqmTreatedPath<T, S>) reusablePath;
if ( path == null ) {
final SqmTreatedPath<T, S> treatedPath;
if ( treatTarget instanceof SqmEntityDomainType<S> entityDomainType ) {
treatedPath = new SqmTreatedEntityValuedSimplePath<>( this, entityDomainType, nodeBuilder() );
}
else if ( treatTarget instanceof SqmEmbeddableDomainType<S> embeddableDomainType ) {
treatedPath = new SqmTreatedEmbeddedValuedSimplePath<>( this, embeddableDomainType );
}
else {
throw new AssertionFailure( "Unrecognized treat target type: " + treatTarget.getTypeName() );
}
lhs.registerReusablePath( treatedPath );
return treatedPath;
}
else {
return path;
}
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(Class<S> treatJavaType) {
return treatAs( nodeBuilder().getDomainModel().entity( treatJavaType ) );
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(EntityDomainType<S> treatTarget) {
return getTreatedPath( treatTarget );
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(Class<S> treatJavaType, @Nullable String alias) {
return treatAs( nodeBuilder().getDomainModel().entity( treatJavaType ) );
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(EntityDomainType<S> treatTarget, @Nullable String alias) {
return getTreatedPath( treatTarget );
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(Class<S> treatJavaType, @Nullable String alias, boolean fetch) {
return treatAs( nodeBuilder().getDomainModel().entity( treatJavaType ) );
}
@Override
public <S extends T> SqmTreatedPath<T, S> treatAs(EntityDomainType<S> treatTarget, @Nullable String alias, boolean fetch) {
return treatAs( treatTarget );
}
/**
* Utility that checks if this path's parent navigable path is compatible with the specified SQM parent,
* and if not creates a copy of the navigable path with the correct parent.
*/
protected NavigablePath getNavigablePathCopy(SqmPath<?> parent) {
final NavigablePath realParentPath = getRealParentPath(
castNonNull( navigablePath.getRealParent() ),
parent.getNavigablePath()
);
if ( realParentPath != null ) {
return realParentPath.append( navigablePath.getLocalName(), navigablePath.getAlias() );
}
return navigablePath;
}
private @Nullable NavigablePath getRealParentPath(NavigablePath realParent, NavigablePath parent) {
@Nullable NavigablePath realParentPath;
if ( parent == realParent ) {
realParentPath = null;
}
else if ( realParent instanceof EntityIdentifierNavigablePath entityIdentifierNavigablePath ) {
realParentPath = getRealParentPath( castNonNull( realParent.getRealParent() ), parent );
if ( realParentPath != null ) {
realParentPath = new EntityIdentifierNavigablePath(
realParentPath,
entityIdentifierNavigablePath.getIdentifierAttributeName()
);
}
}
else if ( realParent.getAlias() == null && realParent instanceof TreatedNavigablePath ) {
// This might be an implicitly treated parent path, check with the non-treated parent
realParentPath = getRealParentPath( castNonNull( realParent.getRealParent() ), parent );
if ( realParentPath != null ) {
realParentPath = realParentPath.treatAs( realParent.getLocalName().substring( 1 ) );
}
}
else if ( CollectionPart.Nature.fromNameExact( realParent.getLocalName() ) != null ) {
if ( parent == realParent.getRealParent() ) {
realParentPath = null;
}
else {
realParentPath = parent.append( realParent.getLocalName() );
}
}
else {
realParentPath = parent;
}
return realParentPath;
}
@Override
@SuppressWarnings("unchecked")
public <Y> SqmPath<Y> get(SingularAttribute<? super T, Y> jpaAttribute) {
return (SqmPath<Y>) resolvePath( (PersistentAttribute<?, ?>) jpaAttribute );
}
@Override
public <E, C extends Collection<E>> SqmExpression<C> get(PluralAttribute<? super T, C, E> attribute) {
//noinspection unchecked
return resolvePath( (PersistentAttribute<T, C>) attribute );
}
@Override
public <K, V, M extends Map<K, V>> SqmExpression<M> get(MapAttribute<? super T, K, V> attribute) {
//noinspection unchecked
return resolvePath( (PersistentAttribute<T, M>) attribute );
}
// The equals/hashCode and isCompatible/cacheHashCode implementations are based on NavigablePath to match paths
// "syntactically" for regular uses in expressions and predicates, which is good enough since the NavigablePath
// contains all the important information. Deep equality for SqmFrom is determined through SqmFromClause
@Override
public boolean equals(@Nullable Object object) {
return object instanceof AbstractSqmPath<?> that
&& this.getClass() == that.getClass()
&& Objects.equals( this.navigablePath, that.navigablePath );
}
@Override
public int hashCode() {
return navigablePath.hashCode();
}
@Override
public boolean isCompatible(Object object) {
return object instanceof AbstractSqmPath<?> that
&& this.getClass() == that.getClass()
&& Objects.equals( this.navigablePath, that.navigablePath );
}
@Override
public int cacheHashCode() {
return navigablePath.hashCode();
}
@Override
public String toString() {
return getClass().getSimpleName() + "(" + navigablePath + ")";
}
}
|
AbstractSqmPath
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java
|
{
"start": 1956,
"end": 8003
}
|
class ____ extends EsqlScalarFunction implements OptionalArgument {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(
Expression.class,
"Substring",
Substring::new
);
private final Expression str, start, length;
@FunctionInfo(
returnType = "keyword",
description = "Returns a substring of a string, specified by a start position and an optional length.",
examples = {
@Example(file = "docs", tag = "substring", description = "This example returns the first three characters of every last name:"),
@Example(file = "docs", tag = "substringEnd", description = """
A negative start position is interpreted as being relative to the end of the string.
This example returns the last three characters of every last name:"""),
@Example(file = "docs", tag = "substringRemainder", description = """
If length is omitted, substring returns the remainder of the string.
This example returns all characters except for the first:""") }
)
public Substring(
Source source,
@Param(
name = "string",
type = { "keyword", "text" },
description = "String expression. If `null`, the function returns `null`."
) Expression str,
@Param(name = "start", type = { "integer" }, description = "Start position.") Expression start,
@Param(
optional = true,
name = "length",
type = { "integer" },
description = "Length of the substring from the start position. Optional; if omitted, all positions after `start` are returned."
) Expression length
) {
super(source, length == null ? Arrays.asList(str, start) : Arrays.asList(str, start, length));
this.str = str;
this.start = start;
this.length = length;
}
private Substring(StreamInput in) throws IOException {
this(
Source.readFrom((PlanStreamInput) in),
in.readNamedWriteable(Expression.class),
in.readNamedWriteable(Expression.class),
in.readOptionalNamedWriteable(Expression.class)
);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
source().writeTo(out);
out.writeNamedWriteable(str);
out.writeNamedWriteable(start);
out.writeOptionalNamedWriteable(length);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
public DataType dataType() {
return DataType.KEYWORD;
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
TypeResolution resolution = isString(str, sourceText(), FIRST);
if (resolution.unresolved()) {
return resolution;
}
resolution = TypeResolutions.isType(start, dt -> dt == INTEGER, sourceText(), SECOND, "integer");
if (resolution.unresolved()) {
return resolution;
}
return length == null
? TypeResolution.TYPE_RESOLVED
: TypeResolutions.isType(length, dt -> dt == INTEGER, sourceText(), THIRD, "integer");
}
@Override
public boolean foldable() {
return str.foldable() && start.foldable() && (length == null || length.foldable());
}
@Evaluator(extraName = "NoLength")
static BytesRef process(BytesRef str, int start) {
int length = str.length; // we just need a value at least the length of the string
return process(str, start, length);
}
@Evaluator
static BytesRef process(BytesRef str, int start, int length) {
if (length < 0) {
throw new IllegalArgumentException("Length parameter cannot be negative, found [" + length + "]");
}
if (str.length == 0) {
return str;
}
int codePointCount = UnicodeUtil.codePointCount(str);
int indexStart = indexStart(codePointCount, start);
int indexEnd = Math.min(codePointCount, indexStart + length);
String s = str.utf8ToString();
return new BytesRef(s.substring(s.offsetByCodePoints(0, indexStart), s.offsetByCodePoints(0, indexEnd)));
}
private static int indexStart(int codePointCount, int start) {
// esql is 1-based when it comes to string manipulation. We treat start = 0 and 1 the same
// a negative value is relative to the end of the string
int indexStart;
if (start > 0) {
indexStart = start - 1;
} else if (start < 0) {
indexStart = codePointCount + start; // start is negative, so this is a subtraction
} else {
indexStart = start; // start == 0
}
return Math.min(Math.max(0, indexStart), codePointCount); // sanitise string start index
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new Substring(source(), newChildren.get(0), newChildren.get(1), length == null ? null : newChildren.get(2));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, Substring::new, str, start, length);
}
@Override
public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
var strFactory = toEvaluator.apply(str);
var startFactory = toEvaluator.apply(start);
if (length == null) {
return new SubstringNoLengthEvaluator.Factory(source(), strFactory, startFactory);
}
var lengthFactory = toEvaluator.apply(length);
return new SubstringEvaluator.Factory(source(), strFactory, startFactory, lengthFactory);
}
Expression str() {
return str;
}
Expression start() {
return start;
}
Expression length() {
return length;
}
}
|
Substring
|
java
|
elastic__elasticsearch
|
modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java
|
{
"start": 5597,
"end": 5785
}
|
class ____ how a {@link S3BlobContainer} and its underlying AWS S3 client are retrying requests when reading or writing blobs.
*/
@SuppressForbidden(reason = "use a http server")
public
|
tests
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxAutoConnectFuseable.java
|
{
"start": 1206,
"end": 2618
}
|
class ____<T> extends Flux<T>
implements Scannable, Fuseable {
final ConnectableFlux<? extends T> source;
final Consumer<? super Disposable> cancelSupport;
volatile int remaining;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<FluxAutoConnectFuseable> REMAINING =
AtomicIntegerFieldUpdater.newUpdater(FluxAutoConnectFuseable.class, "remaining");
FluxAutoConnectFuseable(ConnectableFlux<? extends T> source,
int n, Consumer<? super Disposable> cancelSupport) {
if (n <= 0) {
throw new IllegalArgumentException("n > required but it was " + n);
}
this.source = ConnectableFlux.from(Objects.requireNonNull(source, "source"));
this.cancelSupport = Objects.requireNonNull(cancelSupport, "cancelSupport");
REMAINING.lazySet(this, n);
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
source.subscribe(actual);
if (remaining > 0 && REMAINING.decrementAndGet(this) == 0) {
source.connect(cancelSupport);
}
}
@Override
public int getPrefetch() {
return source.getPrefetch();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PREFETCH) return getPrefetch();
if (key == Attr.PARENT) return source;
if (key == Attr.CAPACITY) return remaining;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
if (key == InternalProducerAttr.INSTANCE) return true;
return null;
}
}
|
FluxAutoConnectFuseable
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/ByteRangeInputStream.java
|
{
"start": 1490,
"end": 1757
}
|
class ____ the complexity of those multiple
* connections from the client. Whenever seek() is called, a new connection
* is made on the successive read(). The normal input stream functions are
* connected to the currently active input stream.
*/
public abstract
|
hides
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/handler/predicate/ReadBodyRoutePredicateFactory.java
|
{
"start": 1624,
"end": 3475
}
|
class ____ extends AbstractRoutePredicateFactory<ReadBodyRoutePredicateFactory.Config> {
protected static final Log log = LogFactory.getLog(ReadBodyRoutePredicateFactory.class);
private static final String TEST_ATTRIBUTE = "read_body_predicate_test_attribute";
private static final String CACHE_REQUEST_BODY_OBJECT_KEY = "cachedRequestBodyObject";
private final List<HttpMessageReader<?>> messageReaders;
public ReadBodyRoutePredicateFactory() {
super(Config.class);
this.messageReaders = HandlerStrategies.withDefaults().messageReaders();
}
public ReadBodyRoutePredicateFactory(List<HttpMessageReader<?>> messageReaders) {
super(Config.class);
this.messageReaders = messageReaders;
}
@Override
@SuppressWarnings("unchecked")
public AsyncPredicate<ServerWebExchange> applyAsync(Config config) {
return new AsyncPredicate<ServerWebExchange>() {
@Override
public Publisher<Boolean> apply(ServerWebExchange exchange) {
Class inClass = config.getInClass();
Object cachedBody = exchange.getAttribute(CACHE_REQUEST_BODY_OBJECT_KEY);
Mono<?> modifiedBody;
// We can only read the body from the request once, once that happens if
// we try to read the body again an exception will be thrown. The below
// if/else caches the body object as a request attribute in the
// ServerWebExchange so if this filter is run more than once (due to more
// than one route using it) we do not try to read the request body
// multiple times
if (cachedBody != null) {
try {
boolean test = config.predicate != null && config.predicate.test(cachedBody);
exchange.getAttributes().put(TEST_ATTRIBUTE, test);
return Mono.just(test);
}
catch (ClassCastException e) {
if (log.isDebugEnabled()) {
log.debug("Predicate test failed because
|
ReadBodyRoutePredicateFactory
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/main/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/support/StubLanguageResolver.java
|
{
"start": 1122,
"end": 1997
}
|
class ____ extends DefaultLanguageResolver {
private final Set<String> names;
private final String stubPattern;
private final boolean silent;
public StubLanguageResolver(String stubPattern, boolean silent) {
this.names = new TreeSet<>();
this.stubPattern = stubPattern;
this.silent = silent;
}
@Override
public Language resolveLanguage(String name, CamelContext context) {
final boolean accept = accept(name);
final Language answer = accept ? super.resolveLanguage(name, context) : new StubLanguage();
this.names.add(name);
return answer;
}
private boolean accept(String name) {
if (stubPattern == null) {
return true;
}
return false;
}
public Set<String> getNames() {
return Set.copyOf(this.names);
}
}
|
StubLanguageResolver
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/MySqlDeleteTest_4_force_partition.java
|
{
"start": 1052,
"end": 2641
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "DELETE force all partitions car_tt FROM runoob_tbl WHERE runoob_id=3;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
assertEquals("DELETE FORCE ALL PARTITIONS car_tt\n" +
"FROM runoob_tbl\n" +
"WHERE runoob_id = 3;", SQLUtils.toMySqlString(stmt));
assertEquals("delete force all partitions car_tt\n" +
"from runoob_tbl\n" +
"where runoob_id = 3;", SQLUtils.toMySqlString(stmt, SQLUtils.DEFAULT_LCASE_FORMAT_OPTION));
assertEquals(1, statementList.size());
System.out.println(stmt.toString());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(2, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(1, visitor.getConditions().size());
assertTrue(visitor.getTables().containsKey(new TableStat.Name("runoob_tbl")));
assertTrue(visitor.getColumns().contains(new Column("car_tt", "runoob_id")));
}
}
|
MySqlDeleteTest_4_force_partition
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/queue/QueueAckParams.java
|
{
"start": 690,
"end": 940
}
|
class ____ extends BaseSyncParams<QueueAckArgs> implements QueueAckArgs {
private final String[] ids;
public QueueAckParams(String[] ids) {
this.ids = ids;
}
public String[] getIds() {
return ids;
}
}
|
QueueAckParams
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client/runtime/src/main/java/io/quarkus/rest/client/reactive/ClientExceptionMapper.java
|
{
"start": 1797,
"end": 2094
}
|
interface ____ {
/**
* The priority with which the exception mapper will be executed.
* <p>
* They are sorted in ascending order; the lower the number the higher the priority.
*
* @see Priorities
*/
int priority() default Priorities.USER;
}
|
ClientExceptionMapper
|
java
|
quarkusio__quarkus
|
extensions/resteasy-classic/resteasy/deployment/src/test/java/io/quarkus/resteasy/test/security/authzpolicy/AuthorizationPolicyAndPathMatchingPoliciesResource.java
|
{
"start": 409,
"end": 1082
}
|
class ____ {
@GET
@Path("jax-rs-path-matching-http-perm")
public boolean jaxRsPathMatchingHttpPerm(@Context SecurityContext securityContext) {
return securityContext.isUserInRole("admin");
}
@GET
@Path("path-matching-http-perm")
public boolean pathMatchingHttpPerm(@Context SecurityContext securityContext) {
return securityContext.isUserInRole("admin");
}
@RolesAllowed("admin")
@GET
@Path("roles-allowed-annotation")
public String rolesAllowed(@Context SecurityContext securityContext) {
return securityContext.getUserPrincipal().getName();
}
}
|
AuthorizationPolicyAndPathMatchingPoliciesResource
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/protocol/CommandArgs.java
|
{
"start": 10160,
"end": 10378
}
|
class ____ {
/**
* Encode the argument and write it to the {@code buffer}.
*
* @param buffer
*/
abstract void encode(ByteBuf buffer);
}
static
|
SingularArgument
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHashGridTiler.java
|
{
"start": 8776,
"end": 9238
}
|
class ____ extends GeoHashGridTiler {
private final long maxHashes;
UnboundedGeoHashGridTiler(int precision) {
super(precision);
this.maxHashes = (long) Math.pow(32, precision);
}
@Override
protected boolean validHash(String hash) {
return true;
}
@Override
protected long getMaxCells() {
return maxHashes;
}
}
}
|
UnboundedGeoHashGridTiler
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AbstractYarnScheduler.java
|
{
"start": 67496,
"end": 70087
}
|
class ____ extends SubjectInheritingThread {
@Override
public void work() {
while (!Thread.currentThread().isInterrupted()) {
try {
synchronized (updateThreadMonitor) {
updateThreadMonitor.wait(updateInterval);
}
update();
} catch (InterruptedException ie) {
LOG.warn("Scheduler UpdateThread interrupted. Exiting.");
return;
} catch (Exception e) {
LOG.error("Exception in scheduler UpdateThread", e);
}
}
}
}
/**
* Allows {@link UpdateThread} to start processing without waiting till
* {@link #updateInterval}.
*/
protected void triggerUpdate() {
synchronized (updateThreadMonitor) {
updateThreadMonitor.notify();
}
}
@Override
public void reinitialize(Configuration conf, RMContext rmContext)
throws IOException {
try {
LOG.info("Reinitializing SchedulingMonitorManager ...");
schedulingMonitorManager.reinitialize(rmContext, conf);
} catch (YarnException e) {
throw new IOException(e);
}
}
/**
* Default implementation. Always returns false.
* @param appAttempt ApplicationAttempt.
* @param schedulingRequest SchedulingRequest.
* @param schedulerNode SchedulerNode.
* @return Success or not.
*/
@Override
public boolean attemptAllocationOnNode(SchedulerApplicationAttempt appAttempt,
SchedulingRequest schedulingRequest, SchedulerNode schedulerNode) {
return false;
}
@Override
public void resetSchedulerMetrics() {
// reset scheduler metrics
}
/**
* Gets the apps from a given queue.
*
* Mechanics:
* 1. Get all {@link ApplicationAttemptId}s in the given queue by
* {@link #getAppsInQueue(String)} method.
* 2. Always need to check validity for the given queue by the returned
* values.
*
* @param queueName queue name
* @return a collection of app attempt ids in the given queue, it maybe empty.
* @throws YarnException if {@link #getAppsInQueue(String)} return null, will
* throw this exception.
*/
private List<ApplicationAttemptId> getAppsFromQueue(String queueName)
throws YarnException {
List<ApplicationAttemptId> apps = getAppsInQueue(queueName);
if (apps == null) {
throw new YarnException("The specified queue: " + queueName
+ " doesn't exist");
}
return apps;
}
/**
* ContainerObjectType is a container object with the following properties.
* Namely allocationId, priority, executionType and resourceType.
*/
protected
|
UpdateThread
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/CatProjectionBean.java
|
{
"start": 134,
"end": 735
}
|
class ____ {
private final String name;
private final String ownerName;
private final Double weight;
public CatProjectionBean(String name, String ownerName) {
this(name, ownerName, null);
}
public CatProjectionBean(String name, String ownerName, Double weight) {
this.name = name;
this.ownerName = ownerName;
this.weight = weight;
}
public String getName() {
return name;
}
public String getOwnerName() {
return ownerName;
}
public Double getWeight() {
return weight;
}
}
|
CatProjectionBean
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/handler/annotation/reactive/MessageMappingMessageHandler.java
|
{
"start": 3963,
"end": 7704
}
|
class ____ extends AbstractMethodMessageHandler<CompositeMessageCondition>
implements EmbeddedValueResolverAware {
private final List<Decoder<?>> decoders = new ArrayList<>();
private @Nullable Validator validator;
private @Nullable RouteMatcher routeMatcher;
private ConversionService conversionService = new DefaultFormattingConversionService();
private @Nullable StringValueResolver valueResolver;
public MessageMappingMessageHandler() {
setHandlerPredicate(type -> AnnotatedElementUtils.hasAnnotation(type, Controller.class));
}
/**
* Configure the decoders to use for incoming payloads.
*/
public void setDecoders(List<? extends Decoder<?>> decoders) {
this.decoders.clear();
this.decoders.addAll(decoders);
}
/**
* Return the configured decoders.
*/
public List<? extends Decoder<?>> getDecoders() {
return this.decoders;
}
/**
* Set the Validator instance used for validating {@code @Payload} arguments.
* @see org.springframework.validation.annotation.Validated
* @see PayloadMethodArgumentResolver
*/
public void setValidator(@Nullable Validator validator) {
this.validator = validator;
}
/**
* Return the configured Validator instance.
*/
public @Nullable Validator getValidator() {
return this.validator;
}
/**
* Set the {@code RouteMatcher} to use for mapping messages to handlers
* based on the route patterns they're configured with.
* <p>By default, {@link SimpleRouteMatcher} is used, backed by
* {@link AntPathMatcher} with "." as separator. For greater
* efficiency consider using the {@code PathPatternRouteMatcher} from
* {@code spring-web} instead.
*/
public void setRouteMatcher(@Nullable RouteMatcher routeMatcher) {
this.routeMatcher = routeMatcher;
}
/**
* Return the {@code RouteMatcher} used to map messages to handlers.
* May be {@code null} before the component is initialized.
*/
public @Nullable RouteMatcher getRouteMatcher() {
return this.routeMatcher;
}
/**
* Obtain the {@code RouteMatcher} for actual use.
* @return the RouteMatcher (never {@code null})
* @throws IllegalStateException in case of no RouteMatcher set
* @since 5.0
*/
protected RouteMatcher obtainRouteMatcher() {
RouteMatcher routeMatcher = getRouteMatcher();
Assert.state(routeMatcher != null, "No RouteMatcher set");
return routeMatcher;
}
/**
* Configure a {@link ConversionService} to use for type conversion of
* String based values, for example, in destination variables or headers.
* <p>By default {@link DefaultFormattingConversionService} is used.
* @param conversionService the conversion service to use
*/
public void setConversionService(ConversionService conversionService) {
this.conversionService = conversionService;
}
/**
* Return the configured ConversionService.
*/
public ConversionService getConversionService() {
return this.conversionService;
}
@Override
public void setEmbeddedValueResolver(StringValueResolver resolver) {
this.valueResolver = resolver;
}
/**
* Use this method to register a {@link MessagingAdviceBean} that may contain
* globally applicable
* {@link org.springframework.messaging.handler.annotation.MessageExceptionHandler @MessageExceptionHandler}
* methods.
* <p>Note: spring-messaging does not depend on spring-web and therefore it
* is not possible to explicitly support the registration of a
* {@code @ControllerAdvice} bean. You can use the following adapter code
* to register {@code @ControllerAdvice} beans here:
* <pre>
* ControllerAdviceBean.findAnnotatedBeans(context).forEach(bean ->
* messageHandler.registerMessagingAdvice(new ControllerAdviceWrapper(bean));
*
* public
|
MessageMappingMessageHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/generated/delegate/MutationDelegateIdentityTest.java
|
{
"start": 11149,
"end": 11689
}
|
class ____ {
@Id
@Column( name = "id_column" )
@GeneratedValue( strategy = GenerationType.IDENTITY )
private Long id;
@Generated( event = EventType.INSERT )
@ColumnDefault( "'default_name'" )
private String name;
@NaturalId
private String data;
public IdentityAndValuesAndRowIdAndNaturalId() {
}
private IdentityAndValuesAndRowIdAndNaturalId(String data) {
this.data = data;
}
public Long getId() {
return id;
}
public String getName() {
return name;
}
}
}
|
IdentityAndValuesAndRowIdAndNaturalId
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/ConfigurationPropertiesTests.java
|
{
"start": 62660,
"end": 62929
}
|
class ____ {
@Bean
@ConfigurationProperties("spam")
PrefixProperties prefixProperties() {
return new PrefixProperties();
}
}
@Configuration(proxyBeanMethods = false)
@EnableConfigurationProperties
static
|
PrefixedPropertiesReplacedOnBeanMethodConfiguration
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/testutils/TestFileSystem.java
|
{
"start": 5088,
"end": 5382
}
|
class ____ implements FileSystemFactory {
@Override
public String getScheme() {
return SCHEME;
}
@Override
public FileSystem create(URI fsUri) throws IOException {
return new TestFileSystem();
}
}
}
|
TestFileSystemFactory
|
java
|
apache__camel
|
components/camel-jaxb/src/test/java/org/apache/camel/example/DataFormatConcurrentTest.java
|
{
"start": 1563,
"end": 9554
}
|
class ____ extends CamelTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(DataFormatConcurrentTest.class);
private int size = 2000;
private int warmupCount = 100;
private int testCycleCount = 10000;
private int fooBarSize = 50;
@Test
public void testUnmarshalConcurrent() {
assertDoesNotThrow(() -> runUnmarshalConcurrent());
}
private void runUnmarshalConcurrent() throws Exception {
template.setDefaultEndpointUri("direct:unmarshal");
final CountDownLatch latch = new CountDownLatch(warmupCount + testCycleCount);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:unmarshal")
.unmarshal(new JaxbDataFormat("org.apache.camel.example"))
.process(exchange -> latch.countDown());
}
});
unmarshal(latch);
}
@Test
public void testUnmarshalFallbackConcurrent() {
assertDoesNotThrow(() -> runUnmarshallFallbackConcurrent());
}
private void runUnmarshallFallbackConcurrent() throws Exception {
template.setDefaultEndpointUri("direct:unmarshalFallback");
final CountDownLatch latch = new CountDownLatch(warmupCount + testCycleCount);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:unmarshalFallback")
.convertBodyTo(Foo.class)
.process(exchange -> latch.countDown());
}
});
unmarshal(latch);
}
@Test
public void testMarshallConcurrent() {
assertDoesNotThrow(() -> runMarshallConcurrent());
}
private void runMarshallConcurrent() throws Exception {
template.setDefaultEndpointUri("direct:marshal");
final CountDownLatch latch = new CountDownLatch(warmupCount + testCycleCount);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:marshal")
.marshal(new JaxbDataFormat("org.apache.camel.example"))
.process(exchange -> latch.countDown());
}
});
marshal(latch);
}
@Test
public void testMarshallFallbackConcurrent() {
assertDoesNotThrow(() -> runMarshallFallbackConcurrent());
}
private void runMarshallFallbackConcurrent() throws Exception {
template.setDefaultEndpointUri("direct:marshalFallback");
final CountDownLatch latch = new CountDownLatch(warmupCount + testCycleCount);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:marshalFallback")
.convertBodyTo(String.class)
.process(exchange -> latch.countDown());
}
});
marshal(latch);
}
@Test
public void testSendConcurrent() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(size);
// wait for seda consumer to start up properly
Thread.sleep(1000);
ExecutorService executor = Executors.newCachedThreadPool();
for (int i = 0; i < size; i++) {
// sleep a little so we interleave with the marshaller
Thread.sleep(1, 500);
executor.execute(new Runnable() {
public void run() {
PurchaseOrder bean = new PurchaseOrder();
bean.setName("Beer");
bean.setAmount(23);
bean.setPrice(2.5);
template.sendBody("seda:start?size=" + size + "&concurrentConsumers=5", bean);
}
});
}
MockEndpoint.assertIsSatisfied(context);
}
public void unmarshal(final CountDownLatch latch) throws Exception {
// warm up
ByteArrayInputStream[] warmUpPayloads = createPayloads(warmupCount);
for (ByteArrayInputStream payload : warmUpPayloads) {
template.sendBody(payload);
}
final ByteArrayInputStream[] payloads = createPayloads(testCycleCount);
ExecutorService pool = Executors.newFixedThreadPool(20);
StopWatch watch = new StopWatch();
for (int i = 0; i < payloads.length; i++) {
final int finalI = i;
pool.execute(new Runnable() {
public void run() {
template.sendBody(payloads[finalI]);
}
});
}
latch.await();
long duration = watch.taken();
LOG.info("Sending {} messages to {} took {} ms",
payloads.length, template.getDefaultEndpoint().getEndpointUri(), duration);
}
public void marshal(final CountDownLatch latch) throws Exception {
// warm up
Foo[] warmUpPayloads = createFoo(warmupCount);
for (Foo payload : warmUpPayloads) {
template.sendBody(payload);
}
final Foo[] payloads = createFoo(testCycleCount);
ExecutorService pool = Executors.newFixedThreadPool(20);
StopWatch watch = new StopWatch();
for (int i = 0; i < payloads.length; i++) {
final int finalI = i;
pool.execute(new Runnable() {
public void run() {
template.sendBody(payloads[finalI]);
}
});
}
latch.await();
long duration = watch.taken();
LOG.info("Sending {} messages to {} took {} ms",
payloads.length, template.getDefaultEndpoint().getEndpointUri(), duration);
}
/**
* the individual size of one record is: fooBarSize = 1 -> 104 bytes fooBarSize = 50 -> 2046 bytes
*
* @return the payloads used for this stress test
*/
public Foo[] createFoo(int testCount) {
Foo[] foos = new Foo[testCount];
for (int i = 0; i < testCount; i++) {
Foo foo = new Foo();
for (int x = 0; x < fooBarSize; x++) {
Bar bar = new Bar();
bar.setName("Name: " + x);
bar.setValue("value: " + x);
foo.getBarRefs().add(bar);
}
foos[i] = foo;
}
return foos;
}
/**
* the individual size of one record is: fooBarSize = 1 -> 104 bytes fooBarSize = 50 -> 2046 bytes
*
* @return the payloads used for this stress test
* @throws Exception
*/
public ByteArrayInputStream[] createPayloads(int testCount) throws Exception {
Foo foo = new Foo();
for (int x = 0; x < fooBarSize; x++) {
Bar bar = new Bar();
bar.setName("Name: " + x);
bar.setValue("value: " + x);
foo.getBarRefs().add(bar);
}
Marshaller m = JAXBContext.newInstance(Foo.class, Bar.class).createMarshaller();
StringWriter writer = new StringWriter();
m.marshal(foo, writer);
byte[] payload = writer.toString().getBytes();
ByteArrayInputStream[] streams = new ByteArrayInputStream[testCount];
for (int i = 0; i < testCount; i++) {
streams[i] = new ByteArrayInputStream(payload);
}
return streams;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
DataFormat jaxb = new JaxbDataFormat("org.apache.camel.example");
// use seda that supports concurrent consumers for concurrency
from("seda:start?size=" + size + "&concurrentConsumers=5")
.marshal(jaxb)
.convertBodyTo(String.class)
.to("mock:result");
}
};
}
}
|
DataFormatConcurrentTest
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/BitmapResourceTest.java
|
{
"start": 2108,
"end": 2360
}
|
class ____ {
final Bitmap bitmap = Bitmap.createBitmap(100, 100, Bitmap.Config.ARGB_8888);
final BitmapPool bitmapPool = mock(BitmapPool.class);
final BitmapResource resource = new BitmapResource(bitmap, bitmapPool);
}
}
|
BitmapResourceHarness
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test.java
|
{
"start": 985,
"end": 1464
}
|
class ____ extends ObjectArrayAssertBaseTest {
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.containsExactlyInAnyOrderElementsOf(newArrayList("Yoda", "Luke"));
}
@Override
protected void verify_internal_effects() {
verify(arrays)
.assertContainsExactlyInAnyOrder(getInfo(assertions), getActual(assertions), new String[] { "Yoda", "Luke" });
}
}
|
ObjectArrayAssert_containsExactlyInAnyOrderElementsOf_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/onetomany/BasicWhereTest.java
|
{
"start": 3498,
"end": 4365
}
|
class ____ {
@Id
@GeneratedValue
private Integer id;
private String name;
@OneToMany
@JoinTable(joinColumns = @JoinColumn(name = "allC"))
@SQLRestriction("type = 'C'")
@Audited(targetAuditMode = RelationTargetAuditMode.NOT_AUDITED)
@AuditJoinTable(name = "A_C_AUD")
private Set<EntityC> allMyC;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set<EntityC> getAllMyC() {
return allMyC;
}
public void setAllMyC(Set<EntityC> allMyC) {
this.allMyC = allMyC;
}
}
@Audited
@Entity(name = "EntityB")
@Table(name = "b_tab")
@DiscriminatorColumn(name = "type", discriminatorType = DiscriminatorType.STRING)
@DiscriminatorValue( value = "B")
public static
|
EntityA
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/writing/DerivedFromFrameworkInstanceRequestRepresentation.java
|
{
"start": 5394,
"end": 5652
}
|
interface ____ {
DerivedFromFrameworkInstanceRequestRepresentation create(
ContributionBinding binding,
RequestRepresentation frameworkRequestRepresentation,
RequestKind requestKind,
FrameworkType frameworkType);
}
}
|
Factory
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/rpc/model/ReflectionMethodDescriptor.java
|
{
"start": 1705,
"end": 8848
}
|
class ____ implements MethodDescriptor {
private static final ErrorTypeAwareLogger logger =
LoggerFactory.getErrorTypeAwareLogger(ReflectionMethodDescriptor.class);
private final ConcurrentMap<String, Object> attributeMap = new ConcurrentHashMap<>();
public final String methodName;
private final String[] compatibleParamSignatures;
private final Class<?>[] parameterClasses;
private final Class<?> returnClass;
private final Type[] returnTypes;
private final String paramDesc;
private final Method method;
private final boolean generic;
private final RpcType rpcType;
private Class<?>[] actualRequestTypes;
private Class<?> actualResponseType;
public ReflectionMethodDescriptor(Method method) {
this.method = method;
this.methodName = method.getName();
this.parameterClasses = method.getParameterTypes();
this.returnClass = method.getReturnType();
Type[] returnTypesResult;
try {
returnTypesResult = ReflectUtils.getReturnTypes(method);
} catch (Throwable throwable) {
logger.error(
COMMON_REFLECTIVE_OPERATION_FAILED,
"",
"",
"fail to get return types. Method name: " + methodName + " Declaring class:"
+ method.getDeclaringClass().getName(),
throwable);
returnTypesResult = new Type[] {returnClass, returnClass};
}
this.returnTypes = returnTypesResult;
this.paramDesc = ReflectUtils.getDesc(parameterClasses);
this.compatibleParamSignatures =
Stream.of(parameterClasses).map(Class::getName).toArray(String[]::new);
this.generic = (methodName.equals($INVOKE) || methodName.equals($INVOKE_ASYNC)) && parameterClasses.length == 3;
this.rpcType = determineRpcType();
}
private RpcType determineRpcType() {
if (generic) {
return RpcType.UNARY;
}
if (parameterClasses.length > 2) {
return RpcType.UNARY;
}
Type[] genericParameterTypes = method.getGenericParameterTypes();
if (parameterClasses.length == 1 && isStreamType(parameterClasses[0]) && isStreamType(returnClass)) {
this.actualRequestTypes = new Class<?>[] {
obtainActualTypeInStreamObserver(
((ParameterizedType) method.getGenericReturnType()).getActualTypeArguments()[0])
};
actualResponseType = obtainActualTypeInStreamObserver(
((ParameterizedType) genericParameterTypes[0]).getActualTypeArguments()[0]);
return RpcType.BI_STREAM;
}
boolean returnIsVoid = returnClass.getName().equals(void.class.getName());
if (returnIsVoid && parameterClasses.length == 1 && isStreamType(parameterClasses[0])) {
actualRequestTypes = Collections.emptyList().toArray(new Class<?>[0]);
actualResponseType = obtainActualTypeInStreamObserver(
((ParameterizedType) method.getGenericParameterTypes()[0]).getActualTypeArguments()[0]);
return RpcType.SERVER_STREAM;
}
if (returnIsVoid
&& parameterClasses.length == 2
&& !isStreamType(parameterClasses[0])
&& isStreamType(parameterClasses[1])) {
actualRequestTypes = parameterClasses;
actualResponseType = obtainActualTypeInStreamObserver(
((ParameterizedType) method.getGenericParameterTypes()[1]).getActualTypeArguments()[0]);
return RpcType.SERVER_STREAM;
}
if (Arrays.stream(parameterClasses).anyMatch(this::isStreamType) || isStreamType(returnClass)) {
throw new IllegalStateException(
"Bad stream method signature. method(" + methodName + ":" + paramDesc + ")");
}
// Can not determine client stream because it has same signature with bi_stream
return RpcType.UNARY;
}
private boolean isStreamType(Class<?> classType) {
return StreamObserver.class.isAssignableFrom(classType);
}
@Override
public String getMethodName() {
return methodName;
}
@Override
public Method getMethod() {
return method;
}
@Override
public String[] getCompatibleParamSignatures() {
return compatibleParamSignatures;
}
@Override
public Class<?>[] getParameterClasses() {
return parameterClasses;
}
@Override
public String getParamDesc() {
return paramDesc;
}
@Override
public Class<?> getReturnClass() {
return returnClass;
}
@Override
public Type[] getReturnTypes() {
return returnTypes;
}
@Override
public RpcType getRpcType() {
return rpcType;
}
@Override
public boolean isGeneric() {
return generic;
}
public void addAttribute(String key, Object value) {
this.attributeMap.put(key, value);
}
public Object getAttribute(String key) {
return this.attributeMap.get(key);
}
@Override
public Class<?>[] getActualRequestTypes() {
return actualRequestTypes;
}
@Override
public Class<?> getActualResponseType() {
return actualResponseType;
}
private Class<?> obtainActualTypeInStreamObserver(Type typeInStreamObserver) {
return (Class<?>)
(typeInStreamObserver instanceof ParameterizedType
? ((ParameterizedType) typeInStreamObserver).getRawType()
: typeInStreamObserver);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ReflectionMethodDescriptor that = (ReflectionMethodDescriptor) o;
return generic == that.generic
&& Objects.equals(method, that.method)
&& Objects.equals(paramDesc, that.paramDesc)
&& Arrays.equals(compatibleParamSignatures, that.compatibleParamSignatures)
&& Arrays.equals(parameterClasses, that.parameterClasses)
&& Objects.equals(returnClass, that.returnClass)
&& Arrays.equals(returnTypes, that.returnTypes)
&& Objects.equals(methodName, that.methodName)
&& Objects.equals(attributeMap, that.attributeMap);
}
@Override
public int hashCode() {
int result = Objects.hash(method, paramDesc, returnClass, methodName, generic, attributeMap);
result = 31 * result + Arrays.hashCode(compatibleParamSignatures);
result = 31 * result + Arrays.hashCode(parameterClasses);
result = 31 * result + Arrays.hashCode(returnTypes);
return result;
}
@Override
public String toString() {
return "ReflectionMethodDescriptor{method='" + toShortString(method) + "', rpcType=" + rpcType + '}';
}
}
|
ReflectionMethodDescriptor
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/floatarray/FloatArrayAssert_isSortedAccordingToComparator_Test.java
|
{
"start": 1030,
"end": 1479
}
|
class ____ extends FloatArrayAssertBaseTest {
private Comparator<Float> comparator = alwaysEqual();
@Override
protected FloatArrayAssert invoke_api_method() {
return assertions.isSortedAccordingTo(comparator);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertIsSortedAccordingToComparator(getInfo(assertions), getActual(assertions), comparator);
}
}
|
FloatArrayAssert_isSortedAccordingToComparator_Test
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/WildcardImport.java
|
{
"start": 8427,
"end": 8918
}
|
enum ____ by simple name without importing them
return null;
}
if (parent instanceof ConstantCaseLabelTree constantCaseLabel
&& tree.equals(constantCaseLabel.getConstantExpression())) {
return null;
}
}
if (sym.owner.equals(owner) && unit.starImportScope.includes(sym)) {
fix.prefixWith(tree, owner.getSimpleName() + ".");
}
return null;
}
}.scan(unit, null);
}
}
|
constants
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/RoutePolicyFactoryTest.java
|
{
"start": 1855,
"end": 2180
}
|
class ____ implements RoutePolicyFactory {
public MyRoutePolicyFactory() {
}
@Override
public RoutePolicy createRoutePolicy(CamelContext camelContext, String routeId, NamedNode route) {
return new MyRoutePolicy(routeId);
}
}
private static final
|
MyRoutePolicyFactory
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java
|
{
"start": 35435,
"end": 36383
}
|
interface ____ {
void apply();
}
public void handle() {
runHandler(
new Handler() {
public void apply() {
// BUG: Diagnostic contains:
// should be guarded by 'Transaction.this'
x++;
}
});
}
private synchronized void runHandler(Handler handler) {
handler.apply();
}
}
""")
.doTest();
}
// TODO(cushon): allowing @GuardedBy on overridden methods is unsound.
@Test
public void lexicalScopingExampleTwo() {
compilationHelper
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
import com.google.errorprone.annotations.concurrent.GuardedBy;
|
Handler
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/hybrid/tiered/tier/disk/DiskIOScheduler.java
|
{
"start": 3136,
"end": 13816
}
|
class ____ implements Runnable, BufferRecycler, NettyServiceProducer {
private static final Logger LOG = LoggerFactory.getLogger(DiskIOScheduler.class);
private final Object lock = new Object();
/** The partition id. */
private final TieredStoragePartitionId partitionId;
/** The executor is responsible for scheduling the disk read process. */
private final ScheduledExecutorService ioExecutor;
/**
* The buffer pool is specifically designed for reading from disk and shared in the TaskManager.
*/
private final BatchShuffleReadBufferPool bufferPool;
/**
* The maximum number of buffers that can be allocated and still not recycled by a single {@link
* DiskIOScheduler} for all subpartitions. This ensures that different {@link DiskIOScheduler}s
* in the TaskManager can evenly use the buffer pool.
*/
private final int maxRequestedBuffers;
/**
* The maximum time to wait when requesting read buffers from the buffer pool before throwing an
* exception.
*/
private final Duration bufferRequestTimeout;
/**
* Get the segment id if the buffer index represents the first buffer in a segment. The first
* integer is the id of subpartition, and the second integer is buffer index and the value is
* segment id.
*/
private final BiFunction<Integer, Integer, Integer> segmentIdGetter;
private final PartitionFileReader partitionFileReader;
@GuardedBy("lock")
private final Map<NettyConnectionId, ScheduledSubpartitionReader> allScheduledReaders =
new HashMap<>();
@GuardedBy("lock")
private boolean isRunning;
@GuardedBy("lock")
private int numRequestedBuffers;
@GuardedBy("lock")
private boolean isReleased;
public DiskIOScheduler(
TieredStoragePartitionId partitionId,
BatchShuffleReadBufferPool bufferPool,
ScheduledExecutorService ioExecutor,
int maxRequestedBuffers,
Duration bufferRequestTimeout,
BiFunction<Integer, Integer, Integer> segmentIdGetter,
PartitionFileReader partitionFileReader) {
this.partitionId = partitionId;
this.bufferPool = checkNotNull(bufferPool);
this.ioExecutor = checkNotNull(ioExecutor);
this.maxRequestedBuffers = maxRequestedBuffers;
this.bufferRequestTimeout = checkNotNull(bufferRequestTimeout);
this.segmentIdGetter = segmentIdGetter;
this.partitionFileReader = partitionFileReader;
bufferPool.registerRequester(this);
}
@Override
public synchronized void run() {
int numBuffersRead = readBuffersFromFile();
synchronized (lock) {
numRequestedBuffers += numBuffersRead;
isRunning = false;
}
if (numBuffersRead == 0) {
try {
ioExecutor.schedule(this::triggerScheduling, 5, TimeUnit.MILLISECONDS);
} catch (RejectedExecutionException e) {
ignoreRejectedExecutionOnShutdown(e);
}
} else {
triggerScheduling();
}
}
@Override
public void connectionEstablished(
TieredStorageSubpartitionId subpartitionId,
NettyConnectionWriter nettyConnectionWriter) {
synchronized (lock) {
checkState(!isReleased, "DiskIOScheduler is already released.");
ScheduledSubpartitionReader scheduledSubpartitionReader =
new ScheduledSubpartitionReader(subpartitionId, nettyConnectionWriter);
allScheduledReaders.put(
nettyConnectionWriter.getNettyConnectionId(), scheduledSubpartitionReader);
triggerScheduling();
}
}
@Override
public void connectionBroken(NettyConnectionId id) {
synchronized (lock) {
allScheduledReaders.remove(id);
}
}
@Override
public void recycle(MemorySegment segment) {
synchronized (lock) {
bufferPool.recycle(segment);
--numRequestedBuffers;
triggerScheduling();
}
}
public void release() {
synchronized (lock) {
if (isReleased) {
return;
}
isReleased = true;
allScheduledReaders.clear();
partitionFileReader.release();
bufferPool.unregisterRequester(this);
}
}
// ------------------------------------------------------------------------
// Internal Methods
// ------------------------------------------------------------------------
private int readBuffersFromFile() {
List<ScheduledSubpartitionReader> scheduledReaders = sortScheduledReaders();
if (scheduledReaders.isEmpty()) {
return 0;
}
Queue<MemorySegment> buffers;
try {
buffers = allocateBuffers();
} catch (Exception e) {
notifyDownstreamSubpartitionFailed(
scheduledReaders, e, "Failed to request buffers for data reading.");
return 0;
}
int numBuffersAllocated = buffers.size();
if (numBuffersAllocated <= 0) {
return 0;
}
for (ScheduledSubpartitionReader scheduledReader : scheduledReaders) {
if (buffers.isEmpty()) {
break;
}
try {
scheduledReader.loadDiskDataToBuffers(buffers, this);
} catch (IOException e) {
notifyDownstreamSubpartitionFailed(
Collections.singletonList(scheduledReader),
e,
"Failed to read shuffle data.");
}
}
int numBuffersRead = numBuffersAllocated - buffers.size();
releaseBuffers(buffers);
return numBuffersRead;
}
private List<ScheduledSubpartitionReader> sortScheduledReaders() {
List<ScheduledSubpartitionReader> scheduledReaders;
synchronized (lock) {
if (isReleased) {
return new ArrayList<>();
}
scheduledReaders = new ArrayList<>(allScheduledReaders.values());
}
for (ScheduledSubpartitionReader reader : scheduledReaders) {
try {
reader.prepareForScheduling();
} catch (IOException e) {
notifyDownstreamSubpartitionFailed(
Collections.singletonList(reader), e, "Failed to prepare for scheduling.");
}
}
Collections.sort(scheduledReaders);
return scheduledReaders;
}
private Queue<MemorySegment> allocateBuffers() throws Exception {
long timeoutTime = getBufferRequestTimeoutTime();
do {
List<MemorySegment> buffers = bufferPool.requestBuffers();
if (!buffers.isEmpty()) {
return new ArrayDeque<>(buffers);
}
synchronized (lock) {
if (isReleased) {
return new ArrayDeque<>();
}
}
} while (System.currentTimeMillis() < timeoutTime
|| System.currentTimeMillis() < (timeoutTime = getBufferRequestTimeoutTime()));
throw new TimeoutException(
String.format(
"Buffer request timeout, this means there is a fierce contention of"
+ " the batch shuffle read memory, please increase '%s'.",
TaskManagerOptions.NETWORK_BATCH_SHUFFLE_READ_MEMORY.key()));
}
/**
* Send an error response to the downstream to notify the specific subpartition has been failed.
* The {@link ScheduledSubpartitionReader} responsible for the failed subpartition will also be
* removed from the {@link DiskIOScheduler}.
*
* @param scheduledReaders the readers of the failed subpartitions.
* @param failureCause the failure cause in the error response.
* @param errorLog the log printed in the {@link DiskIOScheduler}.
*/
private void notifyDownstreamSubpartitionFailed(
List<ScheduledSubpartitionReader> scheduledReaders,
Throwable failureCause,
String errorLog) {
for (ScheduledSubpartitionReader scheduledReader : scheduledReaders) {
synchronized (lock) {
allScheduledReaders.remove(scheduledReader.getId());
}
scheduledReader.failReader(failureCause);
}
LOG.error(errorLog);
}
private void releaseBuffers(Queue<MemorySegment> buffers) {
if (!buffers.isEmpty()) {
bufferPool.recycle(buffers);
buffers.clear();
}
}
private void triggerScheduling() {
synchronized (lock) {
if (!isRunning
&& !allScheduledReaders.isEmpty()
&& numRequestedBuffers + bufferPool.getNumBuffersPerRequest()
<= maxRequestedBuffers
&& numRequestedBuffers < bufferPool.getAverageBuffersPerRequester()) {
isRunning = true;
try {
ioExecutor.execute(
() -> {
try {
run();
} catch (Throwable t) {
LOG.error("Failed to read data.", t);
// handle un-expected exception as unhandledExceptionHandler is
// not worked for ScheduledExecutorService.
FatalExitExceptionHandler.INSTANCE.uncaughtException(
Thread.currentThread(), t);
}
});
} catch (RejectedExecutionException e) {
ignoreRejectedExecutionOnShutdown(e);
}
}
}
}
private long getBufferRequestTimeoutTime() {
return bufferPool.getLastBufferOperationTimestamp() + bufferRequestTimeout.toMillis();
}
private void ignoreRejectedExecutionOnShutdown(RejectedExecutionException e) {
LOG.warn(
"Attempt to submit a task to the shut down batch read thread pool should be ignored. No more tasks should be accepted.",
e);
}
/**
* The {@link ScheduledSubpartitionReader} is responsible for reading a subpartition from disk,
* and is scheduled by the {@link DiskIOScheduler}.
*/
private
|
DiskIOScheduler
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/cors/CORSRegexTestCase.java
|
{
"start": 278,
"end": 2068
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(BeanRegisteringRoute.class)
.addAsResource("conf/cors-regex.properties", "application.properties"));
@Test
public void corsRegexValidOriginTest() {
given().header("Origin", "https://asdf.domain.com")
.when()
.get("/test").then()
.statusCode(200)
.header("Access-Control-Allow-Origin", "https://asdf.domain.com")
.header("Access-Control-Allow-Credentials", "true");
}
@Test
public void corsRegexValidOrigin2Test() {
given().header("Origin", "https://abc-123.app.mydomain.com")
.when()
.get("/test").then()
.statusCode(200)
.header("Access-Control-Allow-Origin", "https://abc-123.app.mydomain.com")
.header("Access-Control-Allow-Credentials", "true");
}
@Test
public void corsRegexInvalidOriginTest() {
given().header("Origin", "https://asdfdomain.com")
.when()
.get("/test").then()
.statusCode(403)
.header("Access-Control-Allow-Origin", nullValue())
.header("Access-Control-Allow-Credentials", nullValue());
}
@Test
public void corsRegexInvalidOrigin2Test() {
given().header("Origin", "https://abc-123app.mydomain.com")
.when()
.get("/test").then()
.statusCode(403)
.header("Access-Control-Allow-Origin", nullValue())
.header("Access-Control-Allow-Credentials", nullValue());
}
}
|
CORSRegexTestCase
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/DispatcherCachedOperationsHandlerTest.java
|
{
"start": 2043,
"end": 10140
}
|
class ____ extends TestLogger {
private static final Duration TIMEOUT = Duration.ofMinutes(10);
private CompletedOperationCache<AsynchronousJobOperationKey, Long> checkpointTriggerCache;
private CompletedOperationCache<AsynchronousJobOperationKey, String> savepointTriggerCache;
private DispatcherCachedOperationsHandler handler;
private TriggerCheckpointSpyFunction triggerCheckpointFunction;
private TriggerSavepointSpyFunction triggerSavepointFunction;
private TriggerSavepointSpyFunction stopWithSavepointFunction;
private CompletableFuture<Long> checkpointIdFuture = new CompletableFuture<>();
private CompletableFuture<String> savepointLocationFuture = new CompletableFuture<>();
private final JobID jobID = new JobID();
private final String targetDirectory = "dummyDirectory";
private AsynchronousJobOperationKey operationKey;
@BeforeEach
public void setup() {
checkpointIdFuture = new CompletableFuture<>();
triggerCheckpointFunction =
TriggerCheckpointSpyFunction.wrap(
new TriggerCheckpointSpyFunction() {
@Override
CompletableFuture<Long> applyWrappedFunction(
JobID jobID, CheckpointType checkpointType, Duration timeout) {
return checkpointIdFuture;
}
});
savepointLocationFuture = new CompletableFuture<>();
triggerSavepointFunction =
TriggerSavepointSpyFunction.wrap(
(jobID, targetDirectory, formatType, savepointMode, timeout) ->
savepointLocationFuture);
stopWithSavepointFunction =
TriggerSavepointSpyFunction.wrap(
(jobID, targetDirectory, formatType, savepointMode, timeout) ->
savepointLocationFuture);
checkpointTriggerCache =
new CompletedOperationCache<>(
RestOptions.ASYNC_OPERATION_STORE_DURATION.defaultValue());
savepointTriggerCache =
new CompletedOperationCache<>(
RestOptions.ASYNC_OPERATION_STORE_DURATION.defaultValue());
handler =
new DispatcherCachedOperationsHandler(
triggerCheckpointFunction,
checkpointTriggerCache,
triggerSavepointFunction,
stopWithSavepointFunction,
savepointTriggerCache);
operationKey = AsynchronousJobOperationKey.of(new TriggerId(), jobID);
}
@Test
public void triggerSavepointRepeatedly() throws ExecutionException, InterruptedException {
CompletableFuture<Acknowledge> firstAcknowledge =
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT);
CompletableFuture<Acknowledge> secondAcknowledge =
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT);
assertThat(triggerSavepointFunction.getNumberOfInvocations()).isOne();
assertThat(triggerSavepointFunction.getInvocationParameters().get(0))
.isEqualTo(
new Tuple4<>(
jobID,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT));
assertThat(firstAcknowledge.get()).isEqualTo(Acknowledge.get());
assertThat(secondAcknowledge.get()).isEqualTo(Acknowledge.get());
}
@Test
public void stopWithSavepointRepeatedly() throws ExecutionException, InterruptedException {
CompletableFuture<Acknowledge> firstAcknowledge =
handler.stopWithSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.TERMINATE_WITH_SAVEPOINT,
TIMEOUT);
CompletableFuture<Acknowledge> secondAcknowledge =
handler.stopWithSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.TERMINATE_WITH_SAVEPOINT,
TIMEOUT);
assertThat(stopWithSavepointFunction.getNumberOfInvocations()).isOne();
assertThat(stopWithSavepointFunction.getInvocationParameters().get(0))
.isEqualTo(
new Tuple4<>(
jobID,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.TERMINATE_WITH_SAVEPOINT));
assertThat(firstAcknowledge.get()).isEqualTo(Acknowledge.get());
assertThat(secondAcknowledge.get()).isEqualTo(Acknowledge.get());
}
@Test
public void retryingCompletedOperationDoesNotMarkCacheEntryAsAccessed()
throws ExecutionException, InterruptedException {
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT)
.get();
savepointLocationFuture.complete("");
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT)
.get();
// should not complete because we wait for the result to be accessed
FlinkAssertions.assertThatFuture(savepointTriggerCache.closeAsync())
.willNotCompleteWithin(Duration.ofMillis(10));
}
@Test
public void throwsIfCacheIsShuttingDown() {
savepointTriggerCache.closeAsync();
assertThatThrownBy(
() ->
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT))
.isInstanceOf(IllegalStateException.class);
}
@Test
public void getStatus() throws ExecutionException, InterruptedException {
handler.triggerSavepoint(
operationKey,
targetDirectory,
SavepointFormatType.CANONICAL,
TriggerSavepointMode.SAVEPOINT,
TIMEOUT);
String savepointLocation = "location";
savepointLocationFuture.complete(savepointLocation);
CompletableFuture<OperationResult<String>> statusFuture =
handler.getSavepointStatus(operationKey);
assertThat(statusFuture.get()).isEqualTo(OperationResult.success(savepointLocation));
}
@Test
public void getStatusFailsIfKeyUnknown() {
CompletableFuture<OperationResult<String>> statusFuture =
handler.getSavepointStatus(operationKey);
FlinkAssertions.assertThatFuture(statusFuture)
.eventuallyFails()
.withCauseOfType(UnknownOperationKeyException.class);
}
private abstract static
|
DispatcherCachedOperationsHandlerTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/netty/NettyMessage.java
|
{
"start": 11776,
"end": 20784
}
|
class ____ extends NettyMessage {
static final byte ID = 0;
// receiver ID (16), sequence number (4), backlog (4), subpartition id (4), partial buffers
// number (4), dataType (1), isCompressed (1), buffer size (4)
static final int MESSAGE_HEADER_LENGTH =
InputChannelID.getByteBufLength()
+ Integer.BYTES
+ Integer.BYTES
+ Integer.BYTES
+ Integer.BYTES
+ Byte.BYTES
+ Byte.BYTES
+ Integer.BYTES;
final Buffer buffer;
final InputChannelID receiverId;
final int subpartitionId;
final int sequenceNumber;
final int backlog;
final Buffer.DataType dataType;
final boolean isCompressed;
final int bufferSize;
final int numOfPartialBuffers;
private List<Integer> partialBufferSizes = new ArrayList<>();
private BufferResponse(
@Nullable Buffer buffer,
Buffer.DataType dataType,
boolean isCompressed,
int sequenceNumber,
InputChannelID receiverId,
int subpartitionId,
int numOfPartialBuffers,
int backlog,
int bufferSize) {
this.buffer = buffer;
this.dataType = dataType;
this.isCompressed = isCompressed;
this.sequenceNumber = sequenceNumber;
this.receiverId = checkNotNull(receiverId);
this.subpartitionId = subpartitionId;
this.backlog = backlog;
this.bufferSize = bufferSize;
this.numOfPartialBuffers = numOfPartialBuffers;
}
BufferResponse(
Buffer buffer,
int sequenceNumber,
InputChannelID receiverId,
int subpartitionId,
int numOfPartialBuffers,
int backlog) {
this.buffer = checkNotNull(buffer);
checkArgument(
buffer.getDataType().ordinal() <= Byte.MAX_VALUE,
"Too many data types defined!");
checkArgument(backlog >= 0, "Must be non-negative.");
this.dataType = buffer.getDataType();
this.isCompressed = buffer.isCompressed();
this.sequenceNumber = sequenceNumber;
this.receiverId = checkNotNull(receiverId);
this.subpartitionId = subpartitionId;
this.backlog = backlog;
this.bufferSize = buffer.getSize();
this.numOfPartialBuffers = numOfPartialBuffers;
}
boolean isBuffer() {
return dataType.isBuffer();
}
@Nullable
public Buffer getBuffer() {
return buffer;
}
void releaseBuffer() {
if (buffer != null) {
buffer.recycleBuffer();
}
}
public List<Integer> getPartialBufferSizes() {
return partialBufferSizes;
}
// --------------------------------------------------------------------
// Serialization
// --------------------------------------------------------------------
@Override
void write(ChannelOutboundInvoker out, ChannelPromise promise, ByteBufAllocator allocator)
throws IOException {
ByteBuf headerBuf = null;
try {
// in order to forward the buffer to netty, it needs an allocator set
buffer.setAllocator(allocator);
headerBuf = fillHeader(allocator);
out.write(headerBuf);
if (buffer instanceof FileRegionBuffer) {
out.write(buffer, promise);
} else {
out.write(buffer.asByteBuf(), promise);
}
} catch (Throwable t) {
handleException(headerBuf, buffer, t);
}
}
@VisibleForTesting
ByteBuf write(ByteBufAllocator allocator) throws IOException {
ByteBuf headerBuf = null;
try {
// in order to forward the buffer to netty, it needs an allocator set
buffer.setAllocator(allocator);
headerBuf = fillHeader(allocator);
CompositeByteBuf composityBuf = allocator.compositeDirectBuffer();
composityBuf.addComponent(headerBuf);
composityBuf.addComponent(buffer.asByteBuf());
// update writer index since we have data written to the components:
composityBuf.writerIndex(
headerBuf.writerIndex() + buffer.asByteBuf().writerIndex());
return composityBuf;
} catch (Throwable t) {
handleException(headerBuf, buffer, t);
return null; // silence the compiler
}
}
private ByteBuf fillHeader(ByteBufAllocator allocator) {
// only allocate header buffer - we will combine it with the data buffer below
ByteBuf headerBuf =
allocateBuffer(
allocator,
ID,
MESSAGE_HEADER_LENGTH + Integer.BYTES * numOfPartialBuffers,
bufferSize,
false);
receiverId.writeTo(headerBuf);
headerBuf.writeInt(subpartitionId);
headerBuf.writeInt(numOfPartialBuffers);
headerBuf.writeInt(sequenceNumber);
headerBuf.writeInt(backlog);
headerBuf.writeByte(dataType.ordinal());
headerBuf.writeBoolean(isCompressed);
headerBuf.writeInt(buffer.readableBytes());
if (numOfPartialBuffers > 0) {
checkArgument(
buffer instanceof FullyFilledBuffer,
"Partial buffers are only supported for fully filled buffers.");
List<Buffer> partialBuffers = ((FullyFilledBuffer) buffer).getPartialBuffers();
checkArgument(
partialBuffers.size() == numOfPartialBuffers,
"Mismatched number of partial buffers");
for (int i = 0; i < numOfPartialBuffers; i++) {
int bytes = partialBuffers.get(i).readableBytes();
headerBuf.writeInt(bytes);
}
}
return headerBuf;
}
/**
* Parses the message header part and composes a new BufferResponse with an empty data
* buffer. The data buffer will be filled in later.
*
* @param messageHeader the serialized message header.
* @param bufferAllocator the allocator for network buffer.
* @return a BufferResponse object with the header parsed and the data buffer to fill in
* later. The data buffer will be null if the target channel has been released or the
* buffer size is 0.
*/
static BufferResponse readFrom(
ByteBuf messageHeader, NetworkBufferAllocator bufferAllocator) {
InputChannelID receiverId = InputChannelID.fromByteBuf(messageHeader);
int subpartitionId = messageHeader.readInt();
int numOfPartialBuffers = messageHeader.readInt();
int sequenceNumber = messageHeader.readInt();
int backlog = messageHeader.readInt();
Buffer.DataType dataType = Buffer.DataType.values()[messageHeader.readByte()];
boolean isCompressed = messageHeader.readBoolean();
int size = messageHeader.readInt();
Buffer dataBuffer;
if (dataType.isBuffer()) {
dataBuffer = bufferAllocator.allocatePooledNetworkBuffer(receiverId);
if (dataBuffer != null) {
dataBuffer.setDataType(dataType);
}
} else {
dataBuffer = bufferAllocator.allocateUnPooledNetworkBuffer(size, dataType);
}
if (size == 0 && dataBuffer != null) {
// recycle the empty buffer directly, we must allocate a buffer for
// the empty data to release the credit already allocated for it
dataBuffer.recycleBuffer();
dataBuffer = null;
}
if (dataBuffer != null) {
dataBuffer.setCompressed(isCompressed);
}
return new BufferResponse(
dataBuffer,
dataType,
isCompressed,
sequenceNumber,
receiverId,
subpartitionId,
numOfPartialBuffers,
backlog,
size);
}
}
static
|
BufferResponse
|
java
|
apache__camel
|
components/camel-couchdb/src/generated/java/org/apache/camel/component/couchdb/CouchDbEndpointUriFactory.java
|
{
"start": 517,
"end": 2986
}
|
class ____ extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = ":protocol:hostname:port/database";
private static final Set<String> PROPERTY_NAMES;
private static final Set<String> SECRET_PROPERTY_NAMES;
private static final Map<String, String> MULTI_VALUE_PREFIXES;
static {
Set<String> props = new HashSet<>(16);
props.add("bridgeErrorHandler");
props.add("createDatabase");
props.add("database");
props.add("deletes");
props.add("exceptionHandler");
props.add("exchangePattern");
props.add("heartbeat");
props.add("hostname");
props.add("lazyStartProducer");
props.add("maxMessagesPerPoll");
props.add("password");
props.add("port");
props.add("protocol");
props.add("style");
props.add("updates");
props.add("username");
PROPERTY_NAMES = Collections.unmodifiableSet(props);
Set<String> secretProps = new HashSet<>(2);
secretProps.add("password");
secretProps.add("username");
SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
MULTI_VALUE_PREFIXES = Collections.emptyMap();
}
@Override
public boolean isEnabled(String scheme) {
return "couchdb".equals(scheme);
}
@Override
public String buildUri(String scheme, Map<String, Object> properties, boolean encode) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "protocol", null, true, copy);
uri = buildPathParameter(syntax, uri, "hostname", null, true, copy);
uri = buildPathParameter(syntax, uri, "port", 5984, false, copy);
uri = buildPathParameter(syntax, uri, "database", null, true, copy);
uri = buildQueryParameters(uri, copy, encode);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public Set<String> secretPropertyNames() {
return SECRET_PROPERTY_NAMES;
}
@Override
public Map<String, String> multiValuePrefixes() {
return MULTI_VALUE_PREFIXES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}
|
CouchDbEndpointUriFactory
|
java
|
dropwizard__dropwizard
|
dropwizard-logging/src/test/java/io/dropwizard/logging/common/DefaultLoggingFactoryPrintErrorMessagesTest.java
|
{
"start": 820,
"end": 4116
}
|
class ____ {
private DefaultLoggingFactory factory;
private ByteArrayOutputStream output;
@BeforeEach
void setUp() throws Exception {
output = new ByteArrayOutputStream();
factory = new DefaultLoggingFactory(new LoggerContext(), new PrintStream(output));
}
@AfterEach
void tearDown() {
factory.stop();
factory.reset();
}
private void configureLoggingFactoryWithFileAppender(File file) {
factory.setAppenders(singletonList(newFileAppenderFactory(file)));
}
private AppenderFactory<ILoggingEvent> newFileAppenderFactory(File file) {
FileAppenderFactory<ILoggingEvent> fileAppenderFactory = new FileAppenderFactory<>();
fileAppenderFactory.setCurrentLogFilename(file.toString() + File.separator + "my-log-file.log");
fileAppenderFactory.setArchive(false);
return fileAppenderFactory;
}
private String configureAndGetOutputWrittenToErrorStream() throws UnsupportedEncodingException {
factory.configure(new MetricRegistry(), "logger-test");
return output.toString(StandardCharsets.UTF_8.name());
}
@Test
void testWhenUsingDefaultConstructor_SystemErrIsSet() {
PrintStream configurationErrorsStream = new DefaultLoggingFactory().getConfigurationErrorsStream();
assertThat(configurationErrorsStream).isSameAs(System.err);
}
@Test
void testWhenUsingDefaultConstructor_StaticILoggerFactoryIsSet() {
LoggerContext loggerContext = new DefaultLoggingFactory().getLoggerContext();
assertThat(loggerContext).isSameAs(LoggerFactory.getILoggerFactory());
}
@Test
void testWhenFileAppenderDoesNotHaveWritePermissionToFolder_PrintsErrorMessageToConsole(@TempDir Path tempDir) throws Exception {
File folderWithoutWritePermission = tempDir.resolve("folder-without-write-permission").toFile();
assumeTrue(folderWithoutWritePermission.mkdirs());
assumeTrue(folderWithoutWritePermission.setWritable(false));
configureLoggingFactoryWithFileAppender(folderWithoutWritePermission);
assertThat(folderWithoutWritePermission.canWrite()).isFalse();
assertThat(configureAndGetOutputWrittenToErrorStream()).contains(folderWithoutWritePermission.toString());
}
@Test
void testWhenSettingUpLoggingWithValidConfiguration_NoErrorMessageIsPrintedToConsole(@TempDir Path tempDir) throws Exception {
File folderWithWritePermission = tempDir.resolve("folder-with-write-permission").toFile();
assumeTrue(folderWithWritePermission.mkdirs());
configureLoggingFactoryWithFileAppender(folderWithWritePermission);
assertThat(folderWithWritePermission).canWrite();
assertThat(configureAndGetOutputWrittenToErrorStream()).isEmpty();
}
@Test
void testLogbackStatusPrinterPrintStreamIsRestoredToSystemOut() throws Exception {
StatusPrinter2 statusPrinter2 = new StatusPrinter2();
Field field = statusPrinter2.getClass().getDeclaredField("ps");
field.setAccessible(true);
assertThat(field.get(statusPrinter2))
.isInstanceOfSatisfying(PrintStream.class, printStream -> assertThat(printStream).isSameAs(System.out));
}
}
|
DefaultLoggingFactoryPrintErrorMessagesTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/config/FallbackHandlerA.java
|
{
"start": 194,
"end": 364
}
|
class ____ implements FallbackHandler<String> {
@Override
public String handle(ExecutionContext context) {
return "FallbackHandlerA";
}
}
|
FallbackHandlerA
|
java
|
apache__camel
|
core/camel-xml-io-util/src/main/java/org/apache/camel/xml/io/util/XmlStreamReader.java
|
{
"start": 2531,
"end": 3082
}
|
class ____ the charset encoding of XML documents in Files, raw streams and HTTP streams by offering
* a wide set of constructors.
* <P>
* By default the charset encoding detection is lenient, the constructor with the lenient flag can be used for an script
* (following HTTP MIME and XML specifications). All this is nicely explained by Mark Pilgrim in his blog,
* <a href= "https://web.archive.org/web/20060706153721/http://diveintomark.org/archives/2004/02/13/xml-media-types">
* Determining the character encoding of a feed</a>.
*/
public
|
handles
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketServerProtocolConfig.java
|
{
"start": 952,
"end": 4088
}
|
class ____ {
static final long DEFAULT_HANDSHAKE_TIMEOUT_MILLIS = 10000L;
private final String websocketPath;
private final String subprotocols;
private final boolean checkStartsWith;
private final long handshakeTimeoutMillis;
private final long forceCloseTimeoutMillis;
private final boolean handleCloseFrames;
private final WebSocketCloseStatus sendCloseFrame;
private final boolean dropPongFrames;
private final WebSocketDecoderConfig decoderConfig;
private WebSocketServerProtocolConfig(
String websocketPath,
String subprotocols,
boolean checkStartsWith,
long handshakeTimeoutMillis,
long forceCloseTimeoutMillis,
boolean handleCloseFrames,
WebSocketCloseStatus sendCloseFrame,
boolean dropPongFrames,
WebSocketDecoderConfig decoderConfig
) {
this.websocketPath = websocketPath;
this.subprotocols = subprotocols;
this.checkStartsWith = checkStartsWith;
this.handshakeTimeoutMillis = checkPositive(handshakeTimeoutMillis, "handshakeTimeoutMillis");
this.forceCloseTimeoutMillis = forceCloseTimeoutMillis;
this.handleCloseFrames = handleCloseFrames;
this.sendCloseFrame = sendCloseFrame;
this.dropPongFrames = dropPongFrames;
this.decoderConfig = decoderConfig == null ? WebSocketDecoderConfig.DEFAULT : decoderConfig;
}
public String websocketPath() {
return websocketPath;
}
public String subprotocols() {
return subprotocols;
}
public boolean checkStartsWith() {
return checkStartsWith;
}
public long handshakeTimeoutMillis() {
return handshakeTimeoutMillis;
}
public long forceCloseTimeoutMillis() {
return forceCloseTimeoutMillis;
}
public boolean handleCloseFrames() {
return handleCloseFrames;
}
public WebSocketCloseStatus sendCloseFrame() {
return sendCloseFrame;
}
public boolean dropPongFrames() {
return dropPongFrames;
}
public WebSocketDecoderConfig decoderConfig() {
return decoderConfig;
}
@Override
public String toString() {
return "WebSocketServerProtocolConfig" +
" {websocketPath=" + websocketPath +
", subprotocols=" + subprotocols +
", checkStartsWith=" + checkStartsWith +
", handshakeTimeoutMillis=" + handshakeTimeoutMillis +
", forceCloseTimeoutMillis=" + forceCloseTimeoutMillis +
", handleCloseFrames=" + handleCloseFrames +
", sendCloseFrame=" + sendCloseFrame +
", dropPongFrames=" + dropPongFrames +
", decoderConfig=" + decoderConfig +
"}";
}
public Builder toBuilder() {
return new Builder(this);
}
public static Builder newBuilder() {
return new Builder("/", null, false, DEFAULT_HANDSHAKE_TIMEOUT_MILLIS, 0L,
true, WebSocketCloseStatus.NORMAL_CLOSURE, true, WebSocketDecoderConfig.DEFAULT);
}
public static final
|
WebSocketServerProtocolConfig
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/pubsub/PubSubOutput.java
|
{
"start": 1264,
"end": 3779
}
|
enum ____ {
message, pmessage, psubscribe, punsubscribe, subscribe, unsubscribe, ssubscribe, smessage, sunsubscribe;
private final static Set<String> names = new HashSet<>();
static {
for (Type value : Type.values()) {
names.add(value.name());
}
}
public static boolean isPubSubType(String name) {
return names.contains(name);
}
}
private Type type;
private K channel;
private K pattern;
private long count;
private boolean completed;
public PubSubOutput(RedisCodec<K, V> codec) {
super(codec, null);
}
public Type type() {
return type;
}
public K channel() {
return channel;
}
public K pattern() {
return pattern;
}
public long count() {
return count;
}
@Override
@SuppressWarnings({ "fallthrough", "unchecked" })
public void set(ByteBuffer bytes) {
if (bytes == null) {
return;
}
if (type == null) {
type = Type.valueOf(decodeString(bytes));
return;
}
handleOutput(bytes);
}
@SuppressWarnings("unchecked")
private void handleOutput(ByteBuffer bytes) {
switch (type) {
case pmessage:
if (pattern == null) {
pattern = codec.decodeKey(bytes);
break;
}
case smessage:
case message:
if (channel == null) {
channel = codec.decodeKey(bytes);
break;
}
output = codec.decodeValue(bytes);
completed = true;
break;
case psubscribe:
case punsubscribe:
pattern = codec.decodeKey(bytes);
break;
case subscribe:
case unsubscribe:
case ssubscribe:
case sunsubscribe:
channel = codec.decodeKey(bytes);
break;
default:
throw new UnsupportedOperationException("Operation " + type + " not supported");
}
}
@Override
public void set(long integer) {
count = integer;
// count comes last in (p)(un)subscribe ack.
completed = true;
}
boolean isCompleted() {
return completed;
}
@Override
public V body() {
return output;
}
}
|
Type
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/annotations/ExecutionTime.java
|
{
"start": 128,
"end": 326
}
|
enum ____ {
/**
* The bytecode is run from a generated static initializer
*/
STATIC_INIT,
/**
* The bytecode is run from a main method
*/
RUNTIME_INIT
}
|
ExecutionTime
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/LoaderUtil.java
|
{
"start": 13040,
"end": 13252
}
|
class ____'t available to the usual ClassLoaders
* @throws ExceptionInInitializerError if an exception was thrown while initializing the class
* @throws LinkageError if the linkage of the
|
isn
|
java
|
spring-projects__spring-framework
|
integration-tests/src/test/java/org/springframework/scheduling/annotation/ScheduledAndTransactionalAnnotationIntegrationTests.java
|
{
"start": 5769,
"end": 6114
}
|
class ____ {
@Bean
static AnnotationAwareAspectJAutoProxyCreator autoProxyCreator() {
AnnotationAwareAspectJAutoProxyCreator apc = new AnnotationAwareAspectJAutoProxyCreator();
apc.setProxyTargetClass(true);
return apc;
}
@Bean
static MyAspect myAspect() {
return new MyAspect();
}
}
@Aspect
public static
|
AspectConfig
|
java
|
quarkusio__quarkus
|
extensions/smallrye-reactive-messaging-pulsar/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/pulsar/deployment/DefaultSchemaConfigTest.java
|
{
"start": 77314,
"end": 77695
}
|
class ____ {
@Incoming("channel1")
void method1(String msg) {
}
@Incoming("channel2")
void method2(String msg) {
}
@Incoming("channel3")
void method3(JsonObject msg) {
}
@Incoming("channel4")
void method4(JsonObject msg) {
}
}
private static
|
ProvidedSchemaWithChannelName
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/TestRouterHDFSContractCreate.java
|
{
"start": 1208,
"end": 1642
}
|
class ____ extends AbstractContractCreateTest {
@BeforeAll
public static void createCluster() throws IOException {
RouterHDFSContract.createCluster();
}
@AfterAll
public static void teardownCluster() throws IOException {
RouterHDFSContract.destroyCluster();
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new RouterHDFSContract(conf);
}
}
|
TestRouterHDFSContractCreate
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/aot/NativeDetector.java
|
{
"start": 1036,
"end": 1563
}
|
class ____ {
/**
* See https://github.com/oracle/graal/blob/master/sdk/src/org.graalvm.nativeimage/src/org/graalvm/nativeimage/ImageInfo.java
*/
private static final boolean IMAGE_CODE =
(SystemPropertyConfigUtils.getSystemProperty(GRAALVM_NATIVEIMAGE_IMAGECODE) != null);
/**
* Returns {@code true} if invoked in the context of image building or during image runtime, else {@code false}.
*/
public static boolean inNativeImage() {
return IMAGE_CODE;
}
}
|
NativeDetector
|
java
|
alibaba__nacos
|
config/src/main/java/com/alibaba/nacos/config/server/controller/ListenerController.java
|
{
"start": 2072,
"end": 4288
}
|
class ____ {
private final ConfigSubService configSubService;
public ListenerController(ConfigSubService configSubService) {
this.configSubService = configSubService;
}
/**
* Get subscribe information from client side.
*/
@GetMapping
@Secured(action = ActionTypes.READ, signType = SignType.CONFIG)
@Compatibility(apiType = ApiType.CONSOLE_API, alternatives = "GET ${contextPath:nacos}/v3/console/cs/config/listener/ip")
public GroupkeyListenserStatus getAllSubClientConfigByIp(@RequestParam("ip") String ip,
@RequestParam(value = "all", required = false) boolean all,
@RequestParam(value = "tenant", required = false) String tenant,
@RequestParam(value = "sampleTime", required = false, defaultValue = "1") int sampleTime, ModelMap modelMap) {
SampleResult collectSampleResult = configSubService.getCollectSampleResultByIp(ip, sampleTime);
GroupkeyListenserStatus gls = new GroupkeyListenserStatus();
gls.setCollectStatus(200);
Map<String, String> configMd5Status = new HashMap<>(100);
if (collectSampleResult.getLisentersGroupkeyStatus() == null) {
return gls;
}
Map<String, String> status = collectSampleResult.getLisentersGroupkeyStatus();
tenant = NamespaceUtil.processNamespaceParameter(tenant);
for (Map.Entry<String, String> config : status.entrySet()) {
if (!StringUtils.isBlank(tenant) && config.getKey().contains(tenant)) {
configMd5Status.put(config.getKey(), config.getValue());
continue;
}
// Get common config default value, if want to get all config, you need to add "all".
if (all) {
configMd5Status.put(config.getKey(), config.getValue());
} else {
String[] configKeys = GroupKey2.parseKey(config.getKey());
if (StringUtils.isBlank(configKeys[2])) {
configMd5Status.put(config.getKey(), config.getValue());
}
}
}
gls.setLisentersGroupkeyStatus(configMd5Status);
return gls;
}
}
|
ListenerController
|
java
|
google__gson
|
gson/src/test/java/com/google/gson/functional/RuntimeTypeAdapterFactoryFunctionalTest.java
|
{
"start": 2885,
"end": 3069
}
|
class ____ extends Shape {
final int side;
Square(int side) {
super(ShapeType.SQUARE);
this.side = side;
}
}
// Copied from the extras package
static
|
Square
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-spring/src/test/java/org/apache/dubbo/config/spring/propertyconfigurer/provider/HelloServiceImpl.java
|
{
"start": 1033,
"end": 1621
}
|
class ____ implements HelloService {
private static final Logger logger = LoggerFactory.getLogger(HelloServiceImpl.class);
@Override
public String sayHello(String name) {
logger.info("Hello " + name + ", request from consumer: "
+ RpcContext.getContext().getRemoteAddress());
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return "Hello " + name + ", response from provider: "
+ RpcContext.getContext().getLocalAddress();
}
}
|
HelloServiceImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleRequestTests.java
|
{
"start": 464,
"end": 1078
}
|
class ____ extends ESTestCase {
public void testSetRefreshPolicy() {
final DeleteRoleRequest request = new DeleteRoleRequest();
final String refreshPolicy = randomFrom(
WriteRequest.RefreshPolicy.IMMEDIATE.getValue(),
WriteRequest.RefreshPolicy.WAIT_UNTIL.getValue()
);
request.setRefreshPolicy(refreshPolicy);
assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy));
request.setRefreshPolicy((String) null);
assertThat(request.getRefreshPolicy().getValue(), equalTo(refreshPolicy));
}
}
|
DeleteRoleRequestTests
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java
|
{
"start": 35234,
"end": 35309
}
|
interface ____ extends Attrs, Flow, _Child {
}
/**
*
*/
public
|
DIV
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java
|
{
"start": 932,
"end": 6263
}
|
class ____ extends InternalTermsTestCase {
@Override
protected InternalTerms<?, ?> createTestInstance(
String name,
Map<String, Object> metadata,
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError
) {
BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
DocValueFormat format = randomNumericDocValueFormat();
long otherDocCount = 0;
List<DoubleTerms.Bucket> buckets = new ArrayList<>();
final int numBuckets = randomNumberOfBuckets();
Set<Double> terms = new HashSet<>();
for (int i = 0; i < numBuckets; ++i) {
double term = randomValueOtherThanMany(d -> terms.add(d) == false, random()::nextDouble);
int docCount = randomIntBetween(1, 100);
buckets.add(new DoubleTerms.Bucket(term, docCount, aggregations, showTermDocCountError, docCountError, format));
}
BucketOrder reduceOrder = rarely() ? order : BucketOrder.key(true);
Collections.sort(buckets, reduceOrder.comparator());
return new DoubleTerms(
name,
reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
}
@Override
protected InternalTerms<?, ?> mutateInstance(InternalTerms<?, ?> instance) {
if (instance instanceof DoubleTerms doubleTerms) {
String name = doubleTerms.getName();
BucketOrder order = doubleTerms.order;
int requiredSize = doubleTerms.requiredSize;
long minDocCount = doubleTerms.minDocCount;
DocValueFormat format = doubleTerms.format;
int shardSize = doubleTerms.getShardSize();
boolean showTermDocCountError = doubleTerms.showTermDocCountError;
long otherDocCount = doubleTerms.getSumOfOtherDocCounts();
List<DoubleTerms.Bucket> buckets = doubleTerms.getBuckets();
long docCountError = doubleTerms.getDocCountError();
Map<String, Object> metadata = doubleTerms.getMetadata();
switch (between(0, 8)) {
case 0 -> name += randomAlphaOfLength(5);
case 1 -> requiredSize += between(1, 100);
case 2 -> minDocCount += between(1, 100);
case 3 -> shardSize += between(1, 100);
case 4 -> showTermDocCountError = showTermDocCountError == false;
case 5 -> otherDocCount += between(1, 100);
case 6 -> docCountError += between(1, 100);
case 7 -> {
buckets = new ArrayList<>(buckets);
buckets.add(
new DoubleTerms.Bucket(
randomDouble(),
randomNonNegativeLong(),
InternalAggregations.EMPTY,
showTermDocCountError,
docCountError,
format
)
);
}
case 8 -> {
if (metadata == null) {
metadata = Maps.newMapWithExpectedSize(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
}
default -> throw new AssertionError("Illegal randomisation branch");
}
Collections.sort(buckets, doubleTerms.reduceOrder.comparator());
return new DoubleTerms(
name,
doubleTerms.reduceOrder,
order,
requiredSize,
minDocCount,
metadata,
format,
shardSize,
showTermDocCountError,
otherDocCount,
buckets,
docCountError
);
} else {
String name = instance.getName();
BucketOrder order = instance.order;
int requiredSize = instance.requiredSize;
long minDocCount = instance.minDocCount;
Map<String, Object> metadata = instance.getMetadata();
switch (between(0, 3)) {
case 0 -> name += randomAlphaOfLength(5);
case 1 -> requiredSize += between(1, 100);
case 2 -> minDocCount += between(1, 100);
case 3 -> {
if (metadata == null) {
metadata = Maps.newMapWithExpectedSize(1);
} else {
metadata = new HashMap<>(instance.getMetadata());
}
metadata.put(randomAlphaOfLength(15), randomInt());
}
default -> throw new AssertionError("Illegal randomisation branch");
}
return new UnmappedTerms(name, order, requiredSize, minDocCount, metadata);
}
}
}
|
DoubleTermsTests
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/ConcurrentWriteOperationDetectedException.java
|
{
"start": 1206,
"end": 1895
}
|
class ____
extends AzureBlobFileSystemException {
private static final String ERROR_MESSAGE = "Parallel access to the create path detected. Failing request "
+ "to honor single writer semantics";
/**
* Constructs a new ConcurrentWriteOperationDetectedException with a default error message.
*/
public ConcurrentWriteOperationDetectedException() {
super(ERROR_MESSAGE);
}
/**
* Constructs a new ConcurrentWriteOperationDetectedException with the specified error message.
*
* @param message the detail message.
*/
public ConcurrentWriteOperationDetectedException(String message) {
super(message);
}
}
|
ConcurrentWriteOperationDetectedException
|
java
|
elastic__elasticsearch
|
libs/geo/src/main/java/org/elasticsearch/geometry/Point.java
|
{
"start": 674,
"end": 2828
}
|
class ____ implements Geometry {
public static final Point EMPTY = new Point();
private final double y;
private final double x;
private final double z;
private final boolean empty;
private Point() {
y = 0;
x = 0;
z = Double.NaN;
empty = true;
}
public Point(double x, double y) {
this(x, y, Double.NaN);
}
public Point(double x, double y, double z) {
this.y = y;
this.x = x;
this.z = z;
this.empty = false;
}
@Override
public ShapeType type() {
return ShapeType.POINT;
}
public double getY() {
return y;
}
public double getX() {
return x;
}
public double getZ() {
return z;
}
public double getLat() {
return y;
}
public double getLon() {
return x;
}
public double getAlt() {
return z;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Point point = (Point) o;
if (point.empty != empty) return false;
if (Double.compare(point.y, y) != 0) return false;
if (Double.compare(point.x, x) != 0) return false;
return Double.compare(point.z, z) == 0;
}
@Override
public int hashCode() {
int result;
long temp;
temp = Double.doubleToLongBits(y);
result = (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(x);
result = 31 * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(z);
result = 31 * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public <T, E extends Exception> T visit(GeometryVisitor<T, E> visitor) throws E {
return visitor.visit(this);
}
@Override
public boolean isEmpty() {
return empty;
}
@Override
public boolean hasZ() {
return Double.isNaN(z) == false;
}
@Override
public String toString() {
return WellKnownText.toWKT(this);
}
}
|
Point
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java
|
{
"start": 2818,
"end": 2988
}
|
interface ____ directly accessing
* functionality in DistributedFileSystem or DFSClient.
*
* Note that this is distinct from the similarly-named DFSAdmin, which
* is a
|
to
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/dispatcher/JobMasterTester.java
|
{
"start": 4277,
"end": 13865
}
|
class ____ {
private final Map<ExecutionAttemptID, CompletableFuture<Void>> completedAttemptFutures;
private final CompletableFuture<Void> completedFuture;
public CheckpointCompletionHandler(List<TaskDeploymentDescriptor> descriptors) {
this.completedAttemptFutures =
descriptors.stream()
.map(TaskDeploymentDescriptor::getExecutionAttemptId)
.collect(
Collectors.toMap(
Function.identity(),
ignored -> new CompletableFuture<>()));
this.completedFuture = FutureUtils.completeAll(completedAttemptFutures.values());
}
void completeAttempt(ExecutionAttemptID executionAttemptId) {
completedAttemptFutures.get(executionAttemptId).complete(null);
}
CompletableFuture<Void> getCompletedFuture() {
return completedFuture;
}
}
private final UnresolvedTaskManagerLocation taskManagerLocation =
new LocalUnresolvedTaskManagerLocation();
private final ConcurrentMap<ExecutionAttemptID, TaskDeploymentDescriptor> descriptors =
new ConcurrentHashMap<>();
private final TestingRpcService rpcService;
private final JobID jobId;
private final JobMasterGateway jobMasterGateway;
private final TaskExecutorGateway taskExecutorGateway;
private final CompletableFuture<List<TaskDeploymentDescriptor>> descriptorsFuture =
new CompletableFuture<>();
private final ConcurrentMap<Long, CheckpointCompletionHandler> checkpoints =
new ConcurrentHashMap<>();
public JobMasterTester(
TestingRpcService rpcService, JobID jobId, JobMasterGateway jobMasterGateway) {
this.rpcService = rpcService;
this.jobId = jobId;
this.jobMasterGateway = jobMasterGateway;
this.taskExecutorGateway = createTaskExecutorGateway();
}
public CompletableFuture<Acknowledge> transitionTo(
List<TaskDeploymentDescriptor> descriptors, ExecutionState state) {
final List<CompletableFuture<Acknowledge>> futures =
descriptors.stream()
.map(TaskDeploymentDescriptor::getExecutionAttemptId)
.map(
attemptId ->
jobMasterGateway.updateTaskExecutionState(
new TaskExecutionState(attemptId, state)))
.collect(Collectors.toList());
return FutureUtils.completeAll(futures).thenApply(ignored -> Acknowledge.get());
}
public CompletableFuture<List<TaskDeploymentDescriptor>> deployVertices(int numSlots) {
return jobMasterGateway
.registerTaskManager(
jobId,
TaskManagerRegistrationInformation.create(
taskExecutorGateway.getAddress(),
taskManagerLocation,
TestingUtils.zeroUUID()),
TIMEOUT)
.thenCompose(ignored -> offerSlots(numSlots))
.thenCompose(ignored -> descriptorsFuture);
}
public CompletableFuture<Void> getCheckpointFuture(long checkpointId) {
return descriptorsFuture.thenCompose(
descriptors ->
checkpoints
.computeIfAbsent(
checkpointId,
key -> new CheckpointCompletionHandler(descriptors))
.getCompletedFuture());
}
@Override
public void close() throws IOException {
rpcService.unregisterGateway(taskExecutorGateway.getAddress());
}
private TaskExecutorGateway createTaskExecutorGateway() {
final TestingTaskExecutorGateway taskExecutorGateway =
new TestingTaskExecutorGatewayBuilder()
.setSubmitTaskConsumer(this::onSubmitTaskConsumer)
.setTriggerCheckpointFunction(this::onTriggerCheckpoint)
.setConfirmCheckpointFunction(this::onConfirmCheckpoint)
.createTestingTaskExecutorGateway();
rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway);
return taskExecutorGateway;
}
private CompletableFuture<TaskInformation> getTaskInformation(
ExecutionAttemptID executionAttemptId) {
return descriptorsFuture.thenApply(
descriptors -> {
final TaskDeploymentDescriptor descriptor =
descriptors.stream()
.filter(
desc ->
executionAttemptId.equals(
desc.getExecutionAttemptId()))
.findAny()
.orElseThrow(
() ->
new IllegalStateException(
String.format(
"Task descriptor for %s not found.",
executionAttemptId)));
try {
return descriptor.getTaskInformation();
} catch (Exception e) {
throw new IllegalStateException(
String.format(
"Unable to deserialize task information of %s.",
executionAttemptId));
}
});
}
private CompletableFuture<Acknowledge> onTriggerCheckpoint(
ExecutionAttemptID executionAttemptId,
long checkpointId,
long checkpointTimestamp,
CheckpointOptions checkpointOptions) {
return getTaskInformation(executionAttemptId)
.thenCompose(
taskInformation -> {
jobMasterGateway.acknowledgeCheckpoint(
jobId,
executionAttemptId,
checkpointId,
new CheckpointMetrics(),
serializeTaskStateSnapshot(
createNonEmptyStateSnapshot(taskInformation)));
return CompletableFuture.completedFuture(Acknowledge.get());
});
}
private CompletableFuture<Acknowledge> onConfirmCheckpoint(
ExecutionAttemptID executionAttemptId, long checkpointId, long checkpointTimestamp) {
return getTaskInformation(executionAttemptId)
.thenCompose(
taskInformation ->
completeAttemptCheckpoint(checkpointId, executionAttemptId));
}
private CompletableFuture<Acknowledge> onSubmitTaskConsumer(
TaskDeploymentDescriptor taskDeploymentDescriptor, JobMasterId jobMasterId) {
return jobMasterGateway
.requestJob(TIMEOUT)
.thenCompose(
executionGraphInfo -> {
final int numVertices =
Iterables.size(
executionGraphInfo
.getArchivedExecutionGraph()
.getAllExecutionVertices());
descriptors.put(
taskDeploymentDescriptor.getExecutionAttemptId(),
taskDeploymentDescriptor);
if (descriptors.size() == numVertices) {
descriptorsFuture.complete(new ArrayList<>(descriptors.values()));
}
return CompletableFuture.completedFuture(Acknowledge.get());
});
}
private CompletableFuture<Acknowledge> completeAttemptCheckpoint(
long checkpointId, ExecutionAttemptID executionAttemptId) {
return descriptorsFuture
.thenAccept(
descriptors ->
checkpoints
.computeIfAbsent(
checkpointId,
key -> new CheckpointCompletionHandler(descriptors))
.completeAttempt(executionAttemptId))
.thenApply(ignored -> Acknowledge.get());
}
private CompletableFuture<Collection<SlotOffer>> offerSlots(int numSlots) {
final List<SlotOffer> offers = new ArrayList<>();
for (int idx = 0; idx < numSlots; idx++) {
offers.add(new SlotOffer(new AllocationID(), 0, ResourceProfile.ANY));
}
return jobMasterGateway.offerSlots(taskManagerLocation.getResourceID(), offers, TIMEOUT);
}
}
|
CheckpointCompletionHandler
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/FuzzyEqualsShouldNotBeUsedInEqualsMethodTest.java
|
{
"start": 1493,
"end": 1808
}
|
class ____ {
public boolean equals(Object o) {
// BUG: Diagnostic contains: DoubleMath.fuzzyEquals should never
DoubleMath.fuzzyEquals(0.2, 9.3, 2.0);
return true;
}
private
|
FuzzyEqualsShouldNotBeUsedInEqualsMethodPositiveCases
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultSemanticAnalysisPhase.java
|
{
"start": 8911,
"end": 88722
}
|
class ____ extends UserTreeBaseVisitor<SemanticScope> {
private static ClassCastException castError(String formatText, Object... arguments) {
return new ClassCastException(Strings.format(formatText, arguments));
}
/**
* Decorates a user expression node with a PainlessCast.
*/
public static void decorateWithCast(AExpression userExpressionNode, SemanticScope semanticScope) {
Location location = userExpressionNode.getLocation();
Class<?> valueType = semanticScope.getDecoration(userExpressionNode, ValueType.class).valueType();
Class<?> targetType = semanticScope.getDecoration(userExpressionNode, TargetType.class).targetType();
boolean isExplicitCast = semanticScope.getCondition(userExpressionNode, Explicit.class);
boolean isInternalCast = semanticScope.getCondition(userExpressionNode, Internal.class);
PainlessCast painlessCast = AnalyzerCaster.getLegalCast(location, valueType, targetType, isExplicitCast, isInternalCast);
if (painlessCast != null) {
semanticScope.putDecoration(userExpressionNode, new ExpressionPainlessCast(painlessCast));
}
}
/**
* Shortcut to visit a user tree node with a null check.
*/
public void visit(ANode userNode, SemanticScope semanticScope) {
if (userNode != null) {
userNode.visit(this, semanticScope);
}
}
/**
* Shortcut to visit a user expression node with additional checks common to most expression nodes. These
* additional checks include looking for an escaped partial canonical type, an unexpected static type, and an
* unexpected value type.
*/
public void checkedVisit(AExpression userExpressionNode, SemanticScope semanticScope) {
if (userExpressionNode != null) {
userExpressionNode.visit(this, semanticScope);
if (semanticScope.hasDecoration(userExpressionNode, PartialCanonicalTypeName.class)) {
throw userExpressionNode.createError(
new IllegalArgumentException(
"cannot resolve symbol ["
+ semanticScope.getDecoration(userExpressionNode, PartialCanonicalTypeName.class).partialCanonicalTypeName()
+ "]"
)
);
}
if (semanticScope.hasDecoration(userExpressionNode, StaticType.class)) {
throw userExpressionNode.createError(
new IllegalArgumentException(
"value required: instead found unexpected type "
+ "["
+ semanticScope.getDecoration(userExpressionNode, StaticType.class).getStaticCanonicalTypeName()
+ "]"
)
);
}
if (semanticScope.hasDecoration(userExpressionNode, ValueType.class) == false) {
throw userExpressionNode.createError(new IllegalStateException("value required: instead found no value"));
}
}
}
/**
* Visits a class.
*/
public void visitClass(SClass userClassNode, ScriptScope scriptScope) {
for (SFunction userFunctionNode : userClassNode.getFunctionNodes()) {
visitFunction(userFunctionNode, scriptScope);
}
}
/**
* Visits a function and defines variables for each parameter.
* Checks: control flow, type validation
*/
public void visitFunction(SFunction userFunctionNode, ScriptScope scriptScope) {
String functionName = userFunctionNode.getFunctionName();
LocalFunction localFunction = scriptScope.getFunctionTable()
.getFunction(functionName, userFunctionNode.getCanonicalTypeNameParameters().size());
Class<?> returnType = localFunction.getReturnType();
List<Class<?>> typeParameters = localFunction.getTypeParameters();
FunctionScope functionScope = newFunctionScope(scriptScope, localFunction.getReturnType());
for (int index = 0; index < typeParameters.size(); ++index) {
Class<?> typeParameter = typeParameters.get(index);
String parameterName = userFunctionNode.getParameterNames().get(index);
functionScope.defineVariable(userFunctionNode.getLocation(), typeParameter, parameterName, false);
}
SBlock userBlockNode = userFunctionNode.getBlockNode();
if (userBlockNode.getStatementNodes().isEmpty()) {
throw userFunctionNode.createError(
new IllegalArgumentException(
Strings.format(
"invalid function definition: found no statements for function [%s] with [%d] parameters",
functionName,
typeParameters.size()
)
)
);
}
functionScope.setCondition(userBlockNode, LastSource.class);
visit(userBlockNode, functionScope.newLocalScope());
boolean methodEscape = functionScope.getCondition(userBlockNode, MethodEscape.class);
boolean isAutoReturnEnabled = userFunctionNode.isAutoReturnEnabled();
if (methodEscape == false && isAutoReturnEnabled == false && returnType != void.class) {
throw userFunctionNode.createError(
new IllegalArgumentException(
Strings.format(
"invalid function definition: not all paths provide a return value for function [%s] with [%d] parameters",
functionName,
typeParameters.size()
)
)
);
}
if (methodEscape) {
functionScope.setCondition(userFunctionNode, MethodEscape.class);
}
}
/**
* Visits a block and which contains one-to-many statements.
* Checks: control flow
*/
@Override
public void visitBlock(SBlock userBlockNode, SemanticScope semanticScope) {
List<AStatement> userStatementNodes = userBlockNode.getStatementNodes();
if (userStatementNodes.isEmpty()) {
throw userBlockNode.createError(new IllegalArgumentException("invalid block: found no statements"));
}
AStatement lastUserStatement = userStatementNodes.get(userStatementNodes.size() - 1);
boolean lastSource = semanticScope.getCondition(userBlockNode, LastSource.class);
boolean beginLoop = semanticScope.getCondition(userBlockNode, BeginLoop.class);
boolean inLoop = semanticScope.getCondition(userBlockNode, InLoop.class);
boolean lastLoop = semanticScope.getCondition(userBlockNode, LastLoop.class);
boolean allEscape;
boolean anyContinue = false;
boolean anyBreak = false;
for (AStatement userStatementNode : userStatementNodes) {
if (inLoop) {
semanticScope.setCondition(userStatementNode, InLoop.class);
}
if (userStatementNode == lastUserStatement) {
if (beginLoop || lastLoop) {
semanticScope.setCondition(userStatementNode, LastLoop.class);
}
if (lastSource) {
semanticScope.setCondition(userStatementNode, LastSource.class);
}
}
visit(userStatementNode, semanticScope);
allEscape = semanticScope.getCondition(userStatementNode, AllEscape.class);
if (userStatementNode == lastUserStatement) {
semanticScope.replicateCondition(userStatementNode, userBlockNode, MethodEscape.class);
semanticScope.replicateCondition(userStatementNode, userBlockNode, LoopEscape.class);
if (allEscape) {
semanticScope.setCondition(userBlockNode, AllEscape.class);
}
} else {
if (allEscape) {
throw userBlockNode.createError(new IllegalArgumentException("invalid block: unreachable statement"));
}
}
anyContinue |= semanticScope.getCondition(userStatementNode, AnyContinue.class);
anyBreak |= semanticScope.getCondition(userStatementNode, AnyBreak.class);
}
if (anyContinue) {
semanticScope.setCondition(userBlockNode, AnyContinue.class);
}
if (anyBreak) {
semanticScope.setCondition(userBlockNode, AnyBreak.class);
}
}
/**
* Visits an if statement with error checking for an extraneous if.
* Checks: control flow
*/
@Override
public void visitIf(SIf userIfNode, SemanticScope semanticScope) {
AExpression userConditionNode = userIfNode.getConditionNode();
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
SBlock userIfBlockNode = userIfNode.getIfBlockNode();
if (userConditionNode instanceof EBooleanConstant || userIfBlockNode == null) {
throw userIfNode.createError(new IllegalArgumentException("extraneous if block"));
}
semanticScope.replicateCondition(userIfNode, userIfBlockNode, LastSource.class);
semanticScope.replicateCondition(userIfNode, userIfBlockNode, InLoop.class);
semanticScope.replicateCondition(userIfNode, userIfBlockNode, LastLoop.class);
visit(userIfBlockNode, semanticScope.newLocalScope());
semanticScope.replicateCondition(userIfBlockNode, userIfNode, AnyContinue.class);
semanticScope.replicateCondition(userIfBlockNode, userIfNode, AnyBreak.class);
}
/**
* Visits an if/else statement with error checking for an extraneous if/else.
* Checks: control flow
*/
@Override
public void visitIfElse(SIfElse userIfElseNode, SemanticScope semanticScope) {
AExpression userConditionNode = userIfElseNode.getConditionNode();
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
SBlock userIfBlockNode = userIfElseNode.getIfBlockNode();
if (userConditionNode instanceof EBooleanConstant || userIfBlockNode == null) {
throw userIfElseNode.createError(new IllegalArgumentException("extraneous if block"));
}
semanticScope.replicateCondition(userIfElseNode, userIfBlockNode, LastSource.class);
semanticScope.replicateCondition(userIfElseNode, userIfBlockNode, InLoop.class);
semanticScope.replicateCondition(userIfElseNode, userIfBlockNode, LastLoop.class);
visit(userIfBlockNode, semanticScope.newLocalScope());
SBlock userElseBlockNode = userIfElseNode.getElseBlockNode();
if (userElseBlockNode == null) {
throw userIfElseNode.createError(new IllegalArgumentException("extraneous else block."));
}
semanticScope.replicateCondition(userIfElseNode, userElseBlockNode, LastSource.class);
semanticScope.replicateCondition(userIfElseNode, userElseBlockNode, InLoop.class);
semanticScope.replicateCondition(userIfElseNode, userElseBlockNode, LastLoop.class);
visit(userElseBlockNode, semanticScope.newLocalScope());
if (semanticScope.getCondition(userIfBlockNode, MethodEscape.class)
&& semanticScope.getCondition(userElseBlockNode, MethodEscape.class)) {
semanticScope.setCondition(userIfElseNode, MethodEscape.class);
}
if (semanticScope.getCondition(userIfBlockNode, LoopEscape.class)
&& semanticScope.getCondition(userElseBlockNode, LoopEscape.class)) {
semanticScope.setCondition(userIfElseNode, LoopEscape.class);
}
if (semanticScope.getCondition(userIfBlockNode, AllEscape.class)
&& semanticScope.getCondition(userElseBlockNode, AllEscape.class)) {
semanticScope.setCondition(userIfElseNode, AllEscape.class);
}
if (semanticScope.getCondition(userIfBlockNode, AnyContinue.class)
|| semanticScope.getCondition(userElseBlockNode, AnyContinue.class)) {
semanticScope.setCondition(userIfElseNode, AnyContinue.class);
}
if (semanticScope.getCondition(userIfBlockNode, AnyBreak.class) || semanticScope.getCondition(userElseBlockNode, AnyBreak.class)) {
semanticScope.setCondition(userIfElseNode, AnyBreak.class);
}
}
/**
* Visits a while statement with error checking for an extraneous loop.
* Checks: control flow
*/
@Override
public void visitWhile(SWhile userWhileNode, SemanticScope semanticScope) {
semanticScope = semanticScope.newLocalScope();
AExpression userConditionNode = userWhileNode.getConditionNode();
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
SBlock userBlockNode = userWhileNode.getBlockNode();
boolean continuous = false;
if (userConditionNode instanceof EBooleanConstant bc) {
continuous = bc.getBool();
if (continuous == false) {
throw userWhileNode.createError(new IllegalArgumentException("extraneous while loop"));
} else {
semanticScope.setCondition(userWhileNode, ContinuousLoop.class);
}
if (userBlockNode == null) {
throw userWhileNode.createError(new IllegalArgumentException("no paths escape from while loop"));
}
}
if (userBlockNode != null) {
semanticScope.setCondition(userBlockNode, BeginLoop.class);
semanticScope.setCondition(userBlockNode, InLoop.class);
visit(userBlockNode, semanticScope);
if (semanticScope.getCondition(userBlockNode, LoopEscape.class)
&& semanticScope.getCondition(userBlockNode, AnyContinue.class) == false) {
throw userWhileNode.createError(new IllegalArgumentException("extraneous while loop"));
}
if (continuous && semanticScope.getCondition(userBlockNode, AnyBreak.class) == false) {
semanticScope.setCondition(userWhileNode, MethodEscape.class);
semanticScope.setCondition(userWhileNode, AllEscape.class);
}
}
}
/**
* Visits a do-while statement with error checking for an extraneous loop.
* Checks: control flow
*/
@Override
public void visitDo(SDo userDoNode, SemanticScope semanticScope) {
semanticScope = semanticScope.newLocalScope();
SBlock userBlockNode = userDoNode.getBlockNode();
if (userBlockNode == null) {
throw userDoNode.createError(new IllegalArgumentException("extraneous do-while loop"));
}
semanticScope.setCondition(userBlockNode, BeginLoop.class);
semanticScope.setCondition(userBlockNode, InLoop.class);
visit(userBlockNode, semanticScope);
if (semanticScope.getCondition(userBlockNode, LoopEscape.class)
&& semanticScope.getCondition(userBlockNode, AnyContinue.class) == false) {
throw userDoNode.createError(new IllegalArgumentException("extraneous do-while loop"));
}
AExpression userConditionNode = userDoNode.getConditionNode();
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
boolean continuous;
if (userConditionNode instanceof EBooleanConstant bc) {
continuous = bc.getBool();
if (continuous == false) {
throw userDoNode.createError(new IllegalArgumentException("extraneous do-while loop"));
} else {
semanticScope.setCondition(userDoNode, ContinuousLoop.class);
}
if (semanticScope.getCondition(userBlockNode, AnyBreak.class) == false) {
semanticScope.setCondition(userDoNode, MethodEscape.class);
semanticScope.setCondition(userDoNode, AllEscape.class);
}
}
}
/**
* Visits a for statement with error checking for an extraneous loop.
* Checks: control flow
*/
@Override
public void visitFor(SFor userForNode, SemanticScope semanticScope) {
semanticScope = semanticScope.newLocalScope();
ANode userInitializerNode = userForNode.getInitializerNode();
if (userInitializerNode != null) {
if (userInitializerNode instanceof SDeclBlock) {
visit(userInitializerNode, semanticScope);
} else if (userInitializerNode instanceof AExpression ae) {
checkedVisit(ae, semanticScope);
} else {
throw userForNode.createError(new IllegalStateException("illegal tree structure"));
}
}
AExpression userConditionNode = userForNode.getConditionNode();
SBlock userBlockNode = userForNode.getBlockNode();
boolean continuous = false;
if (userConditionNode != null) {
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
if (userConditionNode instanceof EBooleanConstant bc) {
continuous = bc.getBool();
if (continuous == false) {
throw userForNode.createError(new IllegalArgumentException("extraneous for loop"));
}
if (userBlockNode == null) {
throw userForNode.createError(new IllegalArgumentException("no paths escape from for loop"));
}
}
} else {
continuous = true;
}
AExpression userAfterthoughtNode = userForNode.getAfterthoughtNode();
if (userAfterthoughtNode != null) {
checkedVisit(userAfterthoughtNode, semanticScope);
}
if (userBlockNode != null) {
semanticScope.setCondition(userBlockNode, BeginLoop.class);
semanticScope.setCondition(userBlockNode, InLoop.class);
visit(userBlockNode, semanticScope);
if (semanticScope.getCondition(userBlockNode, LoopEscape.class)
&& semanticScope.getCondition(userBlockNode, AnyContinue.class) == false) {
throw userForNode.createError(new IllegalArgumentException("extraneous for loop"));
}
if (continuous && semanticScope.getCondition(userBlockNode, AnyBreak.class) == false) {
semanticScope.setCondition(userForNode, MethodEscape.class);
semanticScope.setCondition(userForNode, AllEscape.class);
}
}
}
/**
* Visits a for-each statement which and adds an internal variable for a generated iterator.
* Checks: control flow
*/
@Override
public void visitEach(SEach userEachNode, SemanticScope semanticScope) {
AExpression userIterableNode = userEachNode.getIterableNode();
semanticScope.setCondition(userIterableNode, Read.class);
checkedVisit(userIterableNode, semanticScope);
String canonicalTypeName = userEachNode.getCanonicalTypeName();
Class<?> type = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (type == null) {
throw userEachNode.createError(
new IllegalArgumentException("invalid foreach loop: type [" + canonicalTypeName + "] not found")
);
}
semanticScope = semanticScope.newLocalScope();
Location location = userEachNode.getLocation();
String symbol = userEachNode.getSymbol();
Variable variable = semanticScope.defineVariable(location, type, symbol, true);
semanticScope.putDecoration(userEachNode, new SemanticVariable(variable));
SBlock userBlockNode = userEachNode.getBlockNode();
if (userBlockNode == null) {
throw userEachNode.createError(new IllegalArgumentException("extraneous foreach loop"));
}
semanticScope.setCondition(userBlockNode, BeginLoop.class);
semanticScope.setCondition(userBlockNode, InLoop.class);
visit(userBlockNode, semanticScope);
if (semanticScope.getCondition(userBlockNode, LoopEscape.class)
&& semanticScope.getCondition(userBlockNode, AnyContinue.class) == false) {
throw userEachNode.createError(new IllegalArgumentException("extraneous foreach loop"));
}
Class<?> iterableValueType = semanticScope.getDecoration(userIterableNode, ValueType.class).valueType();
if (iterableValueType.isArray()) {
PainlessCast painlessCast = AnalyzerCaster.getLegalCast(
location,
iterableValueType.getComponentType(),
variable.type(),
true,
true
);
if (painlessCast != null) {
semanticScope.putDecoration(userEachNode, new ExpressionPainlessCast(painlessCast));
}
} else if (iterableValueType == def.class || Iterable.class.isAssignableFrom(iterableValueType)) {
if (iterableValueType != def.class) {
PainlessMethod method = semanticScope.getScriptScope()
.getPainlessLookup()
.lookupPainlessMethod(iterableValueType, false, "iterator", 0);
if (method == null) {
throw userEachNode.createError(
new IllegalArgumentException(
"invalid foreach loop: " + "method [" + typeToCanonicalTypeName(iterableValueType) + ", iterator/0] not found"
)
);
}
semanticScope.putDecoration(userEachNode, new IterablePainlessMethod(method));
}
PainlessCast painlessCast = AnalyzerCaster.getLegalCast(location, def.class, type, true, true);
if (painlessCast != null) {
semanticScope.putDecoration(userEachNode, new ExpressionPainlessCast(painlessCast));
}
} else {
throw userEachNode.createError(
new IllegalArgumentException(
"invalid foreach loop: "
+ "cannot iterate over type ["
+ PainlessLookupUtility.typeToCanonicalTypeName(iterableValueType)
+ "]."
)
);
}
}
/**
* Visits a declaration block which contains one-to-many declarations.
*/
@Override
public void visitDeclBlock(SDeclBlock userDeclBlockNode, SemanticScope semanticScope) {
for (SDeclaration userDeclarationNode : userDeclBlockNode.getDeclarationNodes()) {
visit(userDeclarationNode, semanticScope);
}
}
/**
* Visits a declaration and defines a variable with a type and optionally a value.
* Checks: type validation
*/
@Override
public void visitDeclaration(SDeclaration userDeclarationNode, SemanticScope semanticScope) {
ScriptScope scriptScope = semanticScope.getScriptScope();
String symbol = userDeclarationNode.getSymbol();
if (scriptScope.getPainlessLookup().isValidCanonicalClassName(symbol)) {
throw userDeclarationNode.createError(
new IllegalArgumentException("invalid declaration: type [" + symbol + "] cannot be a name")
);
}
String canonicalTypeName = userDeclarationNode.getCanonicalTypeName();
Class<?> type = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (type == null) {
throw userDeclarationNode.createError(
new IllegalArgumentException("invalid declaration: cannot resolve type [" + canonicalTypeName + "]")
);
}
AExpression userValueNode = userDeclarationNode.getValueNode();
if (userValueNode != null) {
semanticScope.setCondition(userValueNode, Read.class);
semanticScope.putDecoration(userValueNode, new TargetType(type));
checkedVisit(userValueNode, semanticScope);
decorateWithCast(userValueNode, semanticScope);
}
Location location = userDeclarationNode.getLocation();
Variable variable = semanticScope.defineVariable(location, type, symbol, false);
semanticScope.putDecoration(userDeclarationNode, new SemanticVariable(variable));
}
/**
* Visits a return statement and casts the value to the return type if possible.
* Checks: type validation
*/
@Override
public void visitReturn(SReturn userReturnNode, SemanticScope semanticScope) {
AExpression userValueNode = userReturnNode.getValueNode();
if (userValueNode == null) {
if (semanticScope.getReturnType() != void.class) {
throw userReturnNode.createError(
castError(
"cannot cast from [%s] to [%s]",
semanticScope.getReturnCanonicalTypeName(),
PainlessLookupUtility.typeToCanonicalTypeName(void.class)
)
);
}
} else {
semanticScope.setCondition(userValueNode, Read.class);
semanticScope.putDecoration(userValueNode, new TargetType(semanticScope.getReturnType()));
semanticScope.setCondition(userValueNode, Internal.class);
checkedVisit(userValueNode, semanticScope);
decorateWithCast(userValueNode, semanticScope);
}
semanticScope.setCondition(userReturnNode, MethodEscape.class);
semanticScope.setCondition(userReturnNode, LoopEscape.class);
semanticScope.setCondition(userReturnNode, AllEscape.class);
}
/**
* Visits an expression that is also considered a statement.
* Checks: control flow, type validation
*/
@Override
public void visitExpression(SExpression userExpressionNode, SemanticScope semanticScope) {
Class<?> rtnType = semanticScope.getReturnType();
boolean isVoid = rtnType == void.class;
boolean lastSource = semanticScope.getCondition(userExpressionNode, LastSource.class);
AExpression userStatementNode = userExpressionNode.getStatementNode();
if (lastSource && isVoid == false) {
semanticScope.setCondition(userStatementNode, Read.class);
}
checkedVisit(userStatementNode, semanticScope);
Class<?> expressionValueType = semanticScope.getDecoration(userStatementNode, ValueType.class).valueType();
boolean rtn = lastSource && isVoid == false && expressionValueType != void.class;
if (rtn) {
semanticScope.putDecoration(userStatementNode, new TargetType(rtnType));
semanticScope.setCondition(userStatementNode, Internal.class);
decorateWithCast(userStatementNode, semanticScope);
semanticScope.setCondition(userExpressionNode, MethodEscape.class);
semanticScope.setCondition(userExpressionNode, LoopEscape.class);
semanticScope.setCondition(userExpressionNode, AllEscape.class);
}
}
/**
* Visits a try statement.
* Checks: control flow
*/
@Override
public void visitTry(STry userTryNode, SemanticScope semanticScope) {
SBlock userBlockNode = userTryNode.getBlockNode();
if (userBlockNode == null) {
throw userTryNode.createError(new IllegalArgumentException("extraneous try statement"));
}
semanticScope.replicateCondition(userTryNode, userBlockNode, LastSource.class);
semanticScope.replicateCondition(userTryNode, userBlockNode, InLoop.class);
semanticScope.replicateCondition(userTryNode, userBlockNode, LastLoop.class);
visit(userBlockNode, semanticScope.newLocalScope());
boolean methodEscape = semanticScope.getCondition(userBlockNode, MethodEscape.class);
boolean loopEscape = semanticScope.getCondition(userBlockNode, LoopEscape.class);
boolean allEscape = semanticScope.getCondition(userBlockNode, AllEscape.class);
boolean anyContinue = semanticScope.getCondition(userBlockNode, AnyContinue.class);
boolean anyBreak = semanticScope.getCondition(userBlockNode, AnyBreak.class);
for (SCatch userCatchNode : userTryNode.getCatchNodes()) {
semanticScope.replicateCondition(userTryNode, userCatchNode, LastSource.class);
semanticScope.replicateCondition(userTryNode, userCatchNode, InLoop.class);
semanticScope.replicateCondition(userTryNode, userCatchNode, LastLoop.class);
visit(userCatchNode, semanticScope.newLocalScope());
methodEscape &= semanticScope.getCondition(userCatchNode, MethodEscape.class);
loopEscape &= semanticScope.getCondition(userCatchNode, LoopEscape.class);
allEscape &= semanticScope.getCondition(userCatchNode, AllEscape.class);
anyContinue |= semanticScope.getCondition(userCatchNode, AnyContinue.class);
anyBreak |= semanticScope.getCondition(userCatchNode, AnyBreak.class);
}
if (methodEscape) {
semanticScope.setCondition(userTryNode, MethodEscape.class);
}
if (loopEscape) {
semanticScope.setCondition(userTryNode, LoopEscape.class);
}
if (allEscape) {
semanticScope.setCondition(userTryNode, AllEscape.class);
}
if (anyContinue) {
semanticScope.setCondition(userTryNode, AnyContinue.class);
}
if (anyBreak) {
semanticScope.setCondition(userTryNode, AnyBreak.class);
}
}
/**
* Visits a catch statement and defines a variable for the caught exception.
* Checks: control flow, type validation
*/
@Override
public void visitCatch(SCatch userCatchNode, SemanticScope semanticScope) {
ScriptScope scriptScope = semanticScope.getScriptScope();
String symbol = userCatchNode.getSymbol();
if (scriptScope.getPainlessLookup().isValidCanonicalClassName(symbol)) {
throw userCatchNode.createError(
new IllegalArgumentException("invalid catch declaration: type [" + symbol + "] cannot be a name")
);
}
String canonicalTypeName = userCatchNode.getCanonicalTypeName();
Class<?> type = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (type == null) {
throw userCatchNode.createError(
new IllegalArgumentException("invalid catch declaration: cannot resolve type [" + canonicalTypeName + "]")
);
}
Location location = userCatchNode.getLocation();
Variable variable = semanticScope.defineVariable(location, type, symbol, false);
semanticScope.putDecoration(userCatchNode, new SemanticVariable(variable));
Class<?> baseException = userCatchNode.getBaseException();
if (userCatchNode.getBaseException().isAssignableFrom(type) == false) {
throw userCatchNode.createError(
castError(
"cannot cast from [%s] to [%s]",
PainlessLookupUtility.typeToCanonicalTypeName(type),
PainlessLookupUtility.typeToCanonicalTypeName(baseException)
)
);
}
SBlock userBlockNode = userCatchNode.getBlockNode();
if (userBlockNode != null) {
semanticScope.replicateCondition(userCatchNode, userBlockNode, LastSource.class);
semanticScope.replicateCondition(userCatchNode, userBlockNode, InLoop.class);
semanticScope.replicateCondition(userCatchNode, userBlockNode, LastLoop.class);
visit(userBlockNode, semanticScope);
semanticScope.replicateCondition(userBlockNode, userCatchNode, MethodEscape.class);
semanticScope.replicateCondition(userBlockNode, userCatchNode, LoopEscape.class);
semanticScope.replicateCondition(userBlockNode, userCatchNode, AllEscape.class);
semanticScope.replicateCondition(userBlockNode, userCatchNode, AnyContinue.class);
semanticScope.replicateCondition(userBlockNode, userCatchNode, AnyBreak.class);
}
}
/**
* Visits a throw statement.
* Checks: type validation
*/
@Override
public void visitThrow(SThrow userThrowNode, SemanticScope semanticScope) {
AExpression userExpressionNode = userThrowNode.getExpressionNode();
semanticScope.setCondition(userExpressionNode, Read.class);
semanticScope.putDecoration(userExpressionNode, new TargetType(Exception.class));
checkedVisit(userExpressionNode, semanticScope);
decorateWithCast(userExpressionNode, semanticScope);
semanticScope.setCondition(userThrowNode, MethodEscape.class);
semanticScope.setCondition(userThrowNode, LoopEscape.class);
semanticScope.setCondition(userThrowNode, AllEscape.class);
}
/**
* Visits a continue statement.
* Checks: control flow
*/
@Override
public void visitContinue(SContinue userContinueNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userContinueNode, InLoop.class) == false) {
throw userContinueNode.createError(new IllegalArgumentException("invalid continue statement: not inside loop"));
}
if (semanticScope.getCondition(userContinueNode, LastLoop.class)) {
throw userContinueNode.createError(new IllegalArgumentException("extraneous continue statement"));
}
semanticScope.setCondition(userContinueNode, AllEscape.class);
semanticScope.setCondition(userContinueNode, AnyContinue.class);
}
/**
* Visits a break statement.
* Checks: control flow
*/
@Override
public void visitBreak(SBreak userBreakNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userBreakNode, InLoop.class) == false) {
throw userBreakNode.createError(new IllegalArgumentException("invalid break statement: not inside loop"));
}
semanticScope.setCondition(userBreakNode, AllEscape.class);
semanticScope.setCondition(userBreakNode, LoopEscape.class);
semanticScope.setCondition(userBreakNode, AnyBreak.class);
}
/**
* Visits an assignment expression which handles both assignment and compound assignment.
* Checks: type validation
*/
@Override
public void visitAssignment(EAssignment userAssignmentNode, SemanticScope semanticScope) {
AExpression userLeftNode = userAssignmentNode.getLeftNode();
semanticScope.replicateCondition(userAssignmentNode, userLeftNode, Read.class);
semanticScope.setCondition(userLeftNode, Write.class);
checkedVisit(userLeftNode, semanticScope);
Class<?> leftValueType = semanticScope.getDecoration(userLeftNode, Decorations.ValueType.class).valueType();
AExpression userRightNode = userAssignmentNode.getRightNode();
semanticScope.setCondition(userRightNode, Read.class);
Operation operation = userAssignmentNode.getOperation();
if (operation != null) {
checkedVisit(userRightNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).valueType();
Class<?> compoundType;
boolean isConcatenation = false;
Class<?> shiftType = null;
boolean isShift = false;
if (operation == Operation.MUL) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.DIV) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.REM) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.ADD) {
compoundType = AnalyzerCaster.promoteAdd(leftValueType, rightValueType);
isConcatenation = compoundType == String.class;
} else if (operation == Operation.SUB) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.LSH) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, false);
shiftType = AnalyzerCaster.promoteNumeric(rightValueType, false);
isShift = true;
} else if (operation == Operation.RSH) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, false);
shiftType = AnalyzerCaster.promoteNumeric(rightValueType, false);
isShift = true;
} else if (operation == Operation.USH) {
compoundType = AnalyzerCaster.promoteNumeric(leftValueType, false);
shiftType = AnalyzerCaster.promoteNumeric(rightValueType, false);
isShift = true;
} else if (operation == Operation.BWAND) {
compoundType = AnalyzerCaster.promoteXor(leftValueType, rightValueType);
} else if (operation == Operation.XOR) {
compoundType = AnalyzerCaster.promoteXor(leftValueType, rightValueType);
} else if (operation == Operation.BWOR) {
compoundType = AnalyzerCaster.promoteXor(leftValueType, rightValueType);
} else {
throw userAssignmentNode.createError(new IllegalStateException("illegal tree structure"));
}
if (compoundType == null || (isShift && shiftType == null)) {
throw userAssignmentNode.createError(
castError(
"invalid compound assignment: cannot apply [%s=] to types [%s] and [%s]",
operation.symbol,
leftValueType,
rightValueType
)
);
}
if (isConcatenation) {
semanticScope.putDecoration(userRightNode, new TargetType(rightValueType));
} else if (isShift) {
if (compoundType == def.class) {
// shifts are promoted independently, but for the def type, we need object.
semanticScope.putDecoration(userRightNode, new TargetType(def.class));
} else if (shiftType == long.class) {
semanticScope.putDecoration(userRightNode, new TargetType(int.class));
semanticScope.setCondition(userRightNode, Explicit.class);
} else {
semanticScope.putDecoration(userRightNode, new TargetType(shiftType));
}
} else {
semanticScope.putDecoration(userRightNode, new TargetType(compoundType));
}
decorateWithCast(userRightNode, semanticScope);
Location location = userAssignmentNode.getLocation();
PainlessCast upcast = AnalyzerCaster.getLegalCast(location, leftValueType, compoundType, false, false);
PainlessCast downcast = AnalyzerCaster.getLegalCast(location, compoundType, leftValueType, true, false);
semanticScope.putDecoration(userAssignmentNode, new CompoundType(compoundType));
if (upcast != null) {
semanticScope.putDecoration(userAssignmentNode, new UpcastPainlessCast(upcast));
}
if (downcast != null) {
semanticScope.putDecoration(userAssignmentNode, new DowncastPainlessCast(downcast));
}
// if the lhs node is a def optimized node we update the actual type to remove the need for a cast
} else if (semanticScope.getCondition(userLeftNode, DefOptimized.class)) {
checkedVisit(userRightNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).valueType();
if (rightValueType == void.class) {
throw userAssignmentNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign type [" + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + "]"
)
);
}
semanticScope.putDecoration(userLeftNode, new ValueType(rightValueType));
leftValueType = rightValueType;
// Otherwise, we must adapt the rhs type to the lhs type with a cast.
} else {
semanticScope.putDecoration(userRightNode, new TargetType(leftValueType));
checkedVisit(userRightNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
}
semanticScope.putDecoration(
userAssignmentNode,
new ValueType(semanticScope.getCondition(userAssignmentNode, Read.class) ? leftValueType : void.class)
);
}
/**
* Visits a unary expression which special-cases a negative operator when the child
* is a constant expression to handle the maximum negative values appropriately.
* Checks: type validation
*/
@Override
public void visitUnary(EUnary userUnaryNode, SemanticScope semanticScope) {
Operation operation = userUnaryNode.getOperation();
if (semanticScope.getCondition(userUnaryNode, Write.class)) {
throw userUnaryNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
if (semanticScope.getCondition(userUnaryNode, Read.class) == false) {
throw userUnaryNode.createError(
new IllegalArgumentException(
"not a statement: result not used from " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
AExpression userChildNode = userUnaryNode.getChildNode();
Class<?> valueType;
Class<?> unaryType = null;
if (operation == Operation.SUB && (userChildNode instanceof ENumeric || userChildNode instanceof EDecimal)) {
semanticScope.setCondition(userChildNode, Read.class);
semanticScope.copyDecoration(userUnaryNode, userChildNode, TargetType.class);
semanticScope.replicateCondition(userUnaryNode, userChildNode, Explicit.class);
semanticScope.replicateCondition(userUnaryNode, userChildNode, Internal.class);
semanticScope.setCondition(userChildNode, Negate.class);
checkedVisit(userChildNode, semanticScope);
if (semanticScope.hasDecoration(userUnaryNode, TargetType.class)) {
decorateWithCast(userChildNode, semanticScope);
}
valueType = semanticScope.getDecoration(userChildNode, ValueType.class).valueType();
} else {
if (operation == Operation.NOT) {
semanticScope.setCondition(userChildNode, Read.class);
semanticScope.putDecoration(userChildNode, new TargetType(boolean.class));
checkedVisit(userChildNode, semanticScope);
decorateWithCast(userChildNode, semanticScope);
valueType = boolean.class;
} else if (operation == Operation.BWNOT || operation == Operation.ADD || operation == Operation.SUB) {
semanticScope.setCondition(userChildNode, Read.class);
checkedVisit(userChildNode, semanticScope);
Class<?> childValueType = semanticScope.getDecoration(userChildNode, ValueType.class).valueType();
unaryType = AnalyzerCaster.promoteNumeric(childValueType, operation != Operation.BWNOT);
if (unaryType == null) {
throw userUnaryNode.createError(
castError(
"cannot apply the %s operator [%s] to the type [%s]",
operation.name,
operation.symbol,
PainlessLookupUtility.typeToCanonicalTypeName(childValueType)
)
);
}
semanticScope.putDecoration(userChildNode, new TargetType(unaryType));
decorateWithCast(userChildNode, semanticScope);
TargetType targetType = semanticScope.getDecoration(userUnaryNode, TargetType.class);
if (unaryType == def.class && targetType != null) {
valueType = targetType.targetType();
} else {
valueType = unaryType;
}
} else {
throw userUnaryNode.createError(new IllegalStateException("unexpected unary operation [" + operation.name + "]"));
}
}
semanticScope.putDecoration(userUnaryNode, new ValueType(valueType));
if (unaryType != null) {
semanticScope.putDecoration(userUnaryNode, new UnaryType(unaryType));
}
}
/**
* Visits a binary expression which covers all the mathematical operators.
* Checks: type validation
*/
@Override
public void visitBinary(EBinary userBinaryNode, SemanticScope semanticScope) {
Operation operation = userBinaryNode.getOperation();
if (semanticScope.getCondition(userBinaryNode, Write.class)) {
throw userBinaryNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
if (semanticScope.getCondition(userBinaryNode, Read.class) == false) {
throw userBinaryNode.createError(
new IllegalArgumentException(
"not a statement: result not used from " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
AExpression userLeftNode = userBinaryNode.getLeftNode();
semanticScope.setCondition(userLeftNode, Read.class);
checkedVisit(userLeftNode, semanticScope);
Class<?> leftValueType = semanticScope.getDecoration(userLeftNode, ValueType.class).valueType();
AExpression userRightNode = userBinaryNode.getRightNode();
semanticScope.setCondition(userRightNode, Read.class);
checkedVisit(userRightNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).valueType();
Class<?> valueType;
Class<?> binaryType;
Class<?> shiftType = null;
if (operation == Operation.FIND || operation == Operation.MATCH) {
semanticScope.putDecoration(userLeftNode, new TargetType(String.class));
semanticScope.putDecoration(userRightNode, new TargetType(Pattern.class));
decorateWithCast(userLeftNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
binaryType = boolean.class;
valueType = boolean.class;
} else {
if (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM) {
binaryType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.ADD) {
binaryType = AnalyzerCaster.promoteAdd(leftValueType, rightValueType);
} else if (operation == Operation.SUB) {
binaryType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) {
binaryType = AnalyzerCaster.promoteNumeric(leftValueType, false);
shiftType = AnalyzerCaster.promoteNumeric(rightValueType, false);
if (shiftType == null) {
binaryType = null;
}
} else if (operation == Operation.BWOR || operation == Operation.BWAND) {
binaryType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, false);
} else if (operation == Operation.XOR) {
binaryType = AnalyzerCaster.promoteXor(leftValueType, rightValueType);
} else {
throw userBinaryNode.createError(new IllegalStateException("unexpected binary operation [" + operation.name + "]"));
}
if (binaryType == null) {
throw userBinaryNode.createError(
castError(
"cannot apply the %s operator [%s] to the types [%s] and [%s]",
operation.name,
operation.symbol,
PainlessLookupUtility.typeToCanonicalTypeName(leftValueType),
PainlessLookupUtility.typeToCanonicalTypeName(rightValueType)
)
);
}
valueType = binaryType;
if (binaryType == def.class || shiftType == def.class) {
TargetType targetType = semanticScope.getDecoration(userBinaryNode, TargetType.class);
if (targetType != null) {
valueType = targetType.targetType();
}
} else if (operation != Operation.ADD || binaryType != String.class) {
semanticScope.putDecoration(userLeftNode, new TargetType(binaryType));
if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) {
if (shiftType == long.class) {
semanticScope.putDecoration(userRightNode, new TargetType(int.class));
semanticScope.setCondition(userRightNode, Explicit.class);
} else {
semanticScope.putDecoration(userRightNode, new TargetType(shiftType));
}
} else {
semanticScope.putDecoration(userRightNode, new TargetType(binaryType));
}
decorateWithCast(userLeftNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
}
}
semanticScope.putDecoration(userBinaryNode, new ValueType(valueType));
semanticScope.putDecoration(userBinaryNode, new BinaryType(binaryType));
if (shiftType != null) {
semanticScope.putDecoration(userBinaryNode, new ShiftType(shiftType));
}
}
/**
* Visits a boolean comp expression which covers boolean comparision operators.
* Checks: type validation
*/
@Override
public void visitBooleanComp(EBooleanComp userBooleanCompNode, SemanticScope semanticScope) {
Operation operation = userBooleanCompNode.getOperation();
if (semanticScope.getCondition(userBooleanCompNode, Write.class)) {
throw userBooleanCompNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
if (semanticScope.getCondition(userBooleanCompNode, Read.class) == false) {
throw userBooleanCompNode.createError(
new IllegalArgumentException(
"not a statement: result not used from " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
AExpression userLeftNode = userBooleanCompNode.getLeftNode();
semanticScope.setCondition(userLeftNode, Read.class);
semanticScope.putDecoration(userLeftNode, new TargetType(boolean.class));
checkedVisit(userLeftNode, semanticScope);
decorateWithCast(userLeftNode, semanticScope);
AExpression userRightNode = userBooleanCompNode.getRightNode();
semanticScope.setCondition(userRightNode, Read.class);
semanticScope.putDecoration(userRightNode, new TargetType(boolean.class));
checkedVisit(userRightNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
semanticScope.putDecoration(userBooleanCompNode, new ValueType(boolean.class));
}
/**
* Visits a comp expression which covers mathematical comparision operators.
* Checks: type validation
*/
@Override
public void visitComp(EComp userCompNode, SemanticScope semanticScope) {
Operation operation = userCompNode.getOperation();
if (semanticScope.getCondition(userCompNode, Write.class)) {
throw userCompNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
if (semanticScope.getCondition(userCompNode, Read.class) == false) {
throw userCompNode.createError(
new IllegalArgumentException(
"not a statement: result not used from " + operation.name + " operation " + "[" + operation.symbol + "]"
)
);
}
AExpression userLeftNode = userCompNode.getLeftNode();
semanticScope.setCondition(userLeftNode, Read.class);
checkedVisit(userLeftNode, semanticScope);
Class<?> leftValueType = semanticScope.getDecoration(userLeftNode, ValueType.class).valueType();
AExpression userRightNode = userCompNode.getRightNode();
semanticScope.setCondition(userRightNode, Read.class);
checkedVisit(userRightNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).valueType();
Class<?> promotedType;
if (operation == Operation.EQ || operation == Operation.EQR || operation == Operation.NE || operation == Operation.NER) {
promotedType = AnalyzerCaster.promoteEquality(leftValueType, rightValueType);
} else if (operation == Operation.GT || operation == Operation.GTE || operation == Operation.LT || operation == Operation.LTE) {
promotedType = AnalyzerCaster.promoteNumeric(leftValueType, rightValueType, true);
} else {
throw userCompNode.createError(new IllegalStateException("unexpected binary operation [" + operation.name + "]"));
}
if (promotedType == null) {
throw userCompNode.createError(
castError(
"cannot apply the %s operator [%s] to the types [%s] and [%s]",
operation.name,
operation.symbol,
PainlessLookupUtility.typeToCanonicalTypeName(leftValueType),
PainlessLookupUtility.typeToCanonicalTypeName(rightValueType)
)
);
}
if ((operation == Operation.EQ || operation == Operation.EQR || operation == Operation.NE || operation == Operation.NER)
&& userLeftNode instanceof ENull
&& userRightNode instanceof ENull) {
throw userCompNode.createError(new IllegalArgumentException("extraneous comparison of [null] constants"));
}
if (operation == Operation.EQR || operation == Operation.NER || promotedType != def.class) {
semanticScope.putDecoration(userLeftNode, new TargetType(promotedType));
semanticScope.putDecoration(userRightNode, new TargetType(promotedType));
decorateWithCast(userLeftNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
}
semanticScope.putDecoration(userCompNode, new ValueType(boolean.class));
semanticScope.putDecoration(userCompNode, new ComparisonType(promotedType));
}
/**
* Visits an explicit expression which handles an explicit cast.
* Checks: type validation
*/
@Override
public void visitExplicit(EExplicit userExplicitNode, SemanticScope semanticScope) {
String canonicalTypeName = userExplicitNode.getCanonicalTypeName();
if (semanticScope.getCondition(userExplicitNode, Write.class)) {
throw userExplicitNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to an explicit cast with target type [" + canonicalTypeName + "]"
)
);
}
if (semanticScope.getCondition(userExplicitNode, Read.class) == false) {
throw userExplicitNode.createError(
new IllegalArgumentException(
"not a statement: result not used from explicit cast with target type [" + canonicalTypeName + "]"
)
);
}
Class<?> valueType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (valueType == null) {
throw userExplicitNode.createError(new IllegalArgumentException("cannot resolve type [" + canonicalTypeName + "]"));
}
AExpression userChildNode = userExplicitNode.getChildNode();
semanticScope.setCondition(userChildNode, Read.class);
semanticScope.putDecoration(userChildNode, new TargetType(valueType));
semanticScope.setCondition(userChildNode, Explicit.class);
checkedVisit(userChildNode, semanticScope);
decorateWithCast(userChildNode, semanticScope);
semanticScope.putDecoration(userExplicitNode, new ValueType(valueType));
}
/**
* Visits an instanceof expression which handles both primitive and non-primitive types.
* Checks: type validation
*/
@Override
public void visitInstanceof(EInstanceof userInstanceofNode, SemanticScope semanticScope) {
String canonicalTypeName = userInstanceofNode.getCanonicalTypeName();
if (semanticScope.getCondition(userInstanceofNode, Write.class)) {
throw userInstanceofNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to instanceof with target type [" + canonicalTypeName + "]"
)
);
}
if (semanticScope.getCondition(userInstanceofNode, Read.class) == false) {
throw userInstanceofNode.createError(
new IllegalArgumentException(
"not a statement: result not used from instanceof with target type [" + canonicalTypeName + "]"
)
);
}
Class<?> instanceType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (instanceType == null) {
throw userInstanceofNode.createError(new IllegalArgumentException("Not a type [" + canonicalTypeName + "]."));
}
AExpression userExpressionNode = userInstanceofNode.getExpressionNode();
semanticScope.setCondition(userExpressionNode, Read.class);
checkedVisit(userExpressionNode, semanticScope);
semanticScope.putDecoration(userInstanceofNode, new ValueType(boolean.class));
semanticScope.putDecoration(userInstanceofNode, new InstanceType(instanceType));
}
/**
* Visits a conditional expression.
* Checks: type validation
*/
@Override
public void visitConditional(EConditional userConditionalNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userConditionalNode, Write.class)) {
throw userConditionalNode.createError(
new IllegalArgumentException("invalid assignment: cannot assign a value to conditional operation [?:]")
);
}
if (semanticScope.getCondition(userConditionalNode, Read.class) == false) {
throw userConditionalNode.createError(
new IllegalArgumentException("not a statement: result not used from conditional operation [?:]")
);
}
AExpression userConditionNode = userConditionalNode.getConditionNode();
semanticScope.setCondition(userConditionNode, Read.class);
semanticScope.putDecoration(userConditionNode, new TargetType(boolean.class));
checkedVisit(userConditionNode, semanticScope);
decorateWithCast(userConditionNode, semanticScope);
AExpression userTrueNode = userConditionalNode.getTrueNode();
semanticScope.setCondition(userTrueNode, Read.class);
semanticScope.copyDecoration(userConditionalNode, userTrueNode, TargetType.class);
semanticScope.replicateCondition(userConditionalNode, userTrueNode, Explicit.class);
semanticScope.replicateCondition(userConditionalNode, userTrueNode, Internal.class);
checkedVisit(userTrueNode, semanticScope);
Class<?> leftValueType = semanticScope.getDecoration(userTrueNode, ValueType.class).valueType();
AExpression userFalseNode = userConditionalNode.getFalseNode();
semanticScope.setCondition(userFalseNode, Read.class);
semanticScope.copyDecoration(userConditionalNode, userFalseNode, TargetType.class);
semanticScope.replicateCondition(userConditionalNode, userFalseNode, Explicit.class);
semanticScope.replicateCondition(userConditionalNode, userFalseNode, Internal.class);
checkedVisit(userFalseNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userFalseNode, ValueType.class).valueType();
TargetType targetType = semanticScope.getDecoration(userConditionalNode, TargetType.class);
Class<?> valueType;
if (targetType == null) {
Class<?> promote = AnalyzerCaster.promoteConditional(leftValueType, rightValueType);
if (promote == null) {
throw userConditionalNode.createError(
new ClassCastException(
Strings.format(
"cannot apply the conditional operator [?:] to the types [%s] and [%s]",
PainlessLookupUtility.typeToCanonicalTypeName(leftValueType),
PainlessLookupUtility.typeToCanonicalTypeName(rightValueType)
)
)
);
}
semanticScope.putDecoration(userTrueNode, new TargetType(promote));
semanticScope.putDecoration(userFalseNode, new TargetType(promote));
valueType = promote;
} else {
valueType = targetType.targetType();
}
decorateWithCast(userTrueNode, semanticScope);
decorateWithCast(userFalseNode, semanticScope);
semanticScope.putDecoration(userConditionalNode, new ValueType(valueType));
}
/**
* Visits a elvis expression which is a shortcut for a null check on a conditional expression.
* Checks: type validation
*/
@Override
public void visitElvis(EElvis userElvisNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userElvisNode, Write.class)) {
throw userElvisNode.createError(
new IllegalArgumentException("invalid assignment: cannot assign a value to elvis operation [?:]")
);
}
if (semanticScope.getCondition(userElvisNode, Read.class) == false) {
throw userElvisNode.createError(new IllegalArgumentException("not a statement: result not used from elvis operation [?:]"));
}
TargetType targetType = semanticScope.getDecoration(userElvisNode, TargetType.class);
if (targetType != null && targetType.targetType().isPrimitive()) {
throw userElvisNode.createError(new IllegalArgumentException("Elvis operator cannot return primitives"));
}
AExpression userLeftNode = userElvisNode.getLeftNode();
semanticScope.setCondition(userLeftNode, Read.class);
semanticScope.copyDecoration(userElvisNode, userLeftNode, TargetType.class);
semanticScope.replicateCondition(userElvisNode, userLeftNode, Explicit.class);
semanticScope.replicateCondition(userElvisNode, userLeftNode, Internal.class);
checkedVisit(userLeftNode, semanticScope);
Class<?> leftValueType = semanticScope.getDecoration(userLeftNode, ValueType.class).valueType();
AExpression userRightNode = userElvisNode.getRightNode();
semanticScope.setCondition(userRightNode, Read.class);
semanticScope.copyDecoration(userElvisNode, userRightNode, TargetType.class);
semanticScope.replicateCondition(userElvisNode, userRightNode, Explicit.class);
semanticScope.replicateCondition(userElvisNode, userRightNode, Internal.class);
checkedVisit(userRightNode, semanticScope);
Class<?> rightValueType = semanticScope.getDecoration(userRightNode, ValueType.class).valueType();
if (userLeftNode instanceof ENull) {
throw userElvisNode.createError(new IllegalArgumentException("Extraneous elvis operator. LHS is null."));
}
if (userLeftNode instanceof EBooleanConstant
|| userLeftNode instanceof ENumeric
|| userLeftNode instanceof EDecimal
|| userLeftNode instanceof EString) {
throw userElvisNode.createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a constant."));
}
if (leftValueType.isPrimitive()) {
throw userElvisNode.createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a primitive."));
}
if (userRightNode instanceof ENull) {
throw userElvisNode.createError(new IllegalArgumentException("Extraneous elvis operator. RHS is null."));
}
Class<?> valueType;
if (targetType == null) {
Class<?> promote = AnalyzerCaster.promoteConditional(leftValueType, rightValueType);
semanticScope.putDecoration(userLeftNode, new TargetType(promote));
semanticScope.putDecoration(userRightNode, new TargetType(promote));
valueType = promote;
} else {
valueType = targetType.targetType();
}
decorateWithCast(userLeftNode, semanticScope);
decorateWithCast(userRightNode, semanticScope);
semanticScope.putDecoration(userElvisNode, new ValueType(valueType));
}
/**
* Visits a list init expression which is a shortcut for initializing a list with pre-defined values.
* Checks: type validation
*/
@Override
public void visitListInit(EListInit userListInitNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userListInitNode, Write.class)) {
throw userListInitNode.createError(
new IllegalArgumentException("invalid assignment: cannot assign a value to list initializer")
);
}
if (semanticScope.getCondition(userListInitNode, Read.class) == false) {
throw userListInitNode.createError(new IllegalArgumentException("not a statement: result not used from list initializer"));
}
Class<?> valueType = ArrayList.class;
PainlessConstructor constructor = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessConstructor(valueType, 0);
if (constructor == null) {
throw userListInitNode.createError(
new IllegalArgumentException("constructor [" + typeToCanonicalTypeName(valueType) + ", <init>/0] not found")
);
}
semanticScope.putDecoration(userListInitNode, new StandardPainlessConstructor(constructor));
PainlessMethod method = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessMethod(valueType, false, "add", 1);
if (method == null) {
throw userListInitNode.createError(
new IllegalArgumentException("method [" + typeToCanonicalTypeName(valueType) + ", add/1] not found")
);
}
semanticScope.putDecoration(userListInitNode, new StandardPainlessMethod(method));
for (AExpression userValueNode : userListInitNode.getValueNodes()) {
semanticScope.setCondition(userValueNode, Read.class);
semanticScope.putDecoration(userValueNode, new TargetType(def.class));
semanticScope.setCondition(userValueNode, Internal.class);
checkedVisit(userValueNode, semanticScope);
decorateWithCast(userValueNode, semanticScope);
}
semanticScope.putDecoration(userListInitNode, new ValueType(valueType));
}
/**
* Visits a map init expression which is a shortcut for initializing a map with pre-defined keys and values.
* Checks: type validation
*/
@Override
public void visitMapInit(EMapInit userMapInitNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userMapInitNode, Write.class)) {
throw userMapInitNode.createError(new IllegalArgumentException("invalid assignment: cannot assign a value to map initializer"));
}
if (semanticScope.getCondition(userMapInitNode, Read.class) == false) {
throw userMapInitNode.createError(new IllegalArgumentException("not a statement: result not used from map initializer"));
}
Class<?> valueType = HashMap.class;
PainlessConstructor constructor = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessConstructor(valueType, 0);
if (constructor == null) {
throw userMapInitNode.createError(
new IllegalArgumentException("constructor [" + typeToCanonicalTypeName(valueType) + ", <init>/0] not found")
);
}
semanticScope.putDecoration(userMapInitNode, new StandardPainlessConstructor(constructor));
PainlessMethod method = semanticScope.getScriptScope().getPainlessLookup().lookupPainlessMethod(valueType, false, "put", 2);
if (method == null) {
throw userMapInitNode.createError(
new IllegalArgumentException("method [" + typeToCanonicalTypeName(valueType) + ", put/2] not found")
);
}
semanticScope.putDecoration(userMapInitNode, new StandardPainlessMethod(method));
List<AExpression> userKeyNodes = userMapInitNode.getKeyNodes();
List<AExpression> userValueNodes = userMapInitNode.getValueNodes();
if (userKeyNodes.size() != userValueNodes.size()) {
throw userMapInitNode.createError(new IllegalStateException("Illegal tree structure."));
}
for (int i = 0; i < userKeyNodes.size(); ++i) {
AExpression userKeyNode = userKeyNodes.get(i);
semanticScope.setCondition(userKeyNode, Read.class);
semanticScope.putDecoration(userKeyNode, new TargetType(def.class));
semanticScope.setCondition(userKeyNode, Internal.class);
checkedVisit(userKeyNode, semanticScope);
decorateWithCast(userKeyNode, semanticScope);
AExpression userValueNode = userValueNodes.get(i);
semanticScope.setCondition(userValueNode, Read.class);
semanticScope.putDecoration(userValueNode, new TargetType(def.class));
semanticScope.setCondition(userValueNode, Internal.class);
checkedVisit(userValueNode, semanticScope);
decorateWithCast(userValueNode, semanticScope);
}
semanticScope.putDecoration(userMapInitNode, new ValueType(valueType));
}
/**
* Visits a list init expression which either defines a standard array or initializes
* a single-dimensional with pre-defined values.
* Checks: type validation
*/
@Override
public void visitNewArray(ENewArray userNewArrayNode, SemanticScope semanticScope) {
if (semanticScope.getCondition(userNewArrayNode, Write.class)) {
throw userNewArrayNode.createError(new IllegalArgumentException("invalid assignment: cannot assign a value to new array"));
}
if (semanticScope.getCondition(userNewArrayNode, Read.class) == false) {
throw userNewArrayNode.createError(new IllegalArgumentException("not a statement: result not used from new array"));
}
String canonicalTypeName = userNewArrayNode.getCanonicalTypeName();
Class<?> valueType = semanticScope.getScriptScope().getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (valueType == null) {
throw userNewArrayNode.createError(new IllegalArgumentException("Not a type [" + canonicalTypeName + "]."));
}
for (AExpression userValueNode : userNewArrayNode.getValueNodes()) {
semanticScope.setCondition(userValueNode, Read.class);
semanticScope.putDecoration(
userValueNode,
new TargetType(userNewArrayNode.isInitializer() ? valueType.getComponentType() : int.class)
);
semanticScope.setCondition(userValueNode, Internal.class);
checkedVisit(userValueNode, semanticScope);
decorateWithCast(userValueNode, semanticScope);
}
semanticScope.putDecoration(userNewArrayNode, new ValueType(valueType));
}
/**
* Visits a new obj expression which creates a new object and calls its constructor.
* Checks: type validation
*/
@Override
public void visitNewObj(ENewObj userNewObjNode, SemanticScope semanticScope) {
String canonicalTypeName = userNewObjNode.getCanonicalTypeName();
List<AExpression> userArgumentNodes = userNewObjNode.getArgumentNodes();
int userArgumentsSize = userArgumentNodes.size();
if (semanticScope.getCondition(userNewObjNode, Write.class)) {
throw userNewObjNode.createError(
new IllegalArgumentException(
Strings.format(
"invalid assignment cannot assign a value to new object with constructor [%s/%d]",
canonicalTypeName,
userArgumentsSize
)
)
);
}
ScriptScope scriptScope = semanticScope.getScriptScope();
Class<?> valueType = scriptScope.getPainlessLookup().canonicalTypeNameToType(canonicalTypeName);
if (valueType == null) {
throw userNewObjNode.createError(new IllegalArgumentException("Not a type [" + canonicalTypeName + "]."));
}
PainlessConstructor constructor = scriptScope.getPainlessLookup().lookupPainlessConstructor(valueType, userArgumentsSize);
if (constructor == null) {
throw userNewObjNode.createError(
new IllegalArgumentException(
Strings.format("constructor [%s, <init>/%d] not found", typeToCanonicalTypeName(valueType), userArgumentsSize)
)
);
}
scriptScope.putDecoration(userNewObjNode, new StandardPainlessConstructor(constructor));
scriptScope.markNonDeterministic(constructor.annotations().containsKey(NonDeterministicAnnotation.class));
if (constructor.typeParameters().size() != userArgumentsSize) {
throw userNewObjNode.createError(
new IllegalArgumentException(
Strings.format(
"When calling constructor on type [%s] expected [%d] arguments, but found [%d].",
PainlessLookupUtility.typeToCanonicalTypeName(valueType),
constructor.typeParameters().size(),
userArgumentsSize
)
)
);
}
for (int i = 0; i < userArgumentsSize; ++i) {
AExpression userArgumentNode = userArgumentNodes.get(i);
semanticScope.setCondition(userArgumentNode, Read.class);
semanticScope.putDecoration(userArgumentNode, new TargetType(constructor.typeParameters().get(i)));
semanticScope.setCondition(userArgumentNode, Internal.class);
checkedVisit(userArgumentNode, semanticScope);
decorateWithCast(userArgumentNode, semanticScope);
}
semanticScope.putDecoration(userNewObjNode, new ValueType(valueType));
}
/**
* Visits a call local expression which is a method call with no qualifier (prefix).
* Checks: type validation, method resolution
*/
@Override
public void visitCallLocal(ECallLocal userCallLocalNode, SemanticScope semanticScope) {
String methodName = userCallLocalNode.getMethodName();
List<AExpression> userArgumentNodes = userCallLocalNode.getArgumentNodes();
int userArgumentsSize = userArgumentNodes.size();
if (semanticScope.getCondition(userCallLocalNode, Write.class)) {
throw userCallLocalNode.createError(
new IllegalArgumentException(
"invalid assignment: cannot assign a value to function call [" + methodName + "/" + userArgumentsSize + "]"
)
);
}
ScriptScope scriptScope = semanticScope.getScriptScope();
FunctionTable.LocalFunction localFunction = null;
PainlessMethod thisMethod = null;
PainlessMethod importedMethod = null;
PainlessClassBinding classBinding = null;
int classBindingOffset = 0;
PainlessInstanceBinding instanceBinding = null;
Class<?> valueType;
localFunction = scriptScope.getFunctionTable().getFunction(methodName, userArgumentsSize);
// user cannot call internal functions, reset to null if an internal function is found
if (localFunction != null && localFunction.isInternal()) {
localFunction = null;
}
if (localFunction == null) {
thisMethod = scriptScope.getPainlessLookup()
.lookupPainlessMethod(scriptScope.getScriptClassInfo().getBaseClass(), false, methodName, userArgumentsSize);
if (thisMethod == null) {
importedMethod = scriptScope.getPainlessLookup().lookupImportedPainlessMethod(methodName, userArgumentsSize);
if (importedMethod == null) {
classBinding = scriptScope.getPainlessLookup().lookupPainlessClassBinding(methodName, userArgumentsSize);
// check to see if this
|
DefaultSemanticAnalysisPhase
|
java
|
apache__camel
|
components/camel-vertx/camel-vertx-http/src/main/java/org/apache/camel/component/vertx/http/VertxHttpConfiguration.java
|
{
"start": 1368,
"end": 13053
}
|
class ____ {
@UriPath(name = "httpUri")
@Metadata(required = true)
private URI httpUri;
@UriParam(label = "producer",
enums = "OPTIONS,GET,HEAD,POST,PUT,DELETE,TRACE,CONNECT,PATCH,PROPFIND,PROPPATCH,MKCOL,COPY,MOVE,LOCK,UNLOCK,MKCALENDAR,VERSION_CONTROL,REPORT,CHECKIN,CHECKOUT,UNCHECKOUT,MKWORKSPACE,UPDATE,LABEL,MERGE,BASELINE_CONTROL,MKACTIVITY,ORDERPATCH,ACL,SEARCH")
private HttpMethod httpMethod;
@UriParam(label = "producer", defaultValue = "-1")
private long timeout = -1;
@UriParam(label = "producer", defaultValue = "60000")
private int connectTimeout = ClientOptionsBase.DEFAULT_CONNECT_TIMEOUT;
@UriParam(label = "producer", defaultValue = "VertxHttpHeaderFilterStrategy")
private HeaderFilterStrategy headerFilterStrategy = new VertxHttpHeaderFilterStrategy();
@UriParam(label = "producer")
private VertxHttpBinding vertxHttpBinding;
@UriParam(label = "producer", defaultValue = "true")
private boolean throwExceptionOnFailure = true;
@UriParam(label = "producer", defaultValue = "false")
private boolean transferException;
@UriParam(label = "producer", defaultValue = "200-299")
private String okStatusCodeRange = "200-299";
@UriParam(label = "producer", defaultValue = "false")
private boolean sessionManagement;
@UriParam(label = "producer", defaultValue = "InMemoryCookieStore")
private CookieStore cookieStore;
@UriParam(label = "producer", defaultValue = "false")
private boolean useCompression;
@UriParam(label = "producer", defaultValue = "true")
private boolean responsePayloadAsByteArray = true;
@UriParam(label = "security")
private String basicAuthUsername;
@UriParam(label = "security")
private String basicAuthPassword;
@UriParam(label = "security")
private String bearerToken;
@UriParam(label = "security")
private SSLContextParameters sslContextParameters;
@UriParam(label = "proxy")
private String proxyHost;
@UriParam(label = "proxy")
private Integer proxyPort;
@UriParam(label = "proxy", enums = "HTTP,SOCKS4,SOCKS5")
private ProxyType proxyType;
@UriParam(label = "proxy")
private String proxyUsername;
@UriParam(label = "proxy")
private String proxyPassword;
@UriParam(label = "producer")
private WebClientOptions webClientOptions;
@UriParam(label = "producer",
description = "Whether to force using multipart/form-data for easy file uploads. This is only to be used for uploading the message body as a single entity form-data. For uploading multiple entries then use io.vertx.ext.web.multipart.MultipartForm to build the form.")
private boolean multipartUpload;
@UriParam(label = "producer", defaultValue = "data",
description = "The name of the multipart/form-data when multipartUpload is enabled.")
private String multipartUploadName = "data";
@UriParam(label = "producer",
description = "If the option is true, the Exchange.HTTP_URI header will be ignored and the endpoint URI will be used for the HTTP request. You may also set option throwExceptionOnFailure to false to return the fault response back to the client.")
private boolean bridgeEndpoint;
/**
* The HTTP URI to connect to
*/
public void setHttpUri(URI httpUri) {
this.httpUri = httpUri;
}
public URI getHttpUri() {
return httpUri;
}
/**
* The HTTP method to use. The HttpMethod header cannot override this option if set
*/
public void setHttpMethod(HttpMethod httpMethod) {
this.httpMethod = httpMethod;
}
/**
* The HTTP method to use. The HttpMethod header cannot override this option if set
*/
public void setHttpMethod(String httpMethod) {
this.httpMethod = HttpMethod.valueOf(httpMethod);
}
public HttpMethod getHttpMethod() {
return httpMethod;
}
/**
* The amount of time in milliseconds after which if the request does not return any data within the timeout period
* a TimeoutException fails the request.
* <p/>
* Setting zero or a negative value disables the timeout.
*/
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public long getTimeout() {
return timeout;
}
/**
* The amount of time in milliseconds until a connection is established. A timeout value of zero is interpreted as
* an infinite timeout.
*/
public void setConnectTimeout(int connectTimeout) {
this.connectTimeout = connectTimeout;
}
public int getConnectTimeout() {
return connectTimeout;
}
/**
* A custom org.apache.camel.spi.HeaderFilterStrategy to filter header to and from Camel message.
*/
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
}
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
/**
* A custom VertxHttpBinding which can control how to bind between Vert.x and Camel.
*/
public void setVertxHttpBinding(VertxHttpBinding vertxHttpBinding) {
this.vertxHttpBinding = vertxHttpBinding;
}
public VertxHttpBinding getVertxHttpBinding() {
return vertxHttpBinding;
}
/**
* Disable throwing HttpOperationFailedException in case of failed responses from the remote server
*/
public void setThrowExceptionOnFailure(boolean throwExceptionOnFailure) {
this.throwExceptionOnFailure = throwExceptionOnFailure;
}
public boolean isThrowExceptionOnFailure() {
return throwExceptionOnFailure;
}
/**
* If enabled and an Exchange failed processing on the consumer side, and if the caused Exception was sent back
* serialized in the response as a application/x-java-serialized-object content type. On the producer side the
* exception will be deserialized and thrown as is, instead of HttpOperationFailedException. The caused exception is
* required to be serialized.
* <p/>
* This is by default turned off. If you enable this then be aware that Camel will deserialize the incoming data
* from the request to a Java object, which can be a potential security risk.
*/
public void setTransferException(boolean transferException) {
this.transferException = transferException;
}
public boolean isTransferException() {
return transferException;
}
/**
* The status codes which are considered a success response. The values are inclusive. Multiple ranges can be
* defined, separated by comma, e.g. 200-204,209,301-304. Each range must be a single number or from-to with the
* dash included
*/
public void setOkStatusCodeRange(String okStatusCodeRange) {
this.okStatusCodeRange = okStatusCodeRange;
}
public String getOkStatusCodeRange() {
return okStatusCodeRange;
}
/**
* Enables session management via WebClientSession. By default the client is configured to use an in-memory
* CookieStore. The cookieStore option can be used to override this
*/
public void setSessionManagement(boolean sessionManagement) {
this.sessionManagement = sessionManagement;
}
public boolean isSessionManagement() {
return sessionManagement;
}
/**
* A custom CookieStore to use when session management is enabled. If this option is not set then an in-memory
* CookieStore is used
*/
public void setCookieStore(CookieStore cookieStore) {
this.cookieStore = cookieStore;
}
public CookieStore getCookieStore() {
return cookieStore;
}
/**
* Set whether compression is enabled to handled compressed (E.g gzipped) responses
*/
public void setUseCompression(boolean useCompression) {
this.useCompression = useCompression;
}
public boolean isUseCompression() {
return useCompression;
}
public boolean isResponsePayloadAsByteArray() {
return responsePayloadAsByteArray;
}
/**
* Whether the response body should be byte[] or as io.vertx.core.buffer.Buffer
*/
public void setResponsePayloadAsByteArray(boolean responsePayloadAsByteArray) {
this.responsePayloadAsByteArray = responsePayloadAsByteArray;
}
/**
* The user name to use for basic authentication
*/
public void setBasicAuthUsername(String basicAuthUsername) {
this.basicAuthUsername = basicAuthUsername;
}
public String getBasicAuthUsername() {
return basicAuthUsername;
}
/**
* The password to use for basic authentication
*/
public void setBasicAuthPassword(String basicAuthPassword) {
this.basicAuthPassword = basicAuthPassword;
}
public String getBasicAuthPassword() {
return basicAuthPassword;
}
/**
* The bearer token to use for bearer token authentication
*/
public void setBearerToken(String bearerToken) {
this.bearerToken = bearerToken;
}
public String getBearerToken() {
return bearerToken;
}
/**
* The proxy server host address
*/
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public String getProxyHost() {
return proxyHost;
}
/**
* The proxy server port
*/
public void setProxyPort(Integer proxyPort) {
this.proxyPort = proxyPort;
}
public Integer getProxyPort() {
return proxyPort;
}
/**
* The proxy server username if authentication is required
*/
public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
}
public String getProxyUsername() {
return proxyUsername;
}
/**
* The proxy server password if authentication is required
*/
public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
}
public String getProxyPassword() {
return proxyPassword;
}
/**
* The proxy server type
*/
public void setProxyType(ProxyType proxyType) {
this.proxyType = proxyType;
}
public ProxyType getProxyType() {
return proxyType;
}
/**
* Sets customized options for configuring the Vert.x WebClient
*/
public void setWebClientOptions(WebClientOptions webClientOptions) {
this.webClientOptions = webClientOptions;
}
public WebClientOptions getWebClientOptions() {
return webClientOptions;
}
/**
* To configure security using SSLContextParameters
*/
public SSLContextParameters getSslContextParameters() {
return sslContextParameters;
}
public void setSslContextParameters(SSLContextParameters sslContextParameters) {
this.sslContextParameters = sslContextParameters;
}
public boolean isMultipartUpload() {
return multipartUpload;
}
public void setMultipartUpload(boolean multipartUpload) {
this.multipartUpload = multipartUpload;
}
public String getMultipartUploadName() {
return multipartUploadName;
}
public void setMultipartUploadName(String multipartUploadName) {
this.multipartUploadName = multipartUploadName;
}
public boolean isBridgeEndpoint() {
return bridgeEndpoint;
}
public void setBridgeEndpoint(boolean bridgeEndpoint) {
this.bridgeEndpoint = bridgeEndpoint;
}
}
|
VertxHttpConfiguration
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/deser/arraymapping/ArrayMappingErrorTest.java
|
{
"start": 246,
"end": 611
}
|
class ____ extends TestCase {
public void test_for_error() throws Exception {
Exception error = null;
try {
JSON.parseObject("[1001,2002]", Model.class, Feature.SupportArrayToBean);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static
|
ArrayMappingErrorTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/EmptyIOStatisticsContextImpl.java
|
{
"start": 1237,
"end": 2341
}
|
class ____ implements IOStatisticsContext {
private static final IOStatisticsContext EMPTY_CONTEXT = new EmptyIOStatisticsContextImpl();
private EmptyIOStatisticsContextImpl() {
}
/**
* Create a new empty snapshot.
* A new one is always created for isolation.
*
* @return a statistics snapshot
*/
@Override
public IOStatisticsSnapshot snapshot() {
return new IOStatisticsSnapshot();
}
@Override
public IOStatisticsAggregator getAggregator() {
return EmptyIOStatisticsStore.getInstance();
}
@Override
public IOStatistics getIOStatistics() {
return EmptyIOStatistics.getInstance();
}
@Override
public void reset() {}
/**
* The ID is always 0.
* As the real context implementation counter starts at 1,
* we are guaranteed to have unique IDs even between them and
* the empty context.
* @return 0
*/
@Override
public long getID() {
return 0;
}
/**
* Get the single instance.
* @return an instance.
*/
static IOStatisticsContext getInstance() {
return EMPTY_CONTEXT;
}
}
|
EmptyIOStatisticsContextImpl
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/command/CommandBatchService.java
|
{
"start": 2927,
"end": 37548
}
|
class ____ {
final List<BatchCommandData<?, ?>> evalCommands = new LinkedList<>();
final Deque<BatchCommandData<?, ?>> commands = new ConcurrentLinkedDeque<>();
volatile boolean readOnlyMode = true;
public void addCommand(BatchCommandData<?, ?> command) {
if (RedisCommands.EVAL_OBJECT.getName().equals(command.getCommand().getName())) {
evalCommands.add(command);
}
commands.add(command);
}
public List<BatchCommandData<?, ?>> getEvalCommands() {
return evalCommands;
}
public void addFirstCommand(BatchCommandData<?, ?> command) {
commands.addFirst(command);
}
public void add(BatchCommandData<?, ?> command) {
commands.add(command);
}
public Deque<BatchCommandData<?, ?>> getCommands() {
return commands;
}
public void sortCommands() {
int index = 0;
boolean sorted = true;
for (BatchCommandData<?, ?> command : commands) {
if (command.getIndex() > index) {
index = command.getIndex();
} else {
sorted = false;
break;
}
}
if (sorted) {
return;
}
BatchCommandData<?, ?>[] cmds = commands.toArray(new BatchCommandData[0]);
Arrays.sort(cmds);
commands.clear();
Collections.addAll(commands, cmds);
}
public void setReadOnlyMode(boolean readOnlyMode) {
this.readOnlyMode = readOnlyMode;
}
public boolean isReadOnlyMode() {
return readOnlyMode;
}
public void clearErrors() {
for (BatchCommandData<?, ?> commandEntry : commands) {
commandEntry.clearError();
}
}
}
private final AtomicInteger index = new AtomicInteger();
private final ConcurrentMap<NodeSource, Entry> commands = new ConcurrentHashMap<>();
private Map<MasterSlaveEntry, Entry> aggregatedCommands = Collections.emptyMap();
private final ConcurrentMap<MasterSlaveEntry, ConnectionEntry> connections = new ConcurrentHashMap<>();
private final BatchOptions options;
private final Map<CompletableFuture<?>, List<CommandBatchService>> nestedServices = new ConcurrentHashMap<>();
private final AtomicBoolean executed = new AtomicBoolean();
private final DelayStrategy retryDelay;
private final int retryAttempts;
public CommandBatchService(CommandAsyncExecutor executor) {
this(executor, RedissonObjectBuilder.ReferenceType.DEFAULT);
}
public CommandBatchService(CommandAsyncExecutor executor, RedissonObjectBuilder.ReferenceType referenceType) {
this(executor.getConnectionManager(), BatchOptions.defaults(), executor.getObjectBuilder(), referenceType);
}
public CommandBatchService(CommandAsyncExecutor executor, BatchOptions options) {
this(executor.getConnectionManager(), options, executor.getObjectBuilder(), RedissonObjectBuilder.ReferenceType.DEFAULT);
}
public CommandBatchService(CommandAsyncExecutor executor, BatchOptions options, RedissonObjectBuilder.ReferenceType referenceType) {
this(executor.getConnectionManager(), options, executor.getObjectBuilder(), referenceType);
}
private CommandBatchService(ConnectionManager connectionManager, BatchOptions options,
RedissonObjectBuilder objectBuilder, RedissonObjectBuilder.ReferenceType referenceType) {
super(connectionManager, objectBuilder, referenceType);
this.options = options;
if (options.getRetryAttempts() >= 0) {
this.retryAttempts = options.getRetryAttempts();
} else {
this.retryAttempts = connectionManager.getServiceManager().getConfig().getRetryAttempts();
}
if (options.getRetryDelay() != null) {
this.retryDelay = this.options.getRetryDelay();
} else {
this.retryDelay = connectionManager.getServiceManager().getConfig().getRetryDelay();
}
}
public BatchOptions getOptions() {
return options;
}
public void add(CompletableFuture<?> future, List<CommandBatchService> services) {
nestedServices.put(future, services);
}
@Override
public <V, R> RFuture<R> async(boolean readOnlyMode, NodeSource nodeSource,
Codec codec, RedisCommand<V> command, Object[] params, boolean ignoreRedirect, boolean noRetry) {
CompletableFuture<R> mainPromise = createPromise();
if (isRedisBasedQueue()) {
boolean isReadOnly = options.getExecutionMode() == ExecutionMode.REDIS_READ_ATOMIC;
RedisExecutor<V, R> executor = new RedisQueuedBatchExecutor<>(isReadOnly, nodeSource, codec, command, params, mainPromise,
false, connectionManager, objectBuilder, commands, connections, options, index, executed,
referenceType, noRetry, aggregatedCommands);
executor.execute();
} else {
RedisExecutor<V, R> executor = new RedisBatchExecutor<>(readOnlyMode, nodeSource, codec, command, params, mainPromise,
false, connectionManager, objectBuilder, commands, options, index, executed, referenceType, noRetry);
executor.execute();
}
return new CompletableFutureWrapper<>(mainPromise);
}
@Override
public <R> CompletableFuture<R> createPromise() {
if (isRedisBasedQueue()) {
return new BatchPromise<>();
}
return new CompletableFuture<>();
}
public void discard() {
get(discardAsync());
}
public RFuture<Void> discardAsync() {
if (executed.get()) {
throw new IllegalStateException("Batch already executed!");
}
executed.set(true);
if (isRedisBasedQueue()) {
return writeAllVoidAsync(RedisCommands.DISCARD);
}
commands.values().stream()
.flatMap(e -> e.getCommands().stream())
.flatMap(c -> Arrays.stream(c.getParams()))
.forEach(obj -> ReferenceCountUtil.safeRelease(obj));
return new CompletableFutureWrapper<>((Void) null);
}
public BatchResult<?> execute() {
RFuture<BatchResult<?>> f = executeAsync();
return get(f);
}
public RFuture<Void> executeAsyncVoid() {
CompletableFuture<BatchResult<?>> resFuture = executeAsync().toCompletableFuture();
CompletableFuture<Void> s = resFuture.thenApply(res -> null);
return new CompletableFutureWrapper<>(s);
}
public boolean isExecuted() {
return executed.get();
}
public RFuture<BatchResult<?>> executeAsync() {
if (executed.get()) {
throw new IllegalStateException("Batch already executed!");
}
if (commands.isEmpty() && nestedServices.isEmpty()) {
executed.set(true);
BatchResult<Object> result = new BatchResult<>(Collections.emptyList(), 0);
return new CompletableFutureWrapper<>(result);
}
if (isRedisBasedQueue()) {
return executeRedisBasedQueue();
}
CompletableFuture<BatchResult<?>> promise = new CompletableFuture<>();
CompletableFuture<Map<NodeSource, Entry>> voidPromise = new CompletableFuture<>();
if (this.options.isSkipResult()
&& this.options.getSyncSlaves() == 0) {
voidPromise.whenComplete((res, ex) -> {
executed.set(true);
if (ex != null) {
for (Entry e : commands.values()) {
e.getCommands().forEach(t -> t.tryFailure(ex));
}
promise.completeExceptionally(ex);
commands.clear();
nestedServices.clear();
return;
}
commands.clear();
nestedServices.clear();
promise.complete(new BatchResult<>(Collections.emptyList(), 0));
});
} else {
voidPromise.whenComplete((res, ex) -> {
executed.set(true);
if (ex != null) {
for (Entry e : commands.values()) {
e.getCommands().forEach(t -> t.tryFailure(ex));
}
promise.completeExceptionally(ex);
commands.clear();
nestedServices.clear();
return;
}
try {
List<Object> responses = new ArrayList<>();
int syncedSlaves = 0;
if (!res.isEmpty()) {
List<BatchCommandData> entries = new ArrayList<BatchCommandData>();
for (Entry e : res.values()) {
entries.addAll(e.getCommands());
}
Collections.sort(entries);
for (BatchCommandData<?, ?> commandEntry : entries) {
if (isWaitCommand(commandEntry)) {
if (commandEntry.getCommand().getName().equals(RedisCommands.WAIT.getName())) {
syncedSlaves += ((CompletableFuture<Integer>) commandEntry.getPromise()).getNow(0);
} else {
List<Integer> list = ((CompletableFuture<List<Integer>>) commandEntry.getPromise()).getNow(Arrays.asList(0, 0));
syncedSlaves += list.get(1);
}
} else if (!commandEntry.getCommand().getName().equals(RedisCommands.MULTI.getName())
&& !commandEntry.getCommand().getName().equals(RedisCommands.EXEC.getName())
&& !this.options.isSkipResult()) {
if (commandEntry.getPromise().isCancelled()) {
continue;
}
Object entryResult = commandEntry.getPromise().getNow(null);
try {
if (objectBuilder != null) {
entryResult = objectBuilder.tryHandleReference(entryResult, referenceType);
}
} catch (ReflectiveOperationException exc) {
log.error("Unable to handle reference from {}", entryResult, exc);
}
responses.add(entryResult);
}
}
}
if (!nestedServices.isEmpty()) {
for (CompletableFuture<?> f : nestedServices.keySet()) {
responses.add(f.getNow(null));
}
}
BatchResult<Object> result = new BatchResult<>(responses, syncedSlaves);
promise.complete(result);
} catch (Exception e) {
promise.completeExceptionally(ex);
throw e;
}
commands.clear();
nestedServices.clear();
});
}
execute(voidPromise);
return new CompletableFutureWrapper<>(promise);
}
private void execute(CompletableFuture<Map<NodeSource, Entry>> voidPromise) {
AtomicInteger attempt = new AtomicInteger();
CompletableFuture<Map<NodeSource, Entry>> future = new CompletableFuture<>();
resolveCommandsInMemory(attempt, future);
future.whenComplete((r, ex) -> {
try {
if (ex != null) {
voidPromise.completeExceptionally(ex);
return;
}
AtomicInteger slots = new AtomicInteger(r.size());
for (Map.Entry<CompletableFuture<?>, List<CommandBatchService>> entry : nestedServices.entrySet()) {
slots.incrementAndGet();
for (CommandBatchService service : entry.getValue()) {
service.executeAsync();
}
entry.getKey().whenComplete((res, e) -> {
if (e == null) {
if (slots.decrementAndGet() == 0) {
voidPromise.complete(r);
}
} else {
if (entry.getKey().isCancelled()) {
voidPromise.completeExceptionally(e);
} else {
voidPromise.completeExceptionally(e.getCause());
}
}
});
}
CompletionStage<Void> f = loadScripts(r);
f.whenComplete((res, ex1) -> {
try {
if (ex1 != null) {
voidPromise.completeExceptionally(ex1.getCause());
return;
}
for (Map.Entry<NodeSource, Entry> e : r.entrySet()) {
Entry entry = e.getValue();
if (this.options.getExecutionMode() != ExecutionMode.IN_MEMORY) {
BatchCommandData<?, ?> multiCommand = new BatchCommandData<>(RedisCommands.MULTI, new Object[] {}, index.incrementAndGet());
entry.addFirstCommand(multiCommand);
BatchCommandData<?, ?> execCommand = new BatchCommandData<>(RedisCommands.EXEC, new Object[] {}, index.incrementAndGet());
entry.add(execCommand);
}
if (this.options.isSkipResult()) {
BatchCommandData<?, ?> offCommand = new BatchCommandData<>(RedisCommands.CLIENT_REPLY, new Object[] { "OFF" }, index.incrementAndGet());
entry.addFirstCommand(offCommand);
BatchCommandData<?, ?> onCommand = new BatchCommandData<>(RedisCommands.CLIENT_REPLY, new Object[] { "ON" }, index.incrementAndGet());
entry.add(onCommand);
}
if (this.options.getSyncSlaves() > 0) {
BatchCommandData<?, ?> waitCommand;
if (this.options.isSyncAOF()) {
waitCommand = new BatchCommandData<>(RedisCommands.WAITAOF,
new Object[]{this.options.getSyncLocals(), this.options.getSyncSlaves(), this.options.getSyncTimeout()}, index.incrementAndGet());
} else {
waitCommand = new BatchCommandData<>(RedisCommands.WAIT,
new Object[]{this.options.getSyncSlaves(), this.options.getSyncTimeout()}, index.incrementAndGet());
}
entry.add(waitCommand);
}
BatchOptions options = BatchOptions.defaults()
.executionMode(this.options.getExecutionMode())
.responseTimeout(this.options.getResponseTimeout(), TimeUnit.MILLISECONDS)
.retryAttempts(Math.max(0, retryAttempts - attempt.get()))
.retryDelay(retryDelay);
if (this.options.isSkipResult()) {
options.skipResult();
}
if (this.options.isSyncAOF()) {
options.syncAOF(this.options.getSyncLocals(), this.options.getSyncSlaves(), Duration.ofMillis(this.options.getSyncTimeout()));
} else {
options.sync(this.options.getSyncSlaves(), Duration.ofMillis(this.options.getSyncTimeout()));
}
CompletableFuture<Void> mainPromise = new CompletableFuture<>();
mainPromise.whenComplete((res1, ex2) -> {
if (ex2 != null) {
voidPromise.completeExceptionally(ex2);
return;
}
voidPromise.complete(r);
});
RedisCommonBatchExecutor executor = new RedisCommonBatchExecutor(e.getKey(), mainPromise,
connectionManager, options, e.getValue(), slots, referenceType, false);
executor.execute();
}
} catch (Exception e) {
voidPromise.completeExceptionally(e);
}
});
} catch (Exception e) {
voidPromise.completeExceptionally(e);
}
});
}
private CompletionStage<Void> loadScripts(Map<NodeSource, Entry> r) {
if (!connectionManager.getServiceManager().getCfg().isUseScriptCache()) {
return CompletableFuture.completedFuture(null);
}
// TODO BatchOptions.defaults().skipResult() for Redis 3.2+
CommandBatchService bb = new CommandBatchService(connectionManager, BatchOptions.defaults(), objectBuilder, referenceType);
Map<MasterSlaveEntry, Set<String>> newScripts = new HashMap<>();
for (Map.Entry<NodeSource, Entry> e : r.entrySet()) {
for (BatchCommandData<?, ?> data : e.getValue().getEvalCommands()) {
RedisCommand<?> command = data.getCommand();
String script = (String) data.getParams()[0];
MasterSlaveEntry entry = e.getKey().getEntry();
if (!connectionManager.getServiceManager().isCached(entry.getClient().getAddr(), script)) {
Set<String> newShas = newScripts.computeIfAbsent(entry, k -> new HashSet<>());
if (newShas.add(script)) {
if (e.getValue().isReadOnlyMode()) {
bb.executeAllAsync(entry, RedisCommands.SCRIPT_LOAD, script);
} else {
bb.writeAsync(entry, StringCodec.INSTANCE, RedisCommands.SCRIPT_LOAD, script);
}
}
}
RedisCommand cmd = new RedisCommand(command, "EVALSHA");
data.updateCommand(cmd);
String sha1 = getServiceManager().calcSHA(script);
data.getParams()[0] = sha1;
}
}
return bb.executeAsync().thenAccept(res -> {
for (Map.Entry<MasterSlaveEntry, Set<String>> e : newScripts.entrySet()) {
connectionManager.getServiceManager().cacheScripts(e.getKey().getClient().getAddr(), e.getValue());
}
});
}
private void resolveCommands(AtomicInteger attempt, CompletableFuture<Map<MasterSlaveEntry, Entry>> future) {
Map<MasterSlaveEntry, Entry> result = new HashMap<>();
for (Map.Entry<NodeSource, Entry> e : commands.entrySet()) {
MasterSlaveEntry entry = getEntry(e.getKey());
if (entry == null) {
if (attempt.get() == retryAttempts) {
future.completeExceptionally(connectionManager.getServiceManager().createNodeNotFoundException(e.getKey()));
return;
}
Duration timeout = retryDelay.calcDelay(attempt.get());
attempt.incrementAndGet();
connectionManager.getServiceManager().newTimeout(task -> {
resolveCommands(attempt, future);
}, timeout.toMillis(), TimeUnit.MILLISECONDS);
return;
}
Entry ee = result.computeIfAbsent(entry, k -> new Entry());
if (!e.getValue().isReadOnlyMode()) {
ee.setReadOnlyMode(false);
}
ee.getCommands().addAll(e.getValue().getCommands());
ee.getEvalCommands().addAll(e.getValue().getEvalCommands());
}
for (Entry entry : result.values()) {
entry.sortCommands();
}
future.complete(result);
}
private void resolveCommandsInMemory(AtomicInteger attempt, CompletableFuture<Map<NodeSource, Entry>> future) {
Map<NodeSource, Entry> result = new HashMap<>();
for (Map.Entry<NodeSource, Entry> e : commands.entrySet()) {
MasterSlaveEntry entry = getEntry(e.getKey());
if (entry == null) {
if (attempt.get() == retryAttempts) {
future.completeExceptionally(connectionManager.getServiceManager().createNodeNotFoundException(e.getKey()));
return;
}
Duration timeout = retryDelay.calcDelay(attempt.get());
attempt.incrementAndGet();
connectionManager.getServiceManager().newTimeout(task -> {
resolveCommandsInMemory(attempt, future);
}, timeout.toMillis(), TimeUnit.MILLISECONDS);
return;
}
RedisClient client = e.getKey().getRedisClient();
if (client != null) {
ClientConnectionsEntry ce = entry.getEntry(client);
if (ce == null || ce.isFreezed()) {
client = null;
}
}
Entry ee = result.computeIfAbsent(new NodeSource(entry, client), k -> new Entry());
if (!e.getValue().isReadOnlyMode()) {
ee.setReadOnlyMode(false);
}
ee.getCommands().addAll(e.getValue().getCommands());
ee.getEvalCommands().addAll(e.getValue().getEvalCommands());
}
for (Entry entry : result.values()) {
entry.sortCommands();
}
future.complete(result);
}
private MasterSlaveEntry getEntry(NodeSource source) {
if (source.getSlot() != null) {
return connectionManager.getWriteEntry(source.getSlot());
}
return source.getEntry();
}
protected Throwable cause(CompletableFuture<?> future) {
try {
future.getNow(null);
return null;
} catch (CompletionException ex2) {
return ex2.getCause();
} catch (CancellationException ex1) {
return ex1;
}
}
@SuppressWarnings("MethodLength")
private <R> RFuture<R> executeRedisBasedQueue() {
CompletableFuture<R> resultPromise = new CompletableFuture<R>();
long responseTimeout;
if (options.getResponseTimeout() > 0) {
responseTimeout = options.getResponseTimeout();
} else {
responseTimeout = connectionManager.getServiceManager().getConfig().getTimeout();
}
Timeout timeout = connectionManager.getServiceManager().newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
connections.values().forEach(c -> {
c.getCancelCallback().run();
});
resultPromise.completeExceptionally(new RedisTimeoutException("Response timeout for queued commands " + responseTimeout + ": " +
commands.values().stream()
.flatMap(e -> e.getCommands().stream().map(d -> d.getCommand()))
.collect(Collectors.toList())));
}
}, responseTimeout, TimeUnit.MILLISECONDS);
CompletableFuture<Void> allFutures = CompletableFuture.allOf(commands.values().stream()
.flatMap(m -> m.getCommands()
.stream().map(c -> ((BatchPromise) c.getPromise()).getSentPromise()))
.toArray(CompletableFuture[]::new));
allFutures.whenComplete((fr, exc) -> {
if (!timeout.cancel()) {
return;
}
for (Entry entry : commands.values()) {
for (BatchCommandData<?, ?> command : entry.getCommands()) {
if (command.getPromise().isDone() && command.getPromise().isCompletedExceptionally()) {
resultPromise.completeExceptionally(cause(command.getPromise()));
break;
}
}
}
if (resultPromise.isDone()) {
return;
}
Map<MasterSlaveEntry, List<Object>> result = new ConcurrentHashMap<>();
AtomicInteger attempt = new AtomicInteger();
CompletableFuture<Map<MasterSlaveEntry, Entry>> resolvedEntriesFuture = new CompletableFuture<>();
resolveCommands(attempt, resolvedEntriesFuture);
resolvedEntriesFuture.whenComplete((map, ee) -> {
if (ee != null) {
resultPromise.completeExceptionally(ee);
return;
}
AtomicInteger slots = new AtomicInteger(nestedServices.size());
CompletableFuture<Void> nestedServicesFuture;
if (nestedServices.isEmpty()) {
nestedServicesFuture = CompletableFuture.completedFuture(null);
} else {
nestedServicesFuture = new CompletableFuture<>();
}
for (Map.Entry<CompletableFuture<?>, List<CommandBatchService>> entry : nestedServices.entrySet()) {
for (CommandBatchService service : entry.getValue()) {
service.executeAsync();
}
entry.getKey().whenComplete((res, e) -> {
if (e == null) {
if (slots.decrementAndGet() == 0) {
nestedServicesFuture.complete(null);
}
} else {
if (entry.getKey().isCancelled()) {
nestedServicesFuture.completeExceptionally(e);
} else {
nestedServicesFuture.completeExceptionally(e.getCause());
}
}
});
}
nestedServicesFuture.whenComplete((r1, exc2) -> {
if (exc2 != null) {
resultPromise.completeExceptionally(exc2);
return;
}
aggregatedCommands = map;
List<CompletableFuture<Void>> futures = new ArrayList<>(map.size());
for (Map.Entry<MasterSlaveEntry, Entry> entry : aggregatedCommands.entrySet()) {
boolean isReadOnly = options.getExecutionMode() == ExecutionMode.REDIS_READ_ATOMIC;
CompletableFuture<List<Object>> execPromise = createPromise();
RedisExecutor<List<Object>, List<Object>> executor = new RedisQueuedBatchExecutor<>(isReadOnly, new NodeSource(entry.getKey()), codec,
RedisCommands.EXEC, new Object[] {}, execPromise,
false, connectionManager, objectBuilder, commands, connections,
options, index, executed, referenceType, false, aggregatedCommands);
executor.execute();
CompletionStage<Void> f = execPromise.thenCompose(r -> {
BatchCommandData<?, ?> lastCommand = entry.getValue().getCommands().peekLast();
result.put(entry.getKey(), r);
if (RedisCommands.WAIT.getName().equals(lastCommand.getCommand().getName())) {
return lastCommand.getPromise().thenApply(i -> null);
}
return CompletableFuture.completedFuture(null);
});
futures.add(f.toCompletableFuture());
}
CompletableFuture<Void> future = CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]));
future.whenComplete((res, ex) -> {
executed.set(true);
if (ex != null) {
resultPromise.completeExceptionally(ex);
return;
}
try {
for (java.util.Map.Entry<MasterSlaveEntry, List<Object>> entry : result.entrySet()) {
Entry commandEntry = aggregatedCommands.get(entry.getKey());
Iterator<Object> resultIter = entry.getValue().iterator();
for (BatchCommandData<?, ?> data : commandEntry.getCommands()) {
if (data.getCommand().getName().equals(RedisCommands.EXEC.getName())) {
break;
}
CompletableFuture<Object> promise = (CompletableFuture<Object>) data.getPromise();
if (resultIter.hasNext()) {
promise.complete(resultIter.next());
} else {
// fix for https://github.com/redisson/redisson/issues/2212
promise.complete(null);
}
}
}
List<BatchCommandData> entries = new ArrayList<>();
for (Entry e : aggregatedCommands.values()) {
entries.addAll(e.getCommands());
}
Collections.sort(entries);
List<Object> responses = new ArrayList<>(entries.size());
int syncedSlaves = 0;
for (BatchCommandData<?, ?> commandEntry : entries) {
if (isWaitCommand(commandEntry)) {
if (commandEntry.getCommand().getName().equals(RedisCommands.WAIT.getName())) {
syncedSlaves += ((CompletableFuture<Integer>) commandEntry.getPromise()).getNow(0);
} else {
List<Integer> list = ((CompletableFuture<List<Integer>>) commandEntry.getPromise()).getNow(Arrays.asList(0, 0));
syncedSlaves += list.get(1);
}
} else if (!commandEntry.getCommand().getName().equals(RedisCommands.MULTI.getName())
&& !commandEntry.getCommand().getName().equals(RedisCommands.EXEC.getName())) {
Object entryResult = commandEntry.getPromise().getNow(null);
if (objectBuilder != null) {
entryResult = objectBuilder.tryHandleReference(entryResult, referenceType);
}
responses.add(entryResult);
}
}
if (!nestedServices.isEmpty()) {
for (CompletableFuture<?> f : nestedServices.keySet()) {
responses.add(f.getNow(null));
}
}
BatchResult<Object> r = new BatchResult<>(responses, syncedSlaves);
resultPromise.complete((R) r);
} catch (Exception e) {
resultPromise.completeExceptionally(e);
}
});
});
});
});
return new CompletableFutureWrapper<>(resultPromise);
}
protected boolean isRedisBasedQueue() {
return options != null && (options.getExecutionMode() == ExecutionMode.REDIS_READ_ATOMIC
|| options.getExecutionMode() == ExecutionMode.REDIS_WRITE_ATOMIC);
}
protected boolean isWaitCommand(CommandData<?, ?> c) {
return c.getCommand().getName().equals(RedisCommands.WAIT.getName())
|| c.getCommand().getName().equals(RedisCommands.WAITAOF.getName());
}
@Override
protected boolean isEvalCacheActive() {
return false;
}
@Override
protected CommandBatchService createCommandBatchService(int availableSlaves, boolean aofEnabled, long timeout) {
return this;
}
}
|
Entry
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
|
{
"start": 3859,
"end": 10392
}
|
class ____ extends HtmlBlock {
final CapacitySchedulerLeafQueueInfo lqinfo;
private String nodeLabel;
@Inject LeafQueueInfoBlock(ViewContext ctx, CSQInfo info) {
super(ctx);
lqinfo = (CapacitySchedulerLeafQueueInfo) info.qinfo;
nodeLabel = info.label;
}
@Override
protected void render(Block html) {
if (nodeLabel == null) {
renderLeafQueueInfoWithoutParition(html);
} else {
renderLeafQueueInfoWithPartition(html);
}
}
private void renderLeafQueueInfoWithPartition(Block html) {
String nodeLabelDisplay = nodeLabel.length() == 0
? NodeLabel.DEFAULT_NODE_LABEL_PARTITION : nodeLabel;
// first display the queue's label specific details :
ResponseInfo ri =
info("\'" + lqinfo.getQueuePath()
+ "\' Queue Status for Partition \'" + nodeLabelDisplay + "\'");
renderQueueCapacityInfo(ri, nodeLabel);
html.__(InfoBlock.class);
// clear the info contents so this queue's info doesn't accumulate into
// another queue's info
ri.clear();
// second display the queue specific details :
ri =
info("\'" + lqinfo.getQueuePath() + "\' Queue Status")
.__("Queue State:", lqinfo.getQueueState());
renderCommonLeafQueueInfo(ri);
html.__(InfoBlock.class);
// clear the info contents so this queue's info doesn't accumulate into
// another queue's info
ri.clear();
}
private void renderLeafQueueInfoWithoutParition(Block html) {
ResponseInfo ri =
info("\'" + lqinfo.getQueuePath() + "\' Queue Status")
.__("Queue State:", lqinfo.getQueueState());
renderQueueCapacityInfo(ri, "");
renderCommonLeafQueueInfo(ri);
html.__(InfoBlock.class);
// clear the info contents so this queue's info doesn't accumulate into
// another queue's info
ri.clear();
}
private void renderQueueCapacityInfo(ResponseInfo ri, String label) {
PartitionQueueCapacitiesInfo capacities =
lqinfo.getCapacities().getPartitionQueueCapacitiesInfo(label);
PartitionResourcesInfo resourceUsages =
lqinfo.getResources().getPartitionResourceUsageInfo(label);
// Get UserInfo from first user to calculate AM Resource Limit per user.
ResourceInfo userAMResourceLimit = null;
ArrayList<UserInfo> usersList = lqinfo.getUsers().getUsersList();
if (!usersList.isEmpty()) {
userAMResourceLimit = resourceUsages.getUserAmLimit();
}
// If no users are present or if AM limit per user doesn't exist, retrieve
// AM Limit for that queue.
if (userAMResourceLimit == null) {
userAMResourceLimit = resourceUsages.getAMLimit();
}
ResourceInfo amUsed = (resourceUsages.getAmUsed() == null)
? new ResourceInfo(Resources.none())
: resourceUsages.getAmUsed();
ri.
__("Used Capacity:",
appendPercent(resourceUsages.getUsed(),
capacities.getUsedCapacity() / 100))
.__(capacities.getWeight() != -1 ?
"Configured Weight:" :
"Configured Capacity:",
capacities.getWeight() != -1 ?
capacities.getWeight() :
capacities.getConfiguredMinResource() == null ?
Resources.none().toString() :
capacities.getConfiguredMinResource().toString())
.__("Configured Max Capacity:",
(capacities.getConfiguredMaxResource() == null
|| capacities.getConfiguredMaxResource().getResource()
.equals(Resources.none()))
? "unlimited"
: capacities.getConfiguredMaxResource().toString())
.__("Effective Capacity:",
appendPercent(capacities.getEffectiveMinResource(),
capacities.getCapacity() / 100))
.__("Effective Max Capacity:",
appendPercent(capacities.getEffectiveMaxResource(),
capacities.getMaxCapacity() / 100))
.__("Absolute Used Capacity:",
percent(capacities.getAbsoluteUsedCapacity() / 100))
.__("Absolute Configured Capacity:",
percent(capacities.getAbsoluteCapacity() / 100))
.__("Absolute Configured Max Capacity:",
percent(capacities.getAbsoluteMaxCapacity() / 100))
.__("Used Resources:", resourceUsages.getUsed().toString())
.__("Configured Max Application Master Limit:",
StringUtils.format("%.1f", capacities.getMaxAMLimitPercentage()))
.__("Max Application Master Resources:",
resourceUsages.getAMLimit().toString())
.__("Used Application Master Resources:", amUsed.toString())
.__("Max Application Master Resources Per User:",
userAMResourceLimit.toString());
}
private void renderCommonLeafQueueInfo(ResponseInfo ri) {
ri.
__("Num Schedulable Applications:",
Integer.toString(lqinfo.getNumActiveApplications())).
__("Num Non-Schedulable Applications:",
Integer.toString(lqinfo.getNumPendingApplications())).
__("Num Containers:",
Integer.toString(lqinfo.getNumContainers())).
__("Max Applications:",
Integer.toString(lqinfo.getMaxApplications())).
__("Max Applications Per User:",
Integer.toString(lqinfo.getMaxApplicationsPerUser())).
__("Configured Minimum User Limit Percent:",
lqinfo.getUserLimit() + "%").
__("Configured User Limit Factor:", lqinfo.getUserLimitFactor()).
__("Accessible Node Labels:",
StringUtils.join(",", lqinfo.getNodeLabels())).
__("Ordering Policy: ", lqinfo.getOrderingPolicyDisplayName()).
__("Preemption:",
lqinfo.getPreemptionDisabled() ? "disabled" : "enabled").
__("Intra-queue Preemption:", lqinfo.getIntraQueuePreemptionDisabled()
? "disabled" : "enabled").
__("Default Node Label Expression:",
lqinfo.getDefaultNodeLabelExpression() == null
? NodeLabel.DEFAULT_NODE_LABEL_PARTITION
: lqinfo.getDefaultNodeLabelExpression()).
__("Default Application Priority:",
Integer.toString(lqinfo.getDefaultApplicationPriority()));
}
}
static
|
LeafQueueInfoBlock
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/slm/action/ExecuteSnapshotRetentionAction.java
|
{
"start": 618,
"end": 975
}
|
class ____ extends ActionType<AcknowledgedResponse> {
public static final ExecuteSnapshotRetentionAction INSTANCE = new ExecuteSnapshotRetentionAction();
public static final String NAME = "cluster:admin/slm/execute-retention";
protected ExecuteSnapshotRetentionAction() {
super(NAME);
}
public static
|
ExecuteSnapshotRetentionAction
|
java
|
micronaut-projects__micronaut-core
|
core/src/main/java/io/micronaut/core/execution/ExecutionFlow.java
|
{
"start": 1619,
"end": 9600
}
|
interface ____<T> {
/**
* Create a simple flow representing a value.
*
* @param value The value
* @param <K> The value type
* @return a new flow
*/
@NonNull
static <K> ExecutionFlow<K> just(@Nullable K value) {
return (ExecutionFlow<K>) new ImperativeExecutionFlowImpl(value, null);
}
/**
* Create a simple flow representing an error.
*
* @param e The exception
* @param <K> The value type
* @return a new flow
*/
@NonNull
static <K> ExecutionFlow<K> error(@NonNull Throwable e) {
return (ExecutionFlow<K>) new ImperativeExecutionFlowImpl(null, e);
}
/**
* Create a simple flow representing an empty state.
*
* @param <T> The flow value type
* @return a new flow
*/
@NonNull
static <T> ExecutionFlow<T> empty() {
return (ExecutionFlow<T>) new ImperativeExecutionFlowImpl(null, null);
}
/**
* Create a flow by invoking a supplier asynchronously.
*
* @param executor The executor
* @param supplier The supplier
* @param <T> The flow value type
* @return a new flow
*/
@NonNull
static <T> ExecutionFlow<T> async(@NonNull Executor executor, @NonNull Supplier<? extends ExecutionFlow<T>> supplier) {
DelayedExecutionFlow<T> completableFuture = DelayedExecutionFlow.create();
executor.execute(() -> supplier.get().onComplete(completableFuture::complete));
return completableFuture;
}
/**
* Map a not-empty value.
*
* @param transformer The value transformer
* @param <R> New value Type
* @return a new flow
*/
@NonNull
<R> ExecutionFlow<R> map(@NonNull Function<? super T, ? extends R> transformer);
/**
* Map a not-empty value to a new flow.
*
* @param transformer The value transformer
* @param <R> New value Type
* @return a new flow
*/
@NonNull
<R> ExecutionFlow<R> flatMap(@NonNull Function<? super T, ? extends ExecutionFlow<? extends R>> transformer);
/**
* Supply a new flow after the existing flow value is resolved.
*
* @param supplier The supplier
* @param <R> New value Type
* @return a new flow
*/
@NonNull
<R> ExecutionFlow<R> then(@NonNull Supplier<? extends ExecutionFlow<? extends R>> supplier);
/**
* Supply a new flow if the existing flow is erroneous.
*
* @param fallback The fallback
* @return a new flow
*/
@NonNull
ExecutionFlow<T> onErrorResume(@NonNull Function<? super Throwable, ? extends ExecutionFlow<? extends T>> fallback);
/**
* Store a contextual value.
*
* @param key The key
* @param value The value
* @return a new flow
*/
@NonNull
ExecutionFlow<T> putInContext(@NonNull String key, @NonNull Object value);
/**
* Store a contextual value if it is absent.
*
* @param key The key
* @param value The value
* @return a new flow
* @since 4.8.0
*/
@NonNull
default ExecutionFlow<T> putInContextIfAbsent(@NonNull String key, @NonNull Object value) {
return this;
}
/**
* Invokes a provided function when the flow is resolved, or immediately if it is already done.
*
* @param fn The function
*/
void onComplete(@NonNull BiConsumer<? super T, Throwable> fn);
/**
* Completes the flow to the completable future.
*
* @param completableFuture The completable future
* @since 4.8
*/
void completeTo(@NonNull CompletableFuture<T> completableFuture);
/**
* Create a new {@link ExecutionFlow} that either returns the same result or, if the timeout
* expires before the result is received, a {@link java.util.concurrent.TimeoutException}.
*
* @param timeout The timeout
* @param scheduler Scheduler to schedule the timeout task
* @param onDiscard An optional consumer to be called on the value of this flow if the flow
* completes after the timeout has expired and thus the value is discarded
* @return A new flow that will produce either the same value or a {@link java.util.concurrent.TimeoutException}
*/
@NonNull
default ExecutionFlow<T> timeout(@NonNull Duration timeout, @NonNull ScheduledExecutorService scheduler, @Nullable BiConsumer<T, Throwable> onDiscard) {
DelayedExecutionFlow<T> delayed = DelayedExecutionFlow.create();
AtomicBoolean completed = new AtomicBoolean(false);
// schedule the timeout
ScheduledFuture<?> future = scheduler.schedule(() -> {
if (completed.compareAndSet(false, true)) {
cancel();
delayed.completeExceptionally(new TimeoutException());
}
}, timeout.toNanos(), TimeUnit.NANOSECONDS);
// forward any result
onComplete((t, throwable) -> {
if (completed.compareAndSet(false, true)) {
future.cancel(false);
if (throwable != null) {
delayed.completeExceptionally(throwable);
} else {
delayed.complete(t);
}
} else {
if (onDiscard != null) {
onDiscard.accept(t, throwable);
}
}
});
// forward cancel from downstream
delayed.onCancel(this::cancel);
return delayed;
}
/**
* Create an {@link ImperativeExecutionFlow} from this execution flow, if possible. The flow
* will have its result immediately available.
*
* @return The imperative flow, or {@code null} if this flow is not complete or does not
* support this operation
*/
@Nullable
ImperativeExecutionFlow<T> tryComplete();
/**
* Alternative to {@link #tryComplete()} which will unwrap the flow's value.
*
* @return The imperative flow then returns its value, or {@code null} if this flow is not complete or does not
* support this operation
* @since 4.3
*/
@Nullable
default T tryCompleteValue() {
ImperativeExecutionFlow<T> imperativeFlow = tryComplete();
if (imperativeFlow != null) {
return imperativeFlow.getValue();
}
return null;
}
/**
* Alternative to {@link #tryComplete()} which will unwrap the flow's error.
*
* @return The imperative flow then returns its error, or {@code null} if this flow is not complete or does not
* support this operation
* @since 4.3
*/
@Nullable
default Throwable tryCompleteError() {
ImperativeExecutionFlow<T> imperativeFlow = tryComplete();
if (imperativeFlow != null) {
return imperativeFlow.getError();
}
return null;
}
/**
* Converts the existing flow into the completable future.
*
* @return a {@link CompletableFuture} that represents the state if this flow.
*/
@NonNull
default CompletableFuture<T> toCompletableFuture() {
CompletableFuture<T> completableFuture = new CompletableFuture<>();
completeTo(completableFuture);
return completableFuture;
}
/**
* Send an optional hint to the upstream producer that the result of this flow is no longer
* needed and can be discarded. This is an optional operation, and has no effect if the flow
* has already completed. After a cancellation, a flow might never complete.
* <p>If this flow contains a resource that needs to be cleaned up (e.g. an
* {@link java.io.InputStream}), the caller should still add a
* {@link #onComplete completion listener} for cleanup, in case the upstream producer does not
* support cancellation or has already submitted the result.
*
* @since 4.8.0
*/
default void cancel() {
}
}
|
ExecutionFlow
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DeflateCodec.java
|
{
"start": 859,
"end": 938
}
|
class ____ DefaultCodec to enable codec discovery by 'deflate' name.
*/
public
|
for
|
java
|
apache__kafka
|
storage/api/src/main/java/org/apache/kafka/server/log/remote/storage/RemoteLogSegmentMetadata.java
|
{
"start": 1542,
"end": 16963
}
|
class ____ extends RemoteLogMetadata {
/**
* Universally unique remote log segment id.
*/
private final RemoteLogSegmentId remoteLogSegmentId;
/**
* Start offset of this segment.
*/
private final long startOffset;
/**
* End offset of this segment.
*/
private final long endOffset;
/**
* Maximum timestamp in milliseconds in the segment
*/
private final long maxTimestampMs;
/**
* LeaderEpoch vs offset for messages within this segment.
*/
private final NavigableMap<Integer, Long> segmentLeaderEpochs;
/**
* Size of the segment in bytes.
*/
private final int segmentSizeInBytes;
/**
* Custom metadata.
*/
private final Optional<CustomMetadata> customMetadata;
/**
* It indicates the state in which the action is executed on this segment.
*/
private final RemoteLogSegmentState state;
/**
* Indicates whether the transaction index is empty for this segment.
*/
private final boolean txnIdxEmpty;
/**
* Creates an instance with the given metadata of remote log segment.
* <p>
* {@code segmentLeaderEpochs} can not be empty. If all the records in this segment belong to the same leader epoch
* then it should have an entry with epoch mapping to start-offset of this segment.
*
* @param remoteLogSegmentId Universally unique remote log segment id.
* @param startOffset Start offset of this segment (inclusive).
* @param endOffset End offset of this segment (inclusive).
* @param maxTimestampMs Maximum timestamp in milliseconds in this segment.
* @param brokerId Broker id from which this event is generated.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param segmentSizeInBytes Size of this segment in bytes.
* @param customMetadata Custom metadata.
* @param state State of the respective segment of remoteLogSegmentId.
* @param segmentLeaderEpochs leader epochs occurred within this segment.
*/
public RemoteLogSegmentMetadata(RemoteLogSegmentId remoteLogSegmentId,
long startOffset,
long endOffset,
long maxTimestampMs,
int brokerId,
long eventTimestampMs,
int segmentSizeInBytes,
Optional<CustomMetadata> customMetadata,
RemoteLogSegmentState state,
Map<Integer, Long> segmentLeaderEpochs) {
this(remoteLogSegmentId, startOffset, endOffset, maxTimestampMs, brokerId, eventTimestampMs, segmentSizeInBytes,
customMetadata, state, segmentLeaderEpochs, false);
}
/**
* Creates an instance with the given metadata of remote log segment.
* <p>
* {@code segmentLeaderEpochs} can not be empty. If all the records in this segment belong to the same leader epoch
* then it should have an entry with epoch mapping to start-offset of this segment.
*
* @param remoteLogSegmentId Universally unique remote log segment id.
* @param startOffset Start offset of this segment (inclusive).
* @param endOffset End offset of this segment (inclusive).
* @param maxTimestampMs Maximum timestamp in milliseconds in this segment.
* @param brokerId Broker id from which this event is generated.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param segmentSizeInBytes Size of this segment in bytes.
* @param customMetadata Custom metadata.
* @param state State of the respective segment of remoteLogSegmentId.
* @param segmentLeaderEpochs leader epochs occurred within this segment.
* @param txnIdxEmpty True if the transaction index is empty, false otherwise.
*/
public RemoteLogSegmentMetadata(RemoteLogSegmentId remoteLogSegmentId,
long startOffset,
long endOffset,
long maxTimestampMs,
int brokerId,
long eventTimestampMs,
int segmentSizeInBytes,
Optional<CustomMetadata> customMetadata,
RemoteLogSegmentState state,
Map<Integer, Long> segmentLeaderEpochs,
boolean txnIdxEmpty) {
super(brokerId, eventTimestampMs);
this.remoteLogSegmentId = Objects.requireNonNull(remoteLogSegmentId, "remoteLogSegmentId can not be null");
this.state = Objects.requireNonNull(state, "state can not be null");
if (startOffset < 0) {
throw new IllegalArgumentException("Unexpected start offset = " + startOffset + ". StartOffset for a remote segment cannot be negative");
}
this.startOffset = startOffset;
if (endOffset < startOffset) {
throw new IllegalArgumentException("Unexpected end offset = " + endOffset +
". EndOffset for a remote segment cannot be less than startOffset = " + startOffset);
}
this.endOffset = endOffset;
this.maxTimestampMs = maxTimestampMs;
this.segmentSizeInBytes = segmentSizeInBytes;
this.customMetadata = Objects.requireNonNull(customMetadata, "customMetadata can not be null");
if (segmentLeaderEpochs == null || segmentLeaderEpochs.isEmpty()) {
throw new IllegalArgumentException("segmentLeaderEpochs can not be null or empty");
}
this.segmentLeaderEpochs = Collections.unmodifiableNavigableMap(new TreeMap<>(segmentLeaderEpochs));
this.txnIdxEmpty = txnIdxEmpty;
}
/**
* Creates an instance with the given metadata of remote log segment and its state as {@link RemoteLogSegmentState#COPY_SEGMENT_STARTED}.
* <p>
* {@code segmentLeaderEpochs} can not be empty. If all the records in this segment belong to the same leader epoch
* then it should have an entry with epoch mapping to start-offset of this segment.
*
* @param remoteLogSegmentId Universally unique remote log segment id.
* @param startOffset Start offset of this segment (inclusive).
* @param endOffset End offset of this segment (inclusive).
* @param maxTimestampMs Maximum timestamp in this segment
* @param brokerId Broker id from which this event is generated.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param segmentSizeInBytes Size of this segment in bytes.
* @param segmentLeaderEpochs leader epochs occurred within this segment
*/
public RemoteLogSegmentMetadata(RemoteLogSegmentId remoteLogSegmentId,
long startOffset,
long endOffset,
long maxTimestampMs,
int brokerId,
long eventTimestampMs,
int segmentSizeInBytes,
Map<Integer, Long> segmentLeaderEpochs) {
this(remoteLogSegmentId,
startOffset,
endOffset,
maxTimestampMs,
brokerId,
eventTimestampMs, segmentSizeInBytes,
Optional.empty(),
RemoteLogSegmentState.COPY_SEGMENT_STARTED,
segmentLeaderEpochs);
}
/**
* Creates an instance with the given metadata of remote log segment and its state as {@link RemoteLogSegmentState#COPY_SEGMENT_STARTED}.
* <p>
* {@code segmentLeaderEpochs} can not be empty. If all the records in this segment belong to the same leader epoch
* then it should have an entry with epoch mapping to start-offset of this segment.
*
* @param remoteLogSegmentId Universally unique remote log segment id.
* @param startOffset Start offset of this segment (inclusive).
* @param endOffset End offset of this segment (inclusive).
* @param maxTimestampMs Maximum timestamp in this segment
* @param brokerId Broker id from which this event is generated.
* @param eventTimestampMs Epoch time in milliseconds at which the remote log segment is copied to the remote tier storage.
* @param segmentSizeInBytes Size of this segment in bytes.
* @param segmentLeaderEpochs leader epochs occurred within this segment
* @param txnIdxEmpty True if the transaction index is empty, false otherwise.
*/
public RemoteLogSegmentMetadata(RemoteLogSegmentId remoteLogSegmentId,
long startOffset,
long endOffset,
long maxTimestampMs,
int brokerId,
long eventTimestampMs,
int segmentSizeInBytes,
Map<Integer, Long> segmentLeaderEpochs,
boolean txnIdxEmpty) {
this(remoteLogSegmentId, startOffset, endOffset, maxTimestampMs, brokerId, eventTimestampMs, segmentSizeInBytes,
Optional.empty(), RemoteLogSegmentState.COPY_SEGMENT_STARTED, segmentLeaderEpochs, txnIdxEmpty);
}
/**
* @return unique id of this segment.
*/
public RemoteLogSegmentId remoteLogSegmentId() {
return remoteLogSegmentId;
}
/**
* @return Start offset of this segment (inclusive).
*/
public long startOffset() {
return startOffset;
}
/**
* @return End offset of this segment (inclusive).
*/
public long endOffset() {
return endOffset;
}
/**
* @return Total size of this segment in bytes.
*/
public int segmentSizeInBytes() {
return segmentSizeInBytes;
}
/**
* @return Maximum timestamp in milliseconds of a record within this segment.
*/
public long maxTimestampMs() {
return maxTimestampMs;
}
/**
* @return Map of leader epoch vs offset for the records available in this segment.
*/
public NavigableMap<Integer, Long> segmentLeaderEpochs() {
return segmentLeaderEpochs;
}
/**
* @return Custom metadata.
*/
public Optional<CustomMetadata> customMetadata() {
return customMetadata;
}
/**
* Returns the current state of this remote log segment. It can be any of the below
* <ul>
* {@link RemoteLogSegmentState#COPY_SEGMENT_STARTED}
* {@link RemoteLogSegmentState#COPY_SEGMENT_FINISHED}
* {@link RemoteLogSegmentState#DELETE_SEGMENT_STARTED}
* {@link RemoteLogSegmentState#DELETE_SEGMENT_FINISHED}
* </ul>
*/
public RemoteLogSegmentState state() {
return state;
}
/**
* If true indicates that the transaction index is empty.
* @return True if the Transaction index is empty, false otherwise.
*/
public boolean isTxnIdxEmpty() {
return txnIdxEmpty;
}
/**
* Creates a new RemoteLogSegmentMetadata applying the given {@code rlsmUpdate} on this instance. This method will
* not update this instance.
*
* @param rlsmUpdate update to be applied.
* @return a new instance created by applying the given update on this instance.
*/
public RemoteLogSegmentMetadata createWithUpdates(RemoteLogSegmentMetadataUpdate rlsmUpdate) {
if (!remoteLogSegmentId.equals(rlsmUpdate.remoteLogSegmentId())) {
throw new IllegalArgumentException("Given rlsmUpdate does not have this instance's remoteLogSegmentId.");
}
return new RemoteLogSegmentMetadata(remoteLogSegmentId, startOffset,
endOffset, maxTimestampMs, rlsmUpdate.brokerId(), rlsmUpdate.eventTimestampMs(),
segmentSizeInBytes, rlsmUpdate.customMetadata(), rlsmUpdate.state(), segmentLeaderEpochs, txnIdxEmpty);
}
@Override
public TopicIdPartition topicIdPartition() {
return remoteLogSegmentId.topicIdPartition();
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RemoteLogSegmentMetadata that = (RemoteLogSegmentMetadata) o;
return startOffset == that.startOffset && endOffset == that.endOffset
&& maxTimestampMs == that.maxTimestampMs
&& segmentSizeInBytes == that.segmentSizeInBytes
&& Objects.equals(remoteLogSegmentId, that.remoteLogSegmentId)
&& Objects.equals(segmentLeaderEpochs, that.segmentLeaderEpochs)
&& Objects.equals(customMetadata, that.customMetadata)
&& state == that.state
&& eventTimestampMs() == that.eventTimestampMs()
&& brokerId() == that.brokerId()
&& txnIdxEmpty == that.txnIdxEmpty;
}
@Override
public int hashCode() {
return Objects.hash(remoteLogSegmentId, startOffset, endOffset, brokerId(), maxTimestampMs,
eventTimestampMs(), segmentLeaderEpochs, segmentSizeInBytes, customMetadata, state, txnIdxEmpty);
}
@Override
public String toString() {
return "RemoteLogSegmentMetadata{" +
"remoteLogSegmentId=" + remoteLogSegmentId +
", startOffset=" + startOffset +
", endOffset=" + endOffset +
", brokerId=" + brokerId() +
", maxTimestampMs=" + maxTimestampMs +
", eventTimestampMs=" + eventTimestampMs() +
", segmentLeaderEpochs=" + segmentLeaderEpochs +
", segmentSizeInBytes=" + segmentSizeInBytes +
", customMetadata=" + customMetadata +
", state=" + state +
", txnIdxEmpty=" + txnIdxEmpty +
'}';
}
/**
* Custom metadata from a {@link RemoteStorageManager} plugin.
*
* <p>The content of these metadata is RSM-dependent and is opaque to the broker, i.e.
* it's not interpreted, only stored along with the rest of the remote log segment metadata.
*
* <p>Examples of such metadata are:
* <ol>
* <li>The storage path on the remote storage in case it's nondeterministic or version-dependent.</li>
* <li>The actual size of the all files related to the segment on the remote storage.</li>
* </ol>
*
* <p>The maximum size the broker accepts and stores is controlled by
* the {@code remote.log.metadata.custom.metadata.max.bytes} setting.
*/
public static
|
RemoteLogSegmentMetadata
|
java
|
junit-team__junit5
|
junit-platform-console/src/main/java/org/junit/platform/console/options/SelectorConverter.java
|
{
"start": 2291,
"end": 2443
}
|
class ____ implements ITypeConverter<UriSelector> {
@Override
public UriSelector convert(String value) {
return selectUri(value);
}
}
static
|
Uri
|
java
|
bumptech__glide
|
annotation/compiler/test/src/test/resources/GlideExtensionOptionsTest/StaticMethodName/GlideOptions.java
|
{
"start": 1010,
"end": 15316
}
|
class ____ extends RequestOptions implements Cloneable {
private static GlideOptions fitCenterTransform0;
private static GlideOptions centerInsideTransform1;
private static GlideOptions centerCropTransform2;
private static GlideOptions circleCropTransform3;
private static GlideOptions noTransformation4;
private static GlideOptions noAnimation5;
/**
* @see RequestOptions#sizeMultiplierOf(float)
*/
@CheckResult
@NonNull
public static GlideOptions sizeMultiplierOf(@FloatRange(from = 0.0, to = 1.0) float value) {
return new GlideOptions().sizeMultiplier(value);
}
/**
* @see RequestOptions#diskCacheStrategyOf(DiskCacheStrategy)
*/
@CheckResult
@NonNull
public static GlideOptions diskCacheStrategyOf(@NonNull DiskCacheStrategy strategy) {
return new GlideOptions().diskCacheStrategy(strategy);
}
/**
* @see RequestOptions#priorityOf(Priority)
*/
@CheckResult
@NonNull
public static GlideOptions priorityOf(@NonNull Priority priority) {
return new GlideOptions().priority(priority);
}
/**
* @see RequestOptions#placeholderOf(Drawable)
*/
@CheckResult
@NonNull
public static GlideOptions placeholderOf(@Nullable Drawable drawable) {
return new GlideOptions().placeholder(drawable);
}
/**
* @see RequestOptions#placeholderOf(int)
*/
@CheckResult
@NonNull
public static GlideOptions placeholderOf(@DrawableRes int id) {
return new GlideOptions().placeholder(id);
}
/**
* @see RequestOptions#errorOf(Drawable)
*/
@CheckResult
@NonNull
public static GlideOptions errorOf(@Nullable Drawable drawable) {
return new GlideOptions().error(drawable);
}
/**
* @see RequestOptions#errorOf(int)
*/
@CheckResult
@NonNull
public static GlideOptions errorOf(@DrawableRes int id) {
return new GlideOptions().error(id);
}
/**
* @see RequestOptions#skipMemoryCacheOf(boolean)
*/
@CheckResult
@NonNull
public static GlideOptions skipMemoryCacheOf(boolean skipMemoryCache) {
return new GlideOptions().skipMemoryCache(skipMemoryCache);
}
/**
* @see RequestOptions#overrideOf(int, int)
*/
@CheckResult
@NonNull
public static GlideOptions overrideOf(int width, int height) {
return new GlideOptions().override(width, height);
}
/**
* @see RequestOptions#overrideOf(int)
*/
@CheckResult
@NonNull
public static GlideOptions overrideOf(int size) {
return new GlideOptions().override(size);
}
/**
* @see RequestOptions#signatureOf(Key)
*/
@CheckResult
@NonNull
public static GlideOptions signatureOf(@NonNull Key key) {
return new GlideOptions().signature(key);
}
/**
* @see RequestOptions#fitCenterTransform()
*/
@CheckResult
@NonNull
public static GlideOptions fitCenterTransform() {
if (GlideOptions.fitCenterTransform0 == null) {
GlideOptions.fitCenterTransform0 =
new GlideOptions().fitCenter().autoClone();
}
return GlideOptions.fitCenterTransform0;
}
/**
* @see RequestOptions#centerInsideTransform()
*/
@CheckResult
@NonNull
public static GlideOptions centerInsideTransform() {
if (GlideOptions.centerInsideTransform1 == null) {
GlideOptions.centerInsideTransform1 =
new GlideOptions().centerInside().autoClone();
}
return GlideOptions.centerInsideTransform1;
}
/**
* @see RequestOptions#centerCropTransform()
*/
@CheckResult
@NonNull
public static GlideOptions centerCropTransform() {
if (GlideOptions.centerCropTransform2 == null) {
GlideOptions.centerCropTransform2 =
new GlideOptions().centerCrop().autoClone();
}
return GlideOptions.centerCropTransform2;
}
/**
* @see RequestOptions#circleCropTransform()
*/
@CheckResult
@NonNull
public static GlideOptions circleCropTransform() {
if (GlideOptions.circleCropTransform3 == null) {
GlideOptions.circleCropTransform3 =
new GlideOptions().circleCrop().autoClone();
}
return GlideOptions.circleCropTransform3;
}
/**
* @see RequestOptions#bitmapTransform(Transformation)
*/
@CheckResult
@NonNull
public static GlideOptions bitmapTransform(@NonNull Transformation<Bitmap> transformation) {
return new GlideOptions().transform(transformation);
}
/**
* @see RequestOptions#noTransformation()
*/
@CheckResult
@NonNull
public static GlideOptions noTransformation() {
if (GlideOptions.noTransformation4 == null) {
GlideOptions.noTransformation4 =
new GlideOptions().dontTransform().autoClone();
}
return GlideOptions.noTransformation4;
}
/**
* @see RequestOptions#option(Option, T)
*/
@CheckResult
@NonNull
public static <T> GlideOptions option(@NonNull Option<T> option, @NonNull T t) {
return new GlideOptions().set(option, t);
}
/**
* @see RequestOptions#decodeTypeOf(Class)
*/
@CheckResult
@NonNull
public static GlideOptions decodeTypeOf(@NonNull Class<?> clazz) {
return new GlideOptions().decode(clazz);
}
/**
* @see RequestOptions#formatOf(DecodeFormat)
*/
@CheckResult
@NonNull
public static GlideOptions formatOf(@NonNull DecodeFormat format) {
return new GlideOptions().format(format);
}
/**
* @see RequestOptions#frameOf(long)
*/
@CheckResult
@NonNull
public static GlideOptions frameOf(@IntRange(from = 0) long value) {
return new GlideOptions().frame(value);
}
/**
* @see RequestOptions#downsampleOf(DownsampleStrategy)
*/
@CheckResult
@NonNull
public static GlideOptions downsampleOf(@NonNull DownsampleStrategy strategy) {
return new GlideOptions().downsample(strategy);
}
/**
* @see RequestOptions#timeoutOf(int)
*/
@CheckResult
@NonNull
public static GlideOptions timeoutOf(@IntRange(from = 0) int value) {
return new GlideOptions().timeout(value);
}
/**
* @see RequestOptions#encodeQualityOf(int)
*/
@CheckResult
@NonNull
public static GlideOptions encodeQualityOf(@IntRange(from = 0, to = 100) int value) {
return new GlideOptions().encodeQuality(value);
}
/**
* @see RequestOptions#encodeFormatOf(CompressFormat)
*/
@CheckResult
@NonNull
public static GlideOptions encodeFormatOf(@NonNull Bitmap.CompressFormat format) {
return new GlideOptions().encodeFormat(format);
}
/**
* @see RequestOptions#noAnimation()
*/
@CheckResult
@NonNull
public static GlideOptions noAnimation() {
if (GlideOptions.noAnimation5 == null) {
GlideOptions.noAnimation5 =
new GlideOptions().dontAnimate().autoClone();
}
return GlideOptions.noAnimation5;
}
@Override
@NonNull
@CheckResult
public GlideOptions sizeMultiplier(@FloatRange(from = 0.0, to = 1.0) float value) {
return (GlideOptions) super.sizeMultiplier(value);
}
@Override
@NonNull
@CheckResult
public GlideOptions useUnlimitedSourceGeneratorsPool(boolean flag) {
return (GlideOptions) super.useUnlimitedSourceGeneratorsPool(flag);
}
@Override
@NonNull
@CheckResult
public GlideOptions useAnimationPool(boolean flag) {
return (GlideOptions) super.useAnimationPool(flag);
}
@Override
@NonNull
@CheckResult
public GlideOptions onlyRetrieveFromCache(boolean flag) {
return (GlideOptions) super.onlyRetrieveFromCache(flag);
}
@Override
@NonNull
@CheckResult
public GlideOptions diskCacheStrategy(@NonNull DiskCacheStrategy strategy) {
return (GlideOptions) super.diskCacheStrategy(strategy);
}
@Override
@NonNull
@CheckResult
public GlideOptions priority(@NonNull Priority priority) {
return (GlideOptions) super.priority(priority);
}
@Override
@NonNull
@CheckResult
public GlideOptions placeholder(@Nullable Drawable drawable) {
return (GlideOptions) super.placeholder(drawable);
}
@Override
@NonNull
@CheckResult
public GlideOptions placeholder(@DrawableRes int id) {
return (GlideOptions) super.placeholder(id);
}
@Override
@NonNull
@CheckResult
public GlideOptions fallback(@Nullable Drawable drawable) {
return (GlideOptions) super.fallback(drawable);
}
@Override
@NonNull
@CheckResult
public GlideOptions fallback(@DrawableRes int id) {
return (GlideOptions) super.fallback(id);
}
@Override
@NonNull
@CheckResult
public GlideOptions error(@Nullable Drawable drawable) {
return (GlideOptions) super.error(drawable);
}
@Override
@NonNull
@CheckResult
public GlideOptions error(@DrawableRes int id) {
return (GlideOptions) super.error(id);
}
@Override
@NonNull
@CheckResult
public GlideOptions theme(@Nullable Resources.Theme theme) {
return (GlideOptions) super.theme(theme);
}
@Override
@NonNull
@CheckResult
public GlideOptions skipMemoryCache(boolean skip) {
return (GlideOptions) super.skipMemoryCache(skip);
}
@Override
@NonNull
@CheckResult
public GlideOptions override(int width, int height) {
return (GlideOptions) super.override(width, height);
}
@Override
@NonNull
@CheckResult
public GlideOptions override(int size) {
return (GlideOptions) super.override(size);
}
@Override
@NonNull
@CheckResult
public GlideOptions signature(@NonNull Key key) {
return (GlideOptions) super.signature(key);
}
@Override
@CheckResult
public GlideOptions clone() {
return (GlideOptions) super.clone();
}
@Override
@NonNull
@CheckResult
public <Y> GlideOptions set(@NonNull Option<Y> option, @NonNull Y y) {
return (GlideOptions) super.set(option, y);
}
@Override
@NonNull
@CheckResult
public GlideOptions decode(@NonNull Class<?> clazz) {
return (GlideOptions) super.decode(clazz);
}
@Override
@NonNull
@CheckResult
public GlideOptions encodeFormat(@NonNull Bitmap.CompressFormat format) {
return (GlideOptions) super.encodeFormat(format);
}
@Override
@NonNull
@CheckResult
public GlideOptions encodeQuality(@IntRange(from = 0, to = 100) int value) {
return (GlideOptions) super.encodeQuality(value);
}
@Override
@NonNull
@CheckResult
public GlideOptions frame(@IntRange(from = 0) long value) {
return (GlideOptions) super.frame(value);
}
@Override
@NonNull
@CheckResult
public GlideOptions format(@NonNull DecodeFormat format) {
return (GlideOptions) super.format(format);
}
@Override
@NonNull
@CheckResult
public GlideOptions disallowHardwareConfig() {
return (GlideOptions) super.disallowHardwareConfig();
}
@Override
@NonNull
@CheckResult
public GlideOptions downsample(@NonNull DownsampleStrategy strategy) {
return (GlideOptions) super.downsample(strategy);
}
@Override
@NonNull
@CheckResult
public GlideOptions timeout(@IntRange(from = 0) int value) {
return (GlideOptions) super.timeout(value);
}
@Override
@NonNull
@CheckResult
public GlideOptions optionalCenterCrop() {
return (GlideOptions) super.optionalCenterCrop();
}
@Override
@NonNull
@CheckResult
public GlideOptions centerCrop() {
return (GlideOptions) super.centerCrop();
}
@Override
@NonNull
@CheckResult
public GlideOptions optionalFitCenter() {
return (GlideOptions) super.optionalFitCenter();
}
@Override
@NonNull
@CheckResult
public GlideOptions fitCenter() {
return (GlideOptions) super.fitCenter();
}
@Override
@NonNull
@CheckResult
public GlideOptions optionalCenterInside() {
return (GlideOptions) super.optionalCenterInside();
}
@Override
@NonNull
@CheckResult
public GlideOptions centerInside() {
return (GlideOptions) super.centerInside();
}
@Override
@NonNull
@CheckResult
public GlideOptions optionalCircleCrop() {
return (GlideOptions) super.optionalCircleCrop();
}
@Override
@NonNull
@CheckResult
public GlideOptions circleCrop() {
return (GlideOptions) super.circleCrop();
}
@Override
@NonNull
@CheckResult
public GlideOptions transform(@NonNull Transformation<Bitmap> transformation) {
return (GlideOptions) super.transform(transformation);
}
@Override
@SafeVarargs
@SuppressWarnings("varargs")
@NonNull
@CheckResult
public final GlideOptions transform(@NonNull Transformation<Bitmap>... transformations) {
return (GlideOptions) super.transform(transformations);
}
@Override
@SafeVarargs
@SuppressWarnings("varargs")
@Deprecated
@NonNull
@CheckResult
public final GlideOptions transforms(@NonNull Transformation<Bitmap>... transformations) {
return (GlideOptions) super.transforms(transformations);
}
@Override
@NonNull
@CheckResult
public GlideOptions optionalTransform(@NonNull Transformation<Bitmap> transformation) {
return (GlideOptions) super.optionalTransform(transformation);
}
@Override
@NonNull
@CheckResult
public <Y> GlideOptions optionalTransform(@NonNull Class<Y> clazz,
@NonNull Transformation<Y> transformation) {
return (GlideOptions) super.optionalTransform(clazz, transformation);
}
@Override
@NonNull
@CheckResult
public <Y> GlideOptions transform(@NonNull Class<Y> clazz,
@NonNull Transformation<Y> transformation) {
return (GlideOptions) super.transform(clazz, transformation);
}
@Override
@NonNull
@CheckResult
public GlideOptions dontTransform() {
return (GlideOptions) super.dontTransform();
}
@Override
@NonNull
@CheckResult
public GlideOptions dontAnimate() {
return (GlideOptions) super.dontAnimate();
}
@Override
@NonNull
@CheckResult
public GlideOptions apply(@NonNull BaseRequestOptions<?> options) {
return (GlideOptions) super.apply(options);
}
@Override
@NonNull
public GlideOptions lock() {
return (GlideOptions) super.lock();
}
@Override
@NonNull
public GlideOptions autoClone() {
return (GlideOptions) super.autoClone();
}
/**
* @see Extension#test(BaseRequestOptions)
*/
@SuppressWarnings("unchecked")
@CheckResult
@NonNull
public GlideOptions test() {
return (GlideOptions) Extension.test(this);
}
/**
* @see Extension#test(BaseRequestOptions)
*/
@CheckResult
public static GlideOptions testSomething() {
return new GlideOptions().test();
}
}
|
GlideOptions
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java
|
{
"start": 4803,
"end": 5019
}
|
class ____ S3A committers; allows for any commonality
* between different architectures.
*
* Although the committer APIs allow for a committer to be created without
* an output path, this is not supported in this
|
for
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.