language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging-amqp/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/amqp/TestResource.java | {
"start": 159,
"end": 291
} | class ____ {
@Inject
ConsumingBean bean;
@GET
public long getLast() {
return bean.get();
}
}
| TestResource |
java | apache__camel | components/camel-flatpack/src/test/java/org/apache/camel/component/flatpack/FlatpackDelimitedDataFormatTest.java | {
"start": 1392,
"end": 4713
} | class ____ extends CamelTestSupport {
@Test
public void testUnmarshal() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:unmarshal");
// by default we get on big message
mock.expectedMessageCount(1);
mock.message(0).body().isInstanceOf(DataSetList.class);
String data = IOConverter.toString(new File("src/test/data/delim/INVENTORY-CommaDelimitedWithQualifier.txt"), null);
template.sendBody("direct:unmarshal", data);
MockEndpoint.assertIsSatisfied(context);
DataSetList list = mock.getExchanges().get(0).getIn().getBody(DataSetList.class);
assertEquals(4, list.size());
Map<?, ?> row = list.get(0);
assertEquals("SOME VALVE", row.get("ITEM_DESC"));
}
@Test
public void testMarshalWithDefinition() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:marshal");
// by default we get on big message
mock.expectedMessageCount(1);
List<Map<String, Object>> data = new ArrayList<>();
Map<String, Object> row = new LinkedHashMap<>();
row.put("ITEM_DESC", "SOME VALVE");
row.put("IN_STOCK", "2");
row.put("PRICE", "5.00");
row.put("LAST_RECV_DT", "20050101");
data.add(row);
Map<String, Object> row2 = new LinkedHashMap<>();
row2.put("ITEM_DESC", "AN ENGINE");
row2.put("IN_STOCK", "100");
row2.put("PRICE", "1000.00");
row2.put("LAST_RECV_DT", "20040601");
data.add(row2);
template.sendBody("direct:marshal", data);
MockEndpoint.assertIsSatisfied(context);
}
@Test
public void testMarshalNoDefinition() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:marshal2");
// by default we get on big message
mock.expectedMessageCount(1);
List<Map<String, Object>> data = new ArrayList<>();
Map<String, Object> row = new LinkedHashMap<>();
row.put("ITEM_DESC", "SOME VALVE");
row.put("IN_STOCK", "2");
row.put("PRICE", "5.00");
row.put("LAST_RECV_DT", "20050101");
data.add(row);
Map<String, Object> row2 = new LinkedHashMap<>();
row2.put("ITEM_DESC", "AN ENGINE");
row2.put("IN_STOCK", "100");
row2.put("PRICE", "1000.00");
row2.put("LAST_RECV_DT", "20040601");
data.add(row2);
template.sendBody("direct:marshal2", data);
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
FlatpackDataFormat df = new FlatpackDataFormat();
df.setDefinition("INVENTORY-Delimited.pzmap.xml");
from("direct:unmarshal").unmarshal(df).to("mock:unmarshal");
// with the definition
from("direct:marshal").marshal(df).convertBodyTo(String.class).to("mock:marshal");
// without the definition (will auto add column names from the received data)
FlatpackDataFormat df2 = new FlatpackDataFormat();
from("direct:marshal2").marshal(df2).convertBodyTo(String.class).to("mock:marshal2");
}
};
}
}
| FlatpackDelimitedDataFormatTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/UnusedCollectionModifiedInPlaceTest.java | {
"start": 876,
"end": 1342
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(UnusedCollectionModifiedInPlace.class, getClass());
@Test
public void collectionsMethodCoverage() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.Collections;
import java.util.ArrayList;
import java.util.List;
| UnusedCollectionModifiedInPlaceTest |
java | netty__netty | transport/src/test/java/io/netty/channel/embedded/EmbeddedChannelTest.java | {
"start": 30082,
"end": 30777
} | class ____ extends ChannelOutboundHandlerAdapter {
static final Integer DISCONNECT = 0;
static final Integer CLOSE = 1;
private final Queue<Integer> queue = new ArrayDeque<Integer>();
@Override
public void disconnect(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
queue.add(DISCONNECT);
promise.setSuccess();
}
@Override
public void close(ChannelHandlerContext ctx, ChannelPromise promise) throws Exception {
queue.add(CLOSE);
promise.setSuccess();
}
Integer pollEvent() {
return queue.poll();
}
}
}
| EventOutboundHandler |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/AzureBlobStorageTestAccount.java | {
"start": 32633,
"end": 32861
} | interface ____ this account is backed by a mock.
*
* @return The mock storage, or null if it's backed by a real account.
*/
public MockStorageInterface getMockStorage() {
return mockStorage;
}
public static | if |
java | quarkusio__quarkus | integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/propertyaccess/PrivateFieldAccessEntity.java | {
"start": 442,
"end": 1245
} | class ____ {
@DocumentId
private Long id;
@FullTextField
private String property;
@FullTextField
private String otherProperty;
public PrivateFieldAccessEntity(Long id, String property, String otherProperty) {
this.id = id;
this.property = property;
this.otherProperty = otherProperty;
}
public long id() {
return id;
}
public void id(long id) {
this.id = id;
}
public String property() {
return property;
}
public void property(String property) {
this.property = property;
}
public String otherProperty() {
return otherProperty;
}
public void otherProperty(String otherProperty) {
this.otherProperty = otherProperty;
}
}
| PrivateFieldAccessEntity |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/net/ClientOptionsBase.java | {
"start": 957,
"end": 6194
} | class ____ extends TCPSSLOptions {
/**
* The default value of connect timeout = 60000 (ms)
*/
public static final int DEFAULT_CONNECT_TIMEOUT = 60000;
/**
* The default value of the client metrics = "":
*/
public static final String DEFAULT_METRICS_NAME = "";
private int connectTimeout;
private String metricsName;
private ProxyOptions proxyOptions;
private String localAddress;
private List<String> nonProxyHosts;
/**
* Default constructor
*/
public ClientOptionsBase() {
super();
init();
}
/**
* Copy constructor
*
* @param other the options to copy
*/
public ClientOptionsBase(ClientOptionsBase other) {
super(other);
this.connectTimeout = other.getConnectTimeout();
this.metricsName = other.metricsName;
this.proxyOptions = other.proxyOptions != null ? new ProxyOptions(other.proxyOptions) : null;
this.localAddress = other.localAddress;
this.nonProxyHosts = other.nonProxyHosts != null ? new ArrayList<>(other.nonProxyHosts) : null;
}
/**
* Create options from some JSON
*
* @param json the JSON
*/
public ClientOptionsBase(JsonObject json) {
super(json);
init();
ClientOptionsBaseConverter.fromJson(json, this);
}
/**
* Convert to JSON
*
* @return the JSON
*/
public JsonObject toJson() {
JsonObject json = super.toJson();
ClientOptionsBaseConverter.toJson(this, json);
return json;
}
private void init() {
this.connectTimeout = DEFAULT_CONNECT_TIMEOUT;
this.metricsName = DEFAULT_METRICS_NAME;
this.proxyOptions = null;
this.localAddress = null;
}
@GenIgnore
@Override
public ClientSSLOptions getSslOptions() {
return (ClientSSLOptions) super.getSslOptions();
}
@Override
protected ClientSSLOptions getOrCreateSSLOptions() {
return (ClientSSLOptions) super.getOrCreateSSLOptions();
}
@Override
protected ClientSSLOptions createSSLOptions() {
return new ClientSSLOptions();
}
/**
*
* @return true if all server certificates should be trusted
*/
public boolean isTrustAll() {
ClientSSLOptions o = getSslOptions();
return o != null ? o.isTrustAll() : ClientSSLOptions.DEFAULT_TRUST_ALL;
}
/**
* Set whether all server certificates should be trusted
*
* @param trustAll true if all should be trusted
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase setTrustAll(boolean trustAll) {
getOrCreateSSLOptions().setTrustAll(trustAll);
return this;
}
/**
* @return the value of connect timeout
*/
public int getConnectTimeout() {
return connectTimeout;
}
/**
* Set the connect timeout
*
* @param connectTimeout connect timeout, in ms
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase setConnectTimeout(int connectTimeout) {
if (connectTimeout < 0) {
throw new IllegalArgumentException("connectTimeout must be >= 0");
}
this.connectTimeout = connectTimeout;
return this;
}
/**
* @return the metrics name identifying the reported metrics.
*/
public String getMetricsName() {
return metricsName;
}
/**
* Set the metrics name identifying the reported metrics, useful for grouping metrics
* with the same name.
*
* @param metricsName the metrics name
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase setMetricsName(String metricsName) {
this.metricsName = metricsName;
return this;
}
/**
* Set proxy options for connections via CONNECT proxy (e.g. Squid) or a SOCKS proxy.
*
* @param proxyOptions proxy options object
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase setProxyOptions(ProxyOptions proxyOptions) {
this.proxyOptions = proxyOptions;
return this;
}
/**
* Get proxy options for connections
*
* @return proxy options
*/
public ProxyOptions getProxyOptions() {
return proxyOptions;
}
/**
* @return the list of non proxies hosts
*/
public List<String> getNonProxyHosts() {
return nonProxyHosts;
}
/**
* Set a list of remote hosts that are not proxied when the client is configured to use a proxy. This
* list serves the same purpose than the JVM {@code nonProxyHosts} configuration.
*
* <p> Entries can use the <i>*</i> wildcard character for pattern matching, e.g <i>*.example.com</i> matches
* <i>www.example.com</i>.
*
* @param nonProxyHosts the list of non proxies hosts
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase setNonProxyHosts(List<String> nonProxyHosts) {
this.nonProxyHosts = nonProxyHosts;
return this;
}
/**
* Add a {@code host} to the {@link #getNonProxyHosts()} list.
*
* @param host the added host
* @return a reference to this, so the API can be used fluently
*/
public ClientOptionsBase addNonProxyHost(String host) {
if (nonProxyHosts == null) {
nonProxyHosts = new ArrayList<>();
}
nonProxyHosts.add(host);
return this;
}
/**
* @return the local | ClientOptionsBase |
java | greenrobot__greendao | DaoCore/src/main/java/org/greenrobot/greendao/query/WhereCondition.java | {
"start": 5151,
"end": 5753
} | class ____ extends AbstractCondition {
protected final String string;
public StringCondition(String string) {
this.string = string;
}
public StringCondition(String string, Object value) {
super(value);
this.string = string;
}
public StringCondition(String string, Object... values) {
super(values);
this.string = string;
}
@Override
public void appendTo(StringBuilder builder, String tableAlias) {
builder.append(string);
}
}
}
| StringCondition |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/master/IsAcknowledgedSupplier.java | {
"start": 1358,
"end": 1425
} | interface ____ {
boolean isAcknowledged();
}
| IsAcknowledgedSupplier |
java | netty__netty | transport/src/main/java/io/netty/channel/DefaultMessageSizeEstimator.java | {
"start": 1085,
"end": 2306
} | class ____ implements Handle {
private final int unknownSize;
private HandleImpl(int unknownSize) {
this.unknownSize = unknownSize;
}
@Override
public int size(Object msg) {
if (msg instanceof ByteBuf) {
return ((ByteBuf) msg).readableBytes();
}
if (msg instanceof ByteBufHolder) {
return ((ByteBufHolder) msg).content().readableBytes();
}
if (msg instanceof FileRegion) {
return 0;
}
return unknownSize;
}
}
/**
* Return the default implementation which returns {@code 8} for unknown messages.
*/
public static final MessageSizeEstimator DEFAULT = new DefaultMessageSizeEstimator(8);
private final Handle handle;
/**
* Create a new instance
*
* @param unknownSize The size which is returned for unknown messages.
*/
public DefaultMessageSizeEstimator(int unknownSize) {
checkPositiveOrZero(unknownSize, "unknownSize");
handle = new HandleImpl(unknownSize);
}
@Override
public Handle newHandle() {
return handle;
}
}
| HandleImpl |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/ApiKeyBoolQueryBuilderTests.java | {
"start": 3314,
"end": 57884
} | class ____ extends ESTestCase {
public void testBuildFromSimpleQuery() {
{
QueryBuilder qb = randomSimpleQuery("name");
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, null);
assertQueryFields(queryFields, qb, null);
assertCommonFilterQueries(apiKeyQb, null);
List<QueryBuilder> mustQueries = apiKeyQb.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), equalTo(qb));
assertThat(apiKeyQb.should(), emptyIterable());
assertThat(apiKeyQb.mustNot(), emptyIterable());
}
{
Authentication authentication = AuthenticationTests.randomAuthentication(null, null);
QueryBuilder qb = randomSimpleQuery("name");
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, authentication);
assertQueryFields(queryFields, qb, authentication);
assertCommonFilterQueries(apiKeyQb, authentication);
List<QueryBuilder> mustQueries = apiKeyQb.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), equalTo(qb));
assertThat(apiKeyQb.should(), emptyIterable());
assertThat(apiKeyQb.mustNot(), emptyIterable());
}
{
String apiKeyId = randomUUID();
Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), apiKeyId);
QueryBuilder qb = randomSimpleQuery("name");
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(qb, queryFields::add, authentication);
assertQueryFields(queryFields, qb, authentication);
assertCommonFilterQueries(apiKeyQb, authentication);
List<QueryBuilder> mustQueries = apiKeyQb.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), equalTo(qb));
assertThat(apiKeyQb.should(), emptyIterable());
assertThat(apiKeyQb.mustNot(), emptyIterable());
}
}
public void testPrefixQueryBuilderPropertiesArePreserved() {
Authentication authentication = randomFrom(
AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()),
AuthenticationTests.randomAuthentication(null, null),
null
);
String fieldName = randomValidFieldName();
PrefixQueryBuilder prefixQueryBuilder = QueryBuilders.prefixQuery(fieldName, randomAlphaOfLengthBetween(0, 4));
if (randomBoolean()) {
prefixQueryBuilder.boost(Math.abs(randomFloat()));
}
if (randomBoolean()) {
prefixQueryBuilder.queryName(randomAlphaOfLengthBetween(0, 4));
}
if (randomBoolean()) {
prefixQueryBuilder.caseInsensitive(randomBoolean());
}
if (randomBoolean()) {
prefixQueryBuilder.rewrite(randomAlphaOfLengthBetween(0, 4));
}
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(prefixQueryBuilder, queryFields::add, authentication);
assertThat(queryFields, hasItem(API_KEY_FIELD_NAME_TRANSLATORS.translate(fieldName)));
List<QueryBuilder> mustQueries = apiKeyMatchQueryBuilder.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), instanceOf(PrefixQueryBuilder.class));
PrefixQueryBuilder prefixQueryBuilder2 = (PrefixQueryBuilder) mustQueries.get(0);
assertThat(prefixQueryBuilder2.fieldName(), is(API_KEY_FIELD_NAME_TRANSLATORS.translate(prefixQueryBuilder.fieldName())));
assertThat(prefixQueryBuilder2.value(), is(prefixQueryBuilder.value()));
assertThat(prefixQueryBuilder2.boost(), is(prefixQueryBuilder.boost()));
assertThat(prefixQueryBuilder2.queryName(), is(prefixQueryBuilder.queryName()));
assertThat(prefixQueryBuilder2.caseInsensitive(), is(prefixQueryBuilder.caseInsensitive()));
assertThat(prefixQueryBuilder2.rewrite(), is(prefixQueryBuilder.rewrite()));
}
public void testSimpleQueryBuilderWithAllFields() {
SimpleQueryStringBuilder simpleQueryStringBuilder = QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(4));
if (randomBoolean()) {
if (randomBoolean()) {
simpleQueryStringBuilder.field("*");
} else {
simpleQueryStringBuilder.field("*", Math.abs(randomFloat()));
}
}
if (randomBoolean()) {
simpleQueryStringBuilder.lenient(randomBoolean());
}
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(simpleQueryStringBuilder, queryFields::add, null);
List<QueryBuilder> mustQueries = apiKeyMatchQueryBuilder.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), instanceOf(SimpleQueryStringBuilder.class));
SimpleQueryStringBuilder simpleQueryStringBuilder2 = (SimpleQueryStringBuilder) mustQueries.get(0);
assertThat(
simpleQueryStringBuilder2.fields().keySet(),
containsInAnyOrder(
"creation_time",
"invalidation_time",
"expiration_time",
"api_key_invalidated",
"creator.principal",
"creator.realm",
"metadata_flattened",
"name",
"runtime_key_type"
)
);
assertThat(simpleQueryStringBuilder2.lenient(), is(true));
assertThat(
queryFields,
containsInAnyOrder(
"doc_type",
"creation_time",
"invalidation_time",
"expiration_time",
"api_key_invalidated",
"creator.principal",
"creator.realm",
"metadata_flattened",
"name",
"runtime_key_type"
)
);
}
public void testSimpleQueryBuilderPropertiesArePreserved() {
SimpleQueryStringBuilder simpleQueryStringBuilder = QueryBuilders.simpleQueryStringQuery(randomAlphaOfLength(4));
if (randomBoolean()) {
simpleQueryStringBuilder.boost(Math.abs(randomFloat()));
}
if (randomBoolean()) {
simpleQueryStringBuilder.queryName(randomAlphaOfLengthBetween(0, 4));
}
if (randomBoolean()) {
simpleQueryStringBuilder.analyzer(randomAlphaOfLength(4));
}
if (randomBoolean()) {
simpleQueryStringBuilder.defaultOperator(randomFrom(Operator.OR, Operator.AND));
}
if (randomBoolean()) {
simpleQueryStringBuilder.minimumShouldMatch(randomAlphaOfLength(4));
}
if (randomBoolean()) {
simpleQueryStringBuilder.analyzeWildcard(randomBoolean());
}
if (randomBoolean()) {
simpleQueryStringBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean());
}
if (randomBoolean()) {
simpleQueryStringBuilder.lenient(randomBoolean());
}
if (randomBoolean()) {
simpleQueryStringBuilder.type(randomFrom(MultiMatchQueryBuilder.Type.values()));
}
if (randomBoolean()) {
simpleQueryStringBuilder.quoteFieldSuffix(randomAlphaOfLength(4));
}
if (randomBoolean()) {
simpleQueryStringBuilder.fuzzyTranspositions(randomBoolean());
}
if (randomBoolean()) {
simpleQueryStringBuilder.fuzzyMaxExpansions(randomIntBetween(1, 10));
}
if (randomBoolean()) {
simpleQueryStringBuilder.fuzzyPrefixLength(randomIntBetween(1, 10));
}
if (randomBoolean()) {
simpleQueryStringBuilder.flags(
randomSubsetOf(randomIntBetween(0, 3), SimpleQueryStringFlag.values()).toArray(new SimpleQueryStringFlag[0])
);
}
// at least one field for this test
int nFields = randomIntBetween(1, 4);
for (int i = 0; i < nFields; i++) {
simpleQueryStringBuilder.field(randomValidFieldName(), Math.abs(randomFloat()));
}
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(
simpleQueryStringBuilder,
queryFields::add,
randomFrom(
AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()),
AuthenticationTests.randomAuthentication(null, null),
null
)
);
List<QueryBuilder> mustQueries = apiKeyMatchQueryBuilder.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), instanceOf(SimpleQueryStringBuilder.class));
SimpleQueryStringBuilder simpleQueryStringBuilder2 = (SimpleQueryStringBuilder) mustQueries.get(0);
assertThat(simpleQueryStringBuilder2.value(), is(simpleQueryStringBuilder.value()));
assertThat(simpleQueryStringBuilder2.boost(), is(simpleQueryStringBuilder.boost()));
assertThat(simpleQueryStringBuilder2.queryName(), is(simpleQueryStringBuilder.queryName()));
assertThat(simpleQueryStringBuilder2.fields().size(), is(simpleQueryStringBuilder.fields().size()));
for (Map.Entry<String, Float> fieldEntry : simpleQueryStringBuilder.fields().entrySet()) {
assertThat(
simpleQueryStringBuilder2.fields().get(API_KEY_FIELD_NAME_TRANSLATORS.translate(fieldEntry.getKey())),
is(fieldEntry.getValue())
);
}
for (String field : simpleQueryStringBuilder2.fields().keySet()) {
assertThat(queryFields, hasItem(field));
}
assertThat(simpleQueryStringBuilder2.analyzer(), is(simpleQueryStringBuilder.analyzer()));
assertThat(simpleQueryStringBuilder2.defaultOperator(), is(simpleQueryStringBuilder.defaultOperator()));
assertThat(simpleQueryStringBuilder2.minimumShouldMatch(), is(simpleQueryStringBuilder.minimumShouldMatch()));
assertThat(simpleQueryStringBuilder2.analyzeWildcard(), is(simpleQueryStringBuilder.analyzeWildcard()));
assertThat(
simpleQueryStringBuilder2.autoGenerateSynonymsPhraseQuery(),
is(simpleQueryStringBuilder.autoGenerateSynonymsPhraseQuery())
);
assertThat(simpleQueryStringBuilder2.lenient(), is(simpleQueryStringBuilder.lenient()));
assertThat(simpleQueryStringBuilder2.type(), is(simpleQueryStringBuilder.type()));
assertThat(simpleQueryStringBuilder2.quoteFieldSuffix(), is(simpleQueryStringBuilder.quoteFieldSuffix()));
assertThat(simpleQueryStringBuilder2.fuzzyTranspositions(), is(simpleQueryStringBuilder.fuzzyTranspositions()));
assertThat(simpleQueryStringBuilder2.fuzzyMaxExpansions(), is(simpleQueryStringBuilder.fuzzyMaxExpansions()));
assertThat(simpleQueryStringBuilder2.fuzzyPrefixLength(), is(simpleQueryStringBuilder.fuzzyPrefixLength()));
assertThat(simpleQueryStringBuilder2.flags(), is(simpleQueryStringBuilder.flags()));
}
public void testMatchQueryBuilderPropertiesArePreserved() {
// the match query has many properties, that all must be preserved after limiting for API Key docs only
Authentication authentication = randomFrom(
AuthenticationTests.randomApiKeyAuthentication(AuthenticationTests.randomUser(), randomUUID()),
AuthenticationTests.randomAuthentication(null, null),
null
);
String fieldName = randomValidFieldName();
MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(fieldName, new Object());
if (randomBoolean()) {
matchQueryBuilder.boost(Math.abs(randomFloat()));
}
if (randomBoolean()) {
matchQueryBuilder.queryName(randomAlphaOfLengthBetween(0, 4));
}
if (randomBoolean()) {
matchQueryBuilder.operator(randomFrom(Operator.OR, Operator.AND));
}
if (randomBoolean()) {
matchQueryBuilder.analyzer(randomAlphaOfLength(4));
}
if (randomBoolean()) {
matchQueryBuilder.fuzziness(randomFrom(Fuzziness.ZERO, Fuzziness.ONE, Fuzziness.TWO, Fuzziness.AUTO));
}
if (randomBoolean()) {
matchQueryBuilder.minimumShouldMatch(randomAlphaOfLength(4));
}
if (randomBoolean()) {
matchQueryBuilder.fuzzyRewrite(randomAlphaOfLength(4));
}
if (randomBoolean()) {
matchQueryBuilder.zeroTermsQuery(randomFrom(ZeroTermsQueryOption.NONE, ZeroTermsQueryOption.ALL, ZeroTermsQueryOption.NULL));
}
if (randomBoolean()) {
matchQueryBuilder.prefixLength(randomNonNegativeInt());
}
if (randomBoolean()) {
matchQueryBuilder.maxExpansions(randomIntBetween(1, 100));
}
if (randomBoolean()) {
matchQueryBuilder.fuzzyTranspositions(randomBoolean());
}
if (randomBoolean()) {
matchQueryBuilder.lenient(randomBoolean());
}
if (randomBoolean()) {
matchQueryBuilder.autoGenerateSynonymsPhraseQuery(randomBoolean());
}
List<String> queryFields = new ArrayList<>();
ApiKeyBoolQueryBuilder apiKeyMatchQueryBuilder = ApiKeyBoolQueryBuilder.build(matchQueryBuilder, queryFields::add, authentication);
assertThat(queryFields, hasItem(API_KEY_FIELD_NAME_TRANSLATORS.translate(fieldName)));
List<QueryBuilder> mustQueries = apiKeyMatchQueryBuilder.must();
assertThat(mustQueries, hasSize(1));
assertThat(mustQueries.get(0), instanceOf(MatchQueryBuilder.class));
MatchQueryBuilder matchQueryBuilder2 = (MatchQueryBuilder) mustQueries.get(0);
assertThat(matchQueryBuilder2.fieldName(), is(API_KEY_FIELD_NAME_TRANSLATORS.translate(matchQueryBuilder.fieldName())));
assertThat(matchQueryBuilder2.value(), is(matchQueryBuilder.value()));
assertThat(matchQueryBuilder2.operator(), is(matchQueryBuilder.operator()));
assertThat(matchQueryBuilder2.analyzer(), is(matchQueryBuilder.analyzer()));
assertThat(matchQueryBuilder2.fuzziness(), is(matchQueryBuilder.fuzziness()));
assertThat(matchQueryBuilder2.minimumShouldMatch(), is(matchQueryBuilder.minimumShouldMatch()));
assertThat(matchQueryBuilder2.fuzzyRewrite(), is(matchQueryBuilder.fuzzyRewrite()));
assertThat(matchQueryBuilder2.zeroTermsQuery(), is(matchQueryBuilder.zeroTermsQuery()));
assertThat(matchQueryBuilder2.prefixLength(), is(matchQueryBuilder.prefixLength()));
assertThat(matchQueryBuilder2.maxExpansions(), is(matchQueryBuilder.maxExpansions()));
assertThat(matchQueryBuilder2.fuzzyTranspositions(), is(matchQueryBuilder.fuzzyTranspositions()));
assertThat(matchQueryBuilder2.lenient(), is(matchQueryBuilder.lenient()));
assertThat(matchQueryBuilder2.autoGenerateSynonymsPhraseQuery(), is(matchQueryBuilder.autoGenerateSynonymsPhraseQuery()));
assertThat(matchQueryBuilder2.boost(), is(matchQueryBuilder.boost()));
assertThat(matchQueryBuilder2.queryName(), is(matchQueryBuilder.queryName()));
}
public void testQueryForDomainAuthentication() {
final Authentication authentication = AuthenticationTests.randomAuthentication(null, AuthenticationTests.randomRealmRef(true));
final QueryBuilder query = randomSimpleQuery("name");
final List<String> queryFields = new ArrayList<>();
final ApiKeyBoolQueryBuilder apiKeysQuery = ApiKeyBoolQueryBuilder.build(query, queryFields::add, authentication);
assertQueryFields(queryFields, query, authentication);
assertThat(apiKeysQuery.filter().get(0), is(QueryBuilders.termQuery("doc_type", "api_key")));
assertThat(
apiKeysQuery.filter().get(1),
is(QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal()))
);
if (authentication.getEffectiveSubject().getRealm().getDomain().realms().size() == 1) {
assertThat(
apiKeysQuery.filter().get(2),
is(
QueryBuilders.termQuery(
"creator.realm",
authentication.getEffectiveSubject().getRealm().getDomain().realms().stream().findFirst().get().getName()
)
)
);
} else {
assertThat(apiKeysQuery.filter().get(2), instanceOf(BoolQueryBuilder.class));
assertThat(((BoolQueryBuilder) apiKeysQuery.filter().get(2)).must().size(), is(0));
assertThat(((BoolQueryBuilder) apiKeysQuery.filter().get(2)).mustNot().size(), is(0));
assertThat(((BoolQueryBuilder) apiKeysQuery.filter().get(2)).filter().size(), is(0));
assertThat(((BoolQueryBuilder) apiKeysQuery.filter().get(2)).minimumShouldMatch(), is("1"));
for (RealmConfig.RealmIdentifier realmIdentifier : authentication.getEffectiveSubject().getRealm().getDomain().realms()) {
assertThat(
((BoolQueryBuilder) apiKeysQuery.filter().get(2)).should(),
hasItem(QueryBuilders.termQuery("creator.realm", realmIdentifier.getName()))
);
}
}
}
public void testBuildFromBoolQuery() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
final List<String> queryFields = new ArrayList<>();
final BoolQueryBuilder bq1 = QueryBuilders.boolQuery();
boolean accessesNameField = false;
if (randomBoolean()) {
bq1.must(QueryBuilders.prefixQuery("name", "prod-"));
accessesNameField = true;
}
if (randomBoolean()) {
bq1.should(QueryBuilders.wildcardQuery("name", "*-east-*"));
accessesNameField = true;
}
if (randomBoolean()) {
bq1.filter(
QueryBuilders.termsQuery("name", randomArray(3, 8, String[]::new, () -> "prod-" + randomInt() + "-east-" + randomInt()))
);
accessesNameField = true;
}
if (randomBoolean()) {
bq1.mustNot(QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22))));
}
if (randomBoolean()) {
bq1.minimumShouldMatch(randomIntBetween(1, 2));
}
final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(bq1, queryFields::add, authentication);
assertCommonFilterQueries(apiKeyQb1, authentication);
assertThat(queryFields, hasItem("doc_type"));
if (accessesNameField) {
assertThat(queryFields, hasItem("name"));
}
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertThat(apiKeyQb1.must(), hasSize(1));
assertThat(apiKeyQb1.should(), empty());
assertThat(apiKeyQb1.mustNot(), empty());
assertThat(apiKeyQb1.filter(), hasItem(QueryBuilders.termQuery("doc_type", "api_key")));
assertThat(apiKeyQb1.must().get(0).getClass(), is(BoolQueryBuilder.class));
final BoolQueryBuilder processed = (BoolQueryBuilder) apiKeyQb1.must().get(0);
assertThat(processed.must(), equalTo(bq1.must()));
assertThat(processed.should(), equalTo(bq1.should()));
assertThat(processed.mustNot(), equalTo(bq1.mustNot()));
assertThat(processed.minimumShouldMatch(), equalTo(bq1.minimumShouldMatch()));
assertThat(processed.filter(), equalTo(bq1.filter()));
}
public void testFieldNameTranslation() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
// metadata
{
List<String> queryFields = new ArrayList<>();
final String metadataKey = randomAlphaOfLengthBetween(3, 8);
final TermQueryBuilder q1 = QueryBuilders.termQuery("metadata." + metadataKey, randomAlphaOfLengthBetween(3, 8));
ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q1, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("metadata_flattened." + metadataKey));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb1, authentication);
assertThat(apiKeyQb1.must().get(0), equalTo(QueryBuilders.termQuery("metadata_flattened." + metadataKey, q1.value())));
queryFields = new ArrayList<>();
String queryStringQuery = randomAlphaOfLength(8);
SimpleQueryStringBuilder q2 = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("metadata");
apiKeyQb1 = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("metadata_flattened"));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb1, authentication);
assertThat(
apiKeyQb1.must().get(0),
equalTo(QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("metadata_flattened"))
);
}
// username
{
final List<String> queryFields = new ArrayList<>();
final PrefixQueryBuilder q2 = QueryBuilders.prefixQuery("username", randomAlphaOfLength(3));
final ApiKeyBoolQueryBuilder apiKeyQb2 = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("creator.principal"));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb2, authentication);
assertThat(apiKeyQb2.must().get(0), equalTo(QueryBuilders.prefixQuery("creator.principal", q2.value())));
}
// realm name
{
final List<String> queryFields = new ArrayList<>();
final WildcardQueryBuilder q3 = QueryBuilders.wildcardQuery("realm_name", "*" + randomAlphaOfLength(3));
final ApiKeyBoolQueryBuilder apiKeyQb3 = ApiKeyBoolQueryBuilder.build(q3, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("creator.realm"));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
}
assertCommonFilterQueries(apiKeyQb3, authentication);
assertThat(apiKeyQb3.must().get(0), equalTo(QueryBuilders.wildcardQuery("creator.realm", q3.value())));
}
// creation_time
{
final List<String> queryFields = new ArrayList<>();
final TermQueryBuilder q4 = QueryBuilders.termQuery("creation", randomLongBetween(0, Long.MAX_VALUE));
final ApiKeyBoolQueryBuilder apiKeyQb4 = ApiKeyBoolQueryBuilder.build(q4, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("creation_time"));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb4, authentication);
assertThat(apiKeyQb4.must().get(0), equalTo(QueryBuilders.termQuery("creation_time", q4.value())));
}
// expiration_time
{
final List<String> queryFields = new ArrayList<>();
final TermQueryBuilder q5 = QueryBuilders.termQuery("expiration", randomLongBetween(0, Long.MAX_VALUE));
final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("expiration_time"));
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb5, authentication);
assertThat(apiKeyQb5.must().get(0), equalTo(QueryBuilders.termQuery("expiration_time", q5.value())));
}
// type
{
final List<String> queryFields = new ArrayList<>();
float fieldBoost = randomFloat();
final SimpleQueryStringBuilder q5 = QueryBuilders.simpleQueryStringQuery("q=42").field("type", fieldBoost);
final ApiKeyBoolQueryBuilder apiKeyQb5 = ApiKeyBoolQueryBuilder.build(q5, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("runtime_key_type")); // "type" translation
if (authentication != null && authentication.isApiKey() == false) {
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
}
assertCommonFilterQueries(apiKeyQb5, authentication);
assertThat(
apiKeyQb5.must().get(0),
equalTo(QueryBuilders.simpleQueryStringQuery("q=42").field("runtime_key_type", fieldBoost))
);
}
// test them all together
{
final List<String> queryFields = new ArrayList<>();
final SimpleQueryStringBuilder q6 = QueryBuilders.simpleQueryStringQuery("+OK -NOK maybe~3")
.field("username")
.field("realm_name")
.field("name")
.field("type")
.field("creation")
.field("expiration")
.field("invalidated")
.field("invalidation")
.field("metadata")
.field("metadata.inner");
final ApiKeyBoolQueryBuilder apiKeyQb6 = ApiKeyBoolQueryBuilder.build(q6, queryFields::add, authentication);
assertThat(queryFields, hasItem("doc_type"));
assertThat(queryFields, hasItem("creator.principal"));
assertThat(queryFields, hasItem("creator.realm"));
assertThat(queryFields, hasItem("name"));
assertThat(queryFields, hasItem("runtime_key_type")); // "type" translation
assertThat(queryFields, hasItem("creation_time"));
assertThat(queryFields, hasItem("expiration_time"));
assertThat(queryFields, hasItem("api_key_invalidated"));
assertThat(queryFields, hasItem("invalidation_time"));
assertThat(queryFields, hasItem("metadata_flattened"));
assertThat(queryFields, hasItem("metadata_flattened.inner"));
assertCommonFilterQueries(apiKeyQb6, authentication);
assertThat(
apiKeyQb6.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery("+OK -NOK maybe~3")
.field("creator.principal")
.field("creator.realm")
.field("name")
.field("runtime_key_type")
.field("creation_time")
.field("expiration_time")
.field("api_key_invalidated")
.field("invalidation_time")
.field("metadata_flattened")
.field("metadata_flattened.inner")
)
);
}
}
public void testAllowListOfFieldNames() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
final String randomFieldName = randomValueOtherThanMany(
API_KEY_FIELD_NAME_TRANSLATORS::isQueryFieldSupported,
() -> randomAlphaOfLengthBetween(3, 20)
);
final String fieldName = randomFrom(
randomFieldName,
"api_key_hash",
"api_key_invalidated",
"doc_type",
"role_descriptors",
"limited_by_role_descriptors",
"version",
"creator",
"creator.metadata"
);
{
final QueryBuilder q1 = randomValueOtherThanMany(
q -> q.getClass() == IdsQueryBuilder.class
|| q.getClass() == MatchAllQueryBuilder.class
|| q.getClass() == SimpleQueryStringBuilder.class,
() -> randomSimpleQuery(fieldName)
);
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication)
);
assertThat(e1.getMessage(), containsString("Field [" + fieldName + "] is not allowed for querying"));
}
// also wrapped in a boolean query
{
final QueryBuilder q1 = randomValueOtherThanMany(
q -> q.getClass() == IdsQueryBuilder.class
|| q.getClass() == MatchAllQueryBuilder.class
|| q.getClass() == SimpleQueryStringBuilder.class,
() -> randomSimpleQuery(fieldName)
);
final BoolQueryBuilder q2 = QueryBuilders.boolQuery();
if (randomBoolean()) {
if (randomBoolean()) {
q2.filter(q1);
} else {
q2.must(q1);
}
} else {
if (randomBoolean()) {
q2.should(q1);
} else {
q2.mustNot(q1);
}
}
IllegalArgumentException e2 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q2, ignored -> {}, authentication)
);
assertThat(e2.getMessage(), containsString("Field [" + fieldName + "] is not allowed for querying"));
}
}
public void testTermsLookupIsNotAllowed() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
final TermsQueryBuilder q1 = QueryBuilders.termsLookupQuery("name", new TermsLookup("lookup", "1", "names"));
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication)
);
assertThat(e1.getMessage(), containsString("terms query with terms lookup is not currently supported in this context"));
}
public void testRangeQueryWithRelationIsNotAllowed() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
final RangeQueryBuilder q1 = QueryBuilders.rangeQuery("creation").relation("contains");
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication)
);
assertThat(e1.getMessage(), containsString("range query with relation is not currently supported in this context"));
}
public void testDisallowedQueryTypes() {
final Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
final AbstractQueryBuilder<? extends AbstractQueryBuilder<?>> q1 = randomFrom(
QueryBuilders.constantScoreQuery(mock(QueryBuilder.class)),
QueryBuilders.boostingQuery(mock(QueryBuilder.class), mock(QueryBuilder.class)),
QueryBuilders.queryStringQuery("q=a:42"),
QueryBuilders.combinedFieldsQuery(randomAlphaOfLength(5)),
QueryBuilders.disMaxQuery(),
QueryBuilders.distanceFeatureQuery(
randomAlphaOfLength(5),
mock(DistanceFeatureQueryBuilder.Origin.class),
randomAlphaOfLength(5)
),
QueryBuilders.fieldMaskingSpanQuery(mock(SpanQueryBuilder.class), randomAlphaOfLength(5)),
QueryBuilders.functionScoreQuery(mock(QueryBuilder.class)),
QueryBuilders.fuzzyQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.wrapperQuery(randomAlphaOfLength(5)),
QueryBuilders.matchBoolPrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.matchPhraseQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.matchPhrasePrefixQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.moreLikeThisQuery(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(5))),
QueryBuilders.regexpQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.spanTermQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.spanOrQuery(mock(SpanQueryBuilder.class)),
QueryBuilders.spanContainingQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)),
QueryBuilders.spanFirstQuery(mock(SpanQueryBuilder.class), randomIntBetween(1, 3)),
QueryBuilders.spanMultiTermQueryBuilder(mock(MultiTermQueryBuilder.class)),
QueryBuilders.spanNotQuery(mock(SpanQueryBuilder.class), mock(SpanQueryBuilder.class)),
QueryBuilders.scriptQuery(new Script(randomAlphaOfLength(5))),
QueryBuilders.scriptScoreQuery(mock(QueryBuilder.class), new Script(randomAlphaOfLength(5))),
QueryBuilders.geoWithinQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.geoBoundingBoxQuery(randomAlphaOfLength(5)),
QueryBuilders.geoDisjointQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.geoDistanceQuery(randomAlphaOfLength(5)),
QueryBuilders.geoIntersectionQuery(randomAlphaOfLength(5), randomAlphaOfLength(5)),
QueryBuilders.geoShapeQuery(randomAlphaOfLength(5), randomAlphaOfLength(5))
);
final IllegalArgumentException e1 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q1, ignored -> {}, authentication)
);
assertThat(e1.getMessage(), containsString("Query type [" + q1.getName() + "] is not currently supported in this context"));
// also wrapped in a boolean query
{
final BoolQueryBuilder q2 = QueryBuilders.boolQuery();
if (randomBoolean()) {
if (randomBoolean()) {
q2.filter(q1);
} else {
q2.must(q1);
}
} else {
if (randomBoolean()) {
q2.should(q1);
} else {
q2.mustNot(q1);
}
}
IllegalArgumentException e2 = expectThrows(
IllegalArgumentException.class,
() -> ApiKeyBoolQueryBuilder.build(q2, ignored -> {}, authentication)
);
assertThat(e2.getMessage(), containsString("Query type [" + q1.getName() + "] is not currently supported in this context"));
}
}
public void testWillSetAllowedFields() throws IOException {
final ApiKeyBoolQueryBuilder apiKeyQb1 = ApiKeyBoolQueryBuilder.build(
randomSimpleQuery("name"),
ignored -> {},
randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null
);
final SearchExecutionContext context1 = mock(SearchExecutionContext.class);
doAnswer(invocationOnMock -> {
final Object[] args = invocationOnMock.getArguments();
@SuppressWarnings("unchecked")
final Predicate<String> predicate = (Predicate<String>) args[0];
assertTrue(predicate.getClass().getName().startsWith(ApiKeyBoolQueryBuilder.class.getName()));
testAllowedIndexFieldName(predicate);
return null;
}).when(context1).setAllowedFields(any());
try {
if (randomBoolean()) {
apiKeyQb1.doToQuery(context1);
} else {
apiKeyQb1.doRewrite(context1);
}
} catch (Exception e) {
// just ignore any exception from superclass since we only need verify the allowedFields are set
} finally {
verify(context1).setAllowedFields(any());
}
}
public void testWillFilterForApiKeyId() {
final String apiKeyId = randomAlphaOfLength(20);
final Authentication authentication = AuthenticationTests.randomApiKeyAuthentication(
new User(randomAlphaOfLengthBetween(5, 8)),
apiKeyId
);
final ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(
randomFrom(randomSimpleQuery("name"), null),
ignored -> {},
authentication
);
assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.termQuery("doc_type", "api_key")));
assertThat(apiKeyQb.filter(), hasItem(QueryBuilders.idsQuery().addIds(apiKeyId)));
}
public void testSimpleQueryStringFieldPatternTranslation() {
String queryStringQuery = randomAlphaOfLength(8);
Authentication authentication = randomBoolean() ? AuthenticationTests.randomAuthentication(null, null) : null;
// no field translates to all the allowed fields
{
List<String> queryFields = new ArrayList<>();
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(
queryFields.subList(0, 9),
containsInAnyOrder(
"creator.principal",
"creator.realm",
"name",
"runtime_key_type",
"creation_time",
"expiration_time",
"api_key_invalidated",
"invalidation_time",
"metadata_flattened"
)
);
assertThat(queryFields.get(9), is("doc_type"));
assertThat(
apiKeyQb.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("creator.principal")
.field("creator.realm")
.field("name")
.field("runtime_key_type")
.field("creation_time")
.field("expiration_time")
.field("api_key_invalidated")
.field("invalidation_time")
.field("metadata_flattened")
.lenient(true)
)
);
}
// * matches all fields
{
List<String> queryFields = new ArrayList<>();
float fieldBoost = Math.abs(randomFloat());
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("*", fieldBoost);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(
queryFields.subList(0, 9),
containsInAnyOrder(
"creator.principal",
"creator.realm",
"name",
"runtime_key_type",
"creation_time",
"expiration_time",
"api_key_invalidated",
"invalidation_time",
"metadata_flattened"
)
);
assertThat(queryFields.get(9), is("doc_type"));
assertThat(
apiKeyQb.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("creator.principal", fieldBoost)
.field("creator.realm", fieldBoost)
.field("name", fieldBoost)
.field("runtime_key_type", fieldBoost)
.field("creation_time", fieldBoost)
.field("expiration_time", fieldBoost)
.field("api_key_invalidated", fieldBoost)
.field("invalidation_time", fieldBoost)
.field("metadata_flattened", fieldBoost)
.lenient(true)
)
);
}
// pattern that matches a subset of fields
{
List<String> queryFields = new ArrayList<>();
float fieldBoost = Math.abs(randomFloat());
boolean lenient = randomBoolean();
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery).field("i*", fieldBoost).lenient(lenient);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(queryFields.subList(0, 2), containsInAnyOrder("api_key_invalidated", "invalidation_time"));
assertThat(queryFields.get(2), is("doc_type"));
assertThat(
apiKeyQb.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("api_key_invalidated", fieldBoost)
.field("invalidation_time", fieldBoost)
.lenient(lenient)
)
);
}
// multi pattern that matches a subset of fields
{
List<String> queryFields = new ArrayList<>();
float boost1 = randomFrom(2.0f, 4.0f, 8.0f);
float boost2 = randomFrom(2.0f, 4.0f, 8.0f);
float boost3 = randomFrom(2.0f, 4.0f, 8.0f);
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("i*", boost1)
.field("u*", boost2)
.field("user*", boost3);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(queryFields.subList(0, 3), containsInAnyOrder("creator.principal", "api_key_invalidated", "invalidation_time"));
assertThat(queryFields.get(4), is("doc_type"));
assertThat(
apiKeyQb.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("api_key_invalidated", boost1)
.field("invalidation_time", boost1)
.field("creator.principal", boost2 * boost3)
.lenient(false)
)
);
// wildcards don't expand under metadata.*
queryFields = new ArrayList<>();
q = QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("rea*", boost1)
.field("t*", boost1)
.field("ty*", boost2)
.field("me*", boost2)
.field("metadata.*", boost3)
.field("metadata.x*", boost3);
apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(
queryFields.subList(0, 4),
containsInAnyOrder("creator.realm", "runtime_key_type", "metadata_flattened", "runtime_key_type")
);
assertThat(queryFields.get(4), is("doc_type"));
assertThat(
apiKeyQb.must().get(0),
equalTo(
QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("creator.realm", boost1)
.field("runtime_key_type", boost1 * boost2)
.field("metadata_flattened", boost2)
.lenient(false)
)
);
}
// patterns that don't match anything
{
List<String> queryFields = new ArrayList<>();
float boost1 = randomFrom(2.0f, 4.0f, 8.0f);
float boost2 = randomFrom(2.0f, 4.0f, 8.0f);
float boost3 = randomFrom(2.0f, 4.0f, 8.0f);
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("field_that_does_not*", boost1)
.field("what*", boost2)
.field("aiaiaiai*", boost3);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(queryFields.get(0), is("doc_type"));
if (authentication != null) {
assertThat(queryFields.get(1), is("creator.principal"));
assertThat(queryFields.get(2), is("creator.realm"));
assertThat(queryFields.size(), is(3));
} else {
assertThat(queryFields.size(), is(1));
}
assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder()));
}
// disallowed or unknown field is silently ignored
{
List<String> queryFields = new ArrayList<>();
float boost1 = randomFrom(2.0f, 4.0f, 8.0f);
float boost2 = randomFrom(2.0f, 4.0f, 8.0f);
SimpleQueryStringBuilder q = QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field("field_that_does_not*", boost1)
.field("unknown_field", boost2);
ApiKeyBoolQueryBuilder apiKeyQb = ApiKeyBoolQueryBuilder.build(q, queryFields::add, authentication);
assertThat(queryFields.get(0), is("doc_type"));
if (authentication != null) {
assertThat(queryFields.get(1), is("creator.principal"));
assertThat(queryFields.get(2), is("creator.realm"));
assertThat(queryFields.size(), is(3));
} else {
assertThat(queryFields.size(), is(1));
}
assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder()));
// translated field
queryFields = new ArrayList<>();
String translatedField = randomFrom(
"creator.principal",
"creator.realm",
"runtime_key_type",
"creation_time",
"expiration_time",
"api_key_invalidated",
"invalidation_time",
"metadata_flattened"
);
SimpleQueryStringBuilder q2 = QueryBuilders.simpleQueryStringQuery(queryStringQuery)
.field(translatedField, boost1)
.field("field_that_does_not*", boost2);
apiKeyQb = ApiKeyBoolQueryBuilder.build(q2, queryFields::add, authentication);
assertThat(queryFields.get(0), is("doc_type"));
if (authentication != null) {
assertThat(queryFields.get(1), is("creator.principal"));
assertThat(queryFields.get(2), is("creator.realm"));
assertThat(queryFields.size(), is(3));
} else {
assertThat(queryFields.size(), is(1));
}
assertThat(apiKeyQb.must().get(0), equalTo(new MatchNoneQueryBuilder()));
}
}
private void testAllowedIndexFieldName(Predicate<String> predicate) {
final String allowedField = randomFrom(
"doc_type",
"name",
"type",
TransportQueryApiKeyAction.API_KEY_TYPE_RUNTIME_MAPPING_FIELD,
"api_key_invalidated",
"creation_time",
"expiration_time",
"metadata_flattened." + randomAlphaOfLengthBetween(1, 10),
"creator.principal",
"creator.realm"
);
assertThat(predicate, trueWith(allowedField));
final String disallowedField = randomBoolean() ? (randomAlphaOfLengthBetween(1, 3) + allowedField) : (allowedField.substring(1));
assertThat(predicate, falseWith(disallowedField));
}
private void assertCommonFilterQueries(ApiKeyBoolQueryBuilder qb, Authentication authentication) {
final List<TermQueryBuilder> tqb = qb.filter()
.stream()
.filter(q -> q.getClass() == TermQueryBuilder.class)
.map(q -> (TermQueryBuilder) q)
.toList();
assertTrue(tqb.stream().anyMatch(q -> q.equals(QueryBuilders.termQuery("doc_type", "api_key"))));
if (authentication == null) {
return;
}
if (authentication.isApiKey()) {
List<IdsQueryBuilder> idsQueryBuilders = qb.filter()
.stream()
.filter(q -> q.getClass() == IdsQueryBuilder.class)
.map(q -> (IdsQueryBuilder) q)
.toList();
assertThat(idsQueryBuilders, iterableWithSize(1));
assertThat(
idsQueryBuilders.get(0),
equalTo(
QueryBuilders.idsQuery()
.addIds((String) authentication.getAuthenticatingSubject().getMetadata().get(AuthenticationField.API_KEY_ID_KEY))
)
);
} else {
assertTrue(
tqb.stream()
.anyMatch(
q -> q.equals(
QueryBuilders.termQuery("creator.principal", authentication.getEffectiveSubject().getUser().principal())
)
)
);
assertTrue(
tqb.stream()
.anyMatch(q -> q.equals(QueryBuilders.termQuery("creator.realm", ApiKeyService.getCreatorRealmName(authentication))))
);
}
}
private QueryBuilder randomSimpleQuery(String fieldName) {
return switch (randomIntBetween(0, 9)) {
case 0 -> QueryBuilders.termQuery(fieldName, randomAlphaOfLengthBetween(3, 8))
.boost(Math.abs(randomFloat()))
.queryName(randomAlphaOfLength(4));
case 1 -> QueryBuilders.termsQuery(fieldName, randomArray(1, 3, String[]::new, () -> randomAlphaOfLengthBetween(3, 8)))
.boost(Math.abs(randomFloat()))
.queryName(randomAlphaOfLength(4));
case 2 -> QueryBuilders.idsQuery().addIds(randomArray(1, 3, String[]::new, () -> randomAlphaOfLength(22)));
case 3 -> QueryBuilders.prefixQuery(fieldName, "prod-");
case 4 -> QueryBuilders.wildcardQuery(fieldName, "prod-*-east-*");
case 5 -> QueryBuilders.matchAllQuery();
case 6 -> QueryBuilders.existsQuery(fieldName).boost(Math.abs(randomFloat())).queryName(randomAlphaOfLength(4));
case 7 -> QueryBuilders.rangeQuery(fieldName)
.from(Instant.now().minus(1, ChronoUnit.DAYS).toEpochMilli(), randomBoolean())
.to(Instant.now().toEpochMilli(), randomBoolean());
case 8 -> QueryBuilders.simpleQueryStringQuery("+rest key*")
.field(fieldName)
.lenient(randomBoolean())
.analyzeWildcard(randomBoolean())
.fuzzyPrefixLength(randomIntBetween(1, 10))
.fuzzyMaxExpansions(randomIntBetween(1, 10))
.fuzzyTranspositions(randomBoolean());
case 9 -> QueryBuilders.matchQuery(fieldName, randomAlphaOfLengthBetween(3, 8))
.operator(randomFrom(Operator.OR, Operator.AND))
.lenient(randomBoolean())
.maxExpansions(randomIntBetween(1, 100))
.analyzer(randomFrom(randomAlphaOfLength(4), null));
default -> throw new IllegalStateException("illegal switch case");
};
}
private void assertQueryFields(List<String> actualQueryFields, QueryBuilder queryBuilder, Authentication authentication) {
assertThat(actualQueryFields, hasItem("doc_type"));
if ((queryBuilder instanceof IdsQueryBuilder || queryBuilder instanceof MatchAllQueryBuilder) == false) {
assertThat(actualQueryFields, hasItem("name"));
}
if (authentication != null && authentication.isApiKey() == false) {
assertThat(actualQueryFields, hasItem("creator.principal"));
assertThat(actualQueryFields, hasItem("creator.realm"));
}
}
private static String randomValidFieldName() {
return randomFrom(
"username",
"realm_name",
"name",
"type",
"creation",
"expiration",
"invalidated",
"invalidation",
"metadata",
"metadata.what.ever"
);
}
}
| ApiKeyBoolQueryBuilderTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/SelectUnknownEnumLiteralTest.java | {
"start": 1662,
"end": 1727
} | enum ____ {
TRANSACTION, DIRECT_DEBIT_GROUP, DIRECT_DEBIT
}
}
| Type |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customexceptions/ImplClassExceptionMapperTest.java | {
"start": 2870,
"end": 3014
} | interface ____ {
@GET
@Path("error")
@Produces("text/plain")
String throwsException();
}
}
| GlobalCustomResource |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/ProcessContext.java | {
"start": 5334,
"end": 6027
} | class ____ implements ProcessState {
@Override
public boolean setRunning(ProcessContext processContext, AutodetectCommunicator autodetectCommunicator) {
processContext.setAutodetectCommunicator(autodetectCommunicator);
processContext.setState(new ProcessRunningState());
return true;
}
@Override
public boolean setDying(ProcessContext processContext) {
processContext.setState(new ProcessDyingState());
return true;
}
@Override
public ProcessStateName getName() {
return ProcessStateName.NOT_RUNNING;
}
}
private static | ProcessNotRunningState |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/TypeReference.java | {
"start": 8982,
"end": 9290
} | class ____ method type parameter bound.
*/
public static TypeReference newTypeParameterBoundReference(
final int sort, final int paramIndex, final int boundIndex) {
return new TypeReference((sort << 24) | (paramIndex << 16) | (boundIndex << 8));
}
/**
* Returns a reference to the super | or |
java | google__guava | android/guava/src/com/google/common/collect/ImmutableMultimap.java | {
"start": 3160,
"end": 4019
} | class ____<K, V> extends BaseImmutableMultimap<K, V>
implements Serializable {
/**
* Returns an empty multimap.
*
* <p><b>Performance note:</b> the instance returned is a singleton.
*/
public static <K, V> ImmutableMultimap<K, V> of() {
return ImmutableListMultimap.of();
}
/** Returns an immutable multimap containing a single entry. */
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1) {
return ImmutableListMultimap.of(k1, v1);
}
/** Returns an immutable multimap containing the given entries, in order. */
public static <K, V> ImmutableMultimap<K, V> of(K k1, V v1, K k2, V v2) {
return ImmutableListMultimap.of(k1, v1, k2, v2);
}
/**
* Returns an immutable multimap containing the given entries, in the "key-grouped" insertion
* order described in the <a href="#iteration"> | ImmutableMultimap |
java | netty__netty | common/src/test/java/io/netty/util/internal/StringUtilTest.java | {
"start": 18490,
"end": 21973
} | class ____ { }
@Test
public void testEndsWith() {
assertFalse(StringUtil.endsWith("", 'u'));
assertTrue(StringUtil.endsWith("u", 'u'));
assertTrue(StringUtil.endsWith("-u", 'u'));
assertFalse(StringUtil.endsWith("-", 'u'));
assertFalse(StringUtil.endsWith("u-", 'u'));
}
@Test
public void trimOws() {
assertSame("", StringUtil.trimOws(""));
assertEquals("", StringUtil.trimOws(" \t "));
assertSame("a", StringUtil.trimOws("a"));
assertEquals("a", StringUtil.trimOws(" a"));
assertEquals("a", StringUtil.trimOws("a "));
assertEquals("a", StringUtil.trimOws(" a "));
assertSame("abc", StringUtil.trimOws("abc"));
assertEquals("abc", StringUtil.trimOws("\tabc"));
assertEquals("abc", StringUtil.trimOws("abc\t"));
assertEquals("abc", StringUtil.trimOws("\tabc\t"));
assertSame("a\t b", StringUtil.trimOws("a\t b"));
assertEquals("", StringUtil.trimOws("\t ").toString());
assertEquals("a b", StringUtil.trimOws("\ta b \t").toString());
}
@Test
public void testJoin() {
assertEquals("",
StringUtil.join(",", Collections.<CharSequence>emptyList()).toString());
assertEquals("a",
StringUtil.join(",", Collections.singletonList("a")).toString());
assertEquals("a,b",
StringUtil.join(",", Arrays.asList("a", "b")).toString());
assertEquals("a,b,c",
StringUtil.join(",", Arrays.asList("a", "b", "c")).toString());
assertEquals("a,b,c,null,d",
StringUtil.join(",", Arrays.asList("a", "b", "c", null, "d")).toString());
}
@Test
public void testIsNullOrEmpty() {
assertTrue(isNullOrEmpty(null));
assertTrue(isNullOrEmpty(""));
assertTrue(isNullOrEmpty(StringUtil.EMPTY_STRING));
assertFalse(isNullOrEmpty(" "));
assertFalse(isNullOrEmpty("\t"));
assertFalse(isNullOrEmpty("\n"));
assertFalse(isNullOrEmpty("foo"));
assertFalse(isNullOrEmpty(NEWLINE));
}
@Test
public void testIndexOfWhiteSpace() {
assertEquals(-1, indexOfWhiteSpace("", 0));
assertEquals(0, indexOfWhiteSpace(" ", 0));
assertEquals(-1, indexOfWhiteSpace(" ", 1));
assertEquals(0, indexOfWhiteSpace("\n", 0));
assertEquals(-1, indexOfWhiteSpace("\n", 1));
assertEquals(0, indexOfWhiteSpace("\t", 0));
assertEquals(-1, indexOfWhiteSpace("\t", 1));
assertEquals(3, indexOfWhiteSpace("foo\r\nbar", 1));
assertEquals(-1, indexOfWhiteSpace("foo\r\nbar", 10));
assertEquals(7, indexOfWhiteSpace("foo\tbar\r\n", 6));
assertEquals(-1, indexOfWhiteSpace("foo\tbar\r\n", Integer.MAX_VALUE));
}
@Test
public void testIndexOfNonWhiteSpace() {
assertEquals(-1, indexOfNonWhiteSpace("", 0));
assertEquals(-1, indexOfNonWhiteSpace(" ", 0));
assertEquals(-1, indexOfNonWhiteSpace(" \t", 0));
assertEquals(-1, indexOfNonWhiteSpace(" \t\r\n", 0));
assertEquals(2, indexOfNonWhiteSpace(" \tfoo\r\n", 0));
assertEquals(2, indexOfNonWhiteSpace(" \tfoo\r\n", 1));
assertEquals(4, indexOfNonWhiteSpace(" \tfoo\r\n", 4));
assertEquals(-1, indexOfNonWhiteSpace(" \tfoo\r\n", 10));
assertEquals(-1, indexOfNonWhiteSpace(" \tfoo\r\n", Integer.MAX_VALUE));
}
}
| TestClass |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/metamodel/MappingMetamodel.java | {
"start": 3185,
"end": 5322
} | interface ____ the entity. This method accounts for both, preferring the
* direct entity name.
*
* @throws org.hibernate.UnknownEntityTypeException If a matching EntityPersister cannot be located
*
* @deprecated No longer used
*/
@Deprecated(forRemoval = true, since = "7")
EntityPersister locateEntityDescriptor(Class<?> byClass);
String getImportedName(String name);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Collection descriptors
/**
* Visit the mapping descriptors for all collections defined in the model
*/
void forEachCollectionDescriptor(Consumer<CollectionPersister> action);
@Deprecated(forRemoval = true, since = "7")
Stream<CollectionPersister> streamCollectionDescriptors();
/**
* Get a collection mapping descriptor based on its role
*
* @throws IllegalArgumentException if the role does not refer to a collection
*
* @see #findCollectionDescriptor
*/
CollectionPersister getCollectionDescriptor(String role);
/**
* Get a collection mapping descriptor based on its role
*
* @throws IllegalArgumentException if the role does not refer to a collection
*
* @see #findCollectionDescriptor
*/
CollectionPersister getCollectionDescriptor(NavigableRole role);
/**
* Find a collection mapping descriptor based on its role. Returns
* {@code null} if the role does not refer to a collection
*
* @see #findCollectionDescriptor
*/
CollectionPersister findCollectionDescriptor(NavigableRole role);
/**
* Find a collection mapping descriptor based on its role. Returns
* {@code null} if the role does not refer to a collection
*
* @see #findCollectionDescriptor
*/
CollectionPersister findCollectionDescriptor(String role);
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// SQM model -> Mapping model
// TODO Layer breaker used in SQM to SQL translation.
// Consider moving to QueryEngine or collaborators.
@Internal
MappingModelExpressible<?> resolveMappingExpressible(
SqmExpressible<?> sqmExpressible,
Function<NavigablePath, TableGroup> tableGroupLocator);
}
| for |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/validation/DecoratorWithAsyncObserverTest.java | {
"start": 1261,
"end": 1558
} | class ____ implements Converter<Number> {
@Inject
@Delegate
Converter<Number> delegate;
@Override
public Number convert(String value) {
return null;
}
void observeAsync(@ObservesAsync String ignored) {
}
}
}
| MyDecorator |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/target/SingletonTargetSource.java | {
"start": 1186,
"end": 1465
} | class ____ serializable. However, the actual serializability of a
* SingletonTargetSource will depend on whether the target is serializable.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @see org.springframework.aop.framework.AdvisedSupport#setTarget(Object)
*/
public | is |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/OverrideStandardJavaTypeTest.java | {
"start": 1415,
"end": 3237
} | class ____ {
@AfterEach
void tearDown(SessionFactoryScope factoryScope) {
factoryScope.dropData();
}
@Test
public void verifyMappings(SessionFactoryScope scope) {
final MappingMetamodelImplementor mappingMetamodel = scope.getSessionFactory()
.getRuntimeMetamodels()
.getMappingMetamodel();
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor( SampleEntity.class );
final var languageJavaTypeDescriptor = entityDescriptor.findAttributeMapping( "language" )
.getSingleJdbcMapping().getJavaTypeDescriptor();
assertInstanceOf( LocaleAsLanguageTagType.class, languageJavaTypeDescriptor );
}
@Test
public void validateJpa(SessionFactoryScope scope) {
final var id = scope.fromTransaction(
session -> {
final var entity = new SampleEntity();
entity.language = Locale.forLanguageTag( "en-Latn" );
session.persist( entity );
return entity.id;
}
);
scope.inSession(
session -> assertEquals(
Locale.forLanguageTag( "en-Latn" ),
session.find( SampleEntity.class, id ).language
)
);
}
@Test
public void validateNative(SessionFactoryScope scope) {
final var id = scope.fromTransaction(
session -> {
final var entity = new SampleEntity();
entity.language = Locale.forLanguageTag( "en-Latn" );
session.persist( entity );
return entity.id;
}
);
scope.inSession( session -> {
String quotedLanguage = session.getDialect().toQuotedIdentifier( "language" );
assertEquals(
"en-Latn",
session.createNativeQuery(
"select " + quotedLanguage + " from locale_as_language_tag where id=:id", String.class )
.setParameter( "id", id )
.getSingleResult()
);
} );
}
@Entity
@Table(name = "locale_as_language_tag")
public static | OverrideStandardJavaTypeTest |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 9939,
"end": 10174
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.getLimitHandler().supportsLimit() && dialect.getLimitHandler().supportsLimitOffset();
}
}
public static | SupportLimitAndOffsetCheck |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/sort/FloatBucketedSort.java | {
"start": 1164,
"end": 13938
} | class ____ implements Releasable {
private final BigArrays bigArrays;
private final SortOrder order;
private final int bucketSize;
/**
* {@code true} if the bucket is in heap mode, {@code false} if
* it is still gathering.
*/
private final BitArray heapMode;
/**
* An array containing all the values on all buckets. The structure is as follows:
* <p>
* For each bucket, there are bucketSize elements, based on the bucket id (0, 1, 2...).
* Then, for each bucket, it can be in 2 states:
* </p>
* <ul>
* <li>
* Gather mode: All buckets start in gather mode, and remain here while they have less than bucketSize elements.
* In gather mode, the elements are stored in the array from the highest index to the lowest index.
* The lowest index contains the offset to the next slot to be filled.
* <p>
* This allows us to insert elements in O(1) time.
* </p>
* <p>
* When the bucketSize-th element is collected, the bucket transitions to heap mode, by heapifying its contents.
* </p>
* </li>
* <li>
* Heap mode: The bucket slots are organized as a min heap structure.
* <p>
* The root of the heap is the minimum value in the bucket,
* which allows us to quickly discard new values that are not in the top N.
* </p>
* </li>
* </ul>
*/
private FloatArray values;
public FloatBucketedSort(BigArrays bigArrays, SortOrder order, int bucketSize) {
this.bigArrays = bigArrays;
this.order = order;
this.bucketSize = bucketSize;
heapMode = new BitArray(0, bigArrays);
boolean success = false;
try {
values = bigArrays.newFloatArray(0, false);
success = true;
} finally {
if (success == false) {
close();
}
}
}
/**
* Collects a {@code value} into a {@code bucket}.
* <p>
* It may or may not be inserted in the heap, depending on if it is better than the current root.
* </p>
*/
public void collect(float value, int bucket) {
long rootIndex = (long) bucket * bucketSize;
if (inHeapMode(bucket)) {
if (betterThan(value, values.get(rootIndex))) {
values.set(rootIndex, value);
downHeap(rootIndex, 0, bucketSize);
}
return;
}
// Gathering mode
long requiredSize = rootIndex + bucketSize;
if (values.size() < requiredSize) {
grow(bucket);
}
int next = getNextGatherOffset(rootIndex);
assert 0 <= next && next < bucketSize
: "Expected next to be in the range of valid buckets [0 <= " + next + " < " + bucketSize + "]";
long index = next + rootIndex;
values.set(index, value);
if (next == 0) {
heapMode.set(bucket);
heapify(rootIndex, bucketSize);
} else {
setNextGatherOffset(rootIndex, next - 1);
}
}
/**
* The order of the sort.
*/
public SortOrder getOrder() {
return order;
}
/**
* The number of values to store per bucket.
*/
public int getBucketSize() {
return bucketSize;
}
/**
* Get the first and last indexes (inclusive, exclusive) of the values for a bucket.
* Returns [0, 0] if the bucket has never been collected.
*/
private Tuple<Long, Long> getBucketValuesIndexes(int bucket) {
long rootIndex = (long) bucket * bucketSize;
if (rootIndex >= values.size()) {
// We've never seen this bucket.
return Tuple.tuple(0L, 0L);
}
long start = inHeapMode(bucket) ? rootIndex : (rootIndex + getNextGatherOffset(rootIndex) + 1);
long end = rootIndex + bucketSize;
return Tuple.tuple(start, end);
}
/**
* Merge the values from {@code other}'s {@code otherGroupId} into {@code groupId}.
*/
public void merge(int groupId, FloatBucketedSort other, int otherGroupId) {
var otherBounds = other.getBucketValuesIndexes(otherGroupId);
// TODO: This can be improved for heapified buckets by making use of the heap structures
for (long i = otherBounds.v1(); i < otherBounds.v2(); i++) {
collect(other.values.get(i), groupId);
}
}
/**
* Creates a block with the values from the {@code selected} groups.
*/
public Block toBlock(BlockFactory blockFactory, IntVector selected) {
// Check if the selected groups are all empty, to avoid allocating extra memory
if (allSelectedGroupsAreEmpty(selected)) {
return blockFactory.newConstantNullBlock(selected.getPositionCount());
}
try (var builder = blockFactory.newFloatBlockBuilder(selected.getPositionCount())) {
for (int s = 0; s < selected.getPositionCount(); s++) {
int bucket = selected.getInt(s);
var bounds = getBucketValuesIndexes(bucket);
var rootIndex = bounds.v1();
var size = bounds.v2() - bounds.v1();
if (size == 0) {
builder.appendNull();
continue;
}
if (size == 1) {
builder.appendFloat(values.get(rootIndex));
continue;
}
// If we are in the gathering mode, we need to heapify before sorting.
if (inHeapMode(bucket) == false) {
heapify(rootIndex, (int) size);
}
heapSort(rootIndex, (int) size);
builder.beginPositionEntry();
for (int i = 0; i < size; i++) {
builder.appendFloat(values.get(rootIndex + i));
}
builder.endPositionEntry();
}
return builder.build();
}
}
/**
* Checks if the selected groups are all empty.
*/
private boolean allSelectedGroupsAreEmpty(IntVector selected) {
return IntStream.range(0, selected.getPositionCount()).map(selected::getInt).noneMatch(bucket -> {
var bounds = this.getBucketValuesIndexes(bucket);
var size = bounds.v2() - bounds.v1();
return size > 0;
});
}
/**
* Is this bucket a min heap {@code true} or in gathering mode {@code false}?
*/
private boolean inHeapMode(int bucket) {
return heapMode.get(bucket);
}
/**
* Get the next index that should be "gathered" for a bucket rooted
* at {@code rootIndex}.
*/
private int getNextGatherOffset(long rootIndex) {
return (int) values.get(rootIndex);
}
/**
* Set the next index that should be "gathered" for a bucket rooted
* at {@code rootIndex}.
*/
private void setNextGatherOffset(long rootIndex, int offset) {
values.set(rootIndex, offset);
}
/**
* {@code true} if the entry at index {@code lhs} is "better" than
* the entry at {@code rhs}. "Better" in this means "lower" for
* {@link SortOrder#ASC} and "higher" for {@link SortOrder#DESC}.
*/
private boolean betterThan(float lhs, float rhs) {
int res = Float.compare(lhs, rhs);
return getOrder().reverseMul() * res < 0;
}
/**
* Swap the data at two indices.
*/
private void swap(long lhs, long rhs) {
var tmp = values.get(lhs);
values.set(lhs, values.get(rhs));
values.set(rhs, tmp);
}
/**
* Allocate storage for more buckets and store the "next gather offset"
* for those new buckets. We always grow the storage by whole bucket's
* worth of slots at a time. We never allocate space for partial buckets.
*/
private void grow(int bucket) {
long oldMax = values.size();
assert oldMax % bucketSize == 0;
long newSize = BigArrays.overSize(((long) bucket + 1) * bucketSize, PageCacheRecycler.FLOAT_PAGE_SIZE, Float.BYTES);
// Round up to the next full bucket.
newSize = (newSize + bucketSize - 1) / bucketSize;
values = bigArrays.resize(values, newSize * bucketSize);
// Set the next gather offsets for all newly allocated buckets.
fillGatherOffsets(oldMax);
}
/**
* Maintain the "next gather offsets" for newly allocated buckets.
*/
private void fillGatherOffsets(long startingAt) {
int nextOffset = getBucketSize() - 1;
for (long bucketRoot = startingAt; bucketRoot < values.size(); bucketRoot += getBucketSize()) {
setNextGatherOffset(bucketRoot, nextOffset);
}
}
/**
* Heapify a bucket whose entries are in random order.
* <p>
* This works by validating the heap property on each node, iterating
* "upwards", pushing any out of order parents "down". Check out the
* <a href="https://en.wikipedia.org/w/index.php?title=Binary_heap&oldid=940542991#Building_a_heap">wikipedia</a>
* entry on binary heaps for more about this.
* </p>
* <p>
* While this *looks* like it could easily be {@code O(n * log n)}, it is
* a fairly well studied algorithm attributed to Floyd. There's
* been a bunch of work that puts this at {@code O(n)}, close to 1.88n worst
* case.
* </p>
* <ul>
* <li>Hayward, Ryan; McDiarmid, Colin (1991).
* <a href="https://web.archive.org/web/20160205023201/http://www.stats.ox.ac.uk/__data/assets/pdf_file/0015/4173/heapbuildjalg.pdf">
* Average Case Analysis of Heap Building byRepeated Insertion</a> J. Algorithms.
* <li>D.E. Knuth, ”The Art of Computer Programming, Vol. 3, Sorting and Searching”</li>
* </ul>
* @param rootIndex the index the start of the bucket
*/
private void heapify(long rootIndex, int heapSize) {
int maxParent = heapSize / 2 - 1;
for (int parent = maxParent; parent >= 0; parent--) {
downHeap(rootIndex, parent, heapSize);
}
}
/**
* Sorts all the values in the heap using heap sort algorithm.
* This runs in {@code O(n log n)} time.
* @param rootIndex index of the start of the bucket
* @param heapSize Number of values that belong to the heap.
* Can be less than bucketSize.
* In such a case, the remaining values in range
* (rootIndex + heapSize, rootIndex + bucketSize)
* are *not* considered part of the heap.
*/
private void heapSort(long rootIndex, int heapSize) {
while (heapSize > 0) {
swap(rootIndex, rootIndex + heapSize - 1);
heapSize--;
downHeap(rootIndex, 0, heapSize);
}
}
/**
* Correct the heap invariant of a parent and its children. This
* runs in {@code O(log n)} time.
* @param rootIndex index of the start of the bucket
* @param parent Index within the bucket of the parent to check.
* For example, 0 is the "root".
* @param heapSize Number of values that belong to the heap.
* Can be less than bucketSize.
* In such a case, the remaining values in range
* (rootIndex + heapSize, rootIndex + bucketSize)
* are *not* considered part of the heap.
*/
private void downHeap(long rootIndex, int parent, int heapSize) {
while (true) {
long parentIndex = rootIndex + parent;
int worst = parent;
long worstIndex = parentIndex;
int leftChild = parent * 2 + 1;
long leftIndex = rootIndex + leftChild;
if (leftChild < heapSize) {
if (betterThan(values.get(worstIndex), values.get(leftIndex))) {
worst = leftChild;
worstIndex = leftIndex;
}
int rightChild = leftChild + 1;
long rightIndex = rootIndex + rightChild;
if (rightChild < heapSize && betterThan(values.get(worstIndex), values.get(rightIndex))) {
worst = rightChild;
worstIndex = rightIndex;
}
}
if (worst == parent) {
break;
}
swap(worstIndex, parentIndex);
parent = worst;
}
}
@Override
public final void close() {
Releasables.close(values, heapMode);
}
}
| FloatBucketedSort |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/zip/ByteArrayDataBlock.java | {
"start": 810,
"end": 1763
} | class ____ implements CloseableDataBlock {
private final byte[] bytes;
private final int maxReadSize;
/**
* Create a new {@link ByteArrayDataBlock} backed by the given bytes.
* @param bytes the bytes to use
*/
ByteArrayDataBlock(byte... bytes) {
this(bytes, -1);
}
ByteArrayDataBlock(byte[] bytes, int maxReadSize) {
this.bytes = bytes;
this.maxReadSize = maxReadSize;
}
@Override
public long size() throws IOException {
return this.bytes.length;
}
@Override
public int read(ByteBuffer dst, long pos) throws IOException {
return read(dst, (int) pos);
}
private int read(ByteBuffer dst, int pos) {
int remaining = dst.remaining();
int length = Math.min(this.bytes.length - pos, remaining);
if (this.maxReadSize > 0 && length > this.maxReadSize) {
length = this.maxReadSize;
}
dst.put(this.bytes, pos, length);
return length;
}
@Override
public void close() throws IOException {
}
}
| ByteArrayDataBlock |
java | apache__dubbo | dubbo-configcenter/dubbo-configcenter-apollo/src/main/java/org/apache/dubbo/configcenter/support/apollo/ApolloDynamicConfigurationFactory.java | {
"start": 1114,
"end": 1553
} | class ____ extends AbstractDynamicConfigurationFactory {
private ApplicationModel applicationModel;
public ApolloDynamicConfigurationFactory(ApplicationModel applicationModel) {
this.applicationModel = applicationModel;
}
@Override
protected DynamicConfiguration createDynamicConfiguration(URL url) {
return new ApolloDynamicConfiguration(url, applicationModel);
}
}
| ApolloDynamicConfigurationFactory |
java | google__gson | gson/src/test/java/com/google/gson/common/TestTypes.java | {
"start": 1597,
"end": 1814
} | class ____ extends Base {
public static final String SUB_NAME = Sub.class.getSimpleName();
public static final String SUB_FIELD_KEY = "subName";
public final String subName = SUB_NAME;
}
public static | Sub |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/websocketx/WebSocketHandshakeException.java | {
"start": 740,
"end": 1041
} | class ____ extends RuntimeException {
private static final long serialVersionUID = 1L;
public WebSocketHandshakeException(String s) {
super(s);
}
public WebSocketHandshakeException(String s, Throwable throwable) {
super(s, throwable);
}
}
| WebSocketHandshakeException |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/mappedBy/ManyToManyMappedByTypeTest.java | {
"start": 4769,
"end": 4911
} | class ____ {
@Id
private Long id;
@ManyToMany
private List<EntityC> parents;
}
@Entity( name = "EntityC" )
public static | EntityBWrong |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/main/java/io/quarkus/websockets/next/deployment/CallbackArgument.java | {
"start": 1514,
"end": 2796
} | interface ____ {
/**
*
* @return the callback target
*/
Target callbackTarget();
/**
*
* @return the endpoint path or {@code null} for global error handlers
*/
String endpointPath();
/**
*
* @return the index that can be used to inspect parameter types
*/
IndexView index();
/**
*
* @return the callback marker annotation
*/
AnnotationInstance callbackAnnotation();
/**
*
* @return the Java method parameter
*/
MethodParameterInfo parameter();
/**
*
* @return the set of parameter annotations, potentially transformed
*/
Set<AnnotationInstance> parameterAnnotations();
default boolean acceptsMessage() {
return WebSocketDotNames.ON_BINARY_MESSAGE.equals(callbackAnnotation().name())
|| WebSocketDotNames.ON_TEXT_MESSAGE.equals(callbackAnnotation().name())
|| WebSocketDotNames.ON_PING_MESSAGE.equals(callbackAnnotation().name())
|| WebSocketDotNames.ON_PONG_MESSAGE.equals(callbackAnnotation().name());
}
}
| ParameterContext |
java | quarkusio__quarkus | extensions/container-image/container-image-buildpack/deployment/src/main/java/io/quarkus/container/image/buildpack/deployment/BuildpackLogger.java | {
"start": 99,
"end": 752
} | class ____ implements dev.snowdrop.buildpack.Logger {
private static final Logger bplog = Logger.getLogger("buildpack");
private String trim(String message) {
if (message.endsWith("\n")) {
message = message.substring(0, message.length() - 1);
}
if (message.endsWith("\r")) {
message = message.substring(0, message.length() - 1);
}
return message;
}
@Override
public void stdout(String message) {
bplog.info(trim(prepare(message)));
}
@Override
public void stderr(String message) {
bplog.error(trim(prepare(message)));
}
}
| BuildpackLogger |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java | {
"start": 19850,
"end": 20005
} | class ____ {
final Object lock = new Object();
@GuardedBy("this")
boolean flag = false;
}
| A |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/HpackStaticTableTest.java | {
"start": 810,
"end": 2370
} | class ____ {
@Test
public void testEmptyHeaderName() {
assertEquals(-1, HpackStaticTable.getIndex(""));
}
@Test
public void testMissingHeaderName() {
assertEquals(-1, HpackStaticTable.getIndex("missing"));
}
@Test
public void testExistingHeaderName() {
assertEquals(6, HpackStaticTable.getIndex(":scheme"));
}
@Test
public void testMissingHeaderNameAndValue() {
assertEquals(-1, HpackStaticTable.getIndexInsensitive("missing", "value"));
}
@Test
public void testMissingHeaderNameButValueExists() {
assertEquals(-1, HpackStaticTable.getIndexInsensitive("missing", "https"));
}
@Test
public void testExistingHeaderNameAndValueFirstMatch() {
assertEquals(6, HpackStaticTable.getIndexInsensitive(":scheme", "http"));
}
@Test
public void testExistingHeaderNameAndValueSecondMatch() {
assertEquals(7, HpackStaticTable.getIndexInsensitive(
AsciiString.cached(":scheme"), AsciiString.cached("https")));
}
@Test
public void testExistingHeaderNameAndEmptyValueMismatch() {
assertEquals(-1, HpackStaticTable.getIndexInsensitive(":scheme", ""));
}
@Test
public void testExistingHeaderNameAndEmptyValueMatch() {
assertEquals(27, HpackStaticTable.getIndexInsensitive("content-language", ""));
}
@Test
public void testExistingHeaderNameButMissingValue() {
assertEquals(-1, HpackStaticTable.getIndexInsensitive(":scheme", "missing"));
}
}
| HpackStaticTableTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/write/staticinsert/SimpleNullabilityTest.java | {
"start": 656,
"end": 1182
} | class ____ {
@Test
@DomainModel(annotatedClasses = Tester.class)
void checkIt(DomainModelScope scope) {
final PersistentClass entityBinding = scope.getEntityBinding( Tester.class );
final Property descriptionProperty = entityBinding.getProperty( "description" );
assertThat( descriptionProperty.isOptional() ).isTrue();
assertThat( descriptionProperty.getColumns().get( 0 ).isNullable() ).isTrue();
}
@Entity(name="Tester")
@Table(name="Tester")
@SecondaryTable(name="Tester2")
public static | SimpleNullabilityTest |
java | apache__camel | core/camel-core-processor/src/main/java/org/apache/camel/processor/DefaultDeferServiceFactory.java | {
"start": 1118,
"end": 1542
} | class ____ implements DeferServiceFactory {
@Override
public Producer createProducer(Endpoint endpoint) throws Exception {
Producer producer = new DeferProducer(endpoint);
producer = new UnitOfWorkProducer(producer);
producer = new EventNotifierProducer(producer);
endpoint.getCamelContext().deferStartService(producer, true);
return producer;
}
}
| DefaultDeferServiceFactory |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/typeRef/TypeReferenceTest7.java | {
"start": 703,
"end": 959
} | class ____ {
private String id;
private A a;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public A getA() {
return a;
}
public void setA(A a) {
this.a = a;
}
}
public static | Entity |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/reactive/server/WebTestClient.java | {
"start": 34457,
"end": 34698
} | interface ____ extends Consumer<ResponseSpec> {
}
}
/**
* Spec for expectations on the response body decoded to a single Object.
*
* @param <S> a self reference to the spec type
* @param <B> the body type
*/
| ResponseSpecConsumer |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/VersionedKeyValueToBytesStoreAdapter.java | {
"start": 2375,
"end": 7026
} | class ____ implements VersionedBytesStore {
private static final Serde<ValueAndTimestamp<byte[]>> VALUE_AND_TIMESTAMP_SERDE
= new ValueAndTimestampSerde<>(new ByteArraySerde());
private static final Serializer<ValueAndTimestamp<byte[]>> VALUE_AND_TIMESTAMP_SERIALIZER
= VALUE_AND_TIMESTAMP_SERDE.serializer();
final VersionedKeyValueStore<Bytes, byte[]> inner;
public VersionedKeyValueToBytesStoreAdapter(final VersionedKeyValueStore<Bytes, byte[]> inner) {
this.inner = Objects.requireNonNull(inner);
}
@Override
public long put(final Bytes key, final byte[] value, final long timestamp) {
return inner.put(key, value, timestamp);
}
@Override
public byte[] get(final Bytes key) {
final VersionedRecord<byte[]> versionedRecord = inner.get(key);
return serializeAsBytes(versionedRecord);
}
@Override
public byte[] get(final Bytes key, final long asOfTimestamp) {
final VersionedRecord<byte[]> versionedRecord = inner.get(key, asOfTimestamp);
return serializeAsBytes(versionedRecord);
}
@Override
public byte[] delete(final Bytes key, final long timestamp) {
final VersionedRecord<byte[]> versionedRecord = inner.delete(key, timestamp);
return serializeAsBytes(versionedRecord);
}
@Override
public String name() {
return inner.name();
}
@Override
public void init(final StateStoreContext stateStoreContext, final StateStore root) {
inner.init(stateStoreContext, root);
}
@Override
public void flush() {
inner.flush();
}
@Override
public void close() {
inner.close();
}
@Override
public boolean persistent() {
return inner.persistent();
}
@Override
public boolean isOpen() {
return inner.isOpen();
}
@Override
public <R> QueryResult<R> query(final Query<R> query, final PositionBound positionBound, final QueryConfig config) {
return inner.query(query, positionBound, config);
}
@Override
public Position getPosition() {
return inner.getPosition();
}
@Override
public void put(final Bytes key, final byte[] rawValueAndTimestamp) {
throw new UnsupportedOperationException("Versioned key-value stores should use put(key, value, timestamp) instead");
}
@Override
public byte[] putIfAbsent(final Bytes key, final byte[] value) {
throw new UnsupportedOperationException("Versioned key-value stores do not support putIfAbsent(key, value)");
}
@Override
public void putAll(final List<KeyValue<Bytes, byte[]>> entries) {
throw new UnsupportedOperationException("Versioned key-value stores do not support putAll(entries)");
}
@Override
public byte[] delete(final Bytes key) {
throw new UnsupportedOperationException("Versioned key-value stores do not support delete(key). Use delete(key, timestamp) instead.");
}
@Override
public KeyValueIterator<Bytes, byte[]> range(final Bytes from, final Bytes to) {
throw new UnsupportedOperationException("Versioned key-value stores do not support range(from, to)");
}
@Override
public KeyValueIterator<Bytes, byte[]> reverseRange(final Bytes from, final Bytes to) {
throw new UnsupportedOperationException("Versioned key-value stores do not support reverseRange(from, to)");
}
@Override
public KeyValueIterator<Bytes, byte[]> all() {
throw new UnsupportedOperationException("Versioned key-value stores do not support all()");
}
@Override
public KeyValueIterator<Bytes, byte[]> reverseAll() {
throw new UnsupportedOperationException("Versioned key-value stores do not support reverseAll()");
}
@Override
public <PS extends Serializer<P>, P> KeyValueIterator<Bytes, byte[]> prefixScan(final P prefix, final PS prefixKeySerializer) {
throw new UnsupportedOperationException("Versioned key-value stores do not support prefixScan(prefix, prefixKeySerializer)");
}
@Override
public long approximateNumEntries() {
throw new UnsupportedOperationException("Versioned key-value stores do not support approximateNumEntries()");
}
private static byte[] serializeAsBytes(final VersionedRecord<byte[]> versionedRecord) {
if (versionedRecord == null) {
return null;
}
return VALUE_AND_TIMESTAMP_SERIALIZER.serialize(
null,
ValueAndTimestamp.make(versionedRecord.value(), versionedRecord.timestamp()));
}
} | VersionedKeyValueToBytesStoreAdapter |
java | apache__camel | components/camel-google/camel-google-mail/src/generated/java/org/apache/camel/component/google/mail/internal/GmailUsersMessagesAttachmentsApiMethod.java | {
"start": 697,
"end": 1743
} | enum ____ implements ApiMethod {
GET(
com.google.api.services.gmail.Gmail.Users.Messages.Attachments.Get.class,
"get",
arg("userId", String.class),
arg("messageId", String.class),
arg("id", String.class));
private final ApiMethod apiMethod;
GmailUsersMessagesAttachmentsApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(Attachments.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
| GmailUsersMessagesAttachmentsApiMethod |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/buffer/ChannelBuffers.java | {
"start": 887,
"end": 5388
} | class ____ {
public static final ChannelBuffer EMPTY_BUFFER = new HeapChannelBuffer(0);
public static final int DEFAULT_CAPACITY = 256;
private ChannelBuffers() {}
public static ChannelBuffer dynamicBuffer() {
return dynamicBuffer(DEFAULT_CAPACITY);
}
public static ChannelBuffer dynamicBuffer(int capacity) {
return new DynamicChannelBuffer(capacity);
}
public static ChannelBuffer dynamicBuffer(int capacity, ChannelBufferFactory factory) {
return new DynamicChannelBuffer(capacity, factory);
}
public static ChannelBuffer buffer(int capacity) {
if (capacity < 0) {
throw new IllegalArgumentException("capacity can not be negative");
}
if (capacity == 0) {
return EMPTY_BUFFER;
}
return new HeapChannelBuffer(capacity);
}
public static ChannelBuffer wrappedBuffer(byte[] array, int offset, int length) {
if (array == null) {
throw new NullPointerException("array == null");
}
byte[] dest = new byte[length];
System.arraycopy(array, offset, dest, 0, length);
return wrappedBuffer(dest);
}
public static ChannelBuffer wrappedBuffer(byte[] array) {
if (array == null) {
throw new NullPointerException("array == null");
}
if (array.length == 0) {
return EMPTY_BUFFER;
}
return new HeapChannelBuffer(array);
}
public static ChannelBuffer wrappedBuffer(ByteBuffer buffer) {
if (!buffer.hasRemaining()) {
return EMPTY_BUFFER;
}
if (buffer.hasArray()) {
return wrappedBuffer(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining());
} else {
return new ByteBufferBackedChannelBuffer(buffer);
}
}
public static ChannelBuffer directBuffer(int capacity) {
if (capacity == 0) {
return EMPTY_BUFFER;
}
ChannelBuffer buffer = new ByteBufferBackedChannelBuffer(ByteBuffer.allocateDirect(capacity));
buffer.clear();
return buffer;
}
public static boolean equals(ChannelBuffer bufferA, ChannelBuffer bufferB) {
final int aLen = bufferA.readableBytes();
if (aLen != bufferB.readableBytes()) {
return false;
}
final int byteCount = aLen & 7;
int aIndex = bufferA.readerIndex();
int bIndex = bufferB.readerIndex();
for (int i = byteCount; i > 0; i--) {
if (bufferA.getByte(aIndex) != bufferB.getByte(bIndex)) {
return false;
}
aIndex++;
bIndex++;
}
return true;
}
// prefix
public static boolean prefixEquals(ChannelBuffer bufferA, ChannelBuffer bufferB, int count) {
final int aLen = bufferA.readableBytes();
final int bLen = bufferB.readableBytes();
if (aLen < count || bLen < count) {
return false;
}
int aIndex = bufferA.readerIndex();
int bIndex = bufferB.readerIndex();
for (int i = count; i > 0; i--) {
if (bufferA.getByte(aIndex) != bufferB.getByte(bIndex)) {
return false;
}
aIndex++;
bIndex++;
}
return true;
}
public static int hasCode(ChannelBuffer buffer) {
final int aLen = buffer.readableBytes();
final int byteCount = aLen & 7;
int hashCode = 1;
int arrayIndex = buffer.readerIndex();
for (int i = byteCount; i > 0; i--) {
hashCode = 31 * hashCode + buffer.getByte(arrayIndex++);
}
if (hashCode == 0) {
hashCode = 1;
}
return hashCode;
}
public static int compare(ChannelBuffer bufferA, ChannelBuffer bufferB) {
final int aLen = bufferA.readableBytes();
final int bLen = bufferB.readableBytes();
final int minLength = Math.min(aLen, bLen);
int aIndex = bufferA.readerIndex();
int bIndex = bufferB.readerIndex();
for (int i = minLength; i > 0; i--) {
byte va = bufferA.getByte(aIndex);
byte vb = bufferB.getByte(bIndex);
if (va > vb) {
return 1;
} else if (va < vb) {
return -1;
}
aIndex++;
bIndex++;
}
return aLen - bLen;
}
}
| ChannelBuffers |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/DefaultListableBeanFactoryTests.java | {
"start": 145162,
"end": 145241
} | class ____ implements PriorityService {
}
@Priority(3)
static | PriorityService2B |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/v2/internal/InternalAppendingState.java | {
"start": 955,
"end": 1386
} | interface ____ appending state.
*
* @param <K> The type of key the state is associated to.
* @param <N> The namespace type.
* @param <IN> The type of the values that are added into the state.
* @param <SV> The type of the intermediate state.
* @param <OUT> The type of the values that are returned from the state.
* @param <SYNCOUT> Type of the value that can be retrieved from the state by synchronous interface.
*/
public | for |
java | apache__kafka | tools/tools-api/src/main/java/org/apache/kafka/tools/api/RecordReader.java | {
"start": 1348,
"end": 2154
} | interface ____ extends Closeable, Configurable {
default void configure(Map<String, ?> configs) {}
/**
* read byte array from input stream and then generate an iterator of producer record
* @param inputStream {@link InputStream} of messages. the implementation does not need to close the input stream.
* @return an iterator of producer record. It should implement following rules. 1) the hasNext() method must be idempotent.
* 2) the convert error should be thrown by next() method.
*/
Iterator<ProducerRecord<byte[], byte[]>> readRecords(InputStream inputStream);
/**
* Closes this reader.
* This method is invoked if the iterator from readRecords either has no more records or throws exception.
*/
default void close() {}
}
| RecordReader |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/junit/jupiter/nested/ContextConfigurationTestMethodScopedExtensionContextNestedTests.java | {
"start": 2753,
"end": 3139
} | class ____ {
@Autowired(required = false)
@Qualifier("foo")
String localFoo;
@Autowired
String bar;
@Test
void test() {
assertThat(foo).as("foo bean should not be present").isNull();
assertThat(this.localFoo).as("local foo bean should not be present").isNull();
assertThat(this.bar).isEqualTo(BAR);
}
}
@Nested
@NestedTestConfiguration(INHERIT)
| NestedTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/nodelabels/AttributeExpressionOperation.java | {
"start": 930,
"end": 988
} | enum ____ {
LT, GT, IN, NOTIN
}
| AttributeExpressionOperation |
java | apache__dubbo | dubbo-plugin/dubbo-rest-jaxrs/src/test/java/org/apache/dubbo/rpc/protocol/tri/rest/support/jaxrs/compatible/DemoServiceImpl.java | {
"start": 1316,
"end": 4443
} | class ____ implements DemoService {
private static Map<String, Object> context;
private boolean called;
@POST
@Path("/say")
@Consumes({MediaType.TEXT_PLAIN})
@Override
public String sayHello(String name) {
called = true;
return "Hello, " + name;
}
@Override
public Long testFormBody(Long number) {
return number;
}
public boolean isCalled() {
return called;
}
@Override
public int primitiveInt(int a, int b) {
return a + b;
}
@Override
public long primitiveLong(long a, Long b) {
return a + b;
}
@Override
public long primitiveByte(byte a, Long b) {
return a + b;
}
@Override
public long primitiveShort(short a, Long b, int c) {
return a + b;
}
@Override
public void request(DefaultFullHttpRequest defaultFullHttpRequest) {}
@Override
public String testMapParam(Map<String, String> params) {
return params.get("param");
}
@Override
public String testMapHeader(Map<String, String> headers) {
return headers.get("header");
}
@Override
public List<String> testMapForm(MultivaluedMap<String, String> params) {
return params.get("form");
}
@Override
public String header(String header) {
return header;
}
@Override
public int headerInt(int header) {
return header;
}
@Override
public String noStringParam(String param) {
return param;
}
@Override
public String noStringHeader(String header) {
return header;
}
@POST
@Path("/noIntHeader")
@Consumes({MediaType.TEXT_PLAIN})
@Override
public int noIntHeader(@HeaderParam("header") int header) {
return header;
}
@POST
@Path("/noIntParam")
@Consumes({MediaType.TEXT_PLAIN})
@Override
public int noIntParam(@QueryParam("header") int header) {
return header;
}
@Override
public User noBodyArg(User user) {
return user;
}
@GET
@Path("/hello")
@Override
public Integer hello(@QueryParam("a") Integer a, @QueryParam("b") Integer b) {
context = RpcContext.getServerAttachment().getObjectAttachments();
return a + b;
}
@GET
@Path("/error")
@Override
public String error() {
throw new RuntimeException("test error");
}
public static Map<String, Object> getAttachments() {
return context;
}
@Override
public List<User> list(List<User> users) {
return users;
}
@Override
public Set<User> set(Set<User> users) {
return users;
}
@Override
public User[] array(User[] users) {
return users;
}
@Override
public Map<String, User> stringMap(Map<String, User> userMap) {
return userMap;
}
@Override
public Map<User, User> userMap(Map<User, User> userMap) {
return userMap;
}
@Override
public User formBody(User user) {
user.setName("formBody");
return user;
}
}
| DemoServiceImpl |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AtomicIntegerFieldUpdaterAssert.java | {
"start": 1035,
"end": 1570
} | class ____<OBJECT> extends
AbstractAtomicFieldUpdaterAssert<AtomicIntegerFieldUpdaterAssert<OBJECT>, Integer, AtomicIntegerFieldUpdater<OBJECT>, OBJECT> {
public AtomicIntegerFieldUpdaterAssert(AtomicIntegerFieldUpdater<OBJECT> actual) {
super(actual, AtomicIntegerFieldUpdaterAssert.class, false);
}
/**
* Verifies that the actual atomic field updater contains the given value at the given object.
* <p>
* Example:
* <pre><code class='java'> // person is an instance of a Person | AtomicIntegerFieldUpdaterAssert |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/test/java/io/quarkus/devtools/project/update/rewrite/QuarkusUpdatesRepositoryTest.java | {
"start": 1211,
"end": 5695
} | class ____ {
@ParameterizedTest
@CsvFileSource(resources = "/should_apply_recipe_test_cases.csv", numLinesToSkip = 1)
void testShouldApplyRecipeWithCSV(String recipeVersion, String currentVersion, String targetVersion,
boolean expectedResult) {
boolean result = shouldApplyRecipe(recipeVersion, currentVersion, targetVersion);
assertEquals(expectedResult, result);
}
@Test
void testShouldLoadRecipesFromTheDirectory() throws IOException {
Map<String, VersionUpdate> recipeDirectoryNames = new LinkedHashMap<>();
recipeDirectoryNames.put("core", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-core", new VersionUpdate("2.7", "3.0"));
ClassPathResourceLoader resourceLoader = new ClassPathResourceLoader();
List<String> recipes = fetchUpdateRecipes(MessageWriter.info(), resourceLoader, "dir/quarkus-update",
recipeDirectoryNames);
int noOfRecipes = recipes.size();
assertEquals(3, noOfRecipes);
}
@Test
void testShouldLoadRecipesFromTheDirectoryWithWildcard() throws IOException {
Map<String, VersionUpdate> recipeDirectoryNames = new LinkedHashMap<>();
recipeDirectoryNames.put("core", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-file", new VersionUpdate("2.7", "3.0"));
ClassPathResourceLoader resourceLoader = new ClassPathResourceLoader();
List<String> recipes = fetchUpdateRecipes(MessageWriter.info(), resourceLoader, "dir/quarkus-update",
recipeDirectoryNames);
int noOfRecipes = recipes.size();
assertEquals(3, noOfRecipes);
}
@Test
void testShouldLoadDuplicatedRecipesFromTheDirectoryWithWildcard() throws IOException {
Map<String, VersionUpdate> recipeDirectoryNames = new LinkedHashMap<>();
recipeDirectoryNames.put("core", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-file", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-ftp", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-fhir", new VersionUpdate("2.7", "3.1"));
ClassPathResourceLoader resourceLoader = new ClassPathResourceLoader();
List<String> recipes = fetchUpdateRecipes(MessageWriter.info(), resourceLoader, "dir/quarkus-update",
recipeDirectoryNames);
int noOfRecipes = recipes.size();
assertEquals(3, noOfRecipes);
}
@Test
void testToKey() {
String key = toKey("target/classes/quarkus-updates/org.apache.camel.quarkus.camel-quarkus");
assertEquals("target:classes:quarkus-updates:org.apache.camel.quarkus.camel-quarkus", key);
key = toKey("../app/target/classes/quarkus-updates/org.apache.camel.quarkus.camel-quarkus");
assertEquals("..:app:target:classes:quarkus-updates:org.apache.camel.quarkus.camel-quarkus", key);
}
@Test
@EnabledOnOs({ OS.WINDOWS })
void testToKeyWindows() {
String key = toKey("..\\a\\b\\quarkus-updates\\org.apache.camel.quarkus.camel-quarkus\\");
assertEquals("..:a:b:quarkus-updates:org.apache.camel.quarkus.camel-quarkus", key);
}
@Test
void testResolveVersionsForRecipesDir() {
Map<String, VersionUpdate> recipeDirectoryNames = new LinkedHashMap<>();
recipeDirectoryNames.put("core", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-something1", new VersionUpdate("2.7", "3.1"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-file", new VersionUpdate("2.7", "3.3"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-ftp", new VersionUpdate("2.7", "3.4"));
recipeDirectoryNames.put("org.apache.camel.quarkus:camel-quarkus-fhir", new VersionUpdate("2.7", "3.6"));
Optional<RecipeDirectory> versions = resolveVersionsForRecipesDir("dir", "org.apache.camel.quarkus:camel-quarkus",
recipeDirectoryNames);
assertEquals(3, versions.get().versions().size());
versions = resolveVersionsForRecipesDir("dir", "org.apache.camel.quarkus:camel", recipeDirectoryNames);
assertEquals(4, versions.get().versions().size());
}
}
| QuarkusUpdatesRepositoryTest |
java | google__guava | android/guava-tests/test/com/google/common/collect/AbstractImmutableMapMapInterfaceTest.java | {
"start": 1002,
"end": 2057
} | class ____<K, V> extends MapInterfaceTest<K, V> {
AbstractImmutableMapMapInterfaceTest() {
super(false, false, false, false, false);
}
@Override
protected Map<K, V> makeEmptyMap() {
throw new UnsupportedOperationException();
}
private static final Joiner JOINER = Joiner.on(", ");
@Override
protected final void assertMoreInvariants(Map<K, V> map) {
// TODO: can these be moved to MapInterfaceTest?
for (Entry<K, V> entry : map.entrySet()) {
assertEquals(entry.getKey() + "=" + entry.getValue(), entry.toString());
}
assertEquals("{" + JOINER.join(map.entrySet()) + "}", map.toString());
assertEquals("[" + JOINER.join(map.entrySet()) + "]", map.entrySet().toString());
assertEquals("[" + JOINER.join(map.keySet()) + "]", map.keySet().toString());
assertEquals("[" + JOINER.join(map.values()) + "]", map.values().toString());
assertEquals(MinimalSet.from(map.entrySet()), map.entrySet());
assertEquals(new HashSet<>(map.keySet()), map.keySet());
}
}
| AbstractImmutableMapMapInterfaceTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configuration/AuthorizationManagerWebInvocationPrivilegeEvaluatorConfigTests.java | {
"start": 2680,
"end": 2887
} | class ____ {
@Bean
HttpServletRequestTransformer httpServletRequestTransformer() {
return mock(HttpServletRequestTransformer.class);
}
}
@Configuration
@EnableWebSecurity
static | TransformerConfig |
java | apache__camel | components/camel-twilio/src/test/java/org/apache/camel/component/twilio/AccountIT.java | {
"start": 1672,
"end": 4138
} | class ____ extends AbstractTwilioTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(AccountIT.class);
private static final String PATH_PREFIX = TwilioApiCollection.getCollection().getApiName(AccountApiMethod.class).getName();
@Test
public void testFetcher() {
final Account result = requestBody("direct://FETCHER", null);
assertNotNull(result, "fetcher result not null");
assertNotNull(result.getSid(), "fetcher result sid not null");
LOG.debug("fetcher: {}", result);
}
@Test
public void testFetcherWithPathSid() {
final Account result = requestBodyAndHeaders("direct://FETCHER", null,
headers("CamelTwilioPathSid", ((TwilioComponent) context().getComponent("twilio")).getAccountSid()));
assertNotNull(result, "fetcher result not null");
assertNotNull(result.getSid(), "fetcher result sid not null");
LOG.debug("fetcher: {}", result);
}
@Test
public void testReader() {
final ResourceSet<Account> result = requestBody("direct://READER", null);
assertNotNull(result, "reader result not null");
result.forEach(account -> {
assertNotNull(account, "reader result account not null");
LOG.debug("reader: {}", account);
});
}
@Test
public void testReaderWithStatusActive() {
final ResourceSet<Account> result = requestBodyAndHeaders("direct://READER", null,
headers("CamelTwilioStatus", "active"));
assertNotNull(result, "reader result not null");
result.forEach(account -> {
assertEquals(Account.Status.ACTIVE, account.getStatus(), "reader result account active");
LOG.debug("reader: {}", account);
});
}
private static Map<String, Object> headers(String name, Object value) {
Map<String, Object> headers = new HashMap<>();
headers.put(name, value);
return headers;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// test route for fetcher
from("direct://FETCHER")
.to("twilio://" + PATH_PREFIX + "/fetch");
// test route for reader
from("direct://READER")
.to("twilio://" + PATH_PREFIX + "/read");
}
};
}
}
| AccountIT |
java | netty__netty | transport/src/main/java/io/netty/channel/pool/FixedChannelPool.java | {
"start": 17279,
"end": 17808
} | class ____ extends AcquireListener {
final Promise<Channel> promise;
final long expireNanoTime = System.nanoTime() + acquireTimeoutNanos;
ScheduledFuture<?> timeoutFuture;
AcquireTask(Promise<Channel> promise) {
super(promise);
// We need to create a new promise as we need to ensure the AcquireListener runs in the correct
// EventLoop.
this.promise = executor.<Channel>newPromise().addListener(this);
}
}
private abstract | AcquireTask |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/CsrfConfigurerTests.java | {
"start": 40927,
"end": 41259
} | class ____ implements DeferredCsrfToken {
private final CsrfToken csrfToken;
private TestDeferredCsrfToken(CsrfToken csrfToken) {
this.csrfToken = csrfToken;
}
@Override
public CsrfToken get() {
return this.csrfToken;
}
@Override
public boolean isGenerated() {
return false;
}
}
}
| TestDeferredCsrfToken |
java | netty__netty | codec-http/src/main/java/io/netty/handler/codec/http/HttpClientCodec.java | {
"start": 2771,
"end": 11738
} | class ____ extends CombinedChannelDuplexHandler<HttpResponseDecoder, HttpRequestEncoder>
implements HttpClientUpgradeHandler.SourceCodec {
public static final boolean DEFAULT_FAIL_ON_MISSING_RESPONSE = false;
public static final boolean DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST = false;
/** A queue that is used for correlating a request and a response. */
private final Queue<HttpMethod> queue = new ArrayDeque<HttpMethod>();
private final boolean parseHttpAfterConnectRequest;
/** If true, decoding stops (i.e. pass-through) */
private boolean done;
private final AtomicLong requestResponseCounter = new AtomicLong();
private final boolean failOnMissingResponse;
/**
* Creates a new instance with the default decoder options
* ({@code maxInitialLineLength (4096)}, {@code maxHeaderSize (8192)}, and
* {@code maxChunkSize (8192)}).
*/
public HttpClientCodec() {
this(new HttpDecoderConfig(),
DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST,
DEFAULT_FAIL_ON_MISSING_RESPONSE);
}
/**
* Creates a new instance with the specified decoder options.
*/
public HttpClientCodec(int maxInitialLineLength, int maxHeaderSize, int maxChunkSize) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize),
DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST,
DEFAULT_FAIL_ON_MISSING_RESPONSE);
}
/**
* Creates a new instance with the specified decoder options.
*/
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize),
DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(int, int, int, boolean)} constructor,
* to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders),
DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(HttpDecoderConfig, boolean, boolean)} constructor,
* to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders, boolean parseHttpAfterConnectRequest) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders),
parseHttpAfterConnectRequest,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(HttpDecoderConfig, boolean, boolean)} constructor,
* to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders, int initialBufferSize) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders)
.setInitialBufferSize(initialBufferSize),
DEFAULT_PARSE_HTTP_AFTER_CONNECT_REQUEST,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(HttpDecoderConfig, boolean, boolean)} constructor,
* to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders, int initialBufferSize, boolean parseHttpAfterConnectRequest) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders)
.setInitialBufferSize(initialBufferSize),
parseHttpAfterConnectRequest,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(HttpDecoderConfig, boolean, boolean)} constructor,
* to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders, int initialBufferSize, boolean parseHttpAfterConnectRequest,
boolean allowDuplicateContentLengths) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders)
.setInitialBufferSize(initialBufferSize)
.setAllowDuplicateContentLengths(allowDuplicateContentLengths),
parseHttpAfterConnectRequest,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*
* @deprecated Prefer the {@link #HttpClientCodec(HttpDecoderConfig, boolean, boolean)}
* constructor, to always enable header validation.
*/
@Deprecated
public HttpClientCodec(
int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean failOnMissingResponse,
boolean validateHeaders, int initialBufferSize, boolean parseHttpAfterConnectRequest,
boolean allowDuplicateContentLengths, boolean allowPartialChunks) {
this(new HttpDecoderConfig()
.setMaxInitialLineLength(maxInitialLineLength)
.setMaxHeaderSize(maxHeaderSize)
.setMaxChunkSize(maxChunkSize)
.setValidateHeaders(validateHeaders)
.setInitialBufferSize(initialBufferSize)
.setAllowDuplicateContentLengths(allowDuplicateContentLengths)
.setAllowPartialChunks(allowPartialChunks),
parseHttpAfterConnectRequest,
failOnMissingResponse);
}
/**
* Creates a new instance with the specified decoder options.
*/
public HttpClientCodec(
HttpDecoderConfig config, boolean parseHttpAfterConnectRequest, boolean failOnMissingResponse) {
init(new Decoder(config), new Encoder());
this.parseHttpAfterConnectRequest = parseHttpAfterConnectRequest;
this.failOnMissingResponse = failOnMissingResponse;
}
/**
* Prepares to upgrade to another protocol from HTTP. Disables the {@link Encoder}.
*/
@Override
public void prepareUpgradeFrom(ChannelHandlerContext ctx) {
((Encoder) outboundHandler()).upgraded = true;
}
/**
* Upgrades to another protocol from HTTP. Removes the {@link Decoder} and {@link Encoder} from
* the pipeline.
*/
@Override
public void upgradeFrom(ChannelHandlerContext ctx) {
final ChannelPipeline p = ctx.pipeline();
p.remove(this);
}
public void setSingleDecode(boolean singleDecode) {
inboundHandler().setSingleDecode(singleDecode);
}
public boolean isSingleDecode() {
return inboundHandler().isSingleDecode();
}
private final | HttpClientCodec |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/shuffle/ShuffleServiceOptions.java | {
"start": 1352,
"end": 1765
} | class ____ of the shuffle service factory implementation to be used by the cluster.
*/
public static final ConfigOption<String> SHUFFLE_SERVICE_FACTORY_CLASS =
ConfigOptions.key("shuffle-service-factory.class")
.stringType()
.defaultValue(NETTY_SHUFFLE_SERVICE_FACTORY_CLASS)
.withDescription(
"The full | name |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/internal/invocation/mockref/MockStrongReference.java | {
"start": 216,
"end": 781
} | class ____<T> implements MockReference<T> {
private final T ref;
private final boolean deserializeAsWeakRef;
public MockStrongReference(T ref, boolean deserializeAsWeakRef) {
this.ref = ref;
this.deserializeAsWeakRef = deserializeAsWeakRef;
}
@Override
public T get() {
return ref;
}
private Object readResolve() throws ObjectStreamException {
if (deserializeAsWeakRef) {
return new MockWeakReference<T>(ref);
} else {
return this;
}
}
}
| MockStrongReference |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/ops/NumberedNode.java | {
"start": 239,
"end": 1378
} | class ____ {
private long id;
private String name;
private NumberedNode parent;
private Set children = new HashSet();
private String description;
private Date created;
public NumberedNode() {
super();
}
public NumberedNode(String name) {
this.name = name;
created = new Date();
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public Set getChildren() {
return children;
}
public void setChildren(Set children) {
this.children = children;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public NumberedNode getParent() {
return parent;
}
public void setParent(NumberedNode parent) {
this.parent = parent;
}
public NumberedNode addChild(NumberedNode child) {
children.add(child);
child.setParent(this);
return this;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Date getCreated() {
return created;
}
public void setCreated(Date created) {
this.created = created;
}
}
| NumberedNode |
java | apache__rocketmq | proxy/src/main/java/org/apache/rocketmq/proxy/service/LocalServiceManager.java | {
"start": 5830,
"end": 6286
} | class ____ implements StartAndShutdown {
@Override
public void start() throws Exception {
LocalServiceManager.this.scheduledExecutorService.scheduleWithFixedDelay(channelManager::scanAndCleanChannels, 5, 5, TimeUnit.MINUTES);
}
@Override
public void shutdown() throws Exception {
LocalServiceManager.this.scheduledExecutorService.shutdown();
}
}
}
| LocalServiceManagerStartAndShutdown |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/authentication/OAuth2DeviceCodeAuthenticationProvider.java | {
"start": 3533,
"end": 12160
} | class ____ implements AuthenticationProvider {
private static final String DEFAULT_ERROR_URI = "https://datatracker.ietf.org/doc/html/rfc6749#section-5.2";
private static final String DEVICE_ERROR_URI = "https://datatracker.ietf.org/doc/html/rfc8628#section-3.5";
static final OAuth2TokenType DEVICE_CODE_TOKEN_TYPE = new OAuth2TokenType(OAuth2ParameterNames.DEVICE_CODE);
static final String EXPIRED_TOKEN = "expired_token";
static final String AUTHORIZATION_PENDING = "authorization_pending";
private final Log logger = LogFactory.getLog(getClass());
private final OAuth2AuthorizationService authorizationService;
private final OAuth2TokenGenerator<? extends OAuth2Token> tokenGenerator;
/**
* Constructs an {@code OAuth2DeviceCodeAuthenticationProvider} using the provided
* parameters.
* @param authorizationService the authorization service
* @param tokenGenerator the token generator
*/
public OAuth2DeviceCodeAuthenticationProvider(OAuth2AuthorizationService authorizationService,
OAuth2TokenGenerator<? extends OAuth2Token> tokenGenerator) {
Assert.notNull(authorizationService, "authorizationService cannot be null");
Assert.notNull(tokenGenerator, "tokenGenerator cannot be null");
this.authorizationService = authorizationService;
this.tokenGenerator = tokenGenerator;
}
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
OAuth2DeviceCodeAuthenticationToken deviceCodeAuthentication = (OAuth2DeviceCodeAuthenticationToken) authentication;
OAuth2ClientAuthenticationToken clientPrincipal = OAuth2AuthenticationProviderUtils
.getAuthenticatedClientElseThrowInvalidClient(deviceCodeAuthentication);
RegisteredClient registeredClient = clientPrincipal.getRegisteredClient();
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved registered client");
}
OAuth2Authorization authorization = this.authorizationService
.findByToken(deviceCodeAuthentication.getDeviceCode(), DEVICE_CODE_TOKEN_TYPE);
if (authorization == null) {
throw new OAuth2AuthenticationException(OAuth2ErrorCodes.INVALID_GRANT);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Retrieved authorization with device code");
}
OAuth2Authorization.Token<OAuth2UserCode> userCode = authorization.getToken(OAuth2UserCode.class);
OAuth2Authorization.Token<OAuth2DeviceCode> deviceCode = authorization.getToken(OAuth2DeviceCode.class);
if (!registeredClient.getId().equals(authorization.getRegisteredClientId())) {
if (!deviceCode.isInvalidated()) {
// Invalidate the device code given that a different client is attempting
// to use it
authorization = OAuth2Authorization.from(authorization).invalidate(deviceCode.getToken()).build();
this.authorizationService.save(authorization);
if (this.logger.isWarnEnabled()) {
this.logger.warn(LogMessage.format("Invalidated device code used by registered client '%s'",
authorization.getRegisteredClientId()));
}
}
throw new OAuth2AuthenticationException(OAuth2ErrorCodes.INVALID_GRANT);
}
// In https://www.rfc-editor.org/rfc/rfc8628.html#section-3.5,
// the following error codes are defined:
// expired_token
// The "device_code" has expired, and the device authorization
// session has concluded. The client MAY commence a new device
// authorization request but SHOULD wait for user interaction before
// restarting to avoid unnecessary polling.
if (deviceCode.isExpired()) {
if (!deviceCode.isInvalidated()) {
// Invalidate the device code
authorization = OAuth2Authorization.from(authorization).invalidate(deviceCode.getToken()).build();
this.authorizationService.save(authorization);
if (this.logger.isWarnEnabled()) {
this.logger.warn(LogMessage.format("Invalidated device code used by registered client '%s'",
authorization.getRegisteredClientId()));
}
}
OAuth2Error error = new OAuth2Error(EXPIRED_TOKEN, null, DEVICE_ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
// authorization_pending
// The authorization request is still pending as the end user hasn't
// yet completed the user-interaction steps (Section 3.3). The
// client SHOULD repeat the access token request to the token
// endpoint (a process known as polling). Before each new request,
// the client MUST wait at least the number of seconds specified by
// the "interval" parameter of the device authorization response (see
// Section 3.2), or 5 seconds if none was provided, and respect any
// increase in the polling interval required by the "slow_down"
// error.
if (!userCode.isInvalidated()) {
OAuth2Error error = new OAuth2Error(AUTHORIZATION_PENDING, null, DEVICE_ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
// slow_down
// A variant of "authorization_pending", the authorization request is
// still pending and polling should continue, but the interval MUST
// be increased by 5 seconds for this and all subsequent requests.
// NOTE: This error is not handled in the framework.
// access_denied
// The authorization request was denied.
if (deviceCode.isInvalidated()) {
OAuth2Error error = new OAuth2Error(OAuth2ErrorCodes.ACCESS_DENIED, null, DEVICE_ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
// Verify the DPoP Proof (if available)
Jwt dPoPProof = DPoPProofVerifier.verifyIfAvailable(deviceCodeAuthentication);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Validated device token request parameters");
}
// @formatter:off
DefaultOAuth2TokenContext.Builder tokenContextBuilder = DefaultOAuth2TokenContext.builder()
.registeredClient(registeredClient)
.principal(authorization.getAttribute(Principal.class.getName()))
.authorizationServerContext(AuthorizationServerContextHolder.getContext())
.authorization(authorization)
.authorizedScopes(authorization.getAuthorizedScopes())
.authorizationGrantType(AuthorizationGrantType.DEVICE_CODE)
.authorizationGrant(deviceCodeAuthentication);
// @formatter:on
if (dPoPProof != null) {
tokenContextBuilder.put(OAuth2TokenContext.DPOP_PROOF_KEY, dPoPProof);
}
// @formatter:off
OAuth2Authorization.Builder authorizationBuilder = OAuth2Authorization.from(authorization)
// Invalidate the device code as it can only be used (successfully) once
.invalidate(deviceCode.getToken());
// @formatter:on
// ----- Access token -----
OAuth2TokenContext tokenContext = tokenContextBuilder.tokenType(OAuth2TokenType.ACCESS_TOKEN).build();
OAuth2Token generatedAccessToken = this.tokenGenerator.generate(tokenContext);
if (generatedAccessToken == null) {
OAuth2Error error = new OAuth2Error(OAuth2ErrorCodes.SERVER_ERROR,
"The token generator failed to generate the access token.", DEFAULT_ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Generated access token");
}
OAuth2AccessToken accessToken = OAuth2AuthenticationProviderUtils.accessToken(authorizationBuilder,
generatedAccessToken, tokenContext);
// ----- Refresh token -----
OAuth2RefreshToken refreshToken = null;
if (registeredClient.getAuthorizationGrantTypes().contains(AuthorizationGrantType.REFRESH_TOKEN)) {
tokenContext = tokenContextBuilder.tokenType(OAuth2TokenType.REFRESH_TOKEN).build();
OAuth2Token generatedRefreshToken = this.tokenGenerator.generate(tokenContext);
if (!(generatedRefreshToken instanceof OAuth2RefreshToken)) {
OAuth2Error error = new OAuth2Error(OAuth2ErrorCodes.SERVER_ERROR,
"The token generator failed to generate the refresh token.", DEFAULT_ERROR_URI);
throw new OAuth2AuthenticationException(error);
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Generated refresh token");
}
refreshToken = (OAuth2RefreshToken) generatedRefreshToken;
authorizationBuilder.refreshToken(refreshToken);
}
authorization = authorizationBuilder.build();
this.authorizationService.save(authorization);
if (this.logger.isTraceEnabled()) {
this.logger.trace("Saved authorization");
}
if (this.logger.isTraceEnabled()) {
this.logger.trace("Authenticated device token request");
}
return new OAuth2AccessTokenAuthenticationToken(registeredClient, clientPrincipal, accessToken, refreshToken);
}
@Override
public boolean supports(Class<?> authentication) {
return OAuth2DeviceCodeAuthenticationToken.class.isAssignableFrom(authentication);
}
}
| OAuth2DeviceCodeAuthenticationProvider |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDbIndexedTimeOrderedWindowBytesStoreSupplier.java | {
"start": 1355,
"end": 7030
} | enum ____ {
DEFAULT_WINDOW_STORE,
INDEXED_WINDOW_STORE
}
private final String name;
private final long retentionPeriod;
private final long segmentInterval;
private final long windowSize;
private final boolean retainDuplicates;
private final WindowStoreTypes windowStoreType;
public static RocksDbIndexedTimeOrderedWindowBytesStoreSupplier create(final String name,
final Duration retentionPeriod,
final Duration windowSize,
final boolean retainDuplicates,
final boolean hasIndex) {
Objects.requireNonNull(name, "name cannot be null");
final String rpMsgPrefix = prepareMillisCheckFailMsgPrefix(retentionPeriod, "retentionPeriod");
final long retentionMs = validateMillisecondDuration(retentionPeriod, rpMsgPrefix);
final String wsMsgPrefix = prepareMillisCheckFailMsgPrefix(windowSize, "windowSize");
final long windowSizeMs = validateMillisecondDuration(windowSize, wsMsgPrefix);
final long defaultSegmentInterval = Math.max(retentionMs / 2, 60_000L);
if (retentionMs < 0L) {
throw new IllegalArgumentException("retentionPeriod cannot be negative");
}
if (windowSizeMs < 0L) {
throw new IllegalArgumentException("windowSize cannot be negative");
}
if (defaultSegmentInterval < 1L) {
throw new IllegalArgumentException("segmentInterval cannot be zero or negative");
}
if (windowSizeMs > retentionMs) {
throw new IllegalArgumentException("The retention period of the window store "
+ name + " must be no smaller than its window size. Got size=["
+ windowSizeMs + "], retention=[" + retentionMs + "]");
}
return new RocksDbIndexedTimeOrderedWindowBytesStoreSupplier(name, retentionMs,
defaultSegmentInterval, windowSizeMs, retainDuplicates, hasIndex);
}
public RocksDbIndexedTimeOrderedWindowBytesStoreSupplier(final String name,
final long retentionPeriod,
final long segmentInterval,
final long windowSize,
final boolean retainDuplicates,
final boolean withIndex) {
this(name, retentionPeriod, segmentInterval, windowSize, retainDuplicates,
withIndex
? WindowStoreTypes.INDEXED_WINDOW_STORE
: WindowStoreTypes.DEFAULT_WINDOW_STORE);
}
public RocksDbIndexedTimeOrderedWindowBytesStoreSupplier(final String name,
final long retentionPeriod,
final long segmentInterval,
final long windowSize,
final boolean retainDuplicates,
final WindowStoreTypes windowStoreType) {
this.name = name;
this.retentionPeriod = retentionPeriod;
this.segmentInterval = segmentInterval;
this.windowSize = windowSize;
this.retainDuplicates = retainDuplicates;
this.windowStoreType = windowStoreType;
}
@Override
public String name() {
return name;
}
@Override
public WindowStore<Bytes, byte[]> get() {
switch (windowStoreType) {
case DEFAULT_WINDOW_STORE:
return new RocksDBTimeOrderedWindowStore(
new RocksDBTimeOrderedWindowSegmentedBytesStore(
name,
metricsScope(),
retentionPeriod,
segmentInterval,
false),
retainDuplicates,
windowSize);
case INDEXED_WINDOW_STORE:
return new RocksDBTimeOrderedWindowStore(
new RocksDBTimeOrderedWindowSegmentedBytesStore(
name,
metricsScope(),
retentionPeriod,
segmentInterval,
true),
retainDuplicates,
windowSize);
default:
throw new IllegalArgumentException("invalid window store type: " + windowStoreType);
}
}
@Override
public String metricsScope() {
return "rocksdb-window";
}
@Override
public long segmentIntervalMs() {
return segmentInterval;
}
@Override
public long windowSize() {
return windowSize;
}
@Override
public boolean retainDuplicates() {
return retainDuplicates;
}
@Override
public long retentionPeriod() {
return retentionPeriod;
}
@Override
public String toString() {
return "RocksDbIndexedTimeOrderedWindowBytesStoreSupplier{" +
"name='" + name + '\'' +
", retentionPeriod=" + retentionPeriod +
", segmentInterval=" + segmentInterval +
", windowSize=" + windowSize +
", retainDuplicates=" + retainDuplicates +
", windowStoreType=" + windowStoreType +
'}';
}
}
| WindowStoreTypes |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/zip/FileDataBlock.java | {
"start": 4862,
"end": 8789
} | class ____ {
static final int BUFFER_SIZE = 1024 * 10;
private final Path path;
private int referenceCount;
private FileChannel fileChannel;
private boolean fileChannelInterrupted;
private RandomAccessFile randomAccessFile;
private ByteBuffer buffer;
private long bufferPosition = -1;
private int bufferSize;
private final Object lock = new Object();
FileAccess(Path path) {
if (!Files.isRegularFile(path)) {
throw new IllegalArgumentException(path + " must be a regular file");
}
this.path = path;
}
int read(ByteBuffer dst, long position) throws IOException {
synchronized (this.lock) {
if (position < this.bufferPosition || position >= this.bufferPosition + this.bufferSize) {
fillBuffer(position);
}
if (this.bufferSize <= 0) {
return this.bufferSize;
}
int offset = (int) (position - this.bufferPosition);
int length = Math.min(this.bufferSize - offset, dst.remaining());
dst.put(dst.position(), this.buffer, offset, length);
dst.position(dst.position() + length);
return length;
}
}
private void fillBuffer(long position) throws IOException {
if (Thread.currentThread().isInterrupted()) {
fillBufferUsingRandomAccessFile(position);
return;
}
try {
if (this.fileChannelInterrupted) {
repairFileChannel();
this.fileChannelInterrupted = false;
}
this.buffer.clear();
this.bufferSize = this.fileChannel.read(this.buffer, position);
this.bufferPosition = position;
}
catch (ClosedByInterruptException ex) {
this.fileChannelInterrupted = true;
fillBufferUsingRandomAccessFile(position);
}
}
private void fillBufferUsingRandomAccessFile(long position) throws IOException {
if (this.randomAccessFile == null) {
this.randomAccessFile = new RandomAccessFile(this.path.toFile(), "r");
tracker.openedFileChannel(this.path);
}
byte[] bytes = new byte[BUFFER_SIZE];
this.randomAccessFile.seek(position);
int len = this.randomAccessFile.read(bytes);
this.buffer.clear();
if (len > 0) {
this.buffer.put(bytes, 0, len);
}
this.bufferSize = len;
this.bufferPosition = position;
}
private void repairFileChannel() throws IOException {
tracker.closedFileChannel(this.path);
this.fileChannel = FileChannel.open(this.path, StandardOpenOption.READ);
tracker.openedFileChannel(this.path);
}
void open() throws IOException {
synchronized (this.lock) {
if (this.referenceCount == 0) {
debug.log("Opening '%s'", this.path);
this.fileChannel = FileChannel.open(this.path, StandardOpenOption.READ);
this.buffer = ByteBuffer.allocateDirect(BUFFER_SIZE);
tracker.openedFileChannel(this.path);
}
this.referenceCount++;
debug.log("Reference count for '%s' incremented to %s", this.path, this.referenceCount);
}
}
void close() throws IOException {
synchronized (this.lock) {
if (this.referenceCount == 0) {
return;
}
this.referenceCount--;
if (this.referenceCount == 0) {
debug.log("Closing '%s'", this.path);
this.buffer = null;
this.bufferPosition = -1;
this.bufferSize = 0;
this.fileChannel.close();
tracker.closedFileChannel(this.path);
this.fileChannel = null;
if (this.randomAccessFile != null) {
this.randomAccessFile.close();
tracker.closedFileChannel(this.path);
this.randomAccessFile = null;
}
}
debug.log("Reference count for '%s' decremented to %s", this.path, this.referenceCount);
}
}
<E extends Exception> void ensureOpen(Supplier<E> exceptionSupplier) throws E {
synchronized (this.lock) {
if (this.referenceCount == 0) {
throw exceptionSupplier.get();
}
}
}
@Override
public String toString() {
return this.path.toString();
}
}
/**
* Internal tracker used to check open and closing of files in tests.
*/
| FileAccess |
java | apache__camel | components/camel-minio/src/main/java/org/apache/camel/component/minio/MinioEndpoint.java | {
"start": 1844,
"end": 9478
} | class ____ extends ScheduledPollEndpoint implements EndpointServiceLocation {
private static final Logger LOG = LoggerFactory.getLogger(MinioEndpoint.class);
private MinioClient minioClient;
@UriParam
private MinioConfiguration configuration;
public MinioEndpoint(String uri, Component component, MinioConfiguration configuration) {
super(uri, component);
this.configuration = configuration;
}
@Override
public String getServiceUrl() {
return configuration.getEndpoint();
}
@Override
public String getServiceProtocol() {
return "rest";
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
MinioConsumer minioConsumer = new MinioConsumer(this, processor);
configureConsumer(minioConsumer);
minioConsumer.setMaxMessagesPerPoll(configuration.getMaxMessagesPerPoll());
return minioConsumer;
}
@Override
public Producer createProducer() {
return new MinioProducer(this);
}
@Override
public void doStart() throws Exception {
super.doStart();
minioClient
= isNotEmpty(getConfiguration().getMinioClient()) ? getConfiguration().getMinioClient() : createMinioClient();
String objectName = getConfiguration().getObjectName();
if (isNotEmpty(objectName)) {
LOG.trace("Object name {} requested, so skipping bucket check...", objectName);
return;
}
String bucketName = getConfiguration().getBucketName();
LOG.trace("Querying whether bucket {} already exists...", bucketName);
if (bucketExists(bucketName)) {
LOG.trace("Bucket {} already exists", bucketName);
} else {
if (getConfiguration().isAutoCreateBucket()) {
LOG.trace("AutoCreateBucket set to true, Creating bucket {}...", bucketName);
makeBucket(bucketName);
LOG.trace("Bucket created");
}
}
if (isNotEmpty(getConfiguration().getPolicy())) {
LOG.trace("Updating bucket {} with policy {}", bucketName, configuration.getPolicy());
setBucketPolicy(bucketName);
LOG.trace("Bucket policy updated");
}
}
@Override
public void doStop() throws Exception {
super.doStop();
}
public MinioConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(MinioConfiguration configuration) {
this.configuration = configuration;
}
public MinioClient getMinioClient() {
return minioClient;
}
public void setMinioClient(MinioClient minioClient) {
this.minioClient = minioClient;
}
MinioClient createMinioClient() {
if (isNotEmpty(configuration.getEndpoint())) {
MinioClient.Builder minioClientRequest = MinioClient.builder();
if (isNotEmpty(configuration.getProxyPort())) {
minioClientRequest.endpoint(configuration.getEndpoint(), configuration.getProxyPort(),
configuration.isSecure());
} else {
minioClientRequest.endpoint(configuration.getEndpoint());
}
if (isNotEmpty(configuration.getAccessKey()) && isNotEmpty(configuration.getSecretKey())) {
minioClientRequest.credentials(configuration.getAccessKey(), configuration.getSecretKey());
}
if (isNotEmpty(configuration.getRegion())) {
minioClientRequest.region(configuration.getRegion());
}
if (isNotEmpty(configuration.getCustomHttpClient())) {
minioClientRequest.httpClient(configuration.getCustomHttpClient());
}
return minioClientRequest.build();
} else {
throw new IllegalArgumentException("Endpoint must be specified");
}
}
private boolean bucketExists(String bucketName) throws Exception {
return minioClient.bucketExists(BucketExistsArgs.builder().bucket(bucketName).build());
}
private void makeBucket(String bucketName) throws Exception {
MakeBucketArgs.Builder makeBucketRequest
= MakeBucketArgs.builder().bucket(bucketName).objectLock(getConfiguration().isObjectLock());
if (isNotEmpty(getConfiguration().getRegion())) {
makeBucketRequest.region(getConfiguration().getRegion());
}
minioClient.makeBucket(makeBucketRequest.build());
}
private void setBucketPolicy(String bucketName) throws Exception {
LOG.trace("Updating bucket {} with policy...", bucketName);
minioClient.setBucketPolicy(
SetBucketPolicyArgs.builder().bucket(bucketName).config(getConfiguration().getPolicy()).build());
LOG.trace("Bucket policy updated");
}
void getObjectStat(String objectName, Message message) throws Exception {
String bucketName = getConfiguration().getBucketName();
StatObjectArgs.Builder statObjectRequest = StatObjectArgs.builder().bucket(bucketName).object(objectName);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getServerSideEncryptionCustomerKey,
statObjectRequest::ssec);
MinioChecks.checkLengthAndSetConfig(getConfiguration()::getOffset, statObjectRequest::offset);
MinioChecks.checkLengthAndSetConfig(getConfiguration()::getLength, statObjectRequest::length);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getVersionId, statObjectRequest::versionId);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getMatchETag, statObjectRequest::matchETag);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getNotMatchETag,
statObjectRequest::notMatchETag);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getModifiedSince,
statObjectRequest::modifiedSince);
MinioChecks.checkIfConfigIsNotEmptyAndSetAndConfig(getConfiguration()::getUnModifiedSince,
statObjectRequest::unmodifiedSince);
StatObjectResponse stat = minioClient.statObject(statObjectRequest.build());
// set all stat as message headers
message.setHeader(MinioConstants.OBJECT_NAME, stat.object());
message.setHeader(MinioConstants.BUCKET_NAME, stat.bucket());
message.setHeader(MinioConstants.E_TAG, stat.etag());
message.setHeader(MinioConstants.LAST_MODIFIED, stat.headers().get("last-modified"));
message.setHeader(MinioConstants.VERSION_ID, stat.headers().get("x-amz-version-id"));
message.setHeader(MinioConstants.CONTENT_TYPE, stat.contentType());
message.setHeader(MinioConstants.CONTENT_LENGTH, stat.size());
message.setHeader(MinioConstants.CONTENT_ENCODING, stat.headers().get("content-encoding"));
message.setHeader(MinioConstants.CONTENT_DISPOSITION, stat.headers().get("content-disposition"));
message.setHeader(MinioConstants.CACHE_CONTROL, stat.headers().get("cache-control"));
message.setHeader(MinioConstants.SERVER_SIDE_ENCRYPTION, stat.headers().get("x-amz-server-side-encryption"));
message.setHeader(MinioConstants.EXPIRATION_TIME, stat.headers().get("x-amz-expiration"));
message.setHeader(MinioConstants.REPLICATION_STATUS, stat.headers().get("x-amz-replication-status"));
message.setHeader(MinioConstants.STORAGE_CLASS, stat.headers().get("x-amz-storage-class"));
}
}
| MinioEndpoint |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/LogicalTypesTest.java | {
"start": 35872,
"end": 36091
} | class ____
assertThatThrownBy(() -> StructuredType.newBuilder("@#$%^&*").build())
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid | name |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/stubbing/CallingRealMethodTest.java | {
"start": 328,
"end": 417
} | class ____ extends TestBase {
@Mock TestedObject mock;
static | CallingRealMethodTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/functions/sink/filesystem/LocalStreamingFileSinkTest.java | {
"start": 1948,
"end": 26643
} | class ____ {
@TempDir private static java.nio.file.Path tempFolder;
@Test
void testClosingWithoutInput() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L); ) {
testHarness.setup();
testHarness.open();
}
}
@Test
void testClosingWithoutInitializingStateShouldNotFail() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L)) {
testHarness.setup();
}
}
@Test
void testTruncateAfterRecoveryAndOverwrite() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
OperatorSubtaskState snapshot;
// we set the max bucket size to small so that we can know when it rolls
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 10L)) {
testHarness.setup();
testHarness.open();
// this creates a new bucket "test1" and part-0-0
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
TestUtils.checkLocalFs(outDir, 1, 0);
// we take a checkpoint so that we keep the in-progress file offset.
snapshot = testHarness.snapshot(1L, 1L);
// these will close part-0-0 and open part-0-1
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 2), 2L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 3), 3L));
TestUtils.checkLocalFs(outDir, 2, 0);
Map<File, String> contents = TestUtils.getFileContentByPath(outDir);
int fileCounter = 0;
for (Map.Entry<File, String> fileContents : contents.entrySet()) {
if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@3\n");
}
}
assertThat(fileCounter).isEqualTo(2L);
}
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 10L)) {
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
// the in-progress is the not cleaned up one and the pending is truncated and finalized
TestUtils.checkLocalFs(outDir, 2, 0);
// now we go back to the first checkpoint so it should truncate part-0-0 and restart
// part-0-1
int fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) {
// truncated
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
// ignored for now as we do not clean up. This will be overwritten.
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@3\n");
}
}
assertThat(fileCounter).isEqualTo(2L);
// the first closes part-0-0 and the second will open part-0-1
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 4), 4L));
fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
// ignored for now as we do not clean up. This will be overwritten.
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@3\n");
}
}
assertThat(fileCounter).isEqualTo(2L);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 5), 5L));
TestUtils.checkLocalFs(
outDir, 3,
0); // the previous part-0-1 in progress is simply ignored (random extension)
testHarness.snapshot(2L, 2L);
// this will close the new part-0-1
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 6), 6L));
TestUtils.checkLocalFs(outDir, 3, 0);
fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().contains(".part-0-0.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
if (fileContents.getValue().equals("test1@5\ntest1@6\n")
|| fileContents.getValue().equals("test1@3\n")) {
fileCounter++;
}
}
}
assertThat(fileCounter).isEqualTo(3L);
// this will publish part-0-0
testHarness.notifyOfCompletedCheckpoint(2L);
TestUtils.checkLocalFs(outDir, 2, 1);
fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().equals("part-0-0")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@4\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
if (fileContents.getValue().equals("test1@5\ntest1@6\n")
|| fileContents.getValue().equals("test1@3\n")) {
fileCounter++;
}
}
}
assertThat(fileCounter).isEqualTo(3L);
}
}
@Test
void testCommitStagedFilesInCorrectOrder() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
// we set the max bucket size to small so that we can know when it rolls
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 10L)) {
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(0L);
// these 2 create a new bucket "test1", with a .part-0-0.inprogress and also fill it
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 2), 2L));
TestUtils.checkLocalFs(outDir, 1, 0);
// this will open .part-0-1.inprogress
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 3), 3L));
TestUtils.checkLocalFs(outDir, 2, 0);
// we take a checkpoint so that we keep the in-progress file offset.
testHarness.snapshot(1L, 1L);
// this will close .part-0-1.inprogress
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 4), 4L));
// and open and fill .part-0-2.inprogress
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 5), 5L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 6), 6L));
TestUtils.checkLocalFs(outDir, 3, 0); // nothing committed yet
testHarness.snapshot(2L, 2L);
// open .part-0-3.inprogress
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 7), 7L));
TestUtils.checkLocalFs(outDir, 4, 0);
// this will close the part file (time)
testHarness.setProcessingTime(101L);
testHarness.snapshot(3L, 3L);
testHarness.notifyOfCompletedCheckpoint(
1L); // the pending for checkpoint 1 are committed
TestUtils.checkLocalFs(outDir, 3, 1);
int fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().equals("part-0-0")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n");
} else if (fileContents.getKey().getName().contains(".part-0-1.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@3\ntest1@4\n");
} else if (fileContents.getKey().getName().contains(".part-0-2.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@5\ntest1@6\n");
} else if (fileContents.getKey().getName().contains(".part-0-3.inprogress")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@7\n");
}
}
assertThat(fileCounter).isEqualTo(4L);
testHarness.notifyOfCompletedCheckpoint(
3L); // all the pending for checkpoint 2 and 3 are committed
TestUtils.checkLocalFs(outDir, 0, 4);
fileCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getName().equals("part-0-0")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\ntest1@2\n");
} else if (fileContents.getKey().getName().equals("part-0-1")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@3\ntest1@4\n");
} else if (fileContents.getKey().getName().equals("part-0-2")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@5\ntest1@6\n");
} else if (fileContents.getKey().getName().equals("part-0-3")) {
fileCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@7\n");
}
}
assertThat(fileCounter).isEqualTo(4L);
}
}
@Test
void testInactivityPeriodWithLateNotify() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
// we set a big bucket size so that it does not close by size, but by timers.
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 124L)) {
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(0L);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test2", 1), 1L));
TestUtils.checkLocalFs(outDir, 2, 0);
int bucketCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getParentFile().getName().equals("test1")) {
bucketCounter++;
} else if (fileContents.getKey().getParentFile().getName().equals("test2")) {
bucketCounter++;
}
}
assertThat(bucketCounter)
.isEqualTo(2L); // verifies that we have 2 buckets, "test1" and "test2"
testHarness.setProcessingTime(101L); // put them in pending
TestUtils.checkLocalFs(outDir, 2, 0);
testHarness.snapshot(0L, 0L); // put them in pending for 0
TestUtils.checkLocalFs(outDir, 2, 0);
// create another 2 buckets with 1 inprogress file each
testHarness.processElement(new StreamRecord<>(Tuple2.of("test3", 1), 1L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test4", 1), 1L));
testHarness.setProcessingTime(202L); // put them in pending
testHarness.snapshot(1L, 0L); // put them in pending for 1
TestUtils.checkLocalFs(outDir, 4, 0);
testHarness.notifyOfCompletedCheckpoint(
0L); // put the pending for 0 to the "committed" state
TestUtils.checkLocalFs(outDir, 2, 2);
bucketCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getParentFile().getName().equals("test1")) {
bucketCounter++;
assertThat(fileContents.getKey().getName()).isEqualTo("part-0-0");
assertThat(fileContents.getValue()).isEqualTo("test1@1\n");
} else if (fileContents.getKey().getParentFile().getName().equals("test2")) {
bucketCounter++;
assertThat(fileContents.getKey().getName()).isEqualTo("part-0-1");
assertThat(fileContents.getValue()).isEqualTo("test2@1\n");
} else if (fileContents.getKey().getParentFile().getName().equals("test3")) {
bucketCounter++;
} else if (fileContents.getKey().getParentFile().getName().equals("test4")) {
bucketCounter++;
}
}
assertThat(bucketCounter).isEqualTo(4L);
testHarness.notifyOfCompletedCheckpoint(
1L); // put the pending for 1 to the "committed" state
TestUtils.checkLocalFs(outDir, 0, 4);
bucketCounter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
if (fileContents.getKey().getParentFile().getName().equals("test1")) {
bucketCounter++;
assertThat(fileContents.getValue()).isEqualTo("test1@1\n");
} else if (fileContents.getKey().getParentFile().getName().equals("test2")) {
bucketCounter++;
assertThat(fileContents.getValue()).isEqualTo("test2@1\n");
} else if (fileContents.getKey().getParentFile().getName().equals("test3")) {
bucketCounter++;
assertThat(fileContents.getKey().getName()).isEqualTo("part-0-2");
assertThat(fileContents.getValue()).isEqualTo("test3@1\n");
} else if (fileContents.getKey().getParentFile().getName().equals("test4")) {
bucketCounter++;
assertThat(fileContents.getKey().getName()).isEqualTo("part-0-3");
assertThat(fileContents.getValue()).isEqualTo("test4@1\n");
}
}
assertThat(bucketCounter).isEqualTo(4L);
}
}
@Test
void testClosingOnSnapshot() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 2L)) {
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(0L);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test2", 1), 1L));
TestUtils.checkLocalFs(outDir, 2, 0);
// this is to check the inactivity threshold
testHarness.setProcessingTime(101L);
TestUtils.checkLocalFs(outDir, 2, 0);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test3", 1), 1L));
TestUtils.checkLocalFs(outDir, 3, 0);
testHarness.snapshot(0L, 1L);
TestUtils.checkLocalFs(outDir, 3, 0);
testHarness.notifyOfCompletedCheckpoint(0L);
TestUtils.checkLocalFs(outDir, 0, 3);
testHarness.snapshot(1L, 0L);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test4", 10), 10L));
TestUtils.checkLocalFs(outDir, 1, 3);
}
// at close it is not moved to final.
TestUtils.checkLocalFs(outDir, 1, 3);
}
@Test
void testClosingWithCustomizedBucketer() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
final long partMaxSize = 2L;
final long inactivityInterval = 100L;
final RollingPolicy<Tuple2<String, Integer>, Integer> rollingPolicy =
DefaultRollingPolicy.builder()
.withMaxPartSize(new MemorySize(partMaxSize))
.withRolloverInterval(Duration.ofMillis(inactivityInterval))
.withInactivityInterval(Duration.ofMillis(inactivityInterval))
.build();
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createCustomizedRescalingTestSink(
outDir,
1,
0,
100L,
new TupleToIntegerBucketer(),
new Tuple2Encoder(),
rollingPolicy,
new DefaultBucketFactoryImpl<>()); ) {
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(0L);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
testHarness.processElement(new StreamRecord<>(Tuple2.of("test2", 2), 1L));
TestUtils.checkLocalFs(outDir, 2, 0);
// this is to check the inactivity threshold
testHarness.setProcessingTime(101L);
TestUtils.checkLocalFs(outDir, 2, 0);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test3", 3), 1L));
TestUtils.checkLocalFs(outDir, 3, 0);
testHarness.snapshot(0L, 1L);
TestUtils.checkLocalFs(outDir, 3, 0);
testHarness.notifyOfCompletedCheckpoint(0L);
TestUtils.checkLocalFs(outDir, 0, 3);
testHarness.processElement(new StreamRecord<>(Tuple2.of("test4", 4), 10L));
TestUtils.checkLocalFs(outDir, 1, 3);
testHarness.snapshot(1L, 0L);
testHarness.notifyOfCompletedCheckpoint(1L);
}
// at close all files moved to final.
TestUtils.checkLocalFs(outDir, 0, 4);
// check file content and bucket ID.
Map<File, String> contents = TestUtils.getFileContentByPath(outDir);
for (Map.Entry<File, String> fileContents : contents.entrySet()) {
Integer bucketId = Integer.parseInt(fileContents.getKey().getParentFile().getName());
assertThat(bucketId).isBetween(1, 4);
assertThat(fileContents.getValue())
.isEqualTo(String.format("test%d@%d\n", bucketId, bucketId));
}
}
@Test
void testScalingDownAndMergingOfStates() throws Exception {
final File outDir = TempDirUtils.newFolder(tempFolder);
OperatorSubtaskState mergedSnapshot;
// we set small file size so that the part file rolls on every element.
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness1 =
TestUtils.createRescalingTestSink(outDir, 2, 0, 100L, 10L);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness2 =
TestUtils.createRescalingTestSink(outDir, 2, 1, 100L, 10L)) {
testHarness1.setup();
testHarness1.open();
testHarness2.setup();
testHarness2.open();
testHarness1.processElement(new StreamRecord<>(Tuple2.of("test1", 0), 0L));
TestUtils.checkLocalFs(outDir, 1, 0);
testHarness2.processElement(new StreamRecord<>(Tuple2.of("test1", 1), 1L));
testHarness2.processElement(new StreamRecord<>(Tuple2.of("test2", 1), 1L));
// all the files are in-progress
TestUtils.checkLocalFs(outDir, 3, 0);
int counter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
final String parentFilename = fileContents.getKey().getParentFile().getName();
final String inProgressFilename = fileContents.getKey().getName();
if (parentFilename.equals("test1")
&& (inProgressFilename.contains(".part-0-0.inprogress")
|| inProgressFilename.contains(".part-1-0.inprogress"))) {
counter++;
} else if (parentFilename.equals("test2")
&& inProgressFilename.contains(".part-1-1.inprogress")) {
counter++;
}
}
assertThat(counter).isEqualTo(3L);
// intentionally we snapshot them in the reverse order so that the states are shuffled
mergedSnapshot =
AbstractStreamOperatorTestHarness.repackageState(
testHarness1.snapshot(1L, 0L), testHarness2.snapshot(1L, 0L));
}
final OperatorSubtaskState initState =
AbstractStreamOperatorTestHarness.repartitionOperatorState(
mergedSnapshot, TestUtils.MAX_PARALLELISM, 2, 1, 0);
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Object> testHarness =
TestUtils.createRescalingTestSink(outDir, 1, 0, 100L, 10L)) {
testHarness.setup();
testHarness.initializeState(initState);
testHarness.open();
// still everything in-progress but the in-progress for prev task 1 should be put in
// pending now
TestUtils.checkLocalFs(outDir, 3, 0);
testHarness.snapshot(2L, 2L);
testHarness.notifyOfCompletedCheckpoint(2L);
int counter = 0;
for (Map.Entry<File, String> fileContents :
TestUtils.getFileContentByPath(outDir).entrySet()) {
final String parentFilename = fileContents.getKey().getParentFile().getName();
final String filename = fileContents.getKey().getName();
if (parentFilename.equals("test1")) {
// the following is because it depends on the order in which the states are
// consumed in the initialize state.
if (filename.contains("-0.inprogress") || filename.endsWith("-0")) {
counter++;
assertThat(fileContents.getValue()).isIn("test1@1\n", "test1@0\n");
}
} else if (parentFilename.equals("test2")
&& filename.contains(".part-1-1.inprogress")) {
counter++;
assertThat(fileContents.getValue()).isEqualTo("test2@1\n");
}
}
assertThat(counter).isEqualTo(3L);
}
}
}
| LocalStreamingFileSinkTest |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/calcite/rex/RexUtil.java | {
"start": 29404,
"end": 93013
} | class ____ implements RexVisitor<Boolean> {
static final ConstantFinder INSTANCE = new ConstantFinder();
@Override
public Boolean visitLiteral(RexLiteral literal) {
return true;
}
@Override
public Boolean visitInputRef(RexInputRef inputRef) {
return false;
}
@Override
public Boolean visitLocalRef(RexLocalRef localRef) {
return false;
}
@Override
public Boolean visitOver(RexOver over) {
return false;
}
@Override
public Boolean visitSubQuery(RexSubQuery subQuery) {
return false;
}
@Override
public Boolean visitTableInputRef(RexTableInputRef ref) {
return false;
}
@Override
public Boolean visitPatternFieldRef(RexPatternFieldRef fieldRef) {
return false;
}
@Override
public Boolean visitCorrelVariable(RexCorrelVariable correlVariable) {
// Correlating variables change when there is an internal restart.
// Not good enough for our purposes.
return false;
}
@Override
public Boolean visitDynamicParam(RexDynamicParam dynamicParam) {
// Dynamic parameters are constant WITHIN AN EXECUTION, so that's
// good enough.
return true;
}
@Override
public Boolean visitCall(RexCall call) {
// Constant if operator meets the following conditions:
// 1. It is deterministic;
// 2. All its operands are constant.
return call.getOperator().isDeterministic()
&& RexVisitorImpl.visitArrayAnd(this, call.getOperands());
}
@Override
public Boolean visitRangeRef(RexRangeRef rangeRef) {
return false;
}
@Override
public Boolean visitFieldAccess(RexFieldAccess fieldAccess) {
// "<expr>.FIELD" is constant iff "<expr>" is constant.
return fieldAccess.getReferenceExpr().accept(this);
}
}
/**
* Returns whether node is made up of constants.
*
* @param node Node to inspect
* @return true if node is made up of constants, false otherwise
*/
public static boolean isConstant(RexNode node) {
return node.accept(ConstantFinder.INSTANCE);
}
/**
* Returns whether a given expression is deterministic.
*
* @param e Expression
* @return true if tree result is deterministic, false otherwise
*/
public static boolean isDeterministic(RexNode e) {
try {
RexVisitor<Void> visitor =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitCall(RexCall call) {
if (!call.getOperator().isDeterministic()) {
throw Util.FoundOne.NULL;
}
return super.visitCall(call);
}
};
e.accept(visitor);
return true;
} catch (Util.FoundOne ex) {
Util.swallow(ex, null);
return false;
}
}
public static List<RexNode> retainDeterministic(List<RexNode> list) {
List<RexNode> conjuctions = new ArrayList<>();
for (RexNode x : list) {
if (isDeterministic(x)) {
conjuctions.add(x);
}
}
return conjuctions;
}
/**
* Returns whether a given node contains a RexCall with a specified operator.
*
* @param operator Operator to look for
* @param node a RexNode tree
*/
public static @Nullable RexCall findOperatorCall(final SqlOperator operator, RexNode node) {
try {
RexVisitor<Void> visitor =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitCall(RexCall call) {
if (call.getOperator().equals(operator)) {
throw new Util.FoundOne(call);
}
return super.visitCall(call);
}
};
node.accept(visitor);
return null;
} catch (Util.FoundOne e) {
Util.swallow(e, null);
return (RexCall) e.getNode();
}
}
/**
* Returns whether a given tree contains any {link RexInputRef} nodes.
*
* @param node a RexNode tree
*/
public static boolean containsInputRef(RexNode node) {
try {
RexVisitor<Void> visitor =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitInputRef(RexInputRef inputRef) {
throw new Util.FoundOne(inputRef);
}
@Override
public Void visitCall(RexCall call) {
if (call instanceof RexTableArgCall) {
throw new Util.FoundOne(call);
}
return super.visitCall(call);
}
};
node.accept(visitor);
return false;
} catch (Util.FoundOne e) {
Util.swallow(e, null);
return true;
}
}
/**
* Returns whether a given tree contains any {@link org.apache.calcite.rex.RexFieldAccess}
* nodes.
*
* @param node a RexNode tree
*/
public static boolean containsFieldAccess(RexNode node) {
try {
RexVisitor<Void> visitor =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitFieldAccess(RexFieldAccess fieldAccess) {
throw new Util.FoundOne(fieldAccess);
}
};
node.accept(visitor);
return false;
} catch (Util.FoundOne e) {
Util.swallow(e, null);
return true;
}
}
/**
* Determines whether a {@link RexCall} requires decimal expansion. It usually requires
* expansion if it has decimal operands.
*
* <p>Exceptions to this rule are:
*
* <ul>
* <li>isNull doesn't require expansion
* <li>It's okay to cast decimals to and from char types
* <li>It's okay to cast nulls as decimals
* <li>Casts require expansion if their return type is decimal
* <li>Reinterpret casts can handle a decimal operand
* </ul>
*
* @param expr expression possibly in need of expansion
* @param recurse whether to check nested calls
* @return whether the expression requires expansion
*/
public static boolean requiresDecimalExpansion(RexNode expr, boolean recurse) {
if (!(expr instanceof RexCall)) {
return false;
}
RexCall call = (RexCall) expr;
boolean localCheck = true;
switch (call.getKind()) {
case REINTERPRET:
case IS_NULL:
localCheck = false;
break;
case CAST:
RelDataType lhsType = call.getType();
RelDataType rhsType = call.operands.get(0).getType();
if (rhsType.getSqlTypeName() == SqlTypeName.NULL) {
return false;
}
if (SqlTypeUtil.inCharFamily(lhsType) || SqlTypeUtil.inCharFamily(rhsType)) {
localCheck = false;
} else if (SqlTypeUtil.isDecimal(lhsType) && (lhsType != rhsType)) {
return true;
}
break;
default:
localCheck = call.getOperator().requiresDecimalExpansion();
}
if (localCheck) {
if (SqlTypeUtil.isDecimal(call.getType())) {
// NOTE jvs 27-Mar-2007: Depending on the type factory, the
// result of a division may be decimal, even though both inputs
// are integer.
return true;
}
for (int i = 0; i < call.operands.size(); i++) {
if (SqlTypeUtil.isDecimal(call.operands.get(i).getType())) {
return true;
}
}
}
return recurse && requiresDecimalExpansion(call.operands, true);
}
/** Determines whether any operand of a set requires decimal expansion. */
public static boolean requiresDecimalExpansion(List<RexNode> operands, boolean recurse) {
for (RexNode operand : operands) {
if (operand instanceof RexCall) {
RexCall call = (RexCall) operand;
if (requiresDecimalExpansion(call, recurse)) {
return true;
}
}
}
return false;
}
/**
* Returns whether a {@link RexProgram} contains expressions which require decimal expansion.
*/
public static boolean requiresDecimalExpansion(RexProgram program, boolean recurse) {
final List<RexNode> exprList = program.getExprList();
for (RexNode expr : exprList) {
if (requiresDecimalExpansion(expr, recurse)) {
return true;
}
}
return false;
}
public static boolean canReinterpretOverflow(RexCall call) {
assert call.isA(SqlKind.REINTERPRET) : "call is not a reinterpret";
return call.operands.size() > 1;
}
/** Returns whether an array of expressions has any common sub-expressions. */
public static boolean containNoCommonExprs(List<RexNode> exprs, Litmus litmus) {
final ExpressionNormalizer visitor = new ExpressionNormalizer(false);
for (RexNode expr : exprs) {
try {
expr.accept(visitor);
} catch (ExpressionNormalizer.SubExprExistsException e) {
Util.swallow(e, null);
return litmus.fail(null);
}
}
return litmus.succeed();
}
/**
* Returns whether an array of expressions contains no forward references. That is, if
* expression #i contains a {@link RexInputRef} referencing field i or greater.
*
* @param exprs Array of expressions
* @param inputRowType Input row type
* @param litmus What to do if an error is detected (there is a forward reference)
* @return Whether there is a forward reference
*/
public static boolean containNoForwardRefs(
List<RexNode> exprs, RelDataType inputRowType, Litmus litmus) {
final ForwardRefFinder visitor = new ForwardRefFinder(inputRowType);
for (int i = 0; i < exprs.size(); i++) {
RexNode expr = exprs.get(i);
visitor.setLimit(i); // field cannot refer to self or later field
try {
expr.accept(visitor);
} catch (ForwardRefFinder.IllegalForwardRefException e) {
Util.swallow(e, null);
return litmus.fail("illegal forward reference in {}", expr);
}
}
return litmus.succeed();
}
/**
* Returns whether an array of exp contains no aggregate function calls whose arguments are not
* {@link RexInputRef}s.
*
* @param exprs Expressions
* @param litmus Whether to assert if there is such a function call
*/
static boolean containNoNonTrivialAggs(List<RexNode> exprs, Litmus litmus) {
for (RexNode expr : exprs) {
if (expr instanceof RexCall) {
RexCall rexCall = (RexCall) expr;
if (rexCall.getOperator() instanceof SqlAggFunction) {
for (RexNode operand : rexCall.operands) {
if (!(operand instanceof RexLocalRef) && !(operand instanceof RexLiteral)) {
return litmus.fail("contains non trivial agg: {}", operand);
}
}
}
}
}
return litmus.succeed();
}
/**
* Returns whether a list of expressions contains complex expressions, that is, a call whose
* arguments are not {@link RexVariable} (or a subtype such as {@link RexInputRef}) or {@link
* RexLiteral}.
*/
public static boolean containComplexExprs(List<RexNode> exprs) {
for (RexNode expr : exprs) {
if (expr instanceof RexCall) {
for (RexNode operand : ((RexCall) expr).operands) {
if (!isAtomic(operand)) {
return true;
}
}
}
}
return false;
}
/**
* Returns whether any of the given expression trees contains a {link RexTableInputRef} node.
*
* @param nodes a list of RexNode trees
* @return true if at least one was found, otherwise false
*/
public static boolean containsTableInputRef(List<RexNode> nodes) {
for (RexNode e : nodes) {
if (containsTableInputRef(e) != null) {
return true;
}
}
return false;
}
/**
* Returns whether a given tree contains any {link RexTableInputRef} nodes.
*
* @param node a RexNode tree
* @return first such node found or null if it there is no such node
*/
public static @Nullable RexTableInputRef containsTableInputRef(RexNode node) {
try {
RexVisitor<Void> visitor =
new RexVisitorImpl<Void>(true) {
@Override
public Void visitTableInputRef(RexTableInputRef inputRef) {
throw new Util.FoundOne(inputRef);
}
};
node.accept(visitor);
return null;
} catch (Util.FoundOne e) {
Util.swallow(e, null);
return (RexTableInputRef) e.getNode();
}
}
public static boolean isAtomic(RexNode expr) {
return (expr instanceof RexLiteral) || (expr instanceof RexVariable);
}
/**
* Returns whether a {@link RexNode node} is a {@link RexCall call} to a given {@link
* SqlOperator operator}.
*/
public static boolean isCallTo(RexNode expr, SqlOperator op) {
return (expr instanceof RexCall) && (((RexCall) expr).getOperator() == op);
}
/**
* Creates a record type with anonymous field names.
*
* @param typeFactory Type factory
* @param exprs Expressions
* @return Record type
*/
public static RelDataType createStructType(
RelDataTypeFactory typeFactory, final List<RexNode> exprs) {
return createStructType(typeFactory, exprs, null, null);
}
/**
* Creates a record type with specified field names.
*
* <p>The array of field names may be null, or any of the names within it can be null. We
* recommend using explicit names where possible, because it makes it much easier to figure out
* the intent of fields when looking at planner output.
*
* @param typeFactory Type factory
* @param exprs Expressions
* @param names Field names, may be null, or elements may be null
* @param suggester Generates alternative names if {@code names} is not null and its elements
* are not unique
* @return Record type
*/
public static RelDataType createStructType(
RelDataTypeFactory typeFactory,
final List<? extends RexNode> exprs,
@Nullable List<? extends @Nullable String> names,
SqlValidatorUtil.Suggester suggester) {
if (names != null && suggester != null) {
names =
SqlValidatorUtil.uniquify(
names, suggester, typeFactory.getTypeSystem().isSchemaCaseSensitive());
}
final RelDataTypeFactory.Builder builder = typeFactory.builder();
for (int i = 0; i < exprs.size(); i++) {
String name;
if (names == null || (name = names.get(i)) == null) {
name = "$f" + i;
}
builder.add(name, exprs.get(i).getType());
}
return builder.build();
}
@Deprecated // to be removed before 2.0
public static RelDataType createStructType(
RelDataTypeFactory typeFactory,
final List<? extends RexNode> exprs,
List<String> names) {
return createStructType(typeFactory, exprs, names, null);
}
/**
* Returns whether the type of an array of expressions is compatible with a struct type.
*
* @param exprs Array of expressions
* @param type Type
* @param litmus What to do if an error is detected (there is a mismatch)
* @return Whether every expression has the same type as the corresponding member of the struct
* type
* @see RelOptUtil#eq(String, RelDataType, String, RelDataType, org.apache.calcite.util.Litmus)
*/
public static boolean compatibleTypes(List<RexNode> exprs, RelDataType type, Litmus litmus) {
final List<RelDataTypeField> fields = type.getFieldList();
if (exprs.size() != fields.size()) {
return litmus.fail("rowtype mismatches expressions");
}
for (int i = 0; i < fields.size(); i++) {
final RelDataType exprType = exprs.get(i).getType();
final RelDataType fieldType = fields.get(i).getType();
if (!RelOptUtil.eq("type1", exprType, "type2", fieldType, litmus)) {
return litmus.fail(null);
}
}
return litmus.succeed();
}
/**
* Creates a key for {@link RexNode} which is the same as another key of another RexNode only if
* the two have both the same type and textual representation. For example, "10" integer and
* "10" bigint result in different keys.
*/
public static Pair<RexNode, String> makeKey(RexNode expr) {
return Pair.of(expr, expr.getType().getFullTypeString());
}
/**
* Returns whether the leading edge of a given array of expressions is wholly {@link
* RexInputRef} objects with types corresponding to the underlying datatype.
*/
public static boolean containIdentity(
List<? extends RexNode> exprs, RelDataType rowType, Litmus litmus) {
final List<RelDataTypeField> fields = rowType.getFieldList();
if (exprs.size() < fields.size()) {
return litmus.fail("exprs/rowType length mismatch");
}
for (int i = 0; i < fields.size(); i++) {
if (!(exprs.get(i) instanceof RexInputRef)) {
return litmus.fail("expr[{}] is not a RexInputRef", i);
}
RexInputRef inputRef = (RexInputRef) exprs.get(i);
if (inputRef.getIndex() != i) {
return litmus.fail("expr[{}] has ordinal {}", i, inputRef.getIndex());
}
if (!RelOptUtil.eq(
"type1", exprs.get(i).getType(), "type2", fields.get(i).getType(), litmus)) {
return litmus.fail(null);
}
}
return litmus.succeed();
}
/** Returns whether a list of expressions projects the incoming fields. */
public static boolean isIdentity(List<? extends RexNode> exps, RelDataType inputRowType) {
return inputRowType.getFieldCount() == exps.size()
&& containIdentity(exps, inputRowType, Litmus.IGNORE);
}
/** As {@link #composeConjunction(RexBuilder, Iterable, boolean)} but never returns null. */
public static RexNode composeConjunction(
RexBuilder rexBuilder, Iterable<? extends @Nullable RexNode> nodes) {
final RexNode e = composeConjunction(rexBuilder, nodes, false);
return requireNonNull(e, "e");
}
/**
* Converts a collection of expressions into an AND. If there are zero expressions, returns
* TRUE. If there is one expression, returns just that expression. If any of the expressions are
* FALSE, returns FALSE. Removes expressions that always evaluate to TRUE. Returns null only if
* {@code nullOnEmpty} and expression is TRUE.
*/
public static @Nullable RexNode composeConjunction(
RexBuilder rexBuilder,
Iterable<? extends @Nullable RexNode> nodes,
boolean nullOnEmpty) {
ImmutableList<RexNode> list = flattenAnd(nodes);
switch (list.size()) {
case 0:
return nullOnEmpty ? null : rexBuilder.makeLiteral(true);
case 1:
return list.get(0);
default:
if (containsFalse(list)) {
return rexBuilder.makeLiteral(false);
}
return rexBuilder.makeCall(SqlStdOperatorTable.AND, list);
}
}
/**
* Flattens a list of AND nodes.
*
* <p>Treats null nodes as literal TRUE (i.e. ignores them).
*/
public static ImmutableList<RexNode> flattenAnd(Iterable<? extends @Nullable RexNode> nodes) {
if (nodes instanceof Collection && ((Collection) nodes).isEmpty()) {
// Optimize common case
return ImmutableList.of();
}
final ImmutableList.Builder<RexNode> builder = ImmutableList.builder();
final Set<RexNode> set = new HashSet<>(); // to eliminate duplicates
for (RexNode node : nodes) {
if (node != null) {
addAnd(builder, set, node);
}
}
return builder.build();
}
private static void addAnd(
ImmutableList.Builder<RexNode> builder, Set<RexNode> digests, RexNode node) {
switch (node.getKind()) {
case AND:
for (RexNode operand : ((RexCall) node).getOperands()) {
addAnd(builder, digests, operand);
}
return;
default:
if (!node.isAlwaysTrue() && digests.add(node)) {
builder.add(node);
}
}
}
/**
* Converts a collection of expressions into an OR. If there are zero expressions, returns
* FALSE. If there is one expression, returns just that expression. If any of the expressions
* are TRUE, returns TRUE. Removes expressions that always evaluate to FALSE. Flattens
* expressions that are ORs.
*/
public static RexNode composeDisjunction(
RexBuilder rexBuilder, Iterable<? extends RexNode> nodes) {
final RexNode e = composeDisjunction(rexBuilder, nodes, false);
return requireNonNull(e, "e");
}
/**
* Converts a collection of expressions into an OR, optionally returning null if the list is
* empty.
*/
public static @Nullable RexNode composeDisjunction(
RexBuilder rexBuilder, Iterable<? extends RexNode> nodes, boolean nullOnEmpty) {
ImmutableList<RexNode> list = flattenOr(nodes);
switch (list.size()) {
case 0:
return nullOnEmpty ? null : rexBuilder.makeLiteral(false);
case 1:
return list.get(0);
default:
if (containsTrue(list)) {
return rexBuilder.makeLiteral(true);
}
return rexBuilder.makeCall(SqlStdOperatorTable.OR, list);
}
}
/** Flattens a list of OR nodes. */
public static ImmutableList<RexNode> flattenOr(Iterable<? extends RexNode> nodes) {
if (nodes instanceof Collection && ((Collection) nodes).isEmpty()) {
// Optimize common case
return ImmutableList.of();
}
final ImmutableList.Builder<RexNode> builder = ImmutableList.builder();
final Set<RexNode> set = new HashSet<>(); // to eliminate duplicates
for (RexNode node : nodes) {
addOr(builder, set, node);
}
return builder.build();
}
private static void addOr(
ImmutableList.Builder<RexNode> builder, Set<RexNode> set, RexNode node) {
switch (node.getKind()) {
case OR:
for (RexNode operand : ((RexCall) node).getOperands()) {
addOr(builder, set, operand);
}
return;
default:
if (!node.isAlwaysFalse() && set.add(node)) {
builder.add(node);
}
}
}
/**
* Applies a mapping to a collation list.
*
* @param mapping Mapping
* @param collationList Collation list
* @return collation list with mapping applied to each field
*/
public static List<RelCollation> apply(
Mappings.TargetMapping mapping, List<RelCollation> collationList) {
final List<RelCollation> newCollationList = new ArrayList<>();
for (RelCollation collation : collationList) {
final List<RelFieldCollation> newFieldCollationList = new ArrayList<>();
for (RelFieldCollation fieldCollation : collation.getFieldCollations()) {
final RelFieldCollation newFieldCollation = apply(mapping, fieldCollation);
if (newFieldCollation == null) {
// This field is not mapped. Stop here. The leading edge
// of the collation is still valid (although it's useless
// if it's empty).
break;
}
newFieldCollationList.add(newFieldCollation);
}
// Truncation to collations to their leading edge creates empty
// and duplicate collations. Ignore these.
if (!newFieldCollationList.isEmpty()) {
final RelCollation newCollation = RelCollations.of(newFieldCollationList);
if (!newCollationList.contains(newCollation)) {
newCollationList.add(newCollation);
}
}
}
// REVIEW: There might be redundant collations in the list. For example,
// in {(x), (x, y)}, (x) is redundant because it is a leading edge of
// another collation in the list. Could remove redundant collations.
return newCollationList;
}
/**
* Applies a mapping to a collation.
*
* @param mapping Mapping
* @param collation Collation
* @return collation with mapping applied
*/
public static RelCollation apply(Mappings.TargetMapping mapping, RelCollation collation) {
List<RelFieldCollation> fieldCollations =
applyFields(mapping, collation.getFieldCollations());
return fieldCollations.equals(collation.getFieldCollations())
? collation
: RelCollations.of(fieldCollations);
}
/**
* Applies a mapping to a field collation.
*
* <p>If the field is not mapped, returns null.
*
* @param mapping Mapping
* @param fieldCollation Field collation
* @return collation with mapping applied
*/
public static @Nullable RelFieldCollation apply(
Mappings.TargetMapping mapping, RelFieldCollation fieldCollation) {
final int target = mapping.getTargetOpt(fieldCollation.getFieldIndex());
if (target < 0) {
return null;
}
return fieldCollation.withFieldIndex(target);
}
/**
* Applies a mapping to a list of field collations.
*
* @param mapping Mapping
* @param fieldCollations Field collations
* @return collations with mapping applied
*/
public static List<RelFieldCollation> applyFields(
Mappings.TargetMapping mapping, List<RelFieldCollation> fieldCollations) {
final List<RelFieldCollation> newFieldCollations = new ArrayList<>();
for (RelFieldCollation fieldCollation : fieldCollations) {
RelFieldCollation newFieldCollation = apply(mapping, fieldCollation);
if (newFieldCollation == null) {
break;
}
newFieldCollations.add(newFieldCollation);
}
return newFieldCollations;
}
/** Applies a mapping to an expression. */
public static RexNode apply(Mappings.TargetMapping mapping, RexNode node) {
return node.accept(RexPermuteInputsShuttle.of(mapping));
}
/** Applies a mapping to an iterable over expressions. */
public static List<RexNode> apply(
Mappings.TargetMapping mapping, Iterable<? extends RexNode> nodes) {
return RexPermuteInputsShuttle.of(mapping).visitList(nodes);
}
/**
* Applies a shuttle to an array of expressions. Creates a copy first.
*
* @param shuttle Shuttle
* @param exprs Array of expressions
*/
public static <T extends RexNode> T[] apply(RexVisitor<T> shuttle, T[] exprs) {
T[] newExprs = exprs.clone();
for (int i = 0; i < newExprs.length; i++) {
final RexNode expr = newExprs[i];
if (expr != null) {
newExprs[i] = expr.accept(shuttle);
}
}
return newExprs;
}
/**
* Applies a visitor to an array of expressions and, if specified, a single expression.
*
* @param visitor Visitor
* @param exprs Array of expressions
* @param expr Single expression, may be null
*/
public static void apply(RexVisitor<Void> visitor, RexNode[] exprs, @Nullable RexNode expr) {
for (RexNode e : exprs) {
e.accept(visitor);
}
if (expr != null) {
expr.accept(visitor);
}
}
/**
* Applies a visitor to a list of expressions and, if specified, a single expression.
*
* @param visitor Visitor
* @param exprs List of expressions
* @param expr Single expression, may be null
*/
public static void apply(
RexVisitor<Void> visitor, List<? extends RexNode> exprs, @Nullable RexNode expr) {
for (RexNode e : exprs) {
e.accept(visitor);
}
if (expr != null) {
expr.accept(visitor);
}
}
/**
* Flattens an expression.
*
* <p>Returns the same expression if it is already flat.
*/
public static RexNode flatten(RexBuilder rexBuilder, RexNode node) {
if (node instanceof RexCall) {
RexCall call = (RexCall) node;
final SqlOperator op = call.getOperator();
final List<RexNode> flattenedOperands = flatten(call.getOperands(), op);
if (!isFlat(call.getOperands(), op)) {
return rexBuilder.makeCall(call.getType(), op, flattenedOperands);
}
}
return node;
}
/**
* Converts a list of operands into a list that is flat with respect to the given operator. The
* operands are assumed to be flat already.
*/
public static List<RexNode> flatten(List<? extends RexNode> exprs, SqlOperator op) {
if (isFlat(exprs, op)) {
//noinspection unchecked
return (List) exprs;
}
final List<RexNode> list = new ArrayList<>();
flattenRecurse(list, exprs, op);
return list;
}
/**
* Returns whether a call to {@code op} with {@code exprs} as arguments would be considered
* "flat".
*
* <p>For example, {@code isFlat([w, AND[x, y], z, AND)} returns false; {@code isFlat([w, x, y,
* z], AND)} returns true.
*/
private static boolean isFlat(List<? extends RexNode> exprs, final SqlOperator op) {
return !isAssociative(op)
|| !exists(exprs, (Predicate1<RexNode>) expr -> isCallTo(expr, op));
}
/**
* Returns false if the expression can be optimized by flattening calls to an associative
* operator such as AND and OR.
*/
public static boolean isFlat(RexNode expr) {
if (!(expr instanceof RexCall)) {
return true;
}
final RexCall call = (RexCall) expr;
return isFlat(call.getOperands(), call.getOperator())
&& all(call.getOperands(), RexUtil::isFlat);
}
private static void flattenRecurse(
List<RexNode> list, List<? extends RexNode> exprs, SqlOperator op) {
for (RexNode expr : exprs) {
if (expr instanceof RexCall && ((RexCall) expr).getOperator() == op) {
flattenRecurse(list, ((RexCall) expr).getOperands(), op);
} else {
list.add(expr);
}
}
}
/**
* Returns whether the input is a 'loss-less' cast, that is, a cast from which the original
* value of the field can be certainly recovered.
*
* <p>For instance, int → bigint is loss-less (as you can cast back to int without loss of
* information), but bigint → int is not loss-less.
*
* <p>The implementation of this method does not return false positives. However, it is not
* complete.
*
* @param node input node to verify if it represents a loss-less cast
* @return true iff the node is a loss-less cast
*/
public static boolean isLosslessCast(RexNode node) {
if (!node.isA(SqlKind.CAST)) {
return false;
}
return isLosslessCast(((RexCall) node).getOperands().get(0).getType(), node.getType());
}
/**
* Returns whether the conversion from {@code source} to {@code target} type is a 'loss-less'
* cast, that is, a cast from which the original value of the field can be certainly recovered.
*
* <p>For instance, int → bigint is loss-less (as you can cast back to int without loss of
* information), but bigint → int is not loss-less.
*
* <p>The implementation of this method does not return false positives. However, it is not
* complete.
*
* @param source source type
* @param target target type
* @return true iff the conversion is a loss-less cast
*/
@API(since = "1.22", status = API.Status.EXPERIMENTAL)
public static boolean isLosslessCast(RelDataType source, RelDataType target) {
final SqlTypeName sourceSqlTypeName = source.getSqlTypeName();
final SqlTypeName targetSqlTypeName = target.getSqlTypeName();
// 1) Both INT numeric types
if (SqlTypeFamily.INTEGER.getTypeNames().contains(sourceSqlTypeName)
&& SqlTypeFamily.INTEGER.getTypeNames().contains(targetSqlTypeName)) {
return targetSqlTypeName.compareTo(sourceSqlTypeName) >= 0;
}
// 2) Both CHARACTER types: it depends on the precision (length)
if (SqlTypeFamily.CHARACTER.getTypeNames().contains(sourceSqlTypeName)
&& SqlTypeFamily.CHARACTER.getTypeNames().contains(targetSqlTypeName)) {
return targetSqlTypeName.compareTo(sourceSqlTypeName) >= 0
&& source.getPrecision() <= target.getPrecision();
}
// 3) From NUMERIC family to CHARACTER family: it depends on the precision/scale
if (sourceSqlTypeName.getFamily() == SqlTypeFamily.NUMERIC
&& targetSqlTypeName.getFamily() == SqlTypeFamily.CHARACTER) {
int sourceLength = source.getPrecision() + 1; // include sign
if (source.getScale() != -1 && source.getScale() != 0) {
sourceLength += source.getScale() + 1; // include decimal mark
}
return target.getPrecision() >= sourceLength;
}
// Return FALSE by default
return false;
}
/**
* Converts an expression to conjunctive normal form (CNF).
*
* <p>The following expression is in CNF:
*
* <blockquote>
*
* (a OR b) AND (c OR d)
*
* </blockquote>
*
* <p>The following expression is not in CNF:
*
* <blockquote>
*
* (a AND b) OR c
*
* </blockquote>
*
* <p>but can be converted to CNF:
*
* <blockquote>
*
* (a OR c) AND (b OR c)
*
* </blockquote>
*
* <p>The following expression is not in CNF:
*
* <blockquote>
*
* NOT (a OR NOT b)
*
* </blockquote>
*
* <p>but can be converted to CNF by applying de Morgan's theorem:
*
* <blockquote>
*
* NOT a AND b
*
* </blockquote>
*
* <p>Expressions not involving AND, OR or NOT at the top level are in CNF.
*/
public static RexNode toCnf(RexBuilder rexBuilder, RexNode rex) {
return new CnfHelper(rexBuilder, -1).toCnf(rex);
}
/**
* Similar to {@link #toCnf(RexBuilder, RexNode)}; however, it lets you specify a threshold in
* the number of nodes that can be created out of the conversion.
*
* <p>If the number of resulting nodes exceeds that threshold, stops conversion and returns the
* original expression.
*
* <p>If the threshold is negative it is ignored.
*
* <p>Leaf nodes in the expression do not count towards the threshold.
*/
public static RexNode toCnf(RexBuilder rexBuilder, int maxCnfNodeCount, RexNode rex) {
return new CnfHelper(rexBuilder, maxCnfNodeCount).toCnf(rex);
}
/**
* Converts an expression to disjunctive normal form (DNF).
*
* <p>DNF: It is a form of logical formula which is disjunction of conjunctive clauses.
*
* <p>All logical formulas can be converted into DNF.
*
* <p>The following expression is in DNF:
*
* <blockquote>
*
* (a AND b) OR (c AND d)
*
* </blockquote>
*
* <p>The following expression is not in CNF:
*
* <blockquote>
*
* (a OR b) AND c
*
* </blockquote>
*
* <p>but can be converted to DNF:
*
* <blockquote>
*
* (a AND c) OR (b AND c)
*
* </blockquote>
*
* <p>The following expression is not in CNF:
*
* <blockquote>
*
* NOT (a OR NOT b)
*
* </blockquote>
*
* <p>but can be converted to DNF by applying de Morgan's theorem:
*
* <blockquote>
*
* NOT a AND b
*
* </blockquote>
*
* <p>Expressions not involving AND, OR or NOT at the top level are in DNF.
*/
public static RexNode toDnf(RexBuilder rexBuilder, RexNode rex) {
return new DnfHelper(rexBuilder).toDnf(rex);
}
/**
* Returns whether an operator is associative. AND is associative, which means that "(x AND y)
* and z" is equivalent to "x AND (y AND z)". We might well flatten the tree, and write "AND(x,
* y, z)".
*/
private static boolean isAssociative(SqlOperator op) {
return op.getKind() == SqlKind.AND || op.getKind() == SqlKind.OR;
}
/** Returns whether there is an element in {@code list} for which {@code predicate} is true. */
public static <E> boolean exists(List<? extends E> list, Predicate1<E> predicate) {
for (E e : list) {
if (predicate.apply(e)) {
return true;
}
}
return false;
}
/** Returns whether {@code predicate} is true for all elements of {@code list}. */
public static <E> boolean all(List<? extends E> list, Predicate1<E> predicate) {
for (E e : list) {
if (!predicate.apply(e)) {
return false;
}
}
return true;
}
/** Shifts every {@link RexInputRef} in an expression by {@code offset}. */
public static RexNode shift(RexNode node, final int offset) {
if (offset == 0) {
return node;
}
return node.accept(new RexShiftShuttle(offset));
}
/** Shifts every {@link RexInputRef} in an expression by {@code offset}. */
public static List<RexNode> shift(Iterable<RexNode> nodes, int offset) {
return new RexShiftShuttle(offset).visitList(nodes);
}
/**
* Shifts every {@link RexInputRef} in an expression higher than {@code start} by {@code
* offset}.
*/
public static RexNode shift(RexNode node, final int start, final int offset) {
return node.accept(
new RexShuttle() {
@Override
public RexNode visitInputRef(RexInputRef input) {
final int index = input.getIndex();
if (index < start) {
return input;
}
return new RexInputRef(index + offset, input.getType());
}
});
}
/**
* Creates an equivalent version of a node where common factors among ORs are pulled up.
*
* <p>For example,
*
* <blockquote>
*
* (a AND b) OR (a AND c AND d)
*
* </blockquote>
*
* <p>becomes
*
* <blockquote>
*
* a AND (b OR (c AND d))
*
* </blockquote>
*
* <p>Note that this result is not in CNF (see {@link #toCnf(RexBuilder, RexNode)}) because
* there is an AND inside an OR.
*
* <p>This form is useful if, say, {@code a} contains columns from only the left-hand side of a
* join, and can be pushed to the left input.
*
* @param rexBuilder Rex builder
* @param node Expression to transform
* @return Equivalent expression with common factors pulled up
*/
public static RexNode pullFactors(RexBuilder rexBuilder, RexNode node) {
return new CnfHelper(rexBuilder, -1).pull(node);
}
@Deprecated // to be removed before 2.0
public static List<RexNode> fixUp(
final RexBuilder rexBuilder, List<RexNode> nodes, final RelDataType rowType) {
final List<RelDataType> typeList = RelOptUtil.getFieldTypeList(rowType);
return fixUp(rexBuilder, nodes, typeList);
}
/**
* Fixes up the type of all {@link RexInputRef}s in an expression to match differences in
* nullability.
*
* <p>Such differences in nullability occur when expressions are moved through outer joins.
*
* <p>Throws if there any greater inconsistencies of type.
*/
public static List<RexNode> fixUp(
final RexBuilder rexBuilder, List<RexNode> nodes, final List<RelDataType> fieldTypes) {
return new FixNullabilityShuttle(rexBuilder, fieldTypes).apply(nodes);
}
/** Transforms a list of expressions into a list of their types. */
public static List<RelDataType> types(List<? extends RexNode> nodes) {
return Util.transform(nodes, RexNode::getType);
}
public static List<RelDataTypeFamily> families(List<RelDataType> types) {
return Util.transform(types, RelDataType::getFamily);
}
/**
* Removes all expressions from a list that are equivalent to a given expression. Returns
* whether any were removed.
*/
public static boolean removeAll(List<RexNode> targets, RexNode e) {
int count = 0;
Iterator<RexNode> iterator = targets.iterator();
while (iterator.hasNext()) {
RexNode next = iterator.next();
if (next.equals(e)) {
++count;
iterator.remove();
}
}
return count > 0;
}
/**
* Returns whether two {@link RexNode}s are structurally equal.
*
* <p>This method considers structure, not semantics. 'x < y' is not equivalent to 'y >
* x'.
*/
@Deprecated // use e1.equals(e2)
public static boolean eq(RexNode e1, RexNode e2) {
return e1 == e2 || e1.toString().equals(e2.toString());
}
/**
* Simplifies a boolean expression, always preserving its type and its nullability.
*
* <p>This is useful if you are simplifying expressions in a {@link Project}.
*
* @deprecated Use {@link RexSimplify#simplifyPreservingType(RexNode)}, which allows you to
* specify an {@link RexExecutor}.
*/
@Deprecated // to be removed before 2.0
public static RexNode simplifyPreservingType(RexBuilder rexBuilder, RexNode e) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyPreservingType(e);
}
/**
* Simplifies a boolean expression, leaving UNKNOWN values as UNKNOWN, and using the default
* executor.
*
* @deprecated Create a {@link RexSimplify}, then call its {@link RexSimplify#simplify(RexNode,
* RexUnknownAs)} method.
*/
@Deprecated // to be removed before 2.0
public static RexNode simplify(RexBuilder rexBuilder, RexNode e) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR).simplify(e);
}
/**
* Simplifies a boolean expression, using the default executor.
*
* <p>In particular:
*
* <ul>
* <li>{@code simplify(x = 1 AND y = 2 AND NOT x = 1)} returns {@code y = 2}
* <li>{@code simplify(x = 1 AND FALSE)} returns {@code FALSE}
* </ul>
*
* <p>If the expression is a predicate in a WHERE clause, UNKNOWN values have the same effect as
* FALSE. In situations like this, specify {@code unknownAsFalse = true}, so and we can switch
* from 3-valued logic to simpler 2-valued logic and make more optimizations.
*
* @param rexBuilder Rex builder
* @param e Expression to simplify
* @param unknownAsFalse Whether to convert UNKNOWN values to FALSE
* @deprecated Create a {@link RexSimplify}, then call its {@link RexSimplify#simplify(RexNode,
* RexUnknownAs)} method.
*/
@Deprecated // to be removed before 2.0
public static RexNode simplify(RexBuilder rexBuilder, RexNode e, boolean unknownAsFalse) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyUnknownAs(e, RexUnknownAs.falseIf(unknownAsFalse));
}
/**
* Simplifies a conjunction of boolean expressions.
*
* @deprecated Use {@link RexSimplify#simplifyAnds(Iterable, RexUnknownAs)}.
*/
@Deprecated // to be removed before 2.0
public static RexNode simplifyAnds(RexBuilder rexBuilder, Iterable<? extends RexNode> nodes) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyAnds(nodes, RexUnknownAs.UNKNOWN);
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyAnds(
RexBuilder rexBuilder, Iterable<? extends RexNode> nodes, boolean unknownAsFalse) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyAnds(nodes, RexUnknownAs.falseIf(unknownAsFalse));
}
/** Negates a logical expression by adding or removing a NOT. */
public static RexNode not(RexNode e) {
switch (e.getKind()) {
case NOT:
return ((RexCall) e).getOperands().get(0);
default:
return addNot(e);
}
}
private static RexNode addNot(RexNode e) {
return new RexCall(e.getType(), SqlStdOperatorTable.NOT, ImmutableList.of(e));
}
@API(since = "1.27.0", status = API.Status.EXPERIMENTAL)
public static SqlOperator op(SqlKind kind) {
switch (kind) {
case IS_FALSE:
return SqlStdOperatorTable.IS_FALSE;
case IS_TRUE:
return SqlStdOperatorTable.IS_TRUE;
case IS_UNKNOWN:
return SqlStdOperatorTable.IS_UNKNOWN;
case IS_NULL:
return SqlStdOperatorTable.IS_NULL;
case IS_NOT_FALSE:
return SqlStdOperatorTable.IS_NOT_FALSE;
case IS_NOT_TRUE:
return SqlStdOperatorTable.IS_NOT_TRUE;
case IS_NOT_NULL:
return SqlStdOperatorTable.IS_NOT_NULL;
case IS_DISTINCT_FROM:
return SqlStdOperatorTable.IS_DISTINCT_FROM;
case IS_NOT_DISTINCT_FROM:
return SqlStdOperatorTable.IS_NOT_DISTINCT_FROM;
case EQUALS:
return SqlStdOperatorTable.EQUALS;
case NOT_EQUALS:
return SqlStdOperatorTable.NOT_EQUALS;
case LESS_THAN:
return SqlStdOperatorTable.LESS_THAN;
case GREATER_THAN:
return SqlStdOperatorTable.GREATER_THAN;
case LESS_THAN_OR_EQUAL:
return SqlStdOperatorTable.LESS_THAN_OR_EQUAL;
case GREATER_THAN_OR_EQUAL:
return SqlStdOperatorTable.GREATER_THAN_OR_EQUAL;
case AND:
return SqlStdOperatorTable.AND;
case OR:
return SqlStdOperatorTable.OR;
case COALESCE:
return SqlStdOperatorTable.COALESCE;
default:
throw new AssertionError(kind);
}
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyAnd(RexBuilder rexBuilder, RexCall e, boolean unknownAsFalse) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyAnd(e, RexUnknownAs.falseIf(unknownAsFalse));
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyAnd2(
RexBuilder rexBuilder, List<RexNode> terms, List<RexNode> notTerms) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyAnd2(terms, notTerms);
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyAnd2ForUnknownAsFalse(
RexBuilder rexBuilder, List<RexNode> terms, List<RexNode> notTerms) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyAnd2ForUnknownAsFalse(terms, notTerms);
}
public static @Nullable RexNode negate(RexBuilder rexBuilder, RexCall call) {
switch (call.getKind()) {
case EQUALS:
case NOT_EQUALS:
case LESS_THAN:
case GREATER_THAN:
case LESS_THAN_OR_EQUAL:
case GREATER_THAN_OR_EQUAL:
final SqlOperator op = op(call.getKind().negateNullSafe());
return rexBuilder.makeCall(op, call.getOperands());
default:
break;
}
return null;
}
public static @Nullable RexNode invert(RexBuilder rexBuilder, RexCall call) {
switch (call.getKind()) {
case EQUALS:
case NOT_EQUALS:
case LESS_THAN:
case GREATER_THAN:
case LESS_THAN_OR_EQUAL:
case GREATER_THAN_OR_EQUAL:
final SqlOperator op = requireNonNull(call.getOperator().reverse());
return rexBuilder.makeCall(op, Lists.reverse(call.getOperands()));
default:
return null;
}
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyOr(RexBuilder rexBuilder, RexCall call) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyUnknownAs(call, RexUnknownAs.UNKNOWN);
}
@Deprecated // to be removed before 2.0
public static RexNode simplifyOrs(RexBuilder rexBuilder, List<RexNode> terms) {
return new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, EXECUTOR)
.simplifyUnknownAs(
RexUtil.composeDisjunction(rexBuilder, terms), RexUnknownAs.UNKNOWN);
}
/** Creates the expression {@code e1 AND NOT notTerm1 AND NOT notTerm2 ...}. */
public static RexNode andNot(RexBuilder rexBuilder, RexNode e, RexNode... notTerms) {
return andNot(rexBuilder, e, Arrays.asList(notTerms));
}
/**
* Creates the expression {@code e1 AND NOT notTerm1 AND NOT notTerm2 ...}.
*
* <p>Examples:
*
* <ul>
* <li>andNot(p) returns "p"
* <li>andNot(p, n1, n2) returns "p AND NOT n1 AND NOT n2"
* <li>andNot(x = 10, x = 20, y = 30, x = 30) returns "x = 10 AND NOT (y = 30)"
* </ul>
*/
public static RexNode andNot(
final RexBuilder rexBuilder, RexNode e, Iterable<? extends RexNode> notTerms) {
// If "e" is of the form "x = literal", remove all "x = otherLiteral"
// terms from notTerms.
switch (e.getKind()) {
case EQUALS:
final RexCall call = (RexCall) e;
if (call.getOperands().get(1) instanceof RexLiteral) {
notTerms =
Util.filter(
notTerms,
e2 -> {
switch (e2.getKind()) {
case EQUALS:
RexCall call2 = (RexCall) e2;
if (call2.getOperands()
.get(0)
.equals(call.getOperands().get(0))
&& call2.getOperands().get(1)
instanceof RexLiteral
&& !call.getOperands()
.get(1)
.equals(
call2.getOperands()
.get(1))) {
return false;
}
break;
default:
break;
}
return true;
});
}
break;
default:
break;
}
return composeConjunction(
rexBuilder,
Iterables.concat(
ImmutableList.of(e), Util.transform(notTerms, e2 -> not(rexBuilder, e2))));
}
/**
* Returns whether a given operand of a CASE expression is a predicate.
*
* <p>A switched case (CASE x WHEN x1 THEN v1 ... ELSE e END) has an even number of arguments
* and odd-numbered arguments are predicates.
*
* <p>A condition case (CASE WHEN p1 THEN v1 ... ELSE e END) has an odd number of arguments and
* even-numbered arguments are predicates, except for the last argument.
*/
public static boolean isCasePredicate(RexCall call, int i) {
assert call.getKind() == SqlKind.CASE;
return i < call.operands.size() - 1 && (call.operands.size() - i) % 2 == 1;
}
private static boolean containsFalse(Iterable<RexNode> nodes) {
for (RexNode node : nodes) {
if (node.isAlwaysFalse()) {
return true;
}
}
return false;
}
private static boolean containsTrue(Iterable<RexNode> nodes) {
for (RexNode node : nodes) {
if (node.isAlwaysTrue()) {
return true;
}
}
return false;
}
/**
* Returns a function that applies NOT to its argument.
*
* @deprecated Use {@link #not}
*/
@SuppressWarnings("Guava")
@Deprecated // to be removed before 2.0
public static com.google.common.base.Function<RexNode, RexNode> notFn(
final RexBuilder rexBuilder) {
return e -> not(rexBuilder, e);
}
/**
* Applies NOT to an expression.
*
* <p>Unlike {@link #not}, may strengthen the type from {@code BOOLEAN} to {@code BOOLEAN NOT
* NULL}.
*/
static RexNode not(final RexBuilder rexBuilder, RexNode input) {
return input.isAlwaysTrue()
? rexBuilder.makeLiteral(false)
: input.isAlwaysFalse()
? rexBuilder.makeLiteral(true)
: input.getKind() == SqlKind.NOT
? ((RexCall) input).operands.get(0)
: rexBuilder.makeCall(SqlStdOperatorTable.NOT, input);
}
/** Returns whether an expression contains a {@link RexCorrelVariable}. */
public static boolean containsCorrelation(RexNode condition) {
try {
condition.accept(CorrelationFinder.INSTANCE);
return false;
} catch (Util.FoundOne e) {
return true;
}
}
/**
* Given an expression, it will swap the table references contained in its {@link
* RexTableInputRef} using the contents in the map.
*/
public static RexNode swapTableReferences(
final RexBuilder rexBuilder,
final RexNode node,
final Map<RelTableRef, RelTableRef> tableMapping) {
return swapTableColumnReferences(rexBuilder, node, tableMapping, null);
}
/**
* Given an expression, it will swap its column references {@link RexTableInputRef} using the
* contents in the map (in particular, the first element of the set in the map value).
*/
public static RexNode swapColumnReferences(
final RexBuilder rexBuilder,
final RexNode node,
final Map<RexTableInputRef, Set<RexTableInputRef>> ec) {
return swapTableColumnReferences(rexBuilder, node, null, ec);
}
/**
* Given an expression, it will swap the table references contained in its {@link
* RexTableInputRef} using the contents in the first map, and then it will swap the column
* references {@link RexTableInputRef} using the contents in the second map (in particular, the
* first element of the set in the map value).
*/
public static RexNode swapTableColumnReferences(
final RexBuilder rexBuilder,
final RexNode node,
final @Nullable Map<RelTableRef, RelTableRef> tableMapping,
final @Nullable Map<RexTableInputRef, Set<RexTableInputRef>> ec) {
RexShuttle visitor =
new RexShuttle() {
@Override
public RexNode visitTableInputRef(RexTableInputRef inputRef) {
if (tableMapping != null) {
RexTableInputRef inputRefFinal = inputRef;
inputRef =
RexTableInputRef.of(
requireNonNull(
tableMapping.get(inputRef.getTableRef()),
() ->
"tableMapping.get(...) for "
+ inputRefFinal.getTableRef()),
inputRef.getIndex(),
inputRef.getType());
}
if (ec != null) {
Set<RexTableInputRef> s = ec.get(inputRef);
if (s != null) {
inputRef = s.iterator().next();
}
}
return inputRef;
}
};
return visitor.apply(node);
}
/**
* Given an expression, it will swap the column references {@link RexTableInputRef} using the
* contents in the first map (in particular, the first element of the set in the map value), and
* then it will swap the table references contained in its {@link RexTableInputRef} using the
* contents in the second map.
*/
public static RexNode swapColumnTableReferences(
final RexBuilder rexBuilder,
final RexNode node,
final Map<RexTableInputRef, ? extends @Nullable Set<RexTableInputRef>> ec,
final @Nullable Map<RelTableRef, RelTableRef> tableMapping) {
RexShuttle visitor =
new RexShuttle() {
@Override
public RexNode visitTableInputRef(RexTableInputRef inputRef) {
if (ec != null) {
Set<RexTableInputRef> s = ec.get(inputRef);
if (s != null) {
inputRef = s.iterator().next();
}
}
if (tableMapping != null) {
RexTableInputRef inputRefFinal = inputRef;
inputRef =
RexTableInputRef.of(
requireNonNull(
tableMapping.get(inputRef.getTableRef()),
() ->
"tableMapping.get(...) for "
+ inputRefFinal.getTableRef()),
inputRef.getIndex(),
inputRef.getType());
}
return inputRef;
}
};
return visitor.apply(node);
}
/**
* Gather all table references in input expressions.
*
* @param nodes expressions
* @return set of table references
*/
public static Set<RelTableRef> gatherTableReferences(final List<RexNode> nodes) {
final Set<RelTableRef> occurrences = new HashSet<>();
new RexVisitorImpl<Void>(true) {
@Override
public Void visitTableInputRef(RexTableInputRef ref) {
occurrences.add(ref.getTableRef());
return super.visitTableInputRef(ref);
}
}.visitEach(nodes);
return occurrences;
}
/** Given some expressions, gets the indices of the non-constant ones. */
public static ImmutableBitSet getNonConstColumns(List<RexNode> expressions) {
ImmutableBitSet cols = ImmutableBitSet.range(0, expressions.size());
return getNonConstColumns(cols, expressions);
}
/** Given some expressions and columns, gets the indices of the non-constant ones. */
public static ImmutableBitSet getNonConstColumns(
ImmutableBitSet columns, List<RexNode> expressions) {
ImmutableBitSet.Builder nonConstCols = ImmutableBitSet.builder();
for (int col : columns) {
if (!isLiteral(expressions.get(col), true)) {
nonConstCols.set(col);
}
}
return nonConstCols.build();
}
// ~ Inner Classes ----------------------------------------------------------
/** Walks over expressions and builds a bank of common sub-expressions. */
private static | ConstantFinder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/servlet/samples/client/standalone/ViewResolutionTests.java | {
"start": 6397,
"end": 6614
} | class ____ {
@GetMapping("/person/{name}")
String show(@PathVariable String name, Model model) {
Person person = new Person(name);
model.addAttribute(person);
return "person/show";
}
}
}
| PersonController |
java | apache__spark | core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java | {
"start": 994,
"end": 1538
} | enum ____ {
ID,
INCREASING_RUNTIME("runtime"),
DECREASING_RUNTIME("-runtime");
private final Set<String> alternateNames;
TaskSorting(String... names) {
alternateNames = new HashSet<>();
Collections.addAll(alternateNames, names);
}
public static TaskSorting fromString(String str) {
String lower = str.toLowerCase(Locale.ROOT);
for (TaskSorting t: values()) {
if (t.alternateNames.contains(lower)) {
return t;
}
}
return EnumUtil.parseIgnoreCase(TaskSorting.class, str);
}
}
| TaskSorting |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/asyncprocessing/AsyncExecutor.java | {
"start": 1168,
"end": 2341
} | interface ____<REQUEST extends AsyncRequest<?>> {
/**
* Execute a batch of async requests.
*
* @param asyncRequestContainer The AsyncRequestContainer which holds the given batch of
* processing requests.
* @return A future can determine whether execution has completed.
*/
CompletableFuture<Void> executeBatchRequests(
AsyncRequestContainer<REQUEST> asyncRequestContainer);
/**
* Create a {@link AsyncRequestContainer} which is used to hold the batched {@link
* AsyncRequest}.
*/
AsyncRequestContainer<REQUEST> createRequestContainer();
/**
* Execute a single async request *synchronously*. This is for synchronous APIs.
*
* @param asyncRequest the request to run.
*/
void executeRequestSync(REQUEST asyncRequest);
/**
* Check if this executor is fully loaded. Will be invoked to determine whether to give more
* requests to run or wait for a while.
*
* @return the count.
*/
boolean fullyLoaded();
/** Shutdown the StateExecutor, and new committed state execution requests will be rejected. */
void shutdown();
}
| AsyncExecutor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/AbstractManagedParentQueue.java | {
"start": 1326,
"end": 1476
} | class ____ automatically created child leaf queues.
* From the user perspective this is equivalent to a LeafQueue,
* but functionality wise is a sub- | for |
java | apache__logging-log4j2 | log4j-1.2-api/src/test/java/org/apache/log4j/PropertyConfiguratorTest.java | {
"start": 5996,
"end": 13220
} | class ____ implements OptionHandler {
private boolean activated = false;
public TriggeringPolicy() {}
@Override
public void activateOptions() {
activated = true;
}
public final boolean isActivated() {
return activated;
}
}
private static final String BAD_ESCAPE_PROPERTIES = "/PropertyConfiguratorTest/badEscape.properties";
private static final String FILTER_PROPERTIES = "/PropertyConfiguratorTest/filter.properties";
private static final String CAT_A_NAME = "categoryA";
private static final String CAT_B_NAME = "categoryB";
private static final String CAT_C_NAME = "categoryC";
@AfterEach
void cleanup() {
LogManager.resetConfiguration();
}
/**
* Test for bug 40944. Did not catch IllegalArgumentException on Properties.load and close input stream.
*
* @throws IOException if IOException creating properties file.
*/
@Test
void testBadUnicodeEscape() throws IOException {
try (final InputStream is = PropertyConfiguratorTest.class.getResourceAsStream(BAD_ESCAPE_PROPERTIES)) {
PropertyConfigurator.configure(is);
}
}
/**
* Tests configuring Log4J from an InputStream.
*
* @since 1.2.17
*/
@Test
void testInputStream() throws IOException {
try (final InputStream inputStream = PropertyConfiguratorTest.class.getResourceAsStream(FILTER_PROPERTIES)) {
PropertyConfigurator.configure(inputStream);
final Logger rootLogger = Logger.getRootLogger();
assertThat(rootLogger.getLevel(), is(equalTo(Level.INFO)));
assertThat(rootLogger.getAppender("CONSOLE"), notNullValue());
final Logger logger = Logger.getLogger("org.apache.log4j.PropertyConfiguratorTest");
assertThat(logger.getLevel(), is(equalTo(Level.DEBUG)));
assertThat(logger.getAppender("ROLLING"), notNullValue());
}
}
/**
* Test for bug 47465. configure(URL) did not close opened JarURLConnection.
*
* @throws IOException if IOException creating properties jar.
*/
@Test
void testJarURL() throws IOException {
final File dir = new File("output");
dir.mkdirs();
final File file = new File("output/properties.jar");
try (final ZipOutputStream zos = new ZipOutputStream(new FileOutputStream(file))) {
zos.putNextEntry(new ZipEntry(LogManager.DEFAULT_CONFIGURATION_FILE));
zos.write("log4j.rootLogger=debug".getBytes());
zos.closeEntry();
}
final URL url = new URL("jar:" + file.toURI().toURL() + "!/" + LogManager.DEFAULT_CONFIGURATION_FILE);
PropertyConfigurator.configure(url);
assertTrue(file.delete());
assertFalse(file.exists());
}
@Test
void testLocalVsGlobal() {
LoggerRepository repos1, repos2;
final Logger catA = Logger.getLogger(CAT_A_NAME);
final Logger catB = Logger.getLogger(CAT_B_NAME);
final Logger catC = Logger.getLogger(CAT_C_NAME);
final Properties globalSettings = new Properties();
globalSettings.put("log4j.logger." + CAT_A_NAME, Level.WARN.toString());
globalSettings.put("log4j.logger." + CAT_B_NAME, Level.WARN.toString());
globalSettings.put("log4j.logger." + CAT_C_NAME, Level.DEBUG.toString());
PropertyConfigurator.configure(globalSettings);
assertEquals(Level.WARN, catA.getLevel());
assertEquals(Level.WARN, catB.getLevel());
assertEquals(Level.DEBUG, catC.getLevel());
assertEquals(
Level.WARN, catA.getLoggerRepository().getLogger(CAT_A_NAME).getLevel());
assertEquals(
Level.WARN, catB.getLoggerRepository().getLogger(CAT_B_NAME).getLevel());
assertEquals(
Level.DEBUG, catC.getLoggerRepository().getLogger(CAT_C_NAME).getLevel());
final Properties repos1Settings = new Properties();
repos1Settings.put("log4j.logger." + CAT_A_NAME, Level.DEBUG.toString());
repos1Settings.put("log4j.logger." + CAT_B_NAME, Level.INFO.toString());
repos1 = new Hierarchy(new RootLogger(Level.OFF));
new PropertyConfigurator().doConfigure(repos1Settings, repos1);
assertEquals(Level.DEBUG, repos1.getLogger(CAT_A_NAME).getLevel());
assertEquals(Level.INFO, repos1.getLogger(CAT_B_NAME).getLevel());
final Properties repos2Settings = new Properties();
repos2Settings.put("log4j.logger." + CAT_A_NAME, Level.INFO.toString());
repos2Settings.put("log4j.logger." + CAT_B_NAME, Level.DEBUG.toString());
repos2 = new Hierarchy(new RootLogger(Level.OFF));
new PropertyConfigurator().doConfigure(repos2Settings, repos2);
assertEquals(Level.INFO, repos2.getLogger(CAT_A_NAME).getLevel());
assertEquals(Level.DEBUG, repos2.getLogger(CAT_B_NAME).getLevel());
}
/**
* Test processing of log4j.reset property, see bug 17531.
*/
@Test
void testReset() {
final VectorAppender appender = new VectorAppender();
appender.setName("A1");
Logger.getRootLogger().addAppender(appender);
final Properties properties = new Properties();
properties.put("log4j.reset", "true");
PropertyConfigurator.configure(properties);
assertNull(Logger.getRootLogger().getAppender("A1"));
}
/**
* Test for bug 40944. configure(URL) never closed opened stream.
*
* @throws IOException if IOException creating properties file.
*/
@Test
void testURL() throws IOException {
final File file = new File("target/unclosed.properties");
try (final FileWriter writer = new FileWriter(file)) {
writer.write("log4j.rootLogger=debug");
}
final URL url = file.toURI().toURL();
PropertyConfigurator.configure(url);
assertTrue(file.delete());
assertFalse(file.exists());
}
/**
* Test for bug 40944. configure(URL) did not catch IllegalArgumentException and did not close stream.
*
*/
@Test
void testURLBadEscape() {
final URL configURL = PropertyConfiguratorTest.class.getResource(BAD_ESCAPE_PROPERTIES);
PropertyConfigurator.configure(configURL);
}
@Test
@SetTestProperty(key = "log4j1.compatibility", value = "false")
void when_compatibility_disabled_configurator_is_no_op() throws IOException {
final Logger rootLogger = Logger.getRootLogger();
final Logger logger = Logger.getLogger("org.apache.log4j.PropertyConfiguratorTest");
assertThat(logger.getLevel(), nullValue());
try (final InputStream inputStream = PropertyConfiguratorTest.class.getResourceAsStream(FILTER_PROPERTIES)) {
PropertyConfigurator.configure(inputStream);
assertThat(rootLogger.getAppender("CONSOLE"), nullValue());
assertThat(rootLogger.getLevel(), is(not(equalTo(Level.INFO))));
assertThat(logger.getAppender("ROLLING"), nullValue());
assertThat(logger.getLevel(), nullValue());
}
}
}
| TriggeringPolicy |
java | apache__camel | components/camel-http/src/test/java/org/apache/camel/component/http/HttpProducerSelectMethodTest.java | {
"start": 1390,
"end": 6070
} | class ____ extends BaseHttpTest {
private HttpServer localServer;
private String baseUrl;
private Exchange exchange;
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/myget", new BasicValidationHandler(GET.name(), null, null, getExpectedContent()))
.register("/mypost", new BasicValidationHandler(POST.name(), null, null, getExpectedContent()))
.register("/myget2", new BasicValidationHandler(GET.name(), "q=Camel", null, getExpectedContent()))
.create();
localServer.start();
baseUrl = "http://localhost:" + localServer.getLocalPort();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void noDataDefaultIsGet() throws Exception {
HttpProducer producer = createProducer("/myget");
exchange = producer.createExchange();
exchange.getIn().setBody(null);
assertDoesNotThrow(() -> runProducer(producer));
assertExchange(exchange);
}
private void runProducer(HttpProducer producer) throws Exception {
producer.start();
producer.process(exchange);
producer.stop();
}
@Test
public void dataDefaultIsPost() throws Exception {
HttpProducer producer = createProducer("/mypost");
exchange = producer.createExchange();
exchange.getIn().setBody("This is some data to post");
assertDoesNotThrow(() -> runProducer(producer));
assertExchange(exchange);
}
@Test
public void withMethodPostInHeader() throws Exception {
HttpProducer producer = createProducer("/mypost");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_METHOD, POST);
assertDoesNotThrow(() -> runProducer(producer));
}
@Test
public void withMethodGetInHeader() throws Exception {
HttpProducer producer = createProducer("/myget");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_METHOD, GET);
assertDoesNotThrow(() -> runProducer(producer));
}
private HttpProducer createProducer(String path) throws Exception {
HttpComponent component = context.getComponent("http", HttpComponent.class);
HttpEndpoint endpoint = (HttpEndpoint) component.createEndpoint(baseUrl + path);
return new HttpProducer(endpoint);
}
@Test
public void withMethodCommonHttpGetInHeader() throws Exception {
HttpProducer producer = createProducer("/myget");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_METHOD, org.apache.camel.http.common.HttpMethods.GET);
assertDoesNotThrow(() -> runProducer(producer));
}
@Test
public void withEndpointQuery() throws Exception {
HttpProducer producer = createProducer("/myget2?q=Camel");
exchange = producer.createExchange();
exchange.getIn().setBody("");
assertDoesNotThrow(() -> runProducer(producer));
}
@Test
public void withQueryInHeader() throws Exception {
HttpProducer producer = createProducer("/myget2");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_QUERY, "q=Camel");
assertDoesNotThrow(() -> runProducer(producer));
}
@Test
public void withHttpURIInHeader() throws Exception {
HttpProducer producer = createProducer("/myget2");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_URI, baseUrl + "/myget2?q=Camel");
assertDoesNotThrow(() -> runProducer(producer));
}
@Test
public void withQueryInHeaderOverrideEndpoint() throws Exception {
HttpProducer producer = createProducer("/myget2?q=Donkey");
exchange = producer.createExchange();
exchange.getIn().setBody("");
exchange.getIn().setHeader(Exchange.HTTP_QUERY, "q=Camel");
assertDoesNotThrow(() -> runProducer(producer));
}
}
| HttpProducerSelectMethodTest |
java | apache__camel | components/camel-vertx/camel-vertx-http/src/test/java/org/apache/camel/component/vertx/http/VertxHttpSSLGlobalConfigurationTest.java | {
"start": 1355,
"end": 4058
} | class ____ extends VertxHttpTestSupport {
@Test
public void testGlobalSSLContextParameters() {
String result = template.requestBody(getProducerUri(), null, String.class);
assertEquals("Hello World", result);
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
KeyStoreParameters keystoreParameters = new KeyStoreParameters();
keystoreParameters.setResource("server.jks");
keystoreParameters.setPassword("security");
KeyStoreParameters truststoreParameters = new KeyStoreParameters();
truststoreParameters.setResource("client.jks");
truststoreParameters.setPassword("storepass");
TrustManagersParameters clientSSLTrustManagers = new TrustManagersParameters();
clientSSLTrustManagers.setKeyStore(truststoreParameters);
SSLContextParameters clientSSLParameters = new SSLContextParameters();
clientSSLParameters.setTrustManagers(clientSSLTrustManagers);
KeyManagersParameters clientAuthClientSSLKeyManagers = new KeyManagersParameters();
clientAuthClientSSLKeyManagers.setKeyPassword("security");
clientAuthClientSSLKeyManagers.setKeyStore(keystoreParameters);
clientSSLParameters.setKeyManagers(clientAuthClientSSLKeyManagers);
camelContext.setSSLContextParameters(clientSSLParameters);
VertxHttpComponent component = new VertxHttpComponent();
component.setUseGlobalSslContextParameters(true);
camelContext.addComponent("vertx-http", component);
return camelContext;
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getTestServerUri() + "?sslContextParameters=#serverSSLParameters")
.setBody(constant("Hello World"));
}
};
}
@Override
protected void bindToRegistry(Registry registry) {
SSLContextParameters serverSSLParameters = new SSLContextParameters();
KeyStoreParameters keystoreParameters = new KeyStoreParameters();
keystoreParameters.setResource("server.jks");
keystoreParameters.setPassword("security");
KeyManagersParameters serviceSSLKeyManagers = new KeyManagersParameters();
serviceSSLKeyManagers.setKeyPassword("security");
serviceSSLKeyManagers.setKeyStore(keystoreParameters);
serverSSLParameters.setKeyManagers(serviceSSLKeyManagers);
registry.bind("serverSSLParameters", serverSSLParameters);
}
}
| VertxHttpSSLGlobalConfigurationTest |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/oidc/OidcClientRegistration.java | {
"start": 3128,
"end": 8828
} | class ____ extends AbstractBuilder<OidcClientRegistration, Builder> {
private Builder() {
}
/**
* Add the post logout redirection {@code URI} used by the Client, OPTIONAL. The
* {@code post_logout_redirect_uri} parameter is used by the client when
* requesting that the End-User's User Agent be redirected to after a logout has
* been performed.
* @param postLogoutRedirectUri the post logout redirection {@code URI} used by
* the Client
* @return the {@link Builder} for further configuration
*/
public Builder postLogoutRedirectUri(String postLogoutRedirectUri) {
addClaimToClaimList(OidcClientMetadataClaimNames.POST_LOGOUT_REDIRECT_URIS, postLogoutRedirectUri);
return this;
}
/**
* A {@code Consumer} of the post logout redirection {@code URI} values used by
* the Client, allowing the ability to add, replace, or remove, OPTIONAL.
* @param postLogoutRedirectUrisConsumer a {@code Consumer} of the post logout
* redirection {@code URI} values used by the Client
* @return the {@link Builder} for further configuration
*/
public Builder postLogoutRedirectUris(Consumer<List<String>> postLogoutRedirectUrisConsumer) {
acceptClaimValues(OidcClientMetadataClaimNames.POST_LOGOUT_REDIRECT_URIS, postLogoutRedirectUrisConsumer);
return this;
}
/**
* Sets the {@link JwsAlgorithm JWS} algorithm that must be used for signing the
* {@link Jwt JWT} used to authenticate the Client at the Token Endpoint for the
* {@link ClientAuthenticationMethod#PRIVATE_KEY_JWT private_key_jwt} and
* {@link ClientAuthenticationMethod#CLIENT_SECRET_JWT client_secret_jwt}
* authentication methods, OPTIONAL.
* @param authenticationSigningAlgorithm the {@link JwsAlgorithm JWS} algorithm
* that must be used for signing the {@link Jwt JWT} used to authenticate the
* Client at the Token Endpoint
* @return the {@link Builder} for further configuration
*/
public Builder tokenEndpointAuthenticationSigningAlgorithm(String authenticationSigningAlgorithm) {
return claim(OidcClientMetadataClaimNames.TOKEN_ENDPOINT_AUTH_SIGNING_ALG, authenticationSigningAlgorithm);
}
/**
* Sets the {@link SignatureAlgorithm JWS} algorithm required for signing the
* {@link OidcIdToken ID Token} issued to the Client, OPTIONAL.
* @param idTokenSignedResponseAlgorithm the {@link SignatureAlgorithm JWS}
* algorithm required for signing the {@link OidcIdToken ID Token} issued to the
* Client
* @return the {@link Builder} for further configuration
*/
public Builder idTokenSignedResponseAlgorithm(String idTokenSignedResponseAlgorithm) {
return claim(OidcClientMetadataClaimNames.ID_TOKEN_SIGNED_RESPONSE_ALG, idTokenSignedResponseAlgorithm);
}
/**
* Sets the Registration Access Token that can be used at the Client Configuration
* Endpoint, OPTIONAL.
* @param registrationAccessToken the Registration Access Token that can be used
* at the Client Configuration Endpoint
* @return the {@link Builder} for further configuration
*/
public Builder registrationAccessToken(String registrationAccessToken) {
return claim(OidcClientMetadataClaimNames.REGISTRATION_ACCESS_TOKEN, registrationAccessToken);
}
/**
* Sets the {@code URL} of the Client Configuration Endpoint where the
* Registration Access Token can be used, OPTIONAL.
* @param registrationClientUrl the {@code URL} of the Client Configuration
* Endpoint where the Registration Access Token can be used
* @return the {@link Builder} for further configuration
*/
public Builder registrationClientUrl(String registrationClientUrl) {
return claim(OidcClientMetadataClaimNames.REGISTRATION_CLIENT_URI, registrationClientUrl);
}
/**
* Validate the claims and build the {@link OidcClientRegistration}.
* <p>
* The following claims are REQUIRED: {@code client_id}, {@code redirect_uris}.
* @return the {@link OidcClientRegistration}
*/
@Override
public OidcClientRegistration build() {
validate();
return new OidcClientRegistration(getClaims());
}
@Override
protected void validate() {
super.validate();
Assert.notNull(getClaims().get(OidcClientMetadataClaimNames.REDIRECT_URIS), "redirect_uris cannot be null");
Assert.isInstanceOf(List.class, getClaims().get(OidcClientMetadataClaimNames.REDIRECT_URIS),
"redirect_uris must be of type List");
Assert.notEmpty((List<?>) getClaims().get(OidcClientMetadataClaimNames.REDIRECT_URIS),
"redirect_uris cannot be empty");
if (getClaims().get(OidcClientMetadataClaimNames.POST_LOGOUT_REDIRECT_URIS) != null) {
Assert.isInstanceOf(List.class, getClaims().get(OidcClientMetadataClaimNames.POST_LOGOUT_REDIRECT_URIS),
"post_logout_redirect_uris must be of type List");
Assert.notEmpty((List<?>) getClaims().get(OidcClientMetadataClaimNames.POST_LOGOUT_REDIRECT_URIS),
"post_logout_redirect_uris cannot be empty");
}
}
@SuppressWarnings("unchecked")
private void addClaimToClaimList(String name, String value) {
Assert.hasText(name, "name cannot be empty");
Assert.notNull(value, "value cannot be null");
getClaims().computeIfAbsent(name, (k) -> new LinkedList<String>());
((List<String>) getClaims().get(name)).add(value);
}
@SuppressWarnings("unchecked")
private void acceptClaimValues(String name, Consumer<List<String>> valuesConsumer) {
Assert.hasText(name, "name cannot be empty");
Assert.notNull(valuesConsumer, "valuesConsumer cannot be null");
getClaims().computeIfAbsent(name, (k) -> new LinkedList<String>());
List<String> values = (List<String>) getClaims().get(name);
valuesConsumer.accept(values);
}
}
}
| Builder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/jdbc/IsolatedTransactionModeSqlScriptsTests.java | {
"start": 1319,
"end": 1725
} | class ____ extends AbstractTransactionalTests {
@BeforeTransaction
void beforeTransaction() {
assertNumUsers(0);
}
@Test
@SqlGroup(@Sql(scripts = "data-add-dogbert.sql", config = @SqlConfig(transactionMode = TransactionMode.ISOLATED)))
void methodLevelScripts() {
assertNumUsers(1);
}
@AfterTransaction
void afterTransaction() {
assertNumUsers(1);
}
}
| IsolatedTransactionModeSqlScriptsTests |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/DynamicIntroductionAdvice.java | {
"start": 1614,
"end": 1752
} | interface ____ check
* @return whether the advice implements the specified interface
*/
boolean implementsInterface(Class<?> intf);
}
| to |
java | google__guava | guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java | {
"start": 25949,
"end": 26051
} | class ____ {
@Keep
public void oneArg(String s) {}
}
private static | BaseClassThatFailsToThrow |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/DeltaWriteBuilder.java | {
"start": 992,
"end": 1081
} | interface ____ building a {@link DeltaWrite}.
*
* @since 3.4.0
*/
@Experimental
public | for |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/io/ClassLoaderWrapper.java | {
"start": 5327,
"end": 6278
} | class ____ load
* @param classLoader
* - the group of classloaders to examine
*
* @return the class
*
* @throws ClassNotFoundException
* - Remember the wisdom of Judge Smails: Well, the world needs ditch diggers, too.
*/
Class<?> classForName(String name, ClassLoader[] classLoader) throws ClassNotFoundException {
for (ClassLoader cl : classLoader) {
if (null != cl) {
try {
return Class.forName(name, true, cl);
} catch (ClassNotFoundException e) {
// we'll ignore this until all classloaders fail to locate the class
}
}
}
throw new ClassNotFoundException("Cannot find class: " + name);
}
ClassLoader[] getClassLoaders(ClassLoader classLoader) {
return new ClassLoader[] { classLoader, defaultClassLoader, Thread.currentThread().getContextClassLoader(),
getClass().getClassLoader(), systemClassLoader };
}
}
| to |
java | apache__hadoop | hadoop-common-project/hadoop-registry/src/main/java/org/apache/hadoop/registry/client/types/ServiceRecord.java | {
"start": 1552,
"end": 8576
} | class ____ implements Cloneable {
/**
* A type string which MUST be in the serialized json. This permits
* fast discarding of invalid entries
*/
public static final String RECORD_TYPE = "JSONServiceRecord";
/**
* The type field. This must be the string {@link #RECORD_TYPE}
*/
public String type = RECORD_TYPE;
/**
* Description string
*/
public String description;
/**
* map to handle unknown attributes.
*/
private Map<String, String> attributes = new HashMap<String, String>(4);
/**
* List of endpoints intended for use to external callers
*/
public List<Endpoint> external = new ArrayList<Endpoint>();
/**
* List of endpoints for use <i>within</i> an application.
*/
public List<Endpoint> internal = new ArrayList<Endpoint>();
/**
* Create a service record with no ID, description or registration time.
* Endpoint lists are set to empty lists.
*/
public ServiceRecord() {
}
/**
* Deep cloning constructor
* @param that service record source
*/
public ServiceRecord(ServiceRecord that) {
this.description = that.description;
// others
Map<String, String> thatAttrs = that.attributes;
for (Map.Entry<String, String> entry : thatAttrs.entrySet()) {
attributes.put(entry.getKey(), entry.getValue());
}
// endpoints
List<Endpoint> src = that.internal;
if (src != null) {
internal = new ArrayList<Endpoint>(src.size());
for (Endpoint endpoint : src) {
internal.add(new Endpoint(endpoint));
}
}
src = that.external;
if (src != null) {
external = new ArrayList<Endpoint>(src.size());
for (Endpoint endpoint : src) {
external.add(new Endpoint(endpoint));
}
}
}
/**
* Add an external endpoint
* @param endpoint endpoint to set
*/
public void addExternalEndpoint(Endpoint endpoint) {
Preconditions.checkArgument(endpoint != null);
endpoint.validate();
external.add(endpoint);
}
/**
* Add an internal endpoint
* @param endpoint endpoint to set
*/
public void addInternalEndpoint(Endpoint endpoint) {
Preconditions.checkArgument(endpoint != null);
endpoint.validate();
internal.add(endpoint);
}
/**
* Look up an internal endpoint
* @param api API
* @return the endpoint or null if there was no match
*/
public Endpoint getInternalEndpoint(String api) {
return findByAPI(internal, api);
}
/**
* Look up an external endpoint
* @param api API
* @return the endpoint or null if there was no match
*/
public Endpoint getExternalEndpoint(String api) {
return findByAPI(external, api);
}
/**
* Handle unknown attributes by storing them in the
* {@link #attributes} map
* @param key attribute name
* @param value attribute value.
*/
@JsonAnySetter
public void set(String key, Object value) {
attributes.put(key, value.toString());
}
/**
* The map of "other" attributes set when parsing. These
* are not included in the JSON value of this record when it
* is generated.
* @return a map of any unknown attributes in the deserialized JSON.
*/
@JsonAnyGetter
public Map<String, String> attributes() {
return attributes;
}
/**
* Get the "other" attribute with a specific key
* @param key key to look up
* @return the value or null
*/
public String get(String key) {
return attributes.get(key);
}
/**
* Get the "other" attribute with a specific key.
* @param key key to look up
* @param defVal default value
* @return the value as a string,
* or <code>defval</code> if the value was not present
*/
public String get(String key, String defVal) {
String val = attributes.get(key);
return val != null ? val: defVal;
}
/**
* Find an endpoint by its API
* @param list list
* @param api api name
* @return the endpoint or null if there was no match
*/
private Endpoint findByAPI(List<Endpoint> list, String api) {
for (Endpoint endpoint : list) {
if (endpoint.api.equals(api)) {
return endpoint;
}
}
return null;
}
@Override
public String toString() {
final StringBuilder sb =
new StringBuilder("ServiceRecord{");
sb.append("description='").append(description).append('\'');
sb.append("; external endpoints: {");
for (Endpoint endpoint : external) {
sb.append(endpoint).append("; ");
}
sb.append("}; internal endpoints: {");
for (Endpoint endpoint : internal) {
sb.append(endpoint != null ? endpoint.toString() : "NULL ENDPOINT");
sb.append("; ");
}
sb.append('}');
if (!attributes.isEmpty()) {
sb.append(", attributes: {");
for (Map.Entry<String, String> attr : attributes.entrySet()) {
sb.append("\"").append(attr.getKey()).append("\"=\"")
.append(attr.getValue()).append("\" ");
}
} else {
sb.append(", attributes: {");
}
sb.append('}');
sb.append('}');
return sb.toString();
}
/**
* Shallow clone: all endpoints will be shared across instances
* @return a clone of the instance
* @throws CloneNotSupportedException
*/
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
@Override
public int hashCode() {
// Generated by eclipse
final int prime = 31;
int result = 1;
result =
prime * result + ((attributes == null) ? 0 : attributes.hashCode());
result =
prime * result + ((description == null) ? 0 : description.hashCode());
result = prime * result + ((external == null) ? 0 : external.hashCode());
result = prime * result + ((internal == null) ? 0 : internal.hashCode());
result = prime * result + ((type == null) ? 0 : type.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ServiceRecord other = (ServiceRecord) obj;
if (attributes == null) {
if (other.attributes != null) {
return false;
}
} else if (!attributes.equals(other.attributes)) {
return false;
}
if (description == null) {
if (other.description != null) {
return false;
}
} else if (!description.equals(other.description)) {
return false;
}
if (external == null) {
if (other.external != null) {
return false;
}
} else if (!external.equals(other.external)) {
return false;
}
if (internal == null) {
if (other.internal != null) {
return false;
}
} else if (!internal.equals(other.internal)) {
return false;
}
if (type == null) {
if (other.type != null) {
return false;
}
} else if (!type.equals(other.type)) {
return false;
}
return true;
}
}
| ServiceRecord |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/channel/ChannelStateWriterImplTest.java | {
"start": 2006,
"end": 14784
} | class ____ {
private static final long CHECKPOINT_ID = 42L;
private static final String TASK_NAME = "test";
private static final JobID JOB_ID = new JobID();
private static final JobVertexID JOB_VERTEX_ID = new JobVertexID();
private static final int SUBTASK_INDEX = 0;
private static final CheckpointStorage CHECKPOINT_STORAGE = new JobManagerCheckpointStorage();
@Test
void testAddEventBuffer() throws Exception {
NetworkBuffer dataBuf = getBuffer();
NetworkBuffer eventBuf = getBuffer();
eventBuf.setDataType(Buffer.DataType.EVENT_BUFFER);
executeCallbackWithSyncWorker(
(writer, worker) -> {
callStart(writer);
callAddInputData(writer, eventBuf, dataBuf);
assertThatThrownBy(worker::processAllRequests)
.isInstanceOf(IllegalArgumentException.class);
});
assertThat(dataBuf.isRecycled()).isTrue();
}
@Test
void testResultCompletion() throws IOException {
ChannelStateWriteResult result;
try (ChannelStateWriterImpl writer = openWriter()) {
callStart(writer);
result = writer.getAndRemoveWriteResult(CHECKPOINT_ID);
assertThat(result.resultSubpartitionStateHandles).isNotDone();
assertThat(result.inputChannelStateHandles).isNotDone();
}
assertThat(result.inputChannelStateHandles).isDone();
assertThat(result.resultSubpartitionStateHandles).isDone();
}
@Test
void testAbort() throws Exception {
NetworkBuffer buffer = getBuffer();
executeCallbackWithSyncWorker(
(writer, worker) -> {
callStart(writer);
ChannelStateWriteResult result = writer.getAndRemoveWriteResult(CHECKPOINT_ID);
callAddInputData(writer, buffer);
callAbort(writer);
worker.processAllRequests();
assertThat(result.isDone()).isTrue();
assertThat(buffer.isRecycled()).isTrue();
});
}
@Test
void testAbortClearsResults() throws Exception {
executeCallbackWithSyncWorker(
(writer, worker) -> {
callStart(writer);
writer.abort(CHECKPOINT_ID, new TestException(), true);
assertThatThrownBy(() -> writer.getAndRemoveWriteResult(CHECKPOINT_ID))
.isInstanceOf(IllegalArgumentException.class);
});
}
@Test
void testAbortDoesNotClearsResults() throws Exception {
executeCallbackWithSyncWorker(
(writer, worker) -> {
callStart(writer);
callAbort(writer);
worker.processAllRequests();
writer.getAndRemoveWriteResult(CHECKPOINT_ID);
});
}
@Test
void testAbortIgnoresMissing() throws Exception {
executeCallbackAndProcessWithSyncWorker(this::callAbort);
}
@Test
void testAbortOldAndStartNewCheckpoint() throws Exception {
executeCallbackWithSyncWorker(
(writer, worker) -> {
int checkpoint42 = 42;
int checkpoint43 = 43;
writer.start(
checkpoint42, CheckpointOptions.forCheckpointWithDefaultLocation());
writer.abort(checkpoint42, new TestException(), false);
writer.start(
checkpoint43, CheckpointOptions.forCheckpointWithDefaultLocation());
worker.processAllRequests();
ChannelStateWriteResult result42 = writer.getAndRemoveWriteResult(checkpoint42);
assertThat(result42.isDone()).isTrue();
assertThatThrownBy(() -> result42.getInputChannelStateHandles().get())
.as("The result should have failed.")
.hasCauseInstanceOf(TestException.class);
ChannelStateWriteResult result43 = writer.getAndRemoveWriteResult(checkpoint43);
assertThat(result43.isDone()).isFalse();
});
}
@Test
void testBuffersRecycledOnError() {
NetworkBuffer buffer = getBuffer();
ChannelStateWriterImpl writer =
new ChannelStateWriterImpl(
JOB_VERTEX_ID,
TASK_NAME,
SUBTASK_INDEX,
new ConcurrentHashMap<>(),
failingWorker(),
5);
assertThatThrownBy(() -> callAddInputData(writer, buffer))
.isInstanceOf(RuntimeException.class)
.hasCauseInstanceOf(TestException.class);
assertThat(buffer.isRecycled()).isTrue();
}
@Test
void testBuffersRecycledOnClose() throws Exception {
NetworkBuffer buffer = getBuffer();
executeCallbackAndProcessWithSyncWorker(
writer -> {
callStart(writer);
callAddInputData(writer, buffer);
assertThat(buffer.isRecycled()).isFalse();
});
assertThat(buffer.isRecycled()).isTrue();
}
@Test
void testNoAddDataAfterFinished() throws Exception {
executeCallbackWithSyncWorker(
(writer, worker) -> {
callStart(writer);
callFinish(writer);
worker.processAllRequests();
callAddInputData(writer);
assertThatThrownBy(worker::processAllRequests)
.isInstanceOf(IllegalArgumentException.class);
});
}
@Test
void testAddDataNotStarted() {
assertThatThrownBy(() -> executeCallbackAndProcessWithSyncWorker(this::callAddInputData))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testFinishNotStarted() {
assertThatThrownBy(() -> executeCallbackAndProcessWithSyncWorker(this::callFinish))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testRethrowOnClose() {
assertThatThrownBy(
() ->
executeCallbackAndProcessWithSyncWorker(
writer -> {
try {
callFinish(writer);
} catch (IllegalArgumentException e) {
// ignore here - should rethrow in
// close
}
}))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testRethrowOnNextCall() {
SyncChannelStateWriteRequestExecutor worker =
new SyncChannelStateWriteRequestExecutor(JOB_ID);
ChannelStateWriterImpl writer =
new ChannelStateWriterImpl(
JOB_VERTEX_ID,
TASK_NAME,
SUBTASK_INDEX,
new ConcurrentHashMap<>(),
worker,
5);
worker.registerSubtask(JOB_VERTEX_ID, SUBTASK_INDEX);
worker.setThrown(new TestException());
assertThatThrownBy(() -> callStart(writer)).hasCauseInstanceOf(TestException.class);
}
@Test
void testLimit() throws IOException {
int maxCheckpoints = 3;
try (ChannelStateWriterImpl writer =
new ChannelStateWriterImpl(
JOB_VERTEX_ID,
TASK_NAME,
SUBTASK_INDEX,
() -> CHECKPOINT_STORAGE.createCheckpointStorage(JOB_ID),
maxCheckpoints,
new ChannelStateWriteRequestExecutorFactory(JOB_ID),
5)) {
for (int i = 0; i < maxCheckpoints; i++) {
writer.start(i, CheckpointOptions.forCheckpointWithDefaultLocation());
}
assertThatThrownBy(
() ->
writer.start(
maxCheckpoints,
CheckpointOptions.forCheckpointWithDefaultLocation()))
.isInstanceOf(IllegalStateException.class);
}
}
@Test
void testNoStartAfterClose() throws IOException {
ChannelStateWriterImpl writer = openWriter();
writer.close();
assertThatThrownBy(
() ->
writer.start(
42, CheckpointOptions.forCheckpointWithDefaultLocation()))
.hasCauseInstanceOf(IllegalStateException.class);
}
@Test
void testNoAddDataAfterClose() throws IOException {
ChannelStateWriterImpl writer = openWriter();
callStart(writer);
writer.close();
assertThatThrownBy(() -> callAddInputData(writer))
.hasCauseInstanceOf(IllegalStateException.class);
}
private NetworkBuffer getBuffer() {
return new NetworkBuffer(
MemorySegmentFactory.allocateUnpooledSegment(123, null),
FreeingBufferRecycler.INSTANCE);
}
private ChannelStateWriteRequestExecutor failingWorker() {
return new ChannelStateWriteRequestExecutor() {
@Override
public void submit(ChannelStateWriteRequest e) {
throw new TestException();
}
@Override
public void submitPriority(ChannelStateWriteRequest e) {
throw new TestException();
}
@Override
public void start() throws IllegalStateException {}
@Override
public void registerSubtask(JobVertexID jobVertexID, int subtaskIndex) {}
@Override
public void releaseSubtask(JobVertexID jobVertexID, int subtaskIndex) {}
};
}
private void executeCallbackAndProcessWithSyncWorker(
Consumer<ChannelStateWriter> writerConsumer) throws Exception {
executeCallbackWithSyncWorker(
(channelStateWriter, syncChannelStateWriterWorker) -> {
writerConsumer.accept(channelStateWriter);
syncChannelStateWriterWorker.processAllRequests();
});
}
private void executeCallbackWithSyncWorker(
BiConsumerWithException<
ChannelStateWriter, SyncChannelStateWriteRequestExecutor, Exception>
testFn)
throws Exception {
SyncChannelStateWriteRequestExecutor worker =
new SyncChannelStateWriteRequestExecutor(JOB_ID);
try (ChannelStateWriterImpl writer =
new ChannelStateWriterImpl(
JOB_VERTEX_ID,
TASK_NAME,
SUBTASK_INDEX,
new ConcurrentHashMap<>(),
worker,
5)) {
worker.registerSubtask(JOB_VERTEX_ID, SUBTASK_INDEX);
testFn.accept(writer, worker);
} finally {
worker.releaseSubtask(JOB_VERTEX_ID, SUBTASK_INDEX);
}
}
private ChannelStateWriterImpl openWriter() throws IOException {
return new ChannelStateWriterImpl(
JOB_VERTEX_ID,
TASK_NAME,
SUBTASK_INDEX,
() -> CHECKPOINT_STORAGE.createCheckpointStorage(JOB_ID),
new ChannelStateWriteRequestExecutorFactory(JOB_ID),
5);
}
private void callStart(ChannelStateWriter writer) {
writer.start(CHECKPOINT_ID, CheckpointOptions.forCheckpointWithDefaultLocation());
}
private void callAddInputData(ChannelStateWriter writer, NetworkBuffer... buffer) {
writer.addInputData(
CHECKPOINT_ID,
new InputChannelInfo(1, 1),
1,
ofElements(Buffer::recycleBuffer, buffer));
}
private void callAbort(ChannelStateWriter writer) {
writer.abort(CHECKPOINT_ID, new TestException(), false);
}
private void callFinish(ChannelStateWriter writer) {
writer.finishInput(CHECKPOINT_ID);
writer.finishOutput(CHECKPOINT_ID);
}
}
| ChannelStateWriterImplTest |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/JUnitAmbiguousTestClass.java | {
"start": 1212,
"end": 1348
} | class ____ from JUnit 3's TestCase but has JUnit 4 @Test or @RunWith"
+ " annotations.",
severity = WARNING)
public | inherits |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ParameterMissingNullableTest.java | {
"start": 13324,
"end": 13618
} | class ____ {
void bar(Foo foo) {
foo.foo(null);
}
}
""")
.doTest();
}
@Test
public void negativeCallVarargs() {
conservativeHelper
.addSourceLines(
"Foo.java",
"""
| Bar |
java | quarkusio__quarkus | core/runtime/src/main/java/io/quarkus/runtime/ExecutorRecorder.java | {
"start": 10716,
"end": 11034
} | class ____ implements IntSupplier {
private static final MaxThreadsCalculator INSTANCE = new MaxThreadsCalculator();
private MaxThreadsCalculator() {
}
@Override
public int getAsInt() {
return Holder.CALCULATION;
}
private static | MaxThreadsCalculator |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLCostStatement.java | {
"start": 204,
"end": 744
} | class ____ extends SQLStatementImpl {
protected SQLStatement statement;
public SQLStatement getStatement() {
return statement;
}
public void setStatement(SQLStatement statement) {
if (statement != null) {
statement.setParent(this);
}
this.statement = statement;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, statement);
}
visitor.endVisit(this);
}
}
| SQLCostStatement |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/graph/StreamConfig.java | {
"start": 35054,
"end": 36144
} | class ____ implements InputConfig {
private final TypeSerializer<?> typeSerializer;
private final InputRequirement inputRequirement;
private int inputGateIndex;
public NetworkInputConfig(TypeSerializer<?> typeSerializer, int inputGateIndex) {
this(typeSerializer, inputGateIndex, InputRequirement.PASS_THROUGH);
}
public NetworkInputConfig(
TypeSerializer<?> typeSerializer,
int inputGateIndex,
InputRequirement inputRequirement) {
this.typeSerializer = typeSerializer;
this.inputGateIndex = inputGateIndex;
this.inputRequirement = inputRequirement;
}
public TypeSerializer<?> getTypeSerializer() {
return typeSerializer;
}
public int getInputGateIndex() {
return inputGateIndex;
}
public InputRequirement getInputRequirement() {
return inputRequirement;
}
}
/** A serialized representation of an input. */
public static | NetworkInputConfig |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonMultimap.java | {
"start": 14671,
"end": 16157
} | class ____ extends AbstractSet<K> {
@Override
public Iterator<K> iterator() {
return new RedissonBaseMapIterator<K>() {
@Override
protected K getValue(java.util.Map.Entry<Object, Object> entry) {
return (K) entry.getKey();
}
@Override
protected Object put(Entry<Object, Object> entry, Object value) {
return RedissonMultimap.this.put((K) entry.getKey(), (V) value);
}
@Override
protected ScanResult<Entry<Object, Object>> iterator(RedisClient client, String nextIterPos) {
return RedissonMultimap.this.scanIterator(client, nextIterPos);
}
@Override
protected void remove(Entry<Object, Object> value) {
RedissonMultimap.this.fastRemove((K) value.getKey());
}
};
}
@Override
public boolean contains(Object o) {
return RedissonMultimap.this.containsKey(o);
}
@Override
public boolean remove(Object o) {
return RedissonMultimap.this.fastRemove((K) o) == 1;
}
@Override
public int size() {
return RedissonMultimap.this.keySize();
}
@Override
public void clear() {
RedissonMultimap.this.clear();
}
}
final | KeySet |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/EntryPointAssertionsBaseTest.java | {
"start": 750,
"end": 896
} | class ____ {
protected static final WithAssertions withAssertions = mock(WithAssertions.class, CALLS_REAL_METHODS);
}
| EntryPointAssertionsBaseTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/aggregate/OracleAggregateSupport.java | {
"start": 33662,
"end": 37491
} | class ____ implements XmlWriteExpression {
private final SelectableMapping selectableMapping;
private final String columnDefinition;
private final LinkedHashMap<String, XmlWriteExpression> subExpressions = new LinkedHashMap<>();
private AggregateXmlWriteExpression(SelectableMapping selectableMapping, String columnDefinition) {
this.selectableMapping = selectableMapping;
this.columnDefinition = columnDefinition;
}
protected void initializeSubExpressions(SelectableMapping aggregateColumn, SelectableMapping[] columns, TypeConfiguration typeConfiguration) {
for ( SelectableMapping column : columns ) {
final SelectablePath selectablePath = column.getSelectablePath();
final SelectablePath[] parts = selectablePath.getParts();
AggregateXmlWriteExpression currentAggregate = this;
for ( int i = 1; i < parts.length - 1; i++ ) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) currentAggregate.selectableMapping.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int selectableIndex = embeddableMappingType.getSelectableIndex( parts[i].getSelectableName() );
currentAggregate = (AggregateXmlWriteExpression) currentAggregate.subExpressions.computeIfAbsent(
parts[i].getSelectableName(),
k -> new AggregateXmlWriteExpression( embeddableMappingType.getJdbcValueSelectable( selectableIndex ), columnDefinition )
);
}
final String customWriteExpression = column.getWriteExpression();
currentAggregate.subExpressions.put(
parts[parts.length - 1].getSelectableName(),
new BasicXmlWriteExpression(
column,
xmlCustomWriteExpression( customWriteExpression, column.getJdbcMapping(), typeConfiguration )
)
);
}
passThroughUnsetSubExpressions( aggregateColumn );
}
protected void passThroughUnsetSubExpressions(SelectableMapping aggregateColumn) {
final AggregateJdbcType aggregateJdbcType = (AggregateJdbcType) aggregateColumn.getJdbcMapping().getJdbcType();
final EmbeddableMappingType embeddableMappingType = aggregateJdbcType.getEmbeddableMappingType();
final int jdbcValueCount = embeddableMappingType.getJdbcValueCount();
for ( int i = 0; i < jdbcValueCount; i++ ) {
final SelectableMapping selectableMapping = embeddableMappingType.getJdbcValueSelectable( i );
final XmlWriteExpression xmlWriteExpression = subExpressions.get( selectableMapping.getSelectableName() );
if ( xmlWriteExpression == null ) {
subExpressions.put(
selectableMapping.getSelectableName(),
new PassThroughXmlWriteExpression( selectableMapping )
);
}
else if ( xmlWriteExpression instanceof AggregateXmlWriteExpression writeExpression ) {
writeExpression.passThroughUnsetSubExpressions( selectableMapping );
}
}
}
protected String getTagName() {
return selectableMapping.getSelectableName();
}
@Override
public void append(
SqlAppender sb,
String path,
SqlAstTranslator<?> translator,
AggregateColumnWriteExpression expression) {
sb.append( "xmlelement(" );
sb.appendDoubleQuoteEscapedString( getTagName() );
sb.append( ",xmlconcat" );
char separator = '(';
for ( Map.Entry<String, XmlWriteExpression> entry : subExpressions.entrySet() ) {
sb.append( separator );
final XmlWriteExpression value = entry.getValue();
if ( value instanceof AggregateXmlWriteExpression ) {
final String subPath = "xmlquery(" + xmlExtractArguments( path, entry.getKey() ) + ")";
value.append( sb, subPath, translator, expression );
}
else {
value.append( sb, path, translator, expression );
}
separator = ',';
}
sb.append( "))" );
}
}
private static | AggregateXmlWriteExpression |
java | apache__camel | components/camel-braintree/src/generated/java/org/apache/camel/component/braintree/SubscriptionGatewayEndpointConfigurationConfigurer.java | {
"start": 748,
"end": 9711
} | class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("AccessToken", java.lang.String.class);
map.put("Amount", java.math.BigDecimal.class);
map.put("ApiName", org.apache.camel.component.braintree.internal.BraintreeApiName.class);
map.put("CustomerId", java.lang.String.class);
map.put("Environment", java.lang.String.class);
map.put("HttpLogLevel", java.lang.String.class);
map.put("HttpLogName", java.lang.String.class);
map.put("HttpReadTimeout", java.lang.Integer.class);
map.put("Id", java.lang.String.class);
map.put("LogHandlerEnabled", boolean.class);
map.put("MerchantId", java.lang.String.class);
map.put("MethodName", java.lang.String.class);
map.put("PrivateKey", java.lang.String.class);
map.put("ProxyHost", java.lang.String.class);
map.put("ProxyPort", java.lang.Integer.class);
map.put("PublicKey", java.lang.String.class);
map.put("Request", com.braintreegateway.SubscriptionRequest.class);
map.put("SearchRequest", com.braintreegateway.SubscriptionSearchRequest.class);
map.put("SubmitForSettlement", java.lang.Boolean.class);
map.put("SubscriptionId", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.braintree.SubscriptionGatewayEndpointConfiguration target = (org.apache.camel.component.braintree.SubscriptionGatewayEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": target.setAccessToken(property(camelContext, java.lang.String.class, value)); return true;
case "amount": target.setAmount(property(camelContext, java.math.BigDecimal.class, value)); return true;
case "apiname":
case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.braintree.internal.BraintreeApiName.class, value)); return true;
case "customerid":
case "customerId": target.setCustomerId(property(camelContext, java.lang.String.class, value)); return true;
case "environment": target.setEnvironment(property(camelContext, java.lang.String.class, value)); return true;
case "httploglevel":
case "httpLogLevel": target.setHttpLogLevel(property(camelContext, java.lang.String.class, value)); return true;
case "httplogname":
case "httpLogName": target.setHttpLogName(property(camelContext, java.lang.String.class, value)); return true;
case "httpreadtimeout":
case "httpReadTimeout": target.setHttpReadTimeout(property(camelContext, java.lang.Integer.class, value)); return true;
case "id": target.setId(property(camelContext, java.lang.String.class, value)); return true;
case "loghandlerenabled":
case "logHandlerEnabled": target.setLogHandlerEnabled(property(camelContext, boolean.class, value)); return true;
case "merchantid":
case "merchantId": target.setMerchantId(property(camelContext, java.lang.String.class, value)); return true;
case "methodname":
case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true;
case "privatekey":
case "privateKey": target.setPrivateKey(property(camelContext, java.lang.String.class, value)); return true;
case "proxyhost":
case "proxyHost": target.setProxyHost(property(camelContext, java.lang.String.class, value)); return true;
case "proxyport":
case "proxyPort": target.setProxyPort(property(camelContext, java.lang.Integer.class, value)); return true;
case "publickey":
case "publicKey": target.setPublicKey(property(camelContext, java.lang.String.class, value)); return true;
case "request": target.setRequest(property(camelContext, com.braintreegateway.SubscriptionRequest.class, value)); return true;
case "searchrequest":
case "searchRequest": target.setSearchRequest(property(camelContext, com.braintreegateway.SubscriptionSearchRequest.class, value)); return true;
case "submitforsettlement":
case "submitForSettlement": target.setSubmitForSettlement(property(camelContext, java.lang.Boolean.class, value)); return true;
case "subscriptionid":
case "subscriptionId": target.setSubscriptionId(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return java.lang.String.class;
case "amount": return java.math.BigDecimal.class;
case "apiname":
case "apiName": return org.apache.camel.component.braintree.internal.BraintreeApiName.class;
case "customerid":
case "customerId": return java.lang.String.class;
case "environment": return java.lang.String.class;
case "httploglevel":
case "httpLogLevel": return java.lang.String.class;
case "httplogname":
case "httpLogName": return java.lang.String.class;
case "httpreadtimeout":
case "httpReadTimeout": return java.lang.Integer.class;
case "id": return java.lang.String.class;
case "loghandlerenabled":
case "logHandlerEnabled": return boolean.class;
case "merchantid":
case "merchantId": return java.lang.String.class;
case "methodname":
case "methodName": return java.lang.String.class;
case "privatekey":
case "privateKey": return java.lang.String.class;
case "proxyhost":
case "proxyHost": return java.lang.String.class;
case "proxyport":
case "proxyPort": return java.lang.Integer.class;
case "publickey":
case "publicKey": return java.lang.String.class;
case "request": return com.braintreegateway.SubscriptionRequest.class;
case "searchrequest":
case "searchRequest": return com.braintreegateway.SubscriptionSearchRequest.class;
case "submitforsettlement":
case "submitForSettlement": return java.lang.Boolean.class;
case "subscriptionid":
case "subscriptionId": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.braintree.SubscriptionGatewayEndpointConfiguration target = (org.apache.camel.component.braintree.SubscriptionGatewayEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return target.getAccessToken();
case "amount": return target.getAmount();
case "apiname":
case "apiName": return target.getApiName();
case "customerid":
case "customerId": return target.getCustomerId();
case "environment": return target.getEnvironment();
case "httploglevel":
case "httpLogLevel": return target.getHttpLogLevel();
case "httplogname":
case "httpLogName": return target.getHttpLogName();
case "httpreadtimeout":
case "httpReadTimeout": return target.getHttpReadTimeout();
case "id": return target.getId();
case "loghandlerenabled":
case "logHandlerEnabled": return target.isLogHandlerEnabled();
case "merchantid":
case "merchantId": return target.getMerchantId();
case "methodname":
case "methodName": return target.getMethodName();
case "privatekey":
case "privateKey": return target.getPrivateKey();
case "proxyhost":
case "proxyHost": return target.getProxyHost();
case "proxyport":
case "proxyPort": return target.getProxyPort();
case "publickey":
case "publicKey": return target.getPublicKey();
case "request": return target.getRequest();
case "searchrequest":
case "searchRequest": return target.getSearchRequest();
case "submitforsettlement":
case "submitForSettlement": return target.getSubmitForSettlement();
case "subscriptionid":
case "subscriptionId": return target.getSubscriptionId();
default: return null;
}
}
}
| SubscriptionGatewayEndpointConfigurationConfigurer |
java | apache__kafka | connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/WorkerCoordinator.java | {
"start": 21893,
"end": 22369
} | class ____ {
private final String worker;
private final Collection<String> connectors;
private final Collection<ConnectorTaskId> tasks;
private WorkerLoad(
String worker,
Collection<String> connectors,
Collection<ConnectorTaskId> tasks
) {
this.worker = worker;
this.connectors = connectors;
this.tasks = tasks;
}
public static | WorkerLoad |
java | qos-ch__slf4j | log4j-over-slf4j/src/main/java/org/apache/log4j/Appender.java | {
"start": 906,
"end": 3678
} | interface ____ {
/**
* Add a filter to the end of the filter list.
*
* @since 0.9.0
*/
void addFilter(Filter newFilter);
/**
* Returns the head Filter. The Filters are organized in a linked list
* and so all Filters on this Appender are available through the result.
*
* @return the head Filter or null, if no Filters are present
* @since 1.1
*/
public Filter getFilter();
/**
* Clear the list of filters by removing all the filters in it.
*
* @since 0.9.0
*/
public void clearFilters();
/**
* Release any resources allocated within the appender such as file
* handles, network connections, etc.
*
* <p>It is a programming error to append to a closed appender.
*
* @since 0.8.4
*/
public void close();
/**
* Log in <code>Appender</code> specific way. When appropriate,
* Loggers will call the <code>doAppend</code> method of appender
* implementations in order to log.
*/
public void doAppend(LoggingEvent event);
/**
* Get the name of this appender. The name uniquely identifies the
* appender.
*/
public String getName();
/**
* Set the {@link ErrorHandler} for this appender.
*
* @since 0.9.0
*/
public void setErrorHandler(ErrorHandler errorHandler);
/**
* Returns the {@link ErrorHandler} for this appender.
*
* @since 1.1
*/
public ErrorHandler getErrorHandler();
/**
* Set the {@link Layout} for this appender.
*
* @since 0.8.1
*/
public void setLayout(Layout layout);
/**
* Returns this appenders layout.
*
* @since 1.1
*/
public Layout getLayout();
/**
* Set the name of this appender. The name is used by other
* components to identify this appender.
*
* @since 0.8.1
*/
public void setName(String name);
/**
* Configurators call this method to determine if the appender
* requires a layout. If this method returns <code>true</code>,
* meaning that layout is required, then the configurator will
* configure a layout using the configuration information at its
* disposal. If this method returns <code>false</code>, meaning that
* a layout is not required, then layout configuration will be
* skipped even if there is available layout configuration
* information at the disposal of the configurator.
*
* <p>In the rather exceptional case, where the appender
* implementation admits a layout but can also work without it, then
* the appender should return <code>true</code>.
*
* @since 0.8.4
*/
public boolean requiresLayout();
}
| Appender |
java | apache__camel | components/camel-openstack/src/main/java/org/apache/camel/component/openstack/neutron/producer/PortProducer.java | {
"start": 1546,
"end": 5164
} | class ____ extends AbstractOpenstackProducer {
public PortProducer(NeutronEndpoint endpoint, OSClient client) {
super(endpoint, client);
}
@Override
public void process(Exchange exchange) throws Exception {
final String operation = getOperation(exchange);
switch (operation) {
case OpenstackConstants.CREATE:
doCreate(exchange);
break;
case OpenstackConstants.GET:
doGet(exchange);
break;
case OpenstackConstants.GET_ALL:
doGetAll(exchange);
break;
case OpenstackConstants.UPDATE:
doUpdate(exchange);
break;
case OpenstackConstants.DELETE:
doDelete(exchange);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
private void doCreate(Exchange exchange) {
final Port in = messageToPort(exchange.getIn());
final Port out = os.networking().port().create(in);
exchange.getIn().setBody(out);
}
private void doGet(Exchange exchange) {
final Message msg = exchange.getIn();
final String id
= msg.getHeader(OpenstackConstants.ID, msg.getHeader(NeutronConstants.PORT_ID, String.class), String.class);
StringHelper.notEmpty(id, "Port ID");
final Port result = os.networking().port().get(id);
msg.setBody(result);
}
private void doGetAll(Exchange exchange) {
final List<? extends Port> out = os.networking().port().list();
exchange.getIn().setBody(out);
}
private void doUpdate(Exchange exchange) {
final Message msg = exchange.getIn();
final Port port = messageToPort(msg);
final Port updatedPort = os.networking().port().update(port);
msg.setBody(updatedPort);
}
private void doDelete(Exchange exchange) {
final Message msg = exchange.getIn();
final String id
= msg.getHeader(OpenstackConstants.ID, msg.getHeader(NeutronConstants.PORT_ID, String.class), String.class);
StringHelper.notEmpty(id, "Port ID");
final ActionResponse response = os.networking().port().delete(id);
checkFailure(response, exchange, "Delete port with ID " + id);
}
private Port messageToPort(Message message) {
Port port = message.getBody(Port.class);
if (port == null) {
Map headers = message.getHeaders();
PortBuilder builder = Builders.port();
StringHelper.notEmpty(message.getHeader(OpenstackConstants.NAME, String.class), "Name");
builder.name(message.getHeader(OpenstackConstants.NAME, String.class));
if (headers.containsKey(NeutronConstants.TENANT_ID)) {
builder.tenantId(message.getHeader(NeutronConstants.TENANT_ID, String.class));
}
if (headers.containsKey(NeutronConstants.NETWORK_ID)) {
builder.networkId(message.getHeader(NeutronConstants.NETWORK_ID, String.class));
}
if (headers.containsKey(NeutronConstants.DEVICE_ID)) {
builder.deviceId(message.getHeader(NeutronConstants.DEVICE_ID, String.class));
}
if (headers.containsKey(NeutronConstants.MAC_ADDRESS)) {
builder.macAddress(message.getHeader(NeutronConstants.MAC_ADDRESS, String.class));
}
port = builder.build();
}
return port;
}
}
| PortProducer |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/support/HeaderAssertionTests.java | {
"start": 10491,
"end": 10949
} | class ____ extends AbstractHeaderAssertions<TestExchangeResult, Object> {
TestHeaderAssertions(HttpHeaders headers) {
super(new TestExchangeResult(headers), "");
}
@Override
protected HttpHeaders getResponseHeaders() {
return getExchangeResult().headers();
}
@Override
protected void assertWithDiagnostics(Runnable assertion) {
assertion.run();
}
}
private record TestExchangeResult(HttpHeaders headers) {}
}
| TestHeaderAssertions |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/index/IndexResolution.java | {
"start": 377,
"end": 2304
} | class ____ {
public static IndexResolution valid(EsIndex index) {
Objects.requireNonNull(index, "index must not be null if it was found");
return new IndexResolution(index, null);
}
public static IndexResolution invalid(String invalid) {
Objects.requireNonNull(invalid, "invalid must not be null to signal that the index is invalid");
return new IndexResolution(null, invalid);
}
public static IndexResolution notFound(String name) {
Objects.requireNonNull(name, "name must not be null");
return invalid("Unknown index [" + name + "]");
}
private final EsIndex index;
@Nullable
private final String invalid;
private IndexResolution(EsIndex index, @Nullable String invalid) {
this.index = index;
this.invalid = invalid;
}
public boolean matches(String indexName) {
return isValid() && this.index.name().equals(indexName);
}
/**
* Get the {@linkplain EsIndex}
* @throws MappingException if the index is invalid for use with ql
*/
public EsIndex get() {
if (invalid != null) {
throw new MappingException(invalid);
}
return index;
}
/**
* Is the index valid for use with ql? Returns {@code false} if the
* index wasn't found.
*/
public boolean isValid() {
return invalid == null;
}
@Override
public boolean equals(Object obj) {
if (obj == null || obj.getClass() != getClass()) {
return false;
}
IndexResolution other = (IndexResolution) obj;
return Objects.equals(index, other.index) && Objects.equals(invalid, other.invalid);
}
@Override
public int hashCode() {
return Objects.hash(index, invalid);
}
@Override
public String toString() {
return invalid != null ? invalid : index.name();
}
}
| IndexResolution |
java | google__dagger | javatests/dagger/internal/codegen/FullBindingGraphValidationTest.java | {
"start": 9018,
"end": 9669
} | interface ____");
});
}
@Test
public void moduleIncludingModuleWithCombinedErrors_validationTypeWarning() {
CompilerTests.daggerCompiler(A_MODULE, COMBINED_WITH_A_MODULE_HAS_ERRORS)
.withProcessingOptions(ImmutableMap.of("dagger.fullBindingGraphValidation", "WARNING"))
.compile(
subject -> {
subject.hasErrorCount(0);
subject.hasWarningCount(1);
subject.hasWarningContainingMatch(COMBINED_WITH_A_MODULE_HAS_ERRORS_MESSAGE.pattern())
.onSource(COMBINED_WITH_A_MODULE_HAS_ERRORS)
.onLineContaining(" | CombinedWithAModuleHasErrors |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringMulticastNoStopOnExceptionTest.java | {
"start": 1051,
"end": 1351
} | class ____ extends MulticastNoStopOnExceptionTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/SpringMulticastNoStopOnExceptionTest.xml");
}
}
| SpringMulticastNoStopOnExceptionTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java | {
"start": 1236,
"end": 1335
} | class ____ {
private static final String SUIT =
"""
| StatementSwitchToExpressionSwitchTest |
java | quarkusio__quarkus | extensions/mongodb-client/runtime/src/main/java/io/quarkus/mongodb/reactive/ReactiveMongoDatabase.java | {
"start": 15156,
"end": 16287
} | class ____ decode each document into
* @param <T> the target document type of the iterable
* @param options the stream options
* @return the stream of change events.
*/
<T> Multi<ChangeStreamDocument<T>> watch(List<? extends Bson> pipeline, Class<T> clazz,
ChangeStreamOptions options);
/**
* Creates a change stream for this database.
*
* @param clientSession the client session with which to associate this operation
* @return the stream of change events.
*/
Multi<ChangeStreamDocument<Document>> watch(ClientSession clientSession);
/**
* Creates a change stream for this database.
*
* @param clientSession the client session with which to associate this operation
* @param options the stream options
* @return the stream of change events.
*/
Multi<ChangeStreamDocument<Document>> watch(ClientSession clientSession, ChangeStreamOptions options);
/**
* Creates a change stream for this database.
*
* @param clientSession the client session with which to associate this operation
* @param clazz the | to |
java | google__truth | extensions/proto/src/main/java/com/google/common/truth/extensions/proto/ProtoSubjectBuilder.java | {
"start": 1172,
"end": 1289
} | class ____ extends CustomSubjectBuilder {
/** Factory for ProtoSubjectBuilder. */
private static | ProtoSubjectBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/foreignkeys/disabled/InheritanceManyToManyForeignKeyTest.java | {
"start": 2688,
"end": 2883
} | class ____ {
@Id
@GeneratedValue
private Long id;
@Column(name = "START_DATE", nullable = false)
private LocalDate startDate;
}
@MappedSuperclass
public static abstract | LocalDateEvent |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.