language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-netty-http/src/main/java/org/apache/camel/component/netty/http/NettyHttpEndpoint.java | {
"start": 2447,
"end": 10544
} | class ____ extends NettyEndpoint implements AsyncEndpoint, HeaderFilterStrategyAware {
private static final Logger LOG = LoggerFactory.getLogger(NettyHttpEndpoint.class);
static final String PROXY_NOT_SUPPORTED_MESSAGE = "Netty Http Producer does not support proxy mode";
@UriParam
private NettyHttpConfiguration configuration;
@UriParam(label = "advanced", name = "configuration",
javaType = "org.apache.camel.component.netty.http.NettyHttpConfiguration",
description = "To use a custom configured NettyHttpConfiguration for configuring this endpoint.")
private Object httpConfiguration; // to include in component docs as NettyHttpConfiguration is a @UriParams class
@UriParam(label = "advanced")
private NettyHttpBinding nettyHttpBinding;
@UriParam(label = "advanced")
private HeaderFilterStrategy headerFilterStrategy;
@UriParam(label = "consumer,advanced")
private boolean traceEnabled;
@UriParam(label = "consumer,advanced")
private String httpMethodRestrict;
@UriParam(label = "consumer,advanced")
private NettySharedHttpServer nettySharedHttpServer;
@UriParam(label = "consumer,security")
private NettyHttpSecurityConfiguration securityConfiguration;
@UriParam(label = "consumer,security", prefix = "securityConfiguration.", multiValue = true)
private Map<String, Object> securityOptions; // to include in component docs
@UriParam(label = "producer")
private CookieHandler cookieHandler;
public NettyHttpEndpoint(String endpointUri, NettyHttpComponent component, NettyConfiguration configuration) {
super(endpointUri, component, configuration);
}
@Override
public NettyHttpComponent getComponent() {
return (NettyHttpComponent) super.getComponent();
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
NettyHttpConsumer answer = new NettyHttpConsumer(this, processor, getConfiguration());
configureConsumer(answer);
if (nettySharedHttpServer != null) {
answer.setNettyServerBootstrapFactory(nettySharedHttpServer.getServerBootstrapFactory());
LOG.info("NettyHttpConsumer: {} is using NettySharedHttpServer on port: {}", answer,
nettySharedHttpServer.getPort());
} else {
// reuse pipeline factory for the same address
HttpServerBootstrapFactory factory = getComponent().getOrCreateHttpNettyServerBootstrapFactory(answer);
// force using our server bootstrap factory
answer.setNettyServerBootstrapFactory(factory);
LOG.debug("Created NettyHttpConsumer: {} using HttpServerBootstrapFactory: {}", answer, factory);
}
return answer;
}
@Override
public Producer createProducer() throws Exception {
if (isProxyProtocol()) {
doFail(new IllegalArgumentException(PROXY_NOT_SUPPORTED_MESSAGE));
}
Producer answer = new NettyHttpProducer(this, getConfiguration());
if (getConfiguration().isSynchronous()) {
return new SynchronousDelegateProducer(answer);
} else {
return answer;
}
}
@Override
public PollingConsumer createPollingConsumer() throws Exception {
throw new UnsupportedOperationException("This component does not support polling consumer");
}
@Override
public boolean isLenientProperties() {
// true to allow dynamic URI options to be configured and passed to external system for eg. the HttpProducer
return true;
}
@Override
public void setConfiguration(NettyConfiguration configuration) {
super.setConfiguration(configuration);
this.configuration = (NettyHttpConfiguration) configuration;
}
@Override
public NettyHttpConfiguration getConfiguration() {
return (NettyHttpConfiguration) super.getConfiguration();
}
public NettyHttpBinding getNettyHttpBinding() {
return nettyHttpBinding;
}
/**
* To use a custom org.apache.camel.component.netty.http.NettyHttpBinding for binding to/from Netty and Camel
* Message API.
*/
public void setNettyHttpBinding(NettyHttpBinding nettyHttpBinding) {
this.nettyHttpBinding = nettyHttpBinding;
}
@Override
public HeaderFilterStrategy getHeaderFilterStrategy() {
return headerFilterStrategy;
}
/**
* To use a custom org.apache.camel.spi.HeaderFilterStrategy to filter headers.
*/
@Override
public void setHeaderFilterStrategy(HeaderFilterStrategy headerFilterStrategy) {
this.headerFilterStrategy = headerFilterStrategy;
getNettyHttpBinding().setHeaderFilterStrategy(headerFilterStrategy);
}
public boolean isTraceEnabled() {
return traceEnabled;
}
/**
* Specifies whether to enable HTTP TRACE for this Netty HTTP consumer. By default TRACE is turned off.
*/
public void setTraceEnabled(boolean traceEnabled) {
this.traceEnabled = traceEnabled;
}
public String getHttpMethodRestrict() {
return httpMethodRestrict;
}
/**
* To disable HTTP methods on the Netty HTTP consumer. You can specify multiple separated by comma.
*/
public void setHttpMethodRestrict(String httpMethodRestrict) {
this.httpMethodRestrict = httpMethodRestrict;
}
public NettySharedHttpServer getNettySharedHttpServer() {
return nettySharedHttpServer;
}
/**
* To use a shared Netty HTTP server. See Netty HTTP Server Example for more details.
*/
public void setNettySharedHttpServer(NettySharedHttpServer nettySharedHttpServer) {
this.nettySharedHttpServer = nettySharedHttpServer;
}
public NettyHttpSecurityConfiguration getSecurityConfiguration() {
return securityConfiguration;
}
/**
* Refers to a org.apache.camel.component.netty.http.NettyHttpSecurityConfiguration for configuring secure web
* resources.
*/
public void setSecurityConfiguration(NettyHttpSecurityConfiguration securityConfiguration) {
this.securityConfiguration = securityConfiguration;
}
public Map<String, Object> getSecurityOptions() {
return securityOptions;
}
/**
* To configure NettyHttpSecurityConfiguration using key/value pairs from the map
*/
public void setSecurityOptions(Map<String, Object> securityOptions) {
this.securityOptions = securityOptions;
}
public CookieHandler getCookieHandler() {
return cookieHandler;
}
/**
* Configure a cookie handler to maintain a HTTP session
*/
public void setCookieHandler(CookieHandler cookieHandler) {
this.cookieHandler = cookieHandler;
}
@Override
protected void doInit() throws Exception {
super.doInit();
ObjectHelper.notNull(nettyHttpBinding, "nettyHttpBinding", this);
ObjectHelper.notNull(headerFilterStrategy, "headerFilterStrategy", this);
if (securityConfiguration != null) {
StringHelper.notEmpty(securityConfiguration.getRealm(), "realm", securityConfiguration);
StringHelper.notEmpty(securityConfiguration.getConstraint(), "restricted", securityConfiguration);
if (securityConfiguration.getSecurityAuthenticator() == null) {
// setup default JAAS authenticator if none was configured
JAASSecurityAuthenticator jaas = new JAASSecurityAuthenticator();
jaas.setName(securityConfiguration.getRealm());
LOG.info("No SecurityAuthenticator configured, using JAASSecurityAuthenticator as authenticator: {}", jaas);
securityConfiguration.setSecurityAuthenticator(jaas);
}
}
}
private boolean isProxyProtocol() {
URI baseUri = URI.create(getEndpointBaseUri());
String protocol = baseUri.getScheme();
return protocol != null && protocol.equalsIgnoreCase("proxy");
}
}
| NettyHttpEndpoint |
java | elastic__elasticsearch | server/src/internalClusterTest/java/org/elasticsearch/action/support/master/TransportMasterNodeActionIT.java | {
"start": 11829,
"end": 12200
} | class ____ extends MasterNodeRequest<TestRequest> {
TestRequest() {
super(TEST_REQUEST_TIMEOUT);
}
TestRequest(StreamInput in) throws IOException {
super(in);
}
@Override
public ActionRequestValidationException validate() {
return null;
}
}
public static final | TestRequest |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/proxy/TrustedXForwarderMultipleProxiesFailureTest.java | {
"start": 180,
"end": 546
} | class ____ extends AbstractTrustedXForwarderProxiesTest {
@RegisterExtension
static final QuarkusUnitTest config = createTrustedProxyUnitTest("1.2.3.4", "quarkus.io", "154.5.128.0/17",
"::ffff:154.6.99.64/123");
@Test
public void testHeadersAreIgnored() {
assertRequestFailure();
}
}
| TrustedXForwarderMultipleProxiesFailureTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemamanager/SchemaManagerResyncSequencesPooledLoTest.java | {
"start": 1552,
"end": 1815
} | class ____ {
@Id
@GeneratedValue
@SequenceGenerator(name = "TheSequence", allocationSize = 20)
Long id;
String name;
EntityWithSequence(Long id, String name) {
this.id = id;
this.name = name;
}
EntityWithSequence() {
}
}
}
| EntityWithSequence |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 65377,
"end": 69538
} | class ____<R> extends NamedBuilder<FieldBuilder<R>> {
private final FieldAssembler<R> fields;
private Schema.Field.Order order = Schema.Field.Order.ASCENDING;
private boolean validatingDefaults = true;
private FieldBuilder(FieldAssembler<R> fields, NameContext names, String name) {
super(names, name);
this.fields = fields;
}
/** Set this field to have ascending order. Ascending is the default **/
public FieldBuilder<R> orderAscending() {
order = Schema.Field.Order.ASCENDING;
return self();
}
/** Set this field to have descending order. Descending is the default **/
public FieldBuilder<R> orderDescending() {
order = Schema.Field.Order.DESCENDING;
return self();
}
/** Set this field to ignore order. **/
public FieldBuilder<R> orderIgnore() {
order = Schema.Field.Order.IGNORE;
return self();
}
/**
* Validate field default value during {@link #completeField(Schema, JsonNode)}.
**/
public FieldBuilder<R> validatingDefaults() {
validatingDefaults = true;
return self();
}
/**
* Skip field default value validation during
* {@link #completeField(Schema, JsonNode)}}
**/
public FieldBuilder<R> notValidatingDefaults() {
validatingDefaults = false;
return self();
}
/**
* Final step in configuring this field, finalizing name, namespace, alias, and
* order.
*
* @return A builder for the field's type and default value.
*/
public FieldTypeBuilder<R> type() {
return new FieldTypeBuilder<>(this);
}
/**
* Final step in configuring this field, finalizing name, namespace, alias, and
* order. Sets the field's type to the provided schema, returns a
* {@link GenericDefault}.
*/
public GenericDefault<R> type(Schema type) {
return new GenericDefault<>(this, type);
}
/**
* Final step in configuring this field, finalizing name, namespace, alias, and
* order. Sets the field's type to the schema by name reference.
* <p/>
* The name must correspond with a named schema that has already been created in
* the context of this builder. The name may be a fully qualified name, or a
* short name. If it is a short name, the namespace context of this builder will
* be used.
* <p/>
* The name and namespace context rules are the same as the Avro schema JSON
* specification.
*/
public GenericDefault<R> type(String name) {
return type(name, null);
}
/**
* Final step in configuring this field, finalizing name, namespace, alias, and
* order. Sets the field's type to the schema by name reference.
* <p/>
* The name must correspond with a named schema that has already been created in
* the context of this builder. The name may be a fully qualified name, or a
* short name. If it is a full name, the namespace is ignored. If it is a short
* name, the namespace provided is used. If the namespace provided is null, the
* namespace context of this builder will be used.
* <p/>
* The name and namespace context rules are the same as the Avro schema JSON
* specification.
*/
public GenericDefault<R> type(String name, String namespace) {
Schema schema = names().get(name, namespace);
return type(schema);
}
private FieldAssembler<R> completeField(Schema schema, Object defaultVal) {
JsonNode defaultNode = defaultVal == null ? NullNode.getInstance() : toJsonNode(defaultVal);
return completeField(schema, defaultNode);
}
private FieldAssembler<R> completeField(Schema schema) {
return completeField(schema, (JsonNode) null);
}
private FieldAssembler<R> completeField(Schema schema, JsonNode defaultVal) {
Field field = new Field(name(), schema, doc(), defaultVal, validatingDefaults, order);
addPropsTo(field);
addAliasesTo(field);
return fields.addField(field);
}
@Override
protected FieldBuilder<R> self() {
return this;
}
}
/** Abstract base | FieldBuilder |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/bindings/conflicting/ConflictingTransitiveBindingOnBeanTest.java | {
"start": 1594,
"end": 1685
} | interface ____ {
}
@FooBinding(1)
@BarBinding
@Dependent
static | BarBinding |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/autowired/Autowired.java | {
"start": 1495,
"end": 2288
} | interface ____ {
/**
* Specify whether the injection point is required.
*
* <p>Setting to {@code false} results in the injection point being optional
* and therefore will silently proceed without failure if there are missing
* beans that don't satisfy the requirements of the injection point.</p>
*
* <p>Setting to {@code true} (the default behaviour) will result in a {@link io.micronaut.context.exceptions.NoSuchBeanException}
* being thrown when the bean is first retrieved.</p>
*
* @return True if it is required.
* @see io.micronaut.context.exceptions.NoSuchBeanException
*/
@AliasFor(
annotation = Inject.class,
member = AnnotationUtil.MEMBER_REQUIRED
)
boolean required() default true;
}
| Autowired |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/DifferentNameButSameTest.java | {
"start": 7696,
"end": 7944
} | interface ____ {
Foo.Builder a();
Builder b();
}
""")
.addOutputLines(
"Test.java",
"""
package pkg;
import pkg.Foo.Builder;
| Test |
java | apache__kafka | group-coordinator/src/test/java/org/apache/kafka/coordinator/group/assignor/UniformHeterogeneousAssignmentBuilderTest.java | {
"start": 2488,
"end": 3391
} | class ____ {
private final UniformAssignor assignor = new UniformAssignor();
private final Uuid topic1Uuid = Uuid.fromString("T1-A4s3VTwiI5CTbEp6POw");
private final Uuid topic2Uuid = Uuid.fromString("T2-B4s3VTwiI5YHbPp6YUe");
private final Uuid topic3Uuid = Uuid.fromString("T3-CU8fVTLCz5YMkLoDQsa");
private final Uuid topic4Uuid = Uuid.fromString("T4-Tw9fVTLCz5HbPp6YQsa");
private final String topic1Name = "topic1";
private final String topic2Name = "topic2";
private final String topic3Name = "topic3";
private final String topic4Name = "topic4";
private final String memberA = "A";
private final String memberB = "B";
private final String memberC = "C";
/**
* A GroupSpec implementation that returns members in sorted order, so that assignor output is
* deterministic.
*/
private static | UniformHeterogeneousAssignmentBuilderTest |
java | quarkusio__quarkus | extensions/oidc/runtime/src/test/java/io/quarkus/oidc/runtime/OidcProviderTest.java | {
"start": 1163,
"end": 14006
} | class ____ {
@Test
public void testAlgorithmCustomizer() throws Exception {
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
rsaJsonWebKey.setKeyId("k1");
final String token = Jwt.issuer("http://keycloak/realm").jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
final String newToken = replaceAlgorithm(token, "ES256");
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "]}");
OidcTenantConfig oidcConfig = new OidcTenantConfig();
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
try {
provider.verifyJwtToken(newToken, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
// continue
}
}
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet, new TokenCustomizer() {
@Override
public JsonObject customizeHeaders(JsonObject headers) {
return Json.createObjectBuilder(headers).add("alg", "RS256").build();
}
}, null)) {
TokenVerificationResult result = provider.verifyJwtToken(newToken, false, false, null);
assertEquals("http://keycloak/realm", result.localVerificationResult.getString("iss"));
}
}
@Test
public void testTokenWithoutKidSingleRsaJwkWithoutKid() throws Exception {
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
EllipticCurveJsonWebKey ecJsonWebKey = EcJwkGenerator.generateJwk(EllipticCurves.P256);
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "," + ecJsonWebKey.toJson() + "]}");
final String token = Jwt.issuer("http://keycloak/realm").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, new OidcTenantConfig(), jwkSet)) {
TokenVerificationResult result = provider.verifyJwtToken(token, false, false, null);
assertEquals("http://keycloak/realm", result.localVerificationResult.getString("iss"));
}
}
@Test
public void testTokenWithoutKidMultipleRSAJwkWithoutKid() throws Exception {
RsaJsonWebKey rsaJsonWebKey1 = RsaJwkGenerator.generateJwk(2048);
RsaJsonWebKey rsaJsonWebKey2 = RsaJwkGenerator.generateJwk(2048);
JsonWebKeySet jwkSet = new JsonWebKeySet(
"{\"keys\": [" + rsaJsonWebKey1.toJson() + "," + rsaJsonWebKey2.toJson() + "]}");
final String token = Jwt.issuer("http://keycloak/realm").sign(rsaJsonWebKey1.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, new OidcTenantConfig(), jwkSet)) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getCause() instanceof UnresolvableKeyException);
}
}
}
@Test
public void testTokenWithoutKidMultipleRSAJwkWithoutKidTryAll() throws Exception {
RsaJsonWebKey rsaJsonWebKey1 = RsaJwkGenerator.generateJwk(2048);
RsaJsonWebKey rsaJsonWebKey2 = RsaJwkGenerator.generateJwk(2048);
JsonWebKeySet jwkSet = new JsonWebKeySet(
"{\"keys\": [" + rsaJsonWebKey1.toJson() + "," + rsaJsonWebKey2.toJson() + "]}");
final String token = Jwt.issuer("http://keycloak/realm").sign(rsaJsonWebKey2.getPrivateKey());
final OidcTenantConfig config = new OidcTenantConfig();
config.jwks.tryAll = true;
try (OidcProvider provider = new OidcProvider(null, config, jwkSet)) {
TokenVerificationResult result = provider.verifyJwtToken(token, false, false, null);
assertEquals("http://keycloak/realm", result.localVerificationResult.getString("iss"));
}
}
@Test
public void testTokenWithoutKidMultipleRSAJwkWithoutKidTryAllNoMatching() throws Exception {
RsaJsonWebKey rsaJsonWebKey1 = RsaJwkGenerator.generateJwk(2048);
RsaJsonWebKey rsaJsonWebKey2 = RsaJwkGenerator.generateJwk(2048);
RsaJsonWebKey rsaJsonWebKey3 = RsaJwkGenerator.generateJwk(2048);
JsonWebKeySet jwkSet = new JsonWebKeySet(
"{\"keys\": [" + rsaJsonWebKey1.toJson() + "," + rsaJsonWebKey2.toJson() + "]}");
final String token = Jwt.issuer("http://keycloak/realm").sign(rsaJsonWebKey3.getPrivateKey());
final OidcTenantConfig config = new OidcTenantConfig();
config.jwks.tryAll = true;
try (OidcProvider provider = new OidcProvider(null, config, jwkSet)) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getCause() instanceof UnresolvableKeyException);
}
}
}
private static String replaceAlgorithm(String token, String algorithm) {
io.vertx.core.json.JsonObject headers = OidcUtils.decodeJwtHeaders(token);
headers.put("alg", algorithm);
String newHeaders = new String(
Base64.getUrlEncoder().withoutPadding().encode(headers.toString().getBytes()),
StandardCharsets.UTF_8);
int dotIndex = token.indexOf('.');
return newHeaders + token.substring(dotIndex);
}
@Test
public void testSubject() throws Exception {
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
rsaJsonWebKey.setKeyId("k1");
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "]}");
OidcTenantConfig oidcConfig = new OidcTenantConfig();
oidcConfig.token.subjectRequired = true;
final String tokenWithSub = Jwt.subject("subject").jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
TokenVerificationResult result = provider.verifyJwtToken(tokenWithSub, false, true, null);
assertEquals("subject", result.localVerificationResult.getString(Claims.sub.name()));
}
final String tokenWithoutSub = Jwt.claims().jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
try {
provider.verifyJwtToken(tokenWithoutSub, false, true, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("No Subject (sub) claim is present"));
}
}
}
@Test
public void testNonce() throws Exception {
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
rsaJsonWebKey.setKeyId("k1");
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "]}");
OidcTenantConfig oidcConfig = new OidcTenantConfig();
oidcConfig.authentication.nonceRequired = true;
final String tokenWithNonce = Jwt.claim("nonce", "123456").jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
TokenVerificationResult result = provider.verifyJwtToken(tokenWithNonce, false, false, "123456");
assertEquals("123456", result.localVerificationResult.getString(Claims.nonce.name()));
}
final String tokenWithoutNonce = Jwt.claims().jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
try {
provider.verifyJwtToken(tokenWithoutNonce, false, false, "123456");
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("claim nonce is missing"));
}
}
}
@Test
public void testAge() throws Exception {
String tokenPayload = "{\n" +
" \"exp\": " + Instant.now().plusSeconds(1000).getEpochSecond() + "\n" +
"}";
JsonWebSignature jws = new JsonWebSignature();
jws.setPayload(tokenPayload);
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256);
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
jws.setKey(rsaJsonWebKey.getPrivateKey());
String token = jws.getCompactSerialization();
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "]}");
OidcTenantConfig oidcConfig = new OidcTenantConfig();
oidcConfig.token.issuedAtRequired = false;
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet)) {
TokenVerificationResult result = provider.verifyJwtToken(token, false, false, null);
assertNull(result.localVerificationResult.getString(Claims.iat.name()));
}
OidcTenantConfig oidcConfigRequireAge = new OidcTenantConfig();
oidcConfigRequireAge.token.issuedAtRequired = true;
try (OidcProvider provider = new OidcProvider(null, oidcConfigRequireAge, jwkSet)) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("No Issued At (iat) claim present."));
}
}
}
@Test
public void testJwtValidators() throws Exception {
RsaJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(2048);
rsaJsonWebKey.setKeyId("k1");
JsonWebKeySet jwkSet = new JsonWebKeySet("{\"keys\": [" + rsaJsonWebKey.toJson() + "]}");
OidcTenantConfig oidcConfig = new OidcTenantConfig();
String token = Jwt.claim("claim1", "claimValue1").claim("claim2", "claimValue2").jws().keyId("k1")
.sign(rsaJsonWebKey.getPrivateKey());
// no validators
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet, null, null)) {
TokenVerificationResult result = provider.verifyJwtToken(token, false, false, null);
assertEquals("claimValue1", result.localVerificationResult.getString("claim1"));
assertEquals("claimValue2", result.localVerificationResult.getString("claim2"));
}
// one validator
Validator validator1 = new Validator() {
@Override
public String validate(JwtContext jwtContext) throws MalformedClaimException {
if (jwtContext.getJwtClaims().hasClaim("claim1")) {
return "Claim1 is not allowed!";
}
return null;
}
};
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet, null, List.of(validator1))) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("Claim1 is not allowed!"));
}
}
// two validators
Validator validator2 = new Validator() {
@Override
public String validate(JwtContext jwtContext) throws MalformedClaimException {
if (jwtContext.getJwtClaims().hasClaim("claim2")) {
return "Claim2 is not allowed!";
}
return null;
}
};
// check the first validator is still run
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet, null, List.of(validator1, validator2))) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("Claim1 is not allowed!"));
}
}
// check the second validator is applied
token = Jwt.claim("claim2", "claimValue2").jws().keyId("k1").sign(rsaJsonWebKey.getPrivateKey());
try (OidcProvider provider = new OidcProvider(null, oidcConfig, jwkSet, null, List.of(validator1, validator2))) {
try {
provider.verifyJwtToken(token, false, false, null);
fail("InvalidJwtException expected");
} catch (InvalidJwtException ex) {
assertTrue(ex.getMessage().contains("Claim2 is not allowed!"));
}
}
}
}
| OidcProviderTest |
java | grpc__grpc-java | core/src/main/java/io/grpc/internal/WritableBuffer.java | {
"start": 638,
"end": 831
} | interface ____ a byte buffer that can only be written to.
* {@link WritableBuffer}s are a generic way to transfer bytes to
* the concrete network transports, like Netty and OkHttp.
*/
public | for |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/cors/DefaultCorsProcessorTests.java | {
"start": 1371,
"end": 27285
} | class ____ {
private MockHttpServletRequest request;
private MockHttpServletResponse response;
private DefaultCorsProcessor processor;
private CorsConfiguration conf;
@BeforeEach
void setup() {
this.request = new MockHttpServletRequest();
this.request.setRequestURI("/test.html");
this.request.setServerName("domain1.example");
this.conf = new CorsConfiguration();
this.response = new MockHttpServletResponse();
this.response.setStatus(HttpServletResponse.SC_OK);
this.processor = new DefaultCorsProcessor();
}
@Test
void requestWithoutOriginHeader() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void sameOriginRequest() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "http://domain1.example");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void actualRequestWithOriginHeader() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void actualRequestWithOriginHeaderAndNullConfig() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.processor.processRequest(null, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void actualRequestWithOriginHeaderAndAllowedOrigin() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.conf.addAllowedOrigin("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("*");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_MAX_AGE)).isFalse();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void actualRequestCredentials() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.conf.addAllowedOrigin("https://domain1.com");
this.conf.addAllowedOrigin("https://domain2.com");
this.conf.addAllowedOrigin("http://domain3.example");
this.conf.setAllowCredentials(true);
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isEqualTo("true");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void actualRequestCredentialsWithWildcardOrigin() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.conf.addAllowedOrigin("*");
this.conf.setAllowCredentials(true);
assertThatIllegalArgumentException()
.isThrownBy(() -> this.processor.processRequest(this.conf, this.request, this.response));
this.conf.setAllowedOrigins(null);
this.conf.addAllowedOriginPattern("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isEqualTo("true");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void actualRequestCaseInsensitiveOriginMatch() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.conf.addAllowedOrigin("https://DOMAIN2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
}
@Test // gh-26892
public void actualRequestTrailingSlashOriginMatch() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com/");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
}
@Test //gh-33682
public void actualRequestMalformedOriginRejected() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "http://*@:;");
this.conf.addAllowedOrigin("https://domain2.com");
boolean result = this.processor.processRequest(this.conf, this.request, this.response);
assertThat(result).isFalse();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void actualRequestExposedHeaders() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.conf.addExposedHeader("header1");
this.conf.addExposedHeader("header2");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)).contains("header1");
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_EXPOSE_HEADERS)).contains("header2");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestAllOriginsAllowed() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.conf.addAllowedOrigin("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestWrongAllowedMethod() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "DELETE");
this.conf.addAllowedOrigin("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void preflightRequestMatchedAllowedMethod() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.conf.addAllowedOrigin("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isEqualTo("GET,HEAD");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
}
@Test
void preflightRequestTestWithOriginButWithoutOtherHeaders() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void preflightRequestWithoutRequestMethod() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void preflightRequestWithRequestAndMethodHeaderButNoConfig() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_FORBIDDEN);
}
@Test
void preflightRequestValidRequestAndConfig() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.conf.addAllowedOrigin("*");
this.conf.addAllowedMethod("GET");
this.conf.addAllowedMethod("PUT");
this.conf.addAllowedHeader("header1");
this.conf.addAllowedHeader("header2");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("*");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isEqualTo("GET,PUT");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_MAX_AGE)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestCredentials() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.conf.addAllowedOrigin("https://domain1.com");
this.conf.addAllowedOrigin("https://domain2.com");
this.conf.addAllowedOrigin("http://domain3.example");
this.conf.addAllowedHeader("Header1");
this.conf.setAllowCredentials(true);
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_CREDENTIALS)).isEqualTo("true");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestCredentialsWithWildcardOrigin() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.conf.setAllowedOrigins(Arrays.asList("https://domain1.com", "*", "http://domain3.example"));
this.conf.addAllowedHeader("Header1");
this.conf.setAllowCredentials(true);
assertThatIllegalArgumentException().isThrownBy(() ->
this.processor.processRequest(this.conf, this.request, this.response));
this.conf.setAllowedOrigins(null);
this.conf.addAllowedOriginPattern("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestPrivateNetworkWithWildcardOrigin() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1");
this.request.addHeader(DefaultCorsProcessor.ACCESS_CONTROL_REQUEST_PRIVATE_NETWORK, "true");
this.conf.setAllowedOrigins(Arrays.asList("https://domain1.com", "*", "http://domain3.example"));
this.conf.addAllowedHeader("Header1");
this.conf.setAllowPrivateNetwork(true);
assertThatIllegalArgumentException().isThrownBy(() ->
this.processor.processRequest(this.conf, this.request, this.response));
this.conf.setAllowedOrigins(null);
this.conf.addAllowedOriginPattern("*");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(DefaultCorsProcessor.ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestAllowedHeaders() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1, Header2");
this.conf.addAllowedHeader("Header1");
this.conf.addAllowedHeader("Header2");
this.conf.addAllowedHeader("Header3");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).contains("Header1");
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).contains("Header2");
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).doesNotContain("Header3");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestAllowsAllHeaders() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Header1, Header2");
this.conf.addAllowedHeader("*");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).isTrue();
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).contains("Header1");
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).contains("Header2");
assertThat(this.response.getHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).doesNotContain("*");
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestWithEmptyHeaders() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "");
this.conf.addAllowedHeader("*");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS)).isFalse();
assertThat(this.response.getHeaders(HttpHeaders.VARY)).contains(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestWithNullConfig() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.conf.addAllowedOrigin("*");
this.processor.processRequest(null, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isFalse();
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS)).isFalse();
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preventDuplicatedVaryHeaders() throws Exception {
this.request.setMethod(HttpMethod.GET.name());
this.response.addHeader(HttpHeaders.VARY, HttpHeaders.ORIGIN);
this.response.addHeader(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD);
this.response.addHeader(HttpHeaders.VARY, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.getHeaders(HttpHeaders.VARY)).containsOnlyOnce(HttpHeaders.ORIGIN,
HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS);
}
@Test
void preflightRequestWithoutAccessControlRequestPrivateNetwork() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.conf.addAllowedHeader("*");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(DefaultCorsProcessor.ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK)).isFalse();
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestWithAccessControlRequestPrivateNetworkNotAllowed() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(DefaultCorsProcessor.ACCESS_CONTROL_REQUEST_PRIVATE_NETWORK, "true");
this.conf.addAllowedHeader("*");
this.conf.addAllowedOrigin("https://domain2.com");
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(DefaultCorsProcessor.ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK)).isFalse();
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
@Test
void preflightRequestWithAccessControlRequestPrivateNetworkAllowed() throws Exception {
this.request.setMethod(HttpMethod.OPTIONS.name());
this.request.addHeader(HttpHeaders.ORIGIN, "https://domain2.com");
this.request.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD, "GET");
this.request.addHeader(DefaultCorsProcessor.ACCESS_CONTROL_REQUEST_PRIVATE_NETWORK, "true");
this.conf.addAllowedHeader("*");
this.conf.addAllowedOrigin("https://domain2.com");
this.conf.setAllowPrivateNetwork(true);
this.processor.processRequest(this.conf, this.request, this.response);
assertThat(this.response.containsHeader(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN)).isTrue();
assertThat(this.response.containsHeader(DefaultCorsProcessor.ACCESS_CONTROL_ALLOW_PRIVATE_NETWORK)).isTrue();
assertThat(this.response.getStatus()).isEqualTo(HttpServletResponse.SC_OK);
}
}
| DefaultCorsProcessorTests |
java | apache__camel | core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedBacklogDebugger.java | {
"start": 1806,
"end": 17836
} | class ____ implements ManagedBacklogDebuggerMBean {
private final CamelContext camelContext;
private final DefaultBacklogDebugger backlogDebugger;
public ManagedBacklogDebugger(CamelContext camelContext, DefaultBacklogDebugger backlogDebugger) {
this.camelContext = camelContext;
this.backlogDebugger = backlogDebugger;
}
public void init(ManagementStrategy strategy) {
// do nothing
}
public CamelContext getContext() {
return camelContext;
}
public DefaultBacklogDebugger getBacklogDebugger() {
return backlogDebugger;
}
@Override
public String getCamelId() {
return camelContext.getName();
}
@Override
public String getCamelManagementName() {
return camelContext.getManagementName();
}
@Override
public String getLoggingLevel() {
return backlogDebugger.getLoggingLevel();
}
@Override
public void setLoggingLevel(String level) {
backlogDebugger.setLoggingLevel(level);
}
@Override
public boolean isEnabled() {
return backlogDebugger.isEnabled();
}
@Override
public boolean isStandby() {
return backlogDebugger.isStandby();
}
@Override
public void enableDebugger() {
backlogDebugger.enableDebugger();
}
@Override
public void disableDebugger() {
backlogDebugger.disableDebugger();
}
@Override
public void addBreakpoint(String nodeId) {
backlogDebugger.addBreakpoint(nodeId);
}
@Override
public void addConditionalBreakpoint(String nodeId, String language, String predicate) {
backlogDebugger.addConditionalBreakpoint(nodeId, language, predicate);
}
@Override
public void removeBreakpoint(String nodeId) {
backlogDebugger.removeBreakpoint(nodeId);
}
@Override
public void removeAllBreakpoints() {
backlogDebugger.removeAllBreakpoints();
}
@Override
public Set<String> breakpoints() {
return backlogDebugger.getBreakpoints();
}
@Override
public void resumeBreakpoint(String nodeId) {
backlogDebugger.resumeBreakpoint(nodeId);
}
@Override
public void setMessageBodyOnBreakpoint(String nodeId, Object body) {
backlogDebugger.setMessageBodyOnBreakpoint(nodeId, body);
}
@Override
public void setMessageBodyOnBreakpoint(String nodeId, Object body, String type) {
try {
Class<?> classType = camelContext.getClassResolver().resolveMandatoryClass(type);
backlogDebugger.setMessageBodyOnBreakpoint(nodeId, body, classType);
} catch (ClassNotFoundException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void removeMessageBodyOnBreakpoint(String nodeId) {
backlogDebugger.removeMessageBodyOnBreakpoint(nodeId);
}
@Override
public void setMessageHeaderOnBreakpoint(String nodeId, String headerName, Object value) {
try {
backlogDebugger.setMessageHeaderOnBreakpoint(nodeId, headerName, value);
} catch (NoTypeConversionAvailableException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void setMessageHeaderOnBreakpoint(String nodeId, String headerName, Object value, String type) {
try {
Class<?> classType = camelContext.getClassResolver().resolveMandatoryClass(type);
backlogDebugger.setMessageHeaderOnBreakpoint(nodeId, headerName, value, classType);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void removeMessageHeaderOnBreakpoint(String nodeId, String headerName) {
backlogDebugger.removeMessageHeaderOnBreakpoint(nodeId, headerName);
}
@Override
public void resumeAll() {
backlogDebugger.resumeAll();
}
@Override
public void stepBreakpoint(String nodeId) {
backlogDebugger.stepBreakpoint(nodeId);
}
@Override
public boolean isSuspendedMode() {
return backlogDebugger.isSuspendMode();
}
@Override
public boolean isSingleStepMode() {
return backlogDebugger.isSingleStepMode();
}
@Override
public void step() {
backlogDebugger.step();
}
@Override
public void stepOver() {
backlogDebugger.stepOver();
}
@Override
public void skipOver() {
backlogDebugger.skipOver();
}
@Override
public Set<String> suspendedBreakpointNodeIds() {
return backlogDebugger.getSuspendedBreakpointNodeIds();
}
@Override
public Set<String> suspendedBreakpointExchangeIds() {
return backlogDebugger.getSuspendedExchangeIds();
}
@Override
public void disableBreakpoint(String nodeId) {
backlogDebugger.disableBreakpoint(nodeId);
}
@Override
public void enableBreakpoint(String nodeId) {
backlogDebugger.enableBreakpoint(nodeId);
}
@Override
public int getBodyMaxChars() {
return backlogDebugger.getBodyMaxChars();
}
@Override
public void setBodyMaxChars(int bodyMaxChars) {
backlogDebugger.setBodyMaxChars(bodyMaxChars);
}
@Override
public boolean isIncludeExchangeProperties() {
return backlogDebugger.isIncludeExchangeProperties();
}
@Override
public void setIncludeExchangeProperties(boolean includeExchangeProperties) {
backlogDebugger.setIncludeExchangeProperties(includeExchangeProperties);
}
@Override
public boolean isIncludeExchangeVariables() {
return backlogDebugger.isIncludeExchangeVariables();
}
@Override
public void setIncludeExchangeVariables(boolean includeExchangeVariables) {
backlogDebugger.setIncludeExchangeVariables(includeExchangeVariables);
}
@Override
public boolean isBodyIncludeStreams() {
return backlogDebugger.isBodyIncludeStreams();
}
@Override
public void setBodyIncludeStreams(boolean bodyIncludeStreams) {
backlogDebugger.setBodyIncludeStreams(bodyIncludeStreams);
}
@Override
public boolean isBodyIncludeFiles() {
return backlogDebugger.isBodyIncludeFiles();
}
@Override
public void setBodyIncludeFiles(boolean bodyIncludeFiles) {
backlogDebugger.setBodyIncludeFiles(bodyIncludeFiles);
}
@Override
public String dumpTracedMessagesAsXml(String nodeId) {
return backlogDebugger.dumpTracedMessagesAsXml(nodeId);
}
@Override
@Deprecated(since = "4.2.0")
public String dumpTracedMessagesAsXml(String nodeId, boolean includeExchangeProperties) {
return dumpTracedMessagesAsXml(nodeId);
}
@Override
public String dumpTracedMessagesAsJSon(String nodeId) {
return backlogDebugger.dumpTracedMessagesAsJSon(nodeId);
}
@Override
public long getDebugCounter() {
return backlogDebugger.getDebugCounter();
}
@Override
public void resetDebugCounter() {
backlogDebugger.resetDebugCounter();
}
@Override
public String validateConditionalBreakpoint(String language, String predicate) {
Language lan = null;
try {
lan = camelContext.resolveLanguage(language);
lan.createPredicate(predicate);
return null;
} catch (Exception e) {
if (lan == null) {
return e.getMessage();
} else {
return "Invalid syntax " + predicate + " due: " + e.getMessage();
}
}
}
@Override
public long getFallbackTimeout() {
return backlogDebugger.getFallbackTimeout();
}
@Override
public void setFallbackTimeout(long fallbackTimeout) {
backlogDebugger.setFallbackTimeout(fallbackTimeout);
}
@Override
public String evaluateExpressionAtBreakpoint(String nodeId, String language, String expression) {
return evaluateExpressionAtBreakpoint(nodeId, language, expression, "java.lang.String").toString();
}
@Override
public void setExchangePropertyOnBreakpoint(String nodeId, String exchangePropertyName, Object value) {
try {
backlogDebugger.setExchangePropertyOnBreakpoint(nodeId, exchangePropertyName, value);
} catch (NoTypeConversionAvailableException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void setExchangePropertyOnBreakpoint(String nodeId, String exchangePropertyName, Object value, String type) {
try {
Class<?> classType = camelContext.getClassResolver().resolveMandatoryClass(type);
backlogDebugger.setExchangePropertyOnBreakpoint(nodeId, exchangePropertyName, value, classType);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void removeExchangePropertyOnBreakpoint(String nodeId, String exchangePropertyName) {
backlogDebugger.removeExchangePropertyOnBreakpoint(nodeId, exchangePropertyName);
}
@Override
public void setExchangeVariableOnBreakpoint(String nodeId, String variableName, Object value) {
try {
backlogDebugger.setExchangeVariableOnBreakpoint(nodeId, variableName, value);
} catch (NoTypeConversionAvailableException e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void setExchangeVariableOnBreakpoint(String nodeId, String variableName, Object value, String type) {
try {
Class<?> classType = camelContext.getClassResolver().resolveMandatoryClass(type);
backlogDebugger.setExchangeVariableOnBreakpoint(nodeId, variableName, value, classType);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
}
}
@Override
public void removeExchangeVariableOnBreakpoint(String nodeId, String variableName) {
backlogDebugger.removeExchangeVariableOnBreakpoint(nodeId, variableName);
}
@Override
public Object evaluateExpressionAtBreakpoint(String nodeId, String language, String expression, String resultType) {
Exchange suspendedExchange;
try {
Language lan = camelContext.resolveLanguage(language);
suspendedExchange = backlogDebugger.getSuspendedExchange(nodeId);
if (suspendedExchange != null) {
Object result;
Class<?> resultClass = camelContext.getClassResolver().resolveMandatoryClass(resultType);
if (!Boolean.class.isAssignableFrom(resultClass)) {
Expression expr = lan.createExpression(expression);
expr.init(camelContext);
result = expr.evaluate(suspendedExchange, resultClass);
} else {
Predicate pred = lan.createPredicate(expression);
pred.init(camelContext);
result = pred.matches(suspendedExchange);
}
//Test if result is serializable
if (!isSerializable(result)) {
String resultStr = suspendedExchange.getContext().getTypeConverter().tryConvertTo(String.class, result);
if (resultStr != null) {
result = resultStr;
}
}
return result;
}
} catch (Exception e) {
return e.getMessage();
}
return null;
}
@Override
public String messageHistoryOnBreakpointAsXml(String nodeId) {
StringBuilder messageHistoryBuilder = new StringBuilder();
messageHistoryBuilder.append("<messageHistory>\n");
Exchange suspendedExchange = backlogDebugger.getSuspendedExchange(nodeId);
if (suspendedExchange != null) {
List<MessageHistory> list = suspendedExchange.getProperty(ExchangePropertyKey.MESSAGE_HISTORY, List.class);
if (list != null) {
// add incoming origin of message on the top
String routeId = suspendedExchange.getFromRouteId();
Route route = suspendedExchange.getContext().getRoute(routeId);
String loc = route != null ? route.getSourceLocationShort() : "";
String id = routeId;
String label = "";
if (suspendedExchange.getFromEndpoint() != null) {
label = "from["
+ URISupport
.sanitizeUri(
StringHelper.limitLength(suspendedExchange.getFromEndpoint().getEndpointUri(), 100))
+ "]";
}
long elapsed = suspendedExchange.getClock().elapsed();
messageHistoryBuilder
.append(" <messageHistoryEntry")
.append(" location=\"").append(StringHelper.xmlEncode(loc)).append("\"")
.append(" routeId=\"").append(StringHelper.xmlEncode(routeId)).append("\"")
.append(" processorId=\"").append(StringHelper.xmlEncode(id)).append("\"")
.append(" processor=\"").append(StringHelper.xmlEncode(label)).append("\"")
.append(" elapsed=\"").append(elapsed).append("\"")
.append("/>\n");
for (MessageHistory history : list) {
// and then each history
loc = LoggerHelper.getLineNumberLoggerName(history.getNode());
if (loc == null) {
loc = "";
}
routeId = history.getRouteId() != null ? history.getRouteId() : "";
id = history.getNode().getId();
// we need to avoid leak the sensible information here
// the sanitizeUri takes a very long time for very long string
// and the format cuts this to
// 78 characters, anyway. Cut this to 100 characters. This will
// give enough space for removing
// characters in the sanitizeUri method and will be reasonably
// fast
label = URISupport.sanitizeUri(StringHelper.limitLength(history.getNode().getLabel(), 100));
elapsed = history.getElapsed();
messageHistoryBuilder
.append(" <messageHistoryEntry")
.append(" location=\"").append(StringHelper.xmlEncode(loc)).append("\"")
.append(" routeId=\"").append(StringHelper.xmlEncode(routeId)).append("\"")
.append(" processorId=\"").append(StringHelper.xmlEncode(id)).append("\"")
.append(" processor=\"").append(StringHelper.xmlEncode(label)).append("\"")
.append(" elapsed=\"").append(elapsed).append("\"")
.append("/>\n");
}
}
}
messageHistoryBuilder.append("</messageHistory>\n");
return messageHistoryBuilder.toString();
}
@Override
public void attach() {
backlogDebugger.attach();
}
@Override
public void detach() {
backlogDebugger.detach();
}
private static boolean isSerializable(Object obj) {
final ByteArrayOutputStream baos = new ByteArrayOutputStream(512);
try (ObjectOutputStream out = new ObjectOutputStream(baos)) {
out.writeObject(obj);
return true;
} catch (Exception e) {
return false;
}
}
}
| ManagedBacklogDebugger |
java | apache__spark | common/network-common/src/test/java/org/apache/spark/network/TransportConfSuite.java | {
"start": 1136,
"end": 3420
} | class ____ {
private TransportConf transportConf =
new TransportConf(
"shuffle", SslSampleConfigs.createDefaultConfigProviderForRpcNamespace());
@Test
public void testKeyStorePath() {
assertEquals(new File(SslSampleConfigs.keyStorePath), transportConf.sslRpcKeyStore());
}
@Test
public void testPrivateKeyPath() {
assertEquals(new File(SslSampleConfigs.privateKeyPath), transportConf.sslRpcPrivateKey());
}
@Test
public void testCertChainPath() {
assertEquals(new File(SslSampleConfigs.certChainPath), transportConf.sslRpcCertChain());
}
@Test
public void testTrustStorePath() {
assertEquals(new File(SslSampleConfigs.trustStorePath), transportConf.sslRpcTrustStore());
}
@Test
public void testTrustStoreReloadingEnabled() {
assertFalse(transportConf.sslRpcTrustStoreReloadingEnabled());
}
@Test
public void testOpenSslEnabled() {
assertFalse(transportConf.sslRpcOpenSslEnabled());
}
@Test
public void testSslRpcEnabled() {
assertTrue(transportConf.sslRpcEnabled());
}
@Test
public void testSslKeyStorePassword() {
assertEquals("password", transportConf.sslRpcKeyStorePassword());
}
@Test
public void testSslKeyPassword() {
assertEquals("password", transportConf.sslRpcKeyPassword());
}
@Test
public void testSslTrustStorePassword() {
assertEquals("password", transportConf.sslRpcTrustStorePassword());
}
@Test
public void testSsltrustStoreReloadIntervalMs() {
assertEquals(10000, transportConf.sslRpctrustStoreReloadIntervalMs());
}
@Test
public void testDefaultIOMode() {
TransportConf c1 = new TransportConf("m1", new MapConfigProvider(Map.of()));
assertEquals("AUTO", c1.ioMode());
TransportConf c2 = new TransportConf("m1",
new MapConfigProvider(Map.of("spark.io.mode.default", "KQUEUE")));
assertEquals("KQUEUE", c2.ioMode());
TransportConf c3 = new TransportConf("m2",
new MapConfigProvider(Map.of("spark.io.mode.default", "KQUEUE")));
assertEquals("KQUEUE", c3.ioMode());
TransportConf c4 = new TransportConf("m3",
new MapConfigProvider(Map.of(
"spark.io.mode.default", "KQUEUE",
"spark.m3.io.mode", "EPOLL")));
assertEquals("EPOLL", c4.ioMode());
}
}
| TransportConfSuite |
java | spring-projects__spring-boot | build-plugin/spring-boot-gradle-plugin/src/test/java/org/springframework/boot/gradle/tasks/bundling/DockerSpecTests.java | {
"start": 1511,
"end": 11830
} | class ____ {
private DockerSpec dockerSpec;
@BeforeEach
void prepareDockerSpec(@TempDir File temp) {
this.dockerSpec = GradleProjectBuilder.builder()
.withProjectDir(temp)
.build()
.getObjects()
.newInstance(DockerSpec.class);
}
@Test
void asDockerConfigurationWithDefaults() {
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
assertThat(dockerConfiguration.connection()).isNull();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(builderRegistryAuthentication.getAuthHeader()).isNull();
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"\"")
.contains("\"password\" : \"\"")
.contains("\"email\" : \"\"")
.contains("\"serveraddress\" : \"\"");
}
@Test
void asDockerConfigurationWithHostConfiguration() {
this.dockerSpec.getHost().set("docker.example.com");
this.dockerSpec.getTlsVerify().set(true);
this.dockerSpec.getCertPath().set("/tmp/ca-cert");
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerConnectionConfiguration.Host host = (DockerConnectionConfiguration.Host) dockerConfiguration.connection();
assertThat(host).isNotNull();
assertThat(host.address()).isEqualTo("docker.example.com");
assertThat(host.secure()).isTrue();
assertThat(host.certificatePath()).isEqualTo("/tmp/ca-cert");
assertThat(dockerConfiguration.bindHostToBuilder()).isFalse();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(builderRegistryAuthentication.getAuthHeader()).isNull();
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"\"")
.contains("\"password\" : \"\"")
.contains("\"email\" : \"\"")
.contains("\"serveraddress\" : \"\"");
}
@Test
void asDockerConfigurationWithHostConfigurationNoTlsVerify() {
this.dockerSpec.getHost().set("docker.example.com");
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerConnectionConfiguration.Host host = (DockerConnectionConfiguration.Host) dockerConfiguration.connection();
assertThat(host).isNotNull();
assertThat(host.address()).isEqualTo("docker.example.com");
assertThat(host.secure()).isFalse();
assertThat(host.certificatePath()).isNull();
assertThat(dockerConfiguration.bindHostToBuilder()).isFalse();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(builderRegistryAuthentication.getAuthHeader()).isNull();
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"\"")
.contains("\"password\" : \"\"")
.contains("\"email\" : \"\"")
.contains("\"serveraddress\" : \"\"");
}
@Test
void asDockerConfigurationWithContextConfiguration() {
this.dockerSpec.getContext().set("test-context");
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerConnectionConfiguration.Context host = (DockerConnectionConfiguration.Context) dockerConfiguration
.connection();
assertThat(host).isNotNull();
assertThat(host.context()).isEqualTo("test-context");
assertThat(dockerConfiguration.bindHostToBuilder()).isFalse();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(builderRegistryAuthentication.getAuthHeader()).isNull();
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"\"")
.contains("\"password\" : \"\"")
.contains("\"email\" : \"\"")
.contains("\"serveraddress\" : \"\"");
}
@Test
void asDockerConfigurationWithHostAndContextFails() {
this.dockerSpec.getContext().set("test-context");
this.dockerSpec.getHost().set("docker.example.com");
assertThatExceptionOfType(GradleException.class).isThrownBy(this.dockerSpec::asDockerConfiguration)
.withMessageContaining("Invalid Docker configuration");
}
@Test
void asDockerConfigurationWithBindHostToBuilder() {
this.dockerSpec.getHost().set("docker.example.com");
this.dockerSpec.getBindHostToBuilder().set(true);
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerConnectionConfiguration.Host host = (DockerConnectionConfiguration.Host) dockerConfiguration.connection();
assertThat(host).isNotNull();
assertThat(host.address()).isEqualTo("docker.example.com");
assertThat(host.secure()).isFalse();
assertThat(host.certificatePath()).isNull();
assertThat(dockerConfiguration.bindHostToBuilder()).isTrue();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(builderRegistryAuthentication.getAuthHeader()).isNull();
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"\"")
.contains("\"password\" : \"\"")
.contains("\"email\" : \"\"")
.contains("\"serveraddress\" : \"\"");
}
@Test
void asDockerConfigurationWithUserAuth() {
this.dockerSpec.builderRegistry((registry) -> {
registry.getUsername().set("user1");
registry.getPassword().set("secret1");
registry.getUrl().set("https://docker1.example.com");
registry.getEmail().set("docker1@example.com");
});
this.dockerSpec.publishRegistry((registry) -> {
registry.getUsername().set("user2");
registry.getPassword().set("secret2");
registry.getUrl().set("https://docker2.example.com");
registry.getEmail().set("docker2@example.com");
});
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(decoded(builderRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"user1\"")
.contains("\"password\" : \"secret1\"")
.contains("\"email\" : \"docker1@example.com\"")
.contains("\"serveraddress\" : \"https://docker1.example.com\"");
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"username\" : \"user2\"")
.contains("\"password\" : \"secret2\"")
.contains("\"email\" : \"docker2@example.com\"")
.contains("\"serveraddress\" : \"https://docker2.example.com\"");
assertThat(dockerConfiguration.connection()).isNull();
}
@Test
void asDockerConfigurationWithIncompleteBuilderUserAuthFails() {
this.dockerSpec.builderRegistry((registry) -> {
registry.getUsername().set("user1");
registry.getUrl().set("https://docker1.example.com");
registry.getEmail().set("docker1@example.com");
});
assertThatExceptionOfType(GradleException.class).isThrownBy(this.dockerSpec::asDockerConfiguration)
.withMessageContaining("Invalid Docker builder registry configuration");
}
@Test
void asDockerConfigurationWithIncompletePublishUserAuthFails() {
this.dockerSpec.publishRegistry((registry) -> {
registry.getUsername().set("user2");
registry.getUrl().set("https://docker2.example.com");
registry.getEmail().set("docker2@example.com");
});
assertThatExceptionOfType(GradleException.class).isThrownBy(this.dockerSpec::asDockerConfiguration)
.withMessageContaining("Invalid Docker publish registry configuration");
}
@Test
void asDockerConfigurationWithTokenAuth() {
this.dockerSpec.builderRegistry((registry) -> registry.getToken().set("token1"));
this.dockerSpec.publishRegistry((registry) -> registry.getToken().set("token2"));
BuilderDockerConfiguration dockerConfiguration = this.dockerSpec.asDockerConfiguration();
DockerRegistryAuthentication builderRegistryAuthentication = dockerConfiguration
.builderRegistryAuthentication();
assertThat(builderRegistryAuthentication).isNotNull();
assertThat(decoded(builderRegistryAuthentication.getAuthHeader())).contains("\"identitytoken\" : \"token1\"");
DockerRegistryAuthentication publishRegistryAuthentication = dockerConfiguration
.publishRegistryAuthentication();
assertThat(publishRegistryAuthentication).isNotNull();
assertThat(decoded(publishRegistryAuthentication.getAuthHeader())).contains("\"identitytoken\" : \"token2\"");
}
@Test
void asDockerConfigurationWithUserAndTokenAuthFails() {
this.dockerSpec.builderRegistry((registry) -> {
registry.getUsername().set("user");
registry.getPassword().set("secret");
registry.getToken().set("token");
});
assertThatExceptionOfType(GradleException.class).isThrownBy(this.dockerSpec::asDockerConfiguration)
.withMessageContaining("Invalid Docker builder registry configuration");
}
@Nullable String decoded(@Nullable String value) {
return (value != null) ? new String(Base64.getDecoder().decode(value)) : value;
}
}
| DockerSpecTests |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java | {
"start": 723,
"end": 46981
} | class ____ implements EsqlBaseParserListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatements(EsqlBaseParser.StatementsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatements(EsqlBaseParser.StatementsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSingleStatement(EsqlBaseParser.SingleStatementContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSingleStatement(EsqlBaseParser.SingleStatementContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCompositeQuery(EsqlBaseParser.CompositeQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSingleCommandQuery(EsqlBaseParser.SingleCommandQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSingleCommandQuery(EsqlBaseParser.SingleCommandQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSourceCommand(EsqlBaseParser.SourceCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSourceCommand(EsqlBaseParser.SourceCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterProcessingCommand(EsqlBaseParser.ProcessingCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitProcessingCommand(EsqlBaseParser.ProcessingCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitWhereCommand(EsqlBaseParser.WhereCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterToDataType(EsqlBaseParser.ToDataTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitToDataType(EsqlBaseParser.ToDataTypeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRowCommand(EsqlBaseParser.RowCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRowCommand(EsqlBaseParser.RowCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFields(EsqlBaseParser.FieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFields(EsqlBaseParser.FieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterField(EsqlBaseParser.FieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitField(EsqlBaseParser.FieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRerankFields(EsqlBaseParser.RerankFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRerankFields(EsqlBaseParser.RerankFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRerankField(EsqlBaseParser.RerankFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRerankField(EsqlBaseParser.RerankFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFromCommand(EsqlBaseParser.FromCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFromCommand(EsqlBaseParser.FromCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTimeSeriesCommand(EsqlBaseParser.TimeSeriesCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTimeSeriesCommand(EsqlBaseParser.TimeSeriesCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIndexPatternAndMetadataFields(EsqlBaseParser.IndexPatternAndMetadataFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIndexPatternAndMetadataFields(EsqlBaseParser.IndexPatternAndMetadataFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIndexPatternOrSubquery(EsqlBaseParser.IndexPatternOrSubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIndexPatternOrSubquery(EsqlBaseParser.IndexPatternOrSubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSubquery(EsqlBaseParser.SubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSubquery(EsqlBaseParser.SubqueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIndexPattern(EsqlBaseParser.IndexPatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIndexPattern(EsqlBaseParser.IndexPatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterClusterString(EsqlBaseParser.ClusterStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitClusterString(EsqlBaseParser.ClusterStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSelectorString(EsqlBaseParser.SelectorStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSelectorString(EsqlBaseParser.SelectorStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterUnquotedIndexString(EsqlBaseParser.UnquotedIndexStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitUnquotedIndexString(EsqlBaseParser.UnquotedIndexStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIndexString(EsqlBaseParser.IndexStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIndexString(EsqlBaseParser.IndexStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMetadata(EsqlBaseParser.MetadataContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMetadata(EsqlBaseParser.MetadataContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEvalCommand(EsqlBaseParser.EvalCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatsCommand(EsqlBaseParser.StatsCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAggFields(EsqlBaseParser.AggFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAggFields(EsqlBaseParser.AggFieldsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterAggField(EsqlBaseParser.AggFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitAggField(EsqlBaseParser.AggFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedName(EsqlBaseParser.QualifiedNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFieldName(EsqlBaseParser.FieldNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFieldName(EsqlBaseParser.FieldNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFieldNamePattern(EsqlBaseParser.FieldNamePatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFieldNamePattern(EsqlBaseParser.FieldNamePatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedNamePatterns(EsqlBaseParser.QualifiedNamePatternsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedNamePatterns(EsqlBaseParser.QualifiedNamePatternsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifier(EsqlBaseParser.IdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifier(EsqlBaseParser.IdentifierContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInputParam(EsqlBaseParser.InputParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInputParam(EsqlBaseParser.InputParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInputNamedOrPositionalParam(EsqlBaseParser.InputNamedOrPositionalParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInputDoubleParams(EsqlBaseParser.InputDoubleParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInputDoubleParams(EsqlBaseParser.InputDoubleParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInputNamedOrPositionalDoubleParams(EsqlBaseParser.InputNamedOrPositionalDoubleParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInputNamedOrPositionalDoubleParams(EsqlBaseParser.InputNamedOrPositionalDoubleParamsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIdentifierOrParameter(EsqlBaseParser.IdentifierOrParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIdentifierOrParameter(EsqlBaseParser.IdentifierOrParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStringOrParameter(EsqlBaseParser.StringOrParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStringOrParameter(EsqlBaseParser.StringOrParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLimitCommand(EsqlBaseParser.LimitCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSortCommand(EsqlBaseParser.SortCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSortCommand(EsqlBaseParser.SortCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterOrderExpression(EsqlBaseParser.OrderExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitOrderExpression(EsqlBaseParser.OrderExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterKeepCommand(EsqlBaseParser.KeepCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitKeepCommand(EsqlBaseParser.KeepCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDropCommand(EsqlBaseParser.DropCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDropCommand(EsqlBaseParser.DropCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRenameCommand(EsqlBaseParser.RenameCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRenameCommand(EsqlBaseParser.RenameCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRenameClause(EsqlBaseParser.RenameClauseContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRenameClause(EsqlBaseParser.RenameClauseContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDissectCommand(EsqlBaseParser.DissectCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDissectCommandOptions(EsqlBaseParser.DissectCommandOptionsContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDissectCommandOption(EsqlBaseParser.DissectCommandOptionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCommandNamedParameters(EsqlBaseParser.CommandNamedParametersContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitGrokCommand(EsqlBaseParser.GrokCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExplainCommand(EsqlBaseParser.ExplainCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExplainCommand(EsqlBaseParser.ExplainCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSubqueryExpression(EsqlBaseParser.SubqueryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSubqueryExpression(EsqlBaseParser.SubqueryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterShowInfo(EsqlBaseParser.ShowInfoContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitShowInfo(EsqlBaseParser.ShowInfoContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnrichPolicyName(EsqlBaseParser.EnrichPolicyNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSampleCommand(EsqlBaseParser.SampleCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSampleCommand(EsqlBaseParser.SampleCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitChangePointCommand(EsqlBaseParser.ChangePointCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForkCommand(EsqlBaseParser.ForkCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForkCommand(EsqlBaseParser.ForkCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForkSubQueries(EsqlBaseParser.ForkSubQueriesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForkSubQueries(EsqlBaseParser.ForkSubQueriesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForkSubQuery(EsqlBaseParser.ForkSubQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForkSubQuery(EsqlBaseParser.ForkSubQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSingleForkSubQueryCommand(EsqlBaseParser.SingleForkSubQueryCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSingleForkSubQueryCommand(EsqlBaseParser.SingleForkSubQueryCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCompositeForkSubQuery(EsqlBaseParser.CompositeForkSubQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCompositeForkSubQuery(EsqlBaseParser.CompositeForkSubQueryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForkSubQueryProcessingCommand(EsqlBaseParser.ForkSubQueryProcessingCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForkSubQueryProcessingCommand(EsqlBaseParser.ForkSubQueryProcessingCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRerankCommand(EsqlBaseParser.RerankCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRerankCommand(EsqlBaseParser.RerankCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCompletionCommand(EsqlBaseParser.CompletionCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCompletionCommand(EsqlBaseParser.CompletionCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInlineStatsCommand(EsqlBaseParser.InlineStatsCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInlineStatsCommand(EsqlBaseParser.InlineStatsCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFuseCommand(EsqlBaseParser.FuseCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFuseCommand(EsqlBaseParser.FuseCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFuseConfiguration(EsqlBaseParser.FuseConfigurationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFuseConfiguration(EsqlBaseParser.FuseConfigurationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLookupCommand(EsqlBaseParser.LookupCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInsistCommand(EsqlBaseParser.InsistCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSetCommand(EsqlBaseParser.SetCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSetCommand(EsqlBaseParser.SetCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterSetField(EsqlBaseParser.SetFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitSetField(EsqlBaseParser.SetFieldContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMatchExpression(EsqlBaseParser.MatchExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMatchExpression(EsqlBaseParser.MatchExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLogicalNot(EsqlBaseParser.LogicalNotContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalNot(EsqlBaseParser.LogicalNotContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBooleanDefault(EsqlBaseParser.BooleanDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanDefault(EsqlBaseParser.BooleanDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIsNull(EsqlBaseParser.IsNullContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIsNull(EsqlBaseParser.IsNullContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRegexExpression(EsqlBaseParser.RegexExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRegexExpression(EsqlBaseParser.RegexExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLogicalIn(EsqlBaseParser.LogicalInContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalIn(EsqlBaseParser.LogicalInContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLogicalBinary(EsqlBaseParser.LogicalBinaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLogicalBinary(EsqlBaseParser.LogicalBinaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLikeExpression(EsqlBaseParser.LikeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLikeExpression(EsqlBaseParser.LikeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRlikeExpression(EsqlBaseParser.RlikeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRlikeExpression(EsqlBaseParser.RlikeExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLikeListExpression(EsqlBaseParser.LikeListExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLikeListExpression(EsqlBaseParser.LikeListExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterRlikeListExpression(EsqlBaseParser.RlikeListExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitRlikeListExpression(EsqlBaseParser.RlikeListExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMatchBooleanExpression(EsqlBaseParser.MatchBooleanExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterValueExpressionDefault(EsqlBaseParser.ValueExpressionDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitValueExpressionDefault(EsqlBaseParser.ValueExpressionDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterComparison(EsqlBaseParser.ComparisonContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitComparison(EsqlBaseParser.ComparisonContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterOperatorExpressionDefault(EsqlBaseParser.OperatorExpressionDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitOperatorExpressionDefault(EsqlBaseParser.OperatorExpressionDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterArithmeticBinary(EsqlBaseParser.ArithmeticBinaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitArithmeticBinary(EsqlBaseParser.ArithmeticBinaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterArithmeticUnary(EsqlBaseParser.ArithmeticUnaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitArithmeticUnary(EsqlBaseParser.ArithmeticUnaryContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDereference(EsqlBaseParser.DereferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDereference(EsqlBaseParser.DereferenceContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInlineCast(EsqlBaseParser.InlineCastContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInlineCast(EsqlBaseParser.InlineCastContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitConstantDefault(EsqlBaseParser.ConstantDefaultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitParenthesizedExpression(EsqlBaseParser.ParenthesizedExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunction(EsqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunction(EsqlBaseParser.FunctionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionName(EsqlBaseParser.FunctionNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMapExpression(EsqlBaseParser.MapExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMapExpression(EsqlBaseParser.MapExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEntryExpression(EsqlBaseParser.EntryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEntryExpression(EsqlBaseParser.EntryExpressionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMapValue(EsqlBaseParser.MapValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMapValue(EsqlBaseParser.MapValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNullLiteral(EsqlBaseParser.NullLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNullLiteral(EsqlBaseParser.NullLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterQualifiedIntegerLiteral(EsqlBaseParser.QualifiedIntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitQualifiedIntegerLiteral(EsqlBaseParser.QualifiedIntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDecimalLiteral(EsqlBaseParser.DecimalLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDecimalLiteral(EsqlBaseParser.DecimalLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIntegerLiteral(EsqlBaseParser.IntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIntegerLiteral(EsqlBaseParser.IntegerLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBooleanLiteral(EsqlBaseParser.BooleanLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanLiteral(EsqlBaseParser.BooleanLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterInputParameter(EsqlBaseParser.InputParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitInputParameter(EsqlBaseParser.InputParameterContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStringLiteral(EsqlBaseParser.StringLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStringLiteral(EsqlBaseParser.StringLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNumericArrayLiteral(EsqlBaseParser.NumericArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNumericArrayLiteral(EsqlBaseParser.NumericArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBooleanArrayLiteral(EsqlBaseParser.BooleanArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanArrayLiteral(EsqlBaseParser.BooleanArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStringArrayLiteral(EsqlBaseParser.StringArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStringArrayLiteral(EsqlBaseParser.StringArrayLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBooleanValue(EsqlBaseParser.BooleanValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBooleanValue(EsqlBaseParser.BooleanValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterNumericValue(EsqlBaseParser.NumericValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitNumericValue(EsqlBaseParser.NumericValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDecimalValue(EsqlBaseParser.DecimalValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDecimalValue(EsqlBaseParser.DecimalValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIntegerValue(EsqlBaseParser.IntegerValueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterString(EsqlBaseParser.StringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitString(EsqlBaseParser.StringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterComparisonOperator(EsqlBaseParser.ComparisonOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitComparisonOperator(EsqlBaseParser.ComparisonOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitJoinCommand(EsqlBaseParser.JoinCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitJoinTarget(EsqlBaseParser.JoinTargetContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitJoinCondition(EsqlBaseParser.JoinConditionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPromqlCommand(EsqlBaseParser.PromqlCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPromqlCommand(EsqlBaseParser.PromqlCommandContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPromqlParam(EsqlBaseParser.PromqlParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPromqlParam(EsqlBaseParser.PromqlParamContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPromqlParamContent(EsqlBaseParser.PromqlParamContentContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPromqlParamContent(EsqlBaseParser.PromqlParamContentContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPromqlQueryPart(EsqlBaseParser.PromqlQueryPartContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPromqlQueryPart(EsqlBaseParser.PromqlQueryPartContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(ErrorNode node) { }
}
| EsqlBaseParserBaseListener |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/StaticGuardedByInstanceTest.java | {
"start": 3322,
"end": 3710
} | class ____ {
static boolean init = false;
void m() {
synchronized (getClass()) {
init = true;
}
}
}
""")
.doTest();
}
@Test
public void negative_nested() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CometdEndpointBuilderFactory.java | {
"start": 12572,
"end": 18381
} | interface ____
extends
EndpointConsumerBuilder {
default CometdEndpointConsumerBuilder basic() {
return (CometdEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedCometdEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
}
/**
* Builder for endpoint producers for the CometD component.
*/
public | AdvancedCometdEndpointConsumerBuilder |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest_having.java | {
"start": 837,
"end": 1467
} | class ____ extends TestCase {
public void test_having() throws Exception {
assertFalse(WallUtils.isValidateMySql(//
"select id, count(*) from t group by id having 1 = 1"));
}
public void test_having_true_first() throws Exception {
assertTrue(WallUtils.isValidateMySql(//
"select id, count(*) from t group by id having 1 = 1 AND count(*) > 2"));
}
public void test_having_false() throws Exception {
assertFalse(WallUtils.isValidateMySql(//
"select id, count(*) from t group by id having count(*) > 2 OR 1 = 1"));
}
}
| MySqlWallTest_having |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockReplacement.java | {
"start": 2714,
"end": 2796
} | class ____ if block replacement request to data nodes work correctly.
*/
public | tests |
java | square__javapoet | src/test/java/com/squareup/javapoet/TypeSpecTest.java | {
"start": 3748,
"end": 6106
} | class ____ {\n"
+ " List<?> extendsObject;\n"
+ "\n"
+ " List<? extends Serializable> extendsSerializable;\n"
+ "\n"
+ " List<? super String> superString;\n"
+ "}\n");
}
@Test public void anonymousInnerClass() throws Exception {
ClassName foo = ClassName.get(tacosPackage, "Foo");
ClassName bar = ClassName.get(tacosPackage, "Bar");
ClassName thingThang = ClassName.get(tacosPackage, "Thing", "Thang");
TypeName thingThangOfFooBar = ParameterizedTypeName.get(thingThang, foo, bar);
ClassName thung = ClassName.get(tacosPackage, "Thung");
ClassName simpleThung = ClassName.get(tacosPackage, "SimpleThung");
TypeName thungOfSuperBar = ParameterizedTypeName.get(thung, WildcardTypeName.supertypeOf(bar));
TypeName thungOfSuperFoo = ParameterizedTypeName.get(thung, WildcardTypeName.supertypeOf(foo));
TypeName simpleThungOfBar = ParameterizedTypeName.get(simpleThung, bar);
ParameterSpec thungParameter = ParameterSpec.builder(thungOfSuperFoo, "thung")
.addModifiers(Modifier.FINAL)
.build();
TypeSpec aSimpleThung = TypeSpec.anonymousClassBuilder(CodeBlock.of("$N", thungParameter))
.superclass(simpleThungOfBar)
.addMethod(MethodSpec.methodBuilder("doSomething")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.addParameter(bar, "bar")
.addCode("/* code snippets */\n")
.build())
.build();
TypeSpec aThingThang = TypeSpec.anonymousClassBuilder("")
.superclass(thingThangOfFooBar)
.addMethod(MethodSpec.methodBuilder("call")
.addAnnotation(Override.class)
.addModifiers(Modifier.PUBLIC)
.returns(thungOfSuperBar)
.addParameter(thungParameter)
.addCode("return $L;\n", aSimpleThung)
.build())
.build();
TypeSpec taco = TypeSpec.classBuilder("Taco")
.addField(FieldSpec.builder(thingThangOfFooBar, "NAME")
.addModifiers(Modifier.STATIC, Modifier.FINAL, Modifier.FINAL)
.initializer("$L", aThingThang)
.build())
.build();
assertThat(toString(taco)).isEqualTo(""
+ "package com.squareup.tacos;\n"
+ "\n"
+ "import java.lang.Override;\n"
+ "\n"
+ " | Taco |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/RedundantOverrideTest.java | {
"start": 3216,
"end": 3554
} | class ____ extends A {
@Override
public void swap(int a, int b) {
super.swap(b, a);
}
}
""")
.doTest();
}
@Test
public void wideningVisibilityNoMatch() {
testHelper
.addSourceLines(
"A.java",
"""
| B |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackage.java | {
"start": 1429,
"end": 1956
} | interface ____ {
/**
* Base packages that should be registered with {@link AutoConfigurationPackages}.
* <p>
* Use {@link #basePackageClasses} for a type-safe alternative to String-based package
* names.
* @return the back package names
* @since 2.3.0
*/
String[] basePackages() default {};
/**
* Type-safe alternative to {@link #basePackages} for specifying the packages to be
* registered with {@link AutoConfigurationPackages}.
* <p>
* Consider creating a special no-op marker | AutoConfigurationPackage |
java | elastic__elasticsearch | modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java | {
"start": 1080,
"end": 3541
} | class ____ extends AbstractTokenFilterFactory {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(EdgeNGramTokenFilterFactory.class);
private final int minGram;
private final int maxGram;
public static final int SIDE_FRONT = 1;
public static final int SIDE_BACK = 2;
private final int side;
private final boolean preserveOriginal;
private static final String PRESERVE_ORIG_KEY = "preserve_original";
EdgeNGramTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name);
this.minGram = settings.getAsInt("min_gram", 1);
this.maxGram = settings.getAsInt("max_gram", 2);
if (settings.get("side") != null) {
deprecationLogger.critical(
DeprecationCategory.ANALYSIS,
"edge_ngram_side_deprecated",
"The [side] parameter is deprecated and will be removed. Use a [reverse] before and after the [edge_ngram] instead."
);
}
this.side = parseSide(settings.get("side", "front"));
this.preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, false);
}
static int parseSide(String side) {
return switch (side) {
case "front" -> SIDE_FRONT;
case "back" -> SIDE_BACK;
default -> throw new IllegalArgumentException("invalid side: " + side);
};
}
@Override
public TokenStream create(TokenStream tokenStream) {
TokenStream result = tokenStream;
// side=BACK is not supported anymore but applying ReverseStringFilter up-front and after the token filter has the same effect
if (side == SIDE_BACK) {
result = new ReverseStringFilter(result);
}
result = new EdgeNGramTokenFilter(result, minGram, maxGram, preserveOriginal);
// side=BACK is not supported anymore but applying ReverseStringFilter up-front and after the token filter has the same effect
if (side == SIDE_BACK) {
result = new ReverseStringFilter(result);
}
return result;
}
@Override
public boolean breaksFastVectorHighlighter() {
return true;
}
@Override
public TokenFilterFactory getSynonymFilter() {
throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms");
}
}
| EdgeNGramTokenFilterFactory |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/message/ParameterizedNoReferenceMessageFactory.java | {
"start": 1449,
"end": 1652
} | class ____ <em>not</em> implement any {@link MessageFactory2} methods and lets the superclass funnel those calls
* through {@link #newMessage(String, Object...)}.
* </p>
* @since 2.5
*/
public final | does |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/format/factories/EnumFormatFactory.java | {
"start": 981,
"end": 1446
} | class ____ extends AbstractFormatFactory {
@Override
public boolean canBuild(FormattingOptions formattingOptions) {
return formattingOptions.getClazz().isEnum();
}
@Override
public Format<?> build(FormattingOptions formattingOptions) {
@SuppressWarnings({ "rawtypes", "unchecked" })
EnumFormat enumFormat = new EnumFormat(formattingOptions.getClazz());
return enumFormat;
}
private static | EnumFormatFactory |
java | quarkusio__quarkus | extensions/oidc-client-registration/runtime/src/main/java/io/quarkus/oidc/client/registration/runtime/OidcClientRegistrationImpl.java | {
"start": 1597,
"end": 12045
} | class ____ implements OidcClientRegistration {
private static final Logger LOG = Logger.getLogger(OidcClientRegistrationImpl.class);
private static final String APPLICATION_JSON = "application/json";
private static final String AUTHORIZATION_HEADER = String.valueOf(HttpHeaders.AUTHORIZATION);
private static final String DEFAULT_ID = "Default";
private final WebClient client;
private final long connectionDelayInMillisecs;
private final String registrationUri;
private final OidcClientRegistrationConfig oidcConfig;
private final Map<OidcEndpoint.Type, List<OidcRequestFilter>> requestFilters;
private final Map<OidcEndpoint.Type, List<OidcResponseFilter>> responseFilters;
private final RegisteredClient registeredClient;
private volatile boolean closed;
public OidcClientRegistrationImpl(WebClient client, long connectionDelayInMillisecs,
String registrationUri,
OidcClientRegistrationConfig oidcConfig, RegisteredClient registeredClient,
Map<Type, List<OidcRequestFilter>> oidcRequestFilters,
Map<Type, List<OidcResponseFilter>> oidcResponseFilters) {
this.client = client;
this.connectionDelayInMillisecs = connectionDelayInMillisecs;
this.registrationUri = registrationUri;
this.oidcConfig = oidcConfig;
this.requestFilters = oidcRequestFilters;
this.responseFilters = oidcResponseFilters;
this.registeredClient = registeredClient;
}
@Override
public Uni<RegisteredClient> registeredClient() {
if (registeredClient != null) {
return Uni.createFrom().item(registeredClient);
} else if (oidcConfig.registerEarly()) {
return Uni.createFrom().nullItem();
} else {
ClientMetadata metadata = createMetadata(oidcConfig.metadata());
if (metadata.getJsonObject().isEmpty()) {
LOG.debugf("%s client registration is skipped because its metadata is not configured",
oidcConfig.id().orElse(DEFAULT_ID));
return Uni.createFrom().nullItem();
} else {
return registerClient(client, registrationUri,
oidcConfig, requestFilters, responseFilters, metadata.getMetadataString())
.onFailure(OidcCommonUtils.oidcEndpointNotAvailable())
.retry()
.withBackOff(OidcCommonUtils.CONNECTION_BACKOFF_DURATION,
OidcCommonUtils.CONNECTION_BACKOFF_DURATION)
.expireIn(connectionDelayInMillisecs);
}
}
}
@Override
public Uni<RegisteredClient> registerClient(ClientMetadata metadata) {
LOG.debugf("Register client metadata: %s", metadata.getMetadataString());
checkClosed();
OidcRequestContextProperties requestProps = getRequestProps();
return postRequest(requestProps, client, registrationUri, oidcConfig, requestFilters, metadata.getMetadataString())
.transform(resp -> newRegisteredClient(resp, client, registrationUri, oidcConfig, requestFilters,
responseFilters, requestProps));
}
@Override
public Multi<RegisteredClient> registerClients(List<ClientMetadata> metadataList) {
LOG.debugf("Register clients");
checkClosed();
OidcRequestContextProperties requestProps = getRequestProps();
return Multi.createFrom().emitter(new Consumer<MultiEmitter<? super RegisteredClient>>() {
@Override
public void accept(MultiEmitter<? super RegisteredClient> multiEmitter) {
try {
AtomicInteger emitted = new AtomicInteger();
for (ClientMetadata metadata : metadataList) {
postRequest(requestProps, client, registrationUri, oidcConfig, requestFilters,
metadata.getMetadataString())
.transform(
resp -> newRegisteredClient(resp, client, registrationUri, oidcConfig, requestFilters,
responseFilters, requestProps))
.subscribe().with(new Consumer<RegisteredClient>() {
@Override
public void accept(RegisteredClient client) {
multiEmitter.emit(client);
if (emitted.incrementAndGet() == metadataList.size()) {
multiEmitter.complete();
}
}
});
}
} catch (Exception ex) {
multiEmitter.fail(ex);
}
}
});
}
private OidcRequestContextProperties getRequestProps() {
return requestFilters.isEmpty() && responseFilters.isEmpty() ? null : new OidcRequestContextProperties();
}
static Uni<RegisteredClient> registerClient(WebClient client,
String registrationUri,
OidcClientRegistrationConfig oidcConfig,
Map<Type, List<OidcRequestFilter>> requestFilters,
Map<Type, List<OidcResponseFilter>> responseFilters,
String clientRegJson) {
OidcRequestContextProperties requestProps = requestFilters == null && responseFilters.isEmpty() ? null
: new OidcRequestContextProperties();
return postRequest(requestProps, client, registrationUri, oidcConfig, requestFilters, clientRegJson)
.transform(resp -> newRegisteredClient(resp, client, registrationUri, oidcConfig, requestFilters,
responseFilters, requestProps));
}
static UniOnItem<HttpResponse<Buffer>> postRequest(OidcRequestContextProperties requestProps,
WebClient client, String registrationUri,
OidcClientRegistrationConfig oidcConfig,
Map<Type, List<OidcRequestFilter>> filters, String clientRegJson) {
HttpRequest<Buffer> request = client.postAbs(registrationUri);
request.putHeader(HttpHeaders.CONTENT_TYPE.toString(), APPLICATION_JSON);
request.putHeader(HttpHeaders.ACCEPT.toString(), APPLICATION_JSON);
if (oidcConfig.initialToken().orElse(null) != null) {
request.putHeader(AUTHORIZATION_HEADER, OidcConstants.BEARER_SCHEME + " " + oidcConfig.initialToken().get());
}
// Retry up to three times with a one-second delay between the retries if the connection is closed
Buffer buffer = Buffer.buffer(clientRegJson);
Uni<HttpResponse<Buffer>> response = filterHttpRequest(requestProps, request, filters, buffer)
.sendBuffer(OidcCommonUtils.getRequestBuffer(requestProps, buffer))
.onFailure(SocketException.class)
.retry()
.atMost(oidcConfig.connectionRetryCount())
.onFailure().transform(t -> {
LOG.warn("OIDC Server is not available:", t.getCause() != null ? t.getCause() : t);
// don't wrap it to avoid information leak
return new OidcClientRegistrationException("OIDC Server is not available");
});
return response.onItem();
}
static private HttpRequest<Buffer> filterHttpRequest(OidcRequestContextProperties requestProps,
HttpRequest<Buffer> request,
Map<Type, List<OidcRequestFilter>> filters,
Buffer body) {
if (!filters.isEmpty()) {
OidcRequestContext context = new OidcRequestContext(request, body, requestProps);
for (OidcRequestFilter filter : OidcCommonUtils.getMatchingOidcRequestFilters(filters,
OidcEndpoint.Type.CLIENT_REGISTRATION)) {
filter.filter(context);
}
}
return request;
}
static private RegisteredClient newRegisteredClient(HttpResponse<Buffer> resp,
WebClient client, String registrationUri, OidcClientRegistrationConfig oidcConfig,
Map<Type, List<OidcRequestFilter>> requestFilters,
Map<Type, List<OidcResponseFilter>> responseFilters,
OidcRequestContextProperties requestProps) {
Buffer buffer = OidcCommonUtils.filterHttpResponse(requestProps, resp, responseFilters,
OidcEndpoint.Type.CLIENT_REGISTRATION);
if (resp.statusCode() == 200 || resp.statusCode() == 201) {
JsonObject json = buffer.toJsonObject();
LOG.debugf("Client has been succesfully registered: %s", json.toString());
String registrationClientUri = (String) json.remove(OidcConstants.REGISTRATION_CLIENT_URI);
String registrationToken = (String) json.remove(OidcConstants.REGISTRATION_ACCESS_TOKEN);
ClientMetadata metadata = new ClientMetadata(json.toString());
return new RegisteredClientImpl(client, oidcConfig, requestFilters, responseFilters, metadata,
registrationClientUri, registrationToken);
} else {
String errorMessage = buffer.toString();
LOG.errorf("Client registeration has failed: status: %d, error message: %s", resp.statusCode(),
errorMessage);
throw new OidcClientRegistrationException(errorMessage);
}
}
@Override
public Uni<RegisteredClient> readClient(String registrationUri, String registrationToken) {
@SuppressWarnings("resource")
RegisteredClient newClient = new RegisteredClientImpl(client, oidcConfig,
requestFilters, responseFilters, createMetadata(oidcConfig.metadata()), registrationUri, registrationToken);
return newClient.read();
}
@Override
public void close() throws IOException {
if (!closed) {
try {
client.close();
} catch (Exception ex) {
LOG.debug("Failed to close the client", ex);
}
closed = true;
}
}
private void checkClosed() {
if (closed) {
throw new IllegalStateException("OIDC Client Registration is closed");
}
}
static | OidcClientRegistrationImpl |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/type/descriptor/jdbc/JsonJdbcType.java | {
"start": 1001,
"end": 5153
} | class ____ implements AggregateJdbcType {
/**
* Singleton access
*/
public static final JsonJdbcType INSTANCE = new JsonJdbcType( null );
private final EmbeddableMappingType embeddableMappingType;
protected JsonJdbcType(EmbeddableMappingType embeddableMappingType) {
this.embeddableMappingType = embeddableMappingType;
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.VARCHAR;
}
@Override
public int getDefaultSqlTypeCode() {
return SqlTypes.JSON;
}
@Override
public String toString() {
return "JsonJdbcType";
}
@Override
public <T> JdbcLiteralFormatter<T> getJdbcLiteralFormatter(JavaType<T> javaType) {
// No literal support for now
return null;
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new JsonJdbcType( mappingType );
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) throws SQLException {
if ( string == null ) {
return null;
}
if ( embeddableMappingType != null ) {
return JsonHelper.deserialize(
embeddableMappingType,
new StringJsonDocumentReader(string),
javaType.getJavaTypeClass() != Object[].class,
options
);
}
return options.getJsonFormatMapper().fromString( string, javaType, options );
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final StringJsonDocumentWriter writer = new StringJsonDocumentWriter();
try {
JsonGeneratingVisitor.INSTANCE.visit( embeddableMappingType, domainValue, options, writer );
return writer.getJson();
}
catch (IOException e) {
throw new SQLException( e );
}
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
return JsonHelper.deserialize( embeddableMappingType, new StringJsonDocumentReader( (String) rawJdbcValue ), false, options );
}
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) {
if ( embeddableMappingType != null ) {
try {
final StringJsonDocumentWriter writer = new StringJsonDocumentWriter();
JsonGeneratingVisitor.INSTANCE.visit( embeddableMappingType, value, options, writer );
return writer.getJson();
}
catch (IOException e) {
throw new RuntimeException("Failed to serialize JSON mapping", e );
}
}
return options.getJsonFormatMapper().toString( value, javaType, options );
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String json = ( (JsonJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setString( index, json );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String json = ( (JsonJdbcType) getJdbcType() ).toString( value, getJavaType(), options );
st.setString( name, json );
}
};
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return fromString( rs.getString( paramIndex ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return fromString( statement.getString( index ), getJavaType(), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options) throws SQLException {
return fromString( statement.getString( name ), getJavaType(), options );
}
};
}
}
| JsonJdbcType |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java | {
"start": 602,
"end": 2795
} | class ____ implements IntVector.FixedBuilder {
private final BlockFactory blockFactory;
private final int[] values;
private final long preAdjustedBytes;
/**
* The next value to write into. {@code -1} means the vector has already
* been built.
*/
private int nextIndex;
private boolean closed;
IntVectorFixedBuilder(int size, BlockFactory blockFactory) {
preAdjustedBytes = ramBytesUsed(size);
blockFactory.adjustBreaker(preAdjustedBytes);
this.blockFactory = blockFactory;
this.values = new int[size];
}
@Override
public IntVectorFixedBuilder appendInt(int value) {
values[nextIndex++] = value;
return this;
}
@Override
public IntVectorFixedBuilder appendInt(int idx, int value) {
values[idx] = value;
return this;
}
private static long ramBytesUsed(int size) {
return size == 1
? ConstantIntVector.RAM_BYTES_USED
: IntArrayVector.BASE_RAM_BYTES_USED + RamUsageEstimator.alignObjectSize(
(long) RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (long) size * Integer.BYTES
);
}
@Override
public long estimatedBytes() {
return ramBytesUsed(values.length);
}
@Override
public IntVector build() {
if (closed) {
throw new IllegalStateException("already closed");
}
closed = true;
IntVector vector;
if (values.length == 1) {
vector = blockFactory.newConstantIntBlockWith(values[0], 1, preAdjustedBytes).asVector();
} else {
vector = blockFactory.newIntArrayVector(values, values.length, preAdjustedBytes);
}
assert vector.ramBytesUsed() == preAdjustedBytes : "fixed Builders should estimate the exact ram bytes used";
return vector;
}
@Override
public void close() {
if (closed == false) {
// If nextIndex < 0 we've already built the vector
closed = true;
blockFactory.adjustBreaker(-preAdjustedBytes);
}
}
public boolean isReleased() {
return closed;
}
}
| IntVectorFixedBuilder |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/reflection/ReflectorTest.java | {
"start": 8655,
"end": 10207
} | class ____ {
public Integer getProp1() {
return 1;
}
public int getProp2() {
return 0;
}
public int isProp2() {
return 0;
}
}
ReflectorFactory reflectorFactory = new DefaultReflectorFactory();
Reflector reflector = reflectorFactory.findForClass(BeanClass.class);
List<String> getableProps = Arrays.asList(reflector.getGetablePropertyNames());
assertTrue(getableProps.contains("prop1"));
assertTrue(getableProps.contains("prop2"));
assertEquals("prop1", reflector.findPropertyName("PROP1"));
assertEquals("prop2", reflector.findPropertyName("PROP2"));
assertEquals(Integer.class, reflector.getGetterType("prop1"));
Invoker getInvoker = reflector.getGetInvoker("prop1");
assertEquals(Integer.valueOf(1), getInvoker.invoke(new BeanClass(), null));
Class<?> paramType = reflector.getGetterType("prop2");
assertEquals(int.class, paramType);
Invoker ambiguousInvoker = reflector.getGetInvoker("prop2");
when(() -> ambiguousInvoker.invoke(new BeanClass(), new Integer[] { 1 }));
then(caughtException()).isInstanceOf(ReflectionException.class)
.hasMessageContaining("Illegal overloaded getter method with ambiguous type for property 'prop2' in class '"
+ BeanClass.class.getName()
+ "'. This breaks the JavaBeans specification and can cause unpredictable results.");
}
@Test
void shouldTwoGettersWithDifferentTypesThrowException() throws Exception {
@SuppressWarnings("unused")
| BeanClass |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/GuiceNestedCombineTest.java | {
"start": 2803,
"end": 3285
} | class ____ extends AbstractModule {}
public void test() {
foo(new ModuleA(), Modules.combine(new ModuleB(), new ModuleC()));
}
public void foo(Module... xs) {}
}
""")
.addOutputLines(
"Test.java",
"""
import com.google.inject.AbstractModule;
import com.google.inject.Module;
import com.google.inject.util.Modules;
| ModuleC |
java | apache__camel | components/camel-joor/src/main/java/org/apache/camel/language/joor/JavaLanguage.java | {
"start": 1581,
"end": 7618
} | class ____ extends TypedLanguageSupport implements ScriptingLanguage, StaticService {
private static final Logger LOG = LoggerFactory.getLogger(JavaLanguage.class);
private final JoorCompiler compiler;
private final JoorScriptingCompiler scriptingCompiler;
private final Set<String> imports = new TreeSet<>();
private final Map<String, String> aliases = new HashMap<>();
private String configResource = "classpath:camel-joor.properties?optional=true";
private boolean preCompile = true;
private boolean singleQuotes = true;
public JavaLanguage() {
this(new JoorCompiler(), new JoorScriptingCompiler());
}
public JavaLanguage(JoorCompiler compiler, JoorScriptingCompiler scriptingCompiler) {
this.compiler = compiler;
this.scriptingCompiler = scriptingCompiler;
}
public JoorCompiler getCompiler() {
return compiler;
}
public JoorScriptingCompiler getScriptingCompiler() {
return scriptingCompiler;
}
public String getConfigResource() {
return configResource;
}
public void setConfigResource(String configResource) {
this.configResource = configResource;
// trigger configuration to be re-loaded
loadConfiguration();
}
public boolean isPreCompile() {
return preCompile;
}
public void setPreCompile(boolean preCompile) {
this.preCompile = preCompile;
}
public boolean isSingleQuotes() {
return singleQuotes;
}
public void setSingleQuotes(boolean singleQuotes) {
this.singleQuotes = singleQuotes;
}
@Override
@SuppressWarnings("unchecked")
public <T> T evaluate(String script, Map<String, Object> bindings, Class<T> resultType) {
Object out;
JoorScriptingMethod target = scriptingCompiler.compile(getCamelContext(), script, bindings, singleQuotes);
try {
out = target.evaluate(bindings);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
if (out != null && resultType != null) {
return getCamelContext().getTypeConverter().convertTo(resultType, out);
} else {
return (T) out;
}
}
@Override
public Predicate createPredicate(String expression) {
return ExpressionToPredicateAdapter.toPredicate(createExpression(expression));
}
@Override
public Expression createExpression(String expression) {
return createExpression(expression, null);
}
@Override
public Predicate createPredicate(String expression, Object[] properties) {
return ExpressionToPredicateAdapter.toPredicate(createExpression(expression, properties));
}
@Override
public Expression createExpression(String expression, Object[] properties) {
JoorExpression answer = new JoorExpression(expression);
answer.setCompiler(compiler);
answer.setResultType(property(Class.class, properties, 0, null));
answer.setPreCompile(property(boolean.class, properties, 1, preCompile));
answer.setSingleQuotes(property(boolean.class, properties, 2, singleQuotes));
if (getCamelContext() != null) {
answer.init(getCamelContext());
}
return answer;
}
@Override
public void init() {
// attempt to load optional configuration from classpath
loadConfiguration();
CamelContextAware.trySetCamelContext(compiler, getCamelContext());
CamelContextAware.trySetCamelContext(scriptingCompiler, getCamelContext());
}
@Override
public void start() {
ServiceHelper.startService(compiler, scriptingCompiler);
}
@Override
public void stop() {
ServiceHelper.stopService(compiler, scriptingCompiler);
}
private void loadConfiguration() {
// attempt to load configuration
String loaded = ScriptHelper.resolveOptionalExternalScript(getCamelContext(), "resource:" + configResource);
int counter1 = 0;
int counter2 = 0;
if (loaded != null) {
String[] lines = loaded.split("\n");
for (String line : lines) {
line = line.trim();
// skip comments
if (line.startsWith("#")) {
continue;
}
// imports
if (line.startsWith("import ")) {
imports.add(line);
counter1++;
continue;
}
// aliases as key=value
String key = StringHelper.before(line, "=");
String value = StringHelper.after(line, "=");
if (key != null) {
key = key.trim();
}
if (value != null) {
value = value.trim();
}
if (key != null && value != null) {
this.aliases.put(key, value);
counter2++;
}
}
}
if (counter1 > 0 || counter2 > 0) {
LOG.info("Loaded jOOR language imports: {} and aliases: {} from configuration: {}", counter1, counter2,
configResource);
}
if (compiler.getAliases() == null) {
compiler.setAliases(aliases);
} else {
compiler.getAliases().putAll(aliases);
}
if (compiler.getImports() == null) {
compiler.setImports(imports);
} else {
compiler.getImports().addAll(imports);
}
if (scriptingCompiler.getAliases() == null) {
scriptingCompiler.setAliases(aliases);
} else {
scriptingCompiler.getAliases().putAll(aliases);
}
if (scriptingCompiler.getImports() == null) {
scriptingCompiler.setImports(imports);
} else {
scriptingCompiler.getImports().addAll(imports);
}
}
}
| JavaLanguage |
java | elastic__elasticsearch | x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunner.java | {
"start": 4547,
"end": 40683
} | class ____ {
private static final Logger logger = LogManager.getLogger(EnrichPolicyRunner.class);
static final String ENRICH_POLICY_NAME_FIELD_NAME = "enrich_policy_name";
static final String ENRICH_POLICY_TYPE_FIELD_NAME = "enrich_policy_type";
static final String ENRICH_MATCH_FIELD_NAME = "enrich_match_field";
static final String ENRICH_README_FIELD_NAME = "enrich_readme";
public static final String ENRICH_MIN_NUMBER_OF_REPLICAS_NAME = "enrich.min_number_of_replicas";
static final String ENRICH_INDEX_README_TEXT = "This index is managed by Elasticsearch and should not be modified in any way.";
/**
* Timeout for enrich-related requests that interact with the master node. Possibly this should be longer and/or configurable.
*/
static final TimeValue ENRICH_MASTER_REQUEST_TIMEOUT = TimeValue.THIRTY_SECONDS;
private final ProjectId projectId;
private final String policyName;
private final EnrichPolicy policy;
private final ExecuteEnrichPolicyTask task;
private final ClusterService clusterService;
private final IndicesService indicesService;
private final Client client;
private final IndexNameExpressionResolver indexNameExpressionResolver;
private final String enrichIndexName;
private final int fetchSize;
private final int maxForceMergeAttempts;
EnrichPolicyRunner(
ProjectId projectId,
String policyName,
EnrichPolicy policy,
ExecuteEnrichPolicyTask task,
ClusterService clusterService,
IndicesService indicesService,
Client client,
IndexNameExpressionResolver indexNameExpressionResolver,
String enrichIndexName,
int fetchSize,
int maxForceMergeAttempts
) {
this.projectId = projectId;
this.policyName = Objects.requireNonNull(policyName);
this.policy = Objects.requireNonNull(policy);
this.task = Objects.requireNonNull(task);
this.clusterService = Objects.requireNonNull(clusterService);
this.indicesService = indicesService;
this.client = wrapClient(client, policyName, task, clusterService);
this.indexNameExpressionResolver = Objects.requireNonNull(indexNameExpressionResolver);
this.enrichIndexName = enrichIndexName;
this.fetchSize = fetchSize;
this.maxForceMergeAttempts = maxForceMergeAttempts;
}
public void run(ActionListener<ExecuteEnrichPolicyStatus> listener) {
logger.info("Policy [{}]: Running enrich policy", policyName);
task.setStatus(new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.RUNNING));
SubscribableListener
.<GetIndexResponse>newForked(l -> {
// Collect the source index information
final String[] sourceIndices = policy.getIndices().toArray(new String[0]);
logger.debug("Policy [{}]: Checking source indices [{}]", policyName, sourceIndices);
GetIndexRequest getIndexRequest = new GetIndexRequest(ENRICH_MASTER_REQUEST_TIMEOUT).indices(sourceIndices);
// This call does not set the origin to ensure that the user executing the policy has permission to access the source index
client.admin().indices().getIndex(getIndexRequest, l);
})
.<CreateIndexResponse>andThen((l, getIndexResponse) -> {
validateMappings(getIndexResponse);
prepareAndCreateEnrichIndex(toMappings(getIndexResponse), clusterService.getSettings(), l);
})
.andThen(this::prepareReindexOperation)
.andThen(this::transferDataToEnrichIndex)
.andThen(this::forceMergeEnrichIndex)
.andThen(this::setIndexReadOnly)
.andThen(this::waitForIndexGreen)
.andThen(this::updateEnrichPolicyAlias)
.andThenApply(r -> {
logger.info("Policy [{}]: Policy execution complete", policyName);
ExecuteEnrichPolicyStatus completeStatus = new ExecuteEnrichPolicyStatus(ExecuteEnrichPolicyStatus.PolicyPhases.COMPLETE);
task.setStatus(completeStatus);
return completeStatus;
})
.addListener(listener);
}
private static List<Map<String, Object>> toMappings(GetIndexResponse response) {
return response.mappings().values().stream().map(MappingMetadata::getSourceAsMap).collect(Collectors.toList());
}
private Map<String, Object> getMappings(final GetIndexResponse getIndexResponse, final String sourceIndexName) {
Map<String, MappingMetadata> mappings = getIndexResponse.mappings();
MappingMetadata indexMapping = mappings.get(sourceIndexName);
if (MappingMetadata.EMPTY_MAPPINGS.equals(indexMapping)) {
throw new ElasticsearchException(
"Enrich policy execution for [{}] failed. No mapping available on source [{}] included in [{}]",
policyName,
sourceIndexName,
policy.getIndices()
);
}
return indexMapping.sourceAsMap();
}
private void validateMappings(final GetIndexResponse getIndexResponse) {
String[] sourceIndices = getIndexResponse.getIndices();
logger.debug("Policy [{}]: Validating [{}] source mappings", policyName, sourceIndices);
for (String sourceIndex : sourceIndices) {
Map<String, Object> mapping = getMappings(getIndexResponse, sourceIndex);
validateMappings(policyName, policy, sourceIndex, mapping);
}
}
static void validateMappings(
final String policyName,
final EnrichPolicy policy,
final String sourceIndex,
final Map<String, Object> mapping
) {
// First ensure mapping is set
if (mapping.get("properties") == null) {
throw new ElasticsearchException(
"Enrich policy execution for [{}] failed. Could not read mapping for source [{}] included by pattern [{}]",
policyName,
sourceIndex,
policy.getIndices()
);
}
// Validate the key and values
try {
validateAndGetMappingTypeAndFormat(mapping, policy.getMatchField(), true);
for (String valueFieldName : policy.getEnrichFields()) {
validateAndGetMappingTypeAndFormat(mapping, valueFieldName, false);
}
} catch (ElasticsearchException e) {
throw new ElasticsearchException(
"Enrich policy execution for [{}] failed while validating field mappings for index [{}]",
e,
policyName,
sourceIndex
);
}
}
private record MappingTypeAndFormat(String type, String format) {}
private static MappingTypeAndFormat validateAndGetMappingTypeAndFormat(
String fieldName,
EnrichPolicy policy,
boolean strictlyRequired,
List<Map<String, Object>> sourceMappings
) {
var fieldMappings = sourceMappings.stream()
.map(mapping -> validateAndGetMappingTypeAndFormat(mapping, fieldName, strictlyRequired))
.filter(Objects::nonNull)
.toList();
Set<String> types = fieldMappings.stream().map(tf -> tf.type).collect(Collectors.toSet());
if (types.size() > 1) {
if (strictlyRequired) {
throw new ElasticsearchException(
"Multiple distinct mapping types for field '{}' - indices({}) types({})",
fieldName,
Strings.collectionToCommaDelimitedString(policy.getIndices()),
Strings.collectionToCommaDelimitedString(types)
);
}
return null;
}
if (types.isEmpty()) {
return null;
}
Set<String> formats = fieldMappings.stream().map(tf -> tf.format).filter(Objects::nonNull).collect(Collectors.toSet());
if (formats.size() > 1) {
if (strictlyRequired) {
throw new ElasticsearchException(
"Multiple distinct formats specified for field '{}' - indices({}) format entries({})",
policy.getMatchField(),
Strings.collectionToCommaDelimitedString(policy.getIndices()),
Strings.collectionToCommaDelimitedString(formats)
);
}
return null;
}
return new MappingTypeAndFormat(Iterables.get(types, 0), formats.isEmpty() ? null : Iterables.get(formats, 0));
}
@SuppressWarnings("unchecked")
private static <T> T extractValues(Map<String, Object> properties, String path) {
return (T) properties.get(path);
}
private static MappingTypeAndFormat validateAndGetMappingTypeAndFormat(
Map<String, Object> properties,
String fieldName,
boolean fieldRequired
) {
assert Strings.isEmpty(fieldName) == false : "Field name cannot be null or empty";
String[] fieldParts = fieldName.split("\\.");
StringBuilder parent = new StringBuilder();
Map<String, Object> currentField = properties;
boolean onRoot = true;
for (String fieldPart : fieldParts) {
// Ensure that the current field is of object type only (not a nested type or a non compound field)
Object type = currentField.get("type");
if (type != null && "object".equals(type) == false) {
throw new ElasticsearchException(
"Could not traverse mapping to field [{}]. The [{}] field must be regular object but was [{}].",
fieldName,
onRoot ? "root" : parent.toString(),
type
);
}
Map<String, Object> currentProperties = extractValues(currentField, "properties");
if (currentProperties == null) {
if (fieldRequired) {
throw new ElasticsearchException(
"Could not traverse mapping to field [{}]. Expected the [{}] field to have sub fields but none were configured.",
fieldName,
onRoot ? "root" : parent.toString()
);
} else {
return null;
}
}
currentField = extractValues(currentProperties, fieldPart);
if (currentField == null) {
if (fieldRequired) {
throw new ElasticsearchException(
"Could not traverse mapping to field [{}]. Could not find the [{}] field under [{}]",
fieldName,
fieldPart,
onRoot ? "root" : parent.toString()
);
} else {
return null;
}
}
if (onRoot) {
onRoot = false;
} else {
parent.append(".");
}
parent.append(fieldPart);
}
if (currentField == null) {
return null;
}
final String type = (String) currentField.getOrDefault("type", "object");
final String format = (String) currentField.get("format");
return new MappingTypeAndFormat(type, format);
}
static final Set<String> RANGE_TYPES = Set.of("integer_range", "float_range", "long_range", "double_range", "ip_range", "date_range");
static Map<String, Object> mappingForMatchField(EnrichPolicy policy, List<Map<String, Object>> sourceMappings) {
MappingTypeAndFormat typeAndFormat = validateAndGetMappingTypeAndFormat(policy.getMatchField(), policy, true, sourceMappings);
if (typeAndFormat == null) {
throw new ElasticsearchException(
"Match field '{}' doesn't have a correct mapping type for policy type '{}'",
policy.getMatchField(),
policy.getType()
);
}
return switch (policy.getType()) {
case EnrichPolicy.MATCH_TYPE -> Map.of("type", "keyword", "doc_values", false);
case EnrichPolicy.GEO_MATCH_TYPE -> Map.of("type", "geo_shape");
case EnrichPolicy.RANGE_TYPE -> {
if (RANGE_TYPES.contains(typeAndFormat.type) == false) {
throw new ElasticsearchException(
"Field '{}' has type [{}] which doesn't appear to be a range type",
policy.getMatchField(),
typeAndFormat.type
);
}
Map<String, Object> mapping = Maps.newMapWithExpectedSize(3);
mapping.put("type", typeAndFormat.type);
mapping.put("doc_values", false);
if (typeAndFormat.format != null) {
mapping.put("format", typeAndFormat.format);
}
yield mapping;
}
default -> throw new ElasticsearchException("Unrecognized enrich policy type [{}]", policy.getType());
};
}
private XContentBuilder createEnrichMapping(List<Map<String, Object>> sourceMappings) {
Map<String, Map<String, Object>> fieldMappings = new HashMap<>();
Map<String, Object> mappingForMatchField = mappingForMatchField(policy, sourceMappings);
MapperService mapperService = createMapperServiceForValidation(indicesService, enrichIndexName);
for (String enrichField : policy.getEnrichFields()) {
if (enrichField.equals(policy.getMatchField())) {
mappingForMatchField = new HashMap<>(mappingForMatchField);
mappingForMatchField.remove("doc_values"); // enable doc_values
} else {
var typeAndFormat = validateAndGetMappingTypeAndFormat(enrichField, policy, false, sourceMappings);
if (typeAndFormat != null) {
Map<String, Object> mapping = Maps.newMapWithExpectedSize(3);
mapping.put("type", typeAndFormat.type);
if (typeAndFormat.format != null) {
mapping.put("format", typeAndFormat.format);
}
if (isIndexableField(mapperService, enrichField, typeAndFormat.type, mapping)) {
mapping.put("index", false);
}
fieldMappings.put(enrichField, mapping);
}
}
}
fieldMappings.put(policy.getMatchField(), mappingForMatchField);
// Enable _source on enrich index. Explicitly mark key mapping type.
try {
XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
{
builder.startObject(MapperService.SINGLE_MAPPING_NAME);
{
builder.field("dynamic", false);
builder.startObject("_source");
{
builder.field("enabled", true);
}
builder.endObject();
builder.startObject("properties");
{
builder.mapContents(fieldMappings);
}
builder.endObject();
builder.startObject("_meta");
{
builder.field(ENRICH_README_FIELD_NAME, ENRICH_INDEX_README_TEXT);
builder.field(ENRICH_POLICY_NAME_FIELD_NAME, policyName);
builder.field(ENRICH_MATCH_FIELD_NAME, policy.getMatchField());
builder.field(ENRICH_POLICY_TYPE_FIELD_NAME, policy.getType());
}
builder.endObject();
}
builder.endObject();
}
builder.endObject();
return builder;
} catch (IOException ioe) {
throw new UncheckedIOException("Could not render enrich mapping", ioe);
}
}
private static MapperService createMapperServiceForValidation(IndicesService indicesService, String index) {
try {
final Settings idxSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())
.build();
IndexMetadata indexMetadata = IndexMetadata.builder(index).settings(idxSettings).numberOfShards(1).numberOfReplicas(0).build();
return indicesService.createIndexMapperServiceForValidation(indexMetadata);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
static boolean isIndexableField(MapperService mapperService, String field, String type, Map<String, Object> properties) {
var withIndexParameter = new HashMap<>(properties);
withIndexParameter.put("index", false);
Mapper.TypeParser parser = mapperService.getMapperRegistry().getMapperParser(type, IndexVersion.current());
try {
parser.parse(field, withIndexParameter, mapperService.parserContext());
return withIndexParameter.containsKey("index") == false;
} catch (MapperParsingException e) {
// hitting the mapper parsing exception means this field doesn't accept `index:false`.
assert e.getMessage().contains("unknown parameter [index]") : e;
return false;
}
}
private void prepareAndCreateEnrichIndex(
List<Map<String, Object>> mappings,
Settings settings,
ActionListener<CreateIndexResponse> listener
) {
int numberOfReplicas = settings.getAsInt(ENRICH_MIN_NUMBER_OF_REPLICAS_NAME, 0);
Settings enrichIndexSettings = Settings.builder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", numberOfReplicas)
// No changes will be made to an enrich index after policy execution, so need to enable automatic refresh interval:
.put("index.refresh_interval", -1)
// This disables eager global ordinals loading for all fields:
.put("index.warmer.enabled", false)
.build();
CreateIndexRequest createEnrichIndexRequest = new CreateIndexRequest(enrichIndexName, enrichIndexSettings);
createEnrichIndexRequest.mapping(createEnrichMapping(mappings));
logger.debug("Policy [{}]: Creating new enrich index [{}]", policyName, enrichIndexName);
enrichOriginClient().admin().indices().create(createEnrichIndexRequest, listener);
}
private void prepareReindexOperation(ActionListener<AcknowledgedResponse> listener) {
// Check to make sure that the enrich pipeline exists, and create it if it is missing.
if (EnrichPolicyReindexPipeline.exists(clusterService.state().getMetadata().getProject(projectId))) {
listener.onResponse(null);
} else {
EnrichPolicyReindexPipeline.create(enrichOriginClient(), listener);
}
}
private void transferDataToEnrichIndex(ActionListener<Void> listener) {
logger.debug("Policy [{}]: Transferring source data to new enrich index [{}]", policyName, enrichIndexName);
// Filter down the source fields to just the ones required by the policy
final Set<String> retainFields = new HashSet<>();
retainFields.add(policy.getMatchField());
retainFields.addAll(policy.getEnrichFields());
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.size(fetchSize);
searchSourceBuilder.fetchSource(retainFields.toArray(new String[0]), new String[0]);
if (policy.getQuery() != null) {
searchSourceBuilder.query(QueryBuilders.wrapperQuery(policy.getQuery().getQuery()));
}
ReindexRequest reindexRequest = new ReindexRequest().setDestIndex(enrichIndexName)
.setSourceIndices(policy.getIndices().toArray(new String[0]));
reindexRequest.getSearchRequest().source(searchSourceBuilder);
reindexRequest.getDestination().source(new BytesArray(new byte[0]), XContentType.SMILE);
reindexRequest.getDestination().routing("discard");
reindexRequest.getDestination().setPipeline(EnrichPolicyReindexPipeline.pipelineName());
client.execute(EnrichReindexAction.INSTANCE, reindexRequest, new DelegatingActionListener<>(listener) {
@Override
public void onResponse(BulkByScrollResponse bulkByScrollResponse) {
// Do we want to fail the request if there were failures during the reindex process?
if (bulkByScrollResponse.getBulkFailures().size() > 0) {
logger.warn(
"Policy [{}]: encountered [{}] bulk failures. Turn on DEBUG logging for details.",
policyName,
bulkByScrollResponse.getBulkFailures().size()
);
if (logger.isDebugEnabled()) {
for (BulkItemResponse.Failure failure : bulkByScrollResponse.getBulkFailures()) {
logger.debug(
() -> format(
"Policy [%s]: bulk index failed for index [%s], id [%s]",
policyName,
failure.getIndex(),
failure.getId()
),
failure.getCause()
);
}
}
delegate.onFailure(new ElasticsearchException("Encountered bulk failures during reindex process"));
} else if (bulkByScrollResponse.getSearchFailures().size() > 0) {
logger.warn(
"Policy [{}]: encountered [{}] search failures. Turn on DEBUG logging for details.",
policyName,
bulkByScrollResponse.getSearchFailures().size()
);
if (logger.isDebugEnabled()) {
for (ScrollableHitSource.SearchFailure failure : bulkByScrollResponse.getSearchFailures()) {
logger.debug(
() -> format(
"Policy [%s]: search failed for index [%s], shard [%s] on node [%s]",
policyName,
failure.getIndex(),
failure.getShardId(),
failure.getNodeId()
),
failure.getReason()
);
}
}
delegate.onFailure(new ElasticsearchException("Encountered search failures during reindex process"));
} else {
logger.info(
"Policy [{}]: Transferred [{}] documents to enrich index [{}]",
policyName,
bulkByScrollResponse.getCreated(),
enrichIndexName
);
delegate.onResponse(null);
}
}
});
}
private void forceMergeEnrichIndex(ActionListener<Void> listener) {
forceMergeEnrichIndexOrRetry(1, listener);
}
private void forceMergeEnrichIndexOrRetry(final int attempt, ActionListener<Void> listener) {
logger.debug(
"Policy [{}]: Force merging newly created enrich index [{}] (Attempt {}/{})",
policyName,
enrichIndexName,
attempt,
maxForceMergeAttempts
);
SubscribableListener
.<BroadcastResponse>newForked(
l -> enrichOriginClient().admin().indices().forceMerge(new ForceMergeRequest(enrichIndexName).maxNumSegments(1), l)
)
.andThen(this::refreshEnrichIndex)
.andThen(this::afterRefreshEnrichIndex)
.andThen(this::getSegments)
.andThenApply(this::getSegmentCount)
.addListener(
// delegateFailureAndWrap() rather than andThen().addListener() to avoid building unnecessary O(#retries) listener chain
listener.delegateFailureAndWrap((l, segmentCount) -> {
if (segmentCount > 1) {
int nextAttempt = attempt + 1;
if (nextAttempt > maxForceMergeAttempts) {
throw new ElasticsearchException(
"Force merging index [{}] attempted [{}] times but did not result in one segment.",
enrichIndexName,
attempt,
maxForceMergeAttempts
);
} else {
logger.debug(
"Policy [{}]: Force merge result contains more than one segment [{}], retrying (attempt {}/{})",
policyName,
segmentCount,
nextAttempt,
maxForceMergeAttempts
);
// TransportForceMergeAction always forks so no risk of stack overflow from this recursion
forceMergeEnrichIndexOrRetry(nextAttempt, l);
}
} else {
l.onResponse(null);
}
})
);
}
private void refreshEnrichIndex(ActionListener<BroadcastResponse> listener) {
logger.debug("Policy [{}]: Refreshing enrich index [{}]", policyName, enrichIndexName);
enrichOriginClient().admin().indices().refresh(new RefreshRequest(enrichIndexName), listener);
}
// hook to allow testing force-merge retries
protected void afterRefreshEnrichIndex(ActionListener<Void> listener) {
listener.onResponse(null);
}
private void getSegments(ActionListener<IndicesSegmentResponse> listener) {
enrichOriginClient().admin().indices().segments(new IndicesSegmentsRequest(enrichIndexName), listener);
}
private int getSegmentCount(IndicesSegmentResponse indicesSegmentResponse) {
int failedShards = indicesSegmentResponse.getFailedShards();
if (failedShards > 0) {
// Encountered a problem while querying the segments for the enrich index. Try and surface the problem in the log.
logger.warn(
"Policy [{}]: Encountered [{}] shard level failures while querying the segments for enrich index [{}]. "
+ "Turn on DEBUG logging for details.",
policyName,
failedShards,
enrichIndexName
);
if (logger.isDebugEnabled()) {
DefaultShardOperationFailedException[] shardFailures = indicesSegmentResponse.getShardFailures();
int failureNumber = 1;
String logPrefix = "Policy [" + policyName + "]: Encountered shard failure [";
String logSuffix = " of "
+ shardFailures.length
+ "] while querying segments for enrich index ["
+ enrichIndexName
+ "]. Shard [";
for (DefaultShardOperationFailedException shardFailure : shardFailures) {
logger.debug(
logPrefix + failureNumber + logSuffix + shardFailure.index() + "][" + shardFailure.shardId() + "]",
shardFailure.getCause()
);
failureNumber++;
}
}
}
IndexSegments indexSegments = indicesSegmentResponse.getIndices().get(enrichIndexName);
if (indexSegments == null) {
if (indicesSegmentResponse.getShardFailures().length == 0) {
throw new ElasticsearchException("Could not locate segment information for newly created index [{}]", enrichIndexName);
} else {
DefaultShardOperationFailedException shardFailure = indicesSegmentResponse.getShardFailures()[0];
throw new ElasticsearchException(
"Could not obtain segment information for newly created index [{}]; shard info [{}][{}]",
shardFailure.getCause(),
enrichIndexName,
shardFailure.index(),
shardFailure.shardId()
);
}
}
Map<Integer, IndexShardSegments> indexShards = indexSegments.getShards();
assert indexShards.size() == 1 : "Expected enrich index to contain only one shard";
ShardSegments[] shardSegments = indexShards.get(0).shards();
assert shardSegments.length == 1 : "Expected enrich index to contain no replicas at this point";
ShardSegments primarySegments = shardSegments[0];
return primarySegments.getSegments().size();
}
private void setIndexReadOnly(ActionListener<AcknowledgedResponse> listener) {
logger.debug("Policy [{}]: Setting new enrich index [{}] to be read only", policyName, enrichIndexName);
UpdateSettingsRequest request = new UpdateSettingsRequest(enrichIndexName).setPreserveExisting(true)
.settings(Settings.builder().put("index.auto_expand_replicas", "0-all").put("index.blocks.write", "true"));
enrichOriginClient().admin().indices().updateSettings(request, listener);
}
private void waitForIndexGreen(ActionListener<ClusterHealthResponse> listener) {
ClusterHealthRequest request = new ClusterHealthRequest(ENRICH_MASTER_REQUEST_TIMEOUT, enrichIndexName).waitForGreenStatus();
enrichOriginClient().admin().cluster().health(request, listener);
}
/**
* Ensures that the index we are about to promote at the end of a policy execution exists, is intact, and has not been damaged
* during the policy execution. In some cases, it is possible for the index being constructed to be deleted during the policy execution
* and recreated with invalid mappings/data. We validate that the mapping exists and that it contains the expected meta fields on it to
* guard against accidental removal and recreation during policy execution.
*/
private void validateIndexBeforePromotion(String destinationIndexName, ProjectMetadata project) {
IndexMetadata destinationIndex = project.index(destinationIndexName);
if (destinationIndex == null) {
throw new IndexNotFoundException(
"was not able to promote it as part of executing enrich policy [" + policyName + "]",
destinationIndexName
);
}
MappingMetadata mapping = destinationIndex.mapping();
if (mapping == null) {
throw new ResourceNotFoundException(
"Could not locate mapping for enrich index [{}] while completing [{}] policy run",
destinationIndexName,
policyName
);
}
Map<String, Object> mappingSource = mapping.sourceAsMap();
Object meta = mappingSource.get("_meta");
if (meta instanceof Map<?, ?> metaMap) {
Object policyNameMetaField = metaMap.get(ENRICH_POLICY_NAME_FIELD_NAME);
if (policyNameMetaField == null) {
throw new ElasticsearchException(
"Could not verify enrich index [{}] metadata before completing [{}] policy run: policy name meta field missing",
destinationIndexName,
policyName
);
} else if (policyName.equals(policyNameMetaField) == false) {
throw new ElasticsearchException(
"Could not verify enrich index [{}] metadata before completing [{}] policy run: policy name meta field does not "
+ "match expected value of [{}], was [{}]",
destinationIndexName,
policyName,
policyName,
policyNameMetaField.toString()
);
}
} else {
throw new ElasticsearchException(
"Could not verify enrich index [{}] metadata before completing [{}] policy run: mapping meta field missing",
destinationIndexName,
policyName
);
}
}
private void updateEnrichPolicyAlias(ActionListener<IndicesAliasesResponse> listener) {
String enrichIndexBase = EnrichPolicy.getBaseName(policyName);
logger.debug("Policy [{}]: Promoting new enrich index [{}] to alias [{}]", policyName, enrichIndexName, enrichIndexBase);
GetAliasesRequest aliasRequest = new GetAliasesRequest(ENRICH_MASTER_REQUEST_TIMEOUT, enrichIndexBase);
final var project = clusterService.state().metadata().getProject(projectId);
validateIndexBeforePromotion(enrichIndexName, project);
String[] concreteIndices = indexNameExpressionResolver.concreteIndexNamesWithSystemIndexAccess(project, aliasRequest);
String[] aliases = aliasRequest.aliases();
IndicesAliasesRequest aliasToggleRequest = new IndicesAliasesRequest(ENRICH_MASTER_REQUEST_TIMEOUT, ENRICH_MASTER_REQUEST_TIMEOUT);
String[] indices = project.findAliases(aliases, concreteIndices).keySet().toArray(new String[0]);
if (indices.length > 0) {
aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.remove().indices(indices).alias(enrichIndexBase));
}
aliasToggleRequest.addAliasAction(IndicesAliasesRequest.AliasActions.add().index(enrichIndexName).alias(enrichIndexBase));
enrichOriginClient().admin().indices().aliases(aliasToggleRequest, listener);
}
/**
* Use this client to access information at the access level of the Enrich plugin, rather than at the access level of the user.
* For example, use this client to access system indices (such as `.enrich*` indices).
*/
private Client enrichOriginClient() {
return new OriginSettingClient(client, ENRICH_ORIGIN);
}
private static Client wrapClient(Client in, String policyName, ExecuteEnrichPolicyTask task, ClusterService clusterService) {
// Filter client in order to:
// 1) Check on transport action call that policy runner does whether the task has been cancelled
// 2) Set the enrich policy task as parent task, so if other API calls (e.g. reindex) are cancellable then
// the corresponding tasks of these API calls get cancelled as well.
return new FilterClient(in) {
@Override
protected <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
String requestStep = request.getClass().getSimpleName();
task.setStep(requestStep);
if (task.isCancelled()) {
String message = "cancelled policy execution [" + policyName + "], status [" + Strings.toString(task.getStatus()) + "]";
listener.onFailure(new TaskCancelledException(message));
return;
}
request.setParentTask(clusterService.localNode().getId(), task.getId());
super.doExecute(action, request, listener);
}
};
}
}
| EnrichPolicyRunner |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/StringUtilsSubstringTest.java | {
"start": 1112,
"end": 18013
} | class ____ extends AbstractLangTest {
private static final String FOO = "foo";
private static final String BAR = "bar";
private static final String BAZ = "baz";
private static final String FOOBAR = "foobar";
private static final String SENTENCE = "foo bar baz";
@Test
void testCountMatches_char() {
assertEquals(0, StringUtils.countMatches(null, 'D'));
assertEquals(5, StringUtils.countMatches("one long someone sentence of one", ' '));
assertEquals(6, StringUtils.countMatches("one long someone sentence of one", 'o'));
assertEquals(4, StringUtils.countMatches("oooooooooooo", "ooo"));
}
@Test
void testCountMatches_String() {
assertEquals(0, StringUtils.countMatches(null, null));
assertEquals(0, StringUtils.countMatches("blah", null));
assertEquals(0, StringUtils.countMatches(null, "DD"));
assertEquals(0, StringUtils.countMatches("x", ""));
assertEquals(0, StringUtils.countMatches("", ""));
assertEquals(3,
StringUtils.countMatches("one long someone sentence of one", "one"));
assertEquals(0,
StringUtils.countMatches("one long someone sentence of one", "two"));
assertEquals(4,
StringUtils.countMatches("oooooooooooo", "ooo"));
assertEquals(0, StringUtils.countMatches(null, "?"));
assertEquals(0, StringUtils.countMatches("", "?"));
assertEquals(0, StringUtils.countMatches("abba", null));
assertEquals(0, StringUtils.countMatches("abba", ""));
assertEquals(2, StringUtils.countMatches("abba", "a"));
assertEquals(1, StringUtils.countMatches("abba", "ab"));
assertEquals(0, StringUtils.countMatches("abba", "xxx"));
assertEquals(1, StringUtils.countMatches("ababa", "aba"));
}
@Test
void testLeft_String() {
assertSame(null, StringUtils.left(null, -1));
assertSame(null, StringUtils.left(null, 0));
assertSame(null, StringUtils.left(null, 2));
assertEquals("", StringUtils.left("", -1));
assertEquals("", StringUtils.left("", 0));
assertEquals("", StringUtils.left("", 2));
assertEquals("", StringUtils.left(FOOBAR, -1));
assertEquals("", StringUtils.left(FOOBAR, 0));
assertEquals(FOO, StringUtils.left(FOOBAR, 3));
assertSame(FOOBAR, StringUtils.left(FOOBAR, 80));
}
@Test
void testMid_String() {
assertSame(null, StringUtils.mid(null, -1, 0));
assertSame(null, StringUtils.mid(null, 0, -1));
assertSame(null, StringUtils.mid(null, 3, 0));
assertSame(null, StringUtils.mid(null, 3, 2));
assertEquals("", StringUtils.mid("", 0, -1));
assertEquals("", StringUtils.mid("", 0, 0));
assertEquals("", StringUtils.mid("", 0, 2));
assertEquals("", StringUtils.mid(FOOBAR, 3, -1));
assertEquals("", StringUtils.mid(FOOBAR, 3, 0));
assertEquals("b", StringUtils.mid(FOOBAR, 3, 1));
assertEquals(FOO, StringUtils.mid(FOOBAR, 0, 3));
assertEquals(BAR, StringUtils.mid(FOOBAR, 3, 3));
assertEquals(FOOBAR, StringUtils.mid(FOOBAR, 0, 80));
assertEquals(BAR, StringUtils.mid(FOOBAR, 3, 80));
assertEquals("", StringUtils.mid(FOOBAR, 9, 3));
assertEquals(FOO, StringUtils.mid(FOOBAR, -1, 3));
}
@Test
void testRight_String() {
assertSame(null, StringUtils.right(null, -1));
assertSame(null, StringUtils.right(null, 0));
assertSame(null, StringUtils.right(null, 2));
assertEquals("", StringUtils.right("", -1));
assertEquals("", StringUtils.right("", 0));
assertEquals("", StringUtils.right("", 2));
assertEquals("", StringUtils.right(FOOBAR, -1));
assertEquals("", StringUtils.right(FOOBAR, 0));
assertEquals(BAR, StringUtils.right(FOOBAR, 3));
assertSame(FOOBAR, StringUtils.right(FOOBAR, 80));
}
@Test
void testSubstring_StringInt() {
assertNull(StringUtils.substring(null, 0));
assertEquals("", StringUtils.substring("", 0));
assertEquals("", StringUtils.substring("", 2));
assertEquals("", StringUtils.substring(SENTENCE, 80));
assertEquals(BAZ, StringUtils.substring(SENTENCE, 8));
assertEquals(BAZ, StringUtils.substring(SENTENCE, -3));
assertEquals(SENTENCE, StringUtils.substring(SENTENCE, 0));
assertEquals("abc", StringUtils.substring("abc", -4));
assertEquals("abc", StringUtils.substring("abc", -3));
assertEquals("bc", StringUtils.substring("abc", -2));
assertEquals("c", StringUtils.substring("abc", -1));
assertEquals("abc", StringUtils.substring("abc", 0));
assertEquals("bc", StringUtils.substring("abc", 1));
assertEquals("c", StringUtils.substring("abc", 2));
assertEquals("", StringUtils.substring("abc", 3));
assertEquals("", StringUtils.substring("abc", 4));
}
@Test
void testSubstring_StringIntInt() {
assertNull(StringUtils.substring(null, 0, 0));
assertNull(StringUtils.substring(null, 1, 2));
assertEquals("", StringUtils.substring("", 0, 0));
assertEquals("", StringUtils.substring("", 1, 2));
assertEquals("", StringUtils.substring("", -2, -1));
assertEquals("", StringUtils.substring(SENTENCE, 8, 6));
assertEquals(FOO, StringUtils.substring(SENTENCE, 0, 3));
assertEquals("o", StringUtils.substring(SENTENCE, -9, 3));
assertEquals(FOO, StringUtils.substring(SENTENCE, 0, -8));
assertEquals("o", StringUtils.substring(SENTENCE, -9, -8));
assertEquals(SENTENCE, StringUtils.substring(SENTENCE, 0, 80));
assertEquals("", StringUtils.substring(SENTENCE, 2, 2));
assertEquals("b", StringUtils.substring("abc", -2, -1));
}
@Test
void testSubstringAfter_StringInt() {
assertNull(StringUtils.substringAfter(null, 0));
assertNull(StringUtils.substringAfter(null, 'X'));
assertEquals("", StringUtils.substringAfter("", 0));
assertEquals("", StringUtils.substringAfter("", 'X'));
assertEquals("", StringUtils.substringAfter("foo", 0));
assertEquals("ot", StringUtils.substringAfter("foot", 'o'));
assertEquals("bc", StringUtils.substringAfter("abc", 'a'));
assertEquals("cba", StringUtils.substringAfter("abcba", 'b'));
assertEquals("", StringUtils.substringAfter("abc", 'c'));
assertEquals("", StringUtils.substringAfter("abc", 'd'));
}
@Test
void testSubstringAfter_StringString() {
assertEquals("barXXbaz", StringUtils.substringAfter("fooXXbarXXbaz", "XX"));
assertNull(StringUtils.substringAfter(null, null));
assertNull(StringUtils.substringAfter(null, ""));
assertNull(StringUtils.substringAfter(null, "XX"));
assertEquals("", StringUtils.substringAfter("", null));
assertEquals("", StringUtils.substringAfter("", ""));
assertEquals("", StringUtils.substringAfter("", "XX"));
assertEquals("", StringUtils.substringAfter("foo", null));
assertEquals("ot", StringUtils.substringAfter("foot", "o"));
assertEquals("bc", StringUtils.substringAfter("abc", "a"));
assertEquals("cba", StringUtils.substringAfter("abcba", "b"));
assertEquals("", StringUtils.substringAfter("abc", "c"));
assertEquals("abc", StringUtils.substringAfter("abc", ""));
assertEquals("", StringUtils.substringAfter("abc", "d"));
}
@Test
void testSubstringAfterLast_StringInt() {
assertNull(StringUtils.substringAfterLast(null, 0));
assertNull(StringUtils.substringAfterLast(null, 'X'));
assertEquals("", StringUtils.substringAfterLast("", 0));
assertEquals("", StringUtils.substringAfterLast("", 'a'));
assertEquals("", StringUtils.substringAfterLast("foo", 0));
assertEquals("", StringUtils.substringAfterLast("foo", 'b'));
assertEquals("t", StringUtils.substringAfterLast("foot", 'o'));
assertEquals("bc", StringUtils.substringAfterLast("abc", 'a'));
assertEquals("a", StringUtils.substringAfterLast("abcba", 'b'));
assertEquals("", StringUtils.substringAfterLast("abc", 'c'));
assertEquals("", StringUtils.substringAfterLast("", 'd'));
}
@Test
void testSubstringAfterLast_StringString() {
assertEquals("baz", StringUtils.substringAfterLast("fooXXbarXXbaz", "XX"));
assertNull(StringUtils.substringAfterLast(null, null));
assertNull(StringUtils.substringAfterLast(null, ""));
assertNull(StringUtils.substringAfterLast(null, "XX"));
assertEquals("", StringUtils.substringAfterLast("", null));
assertEquals("", StringUtils.substringAfterLast("", ""));
assertEquals("", StringUtils.substringAfterLast("", "a"));
assertEquals("", StringUtils.substringAfterLast("foo", null));
assertEquals("", StringUtils.substringAfterLast("foo", "b"));
assertEquals("t", StringUtils.substringAfterLast("foot", "o"));
assertEquals("bc", StringUtils.substringAfterLast("abc", "a"));
assertEquals("a", StringUtils.substringAfterLast("abcba", "b"));
assertEquals("", StringUtils.substringAfterLast("abc", "c"));
assertEquals("", StringUtils.substringAfterLast("", "d"));
assertEquals("", StringUtils.substringAfterLast("abc", ""));
}
@Test
void testSubstringBefore_StringInt() {
assertEquals("foo", StringUtils.substringBefore("fooXXbarXXbaz", 'X'));
assertNull(StringUtils.substringBefore(null, 0));
assertNull(StringUtils.substringBefore(null, 'X'));
assertEquals("", StringUtils.substringBefore("", 0));
assertEquals("", StringUtils.substringBefore("", 'X'));
assertEquals("foo", StringUtils.substringBefore("foo", 0));
assertEquals("foo", StringUtils.substringBefore("foo", 'b'));
assertEquals("f", StringUtils.substringBefore("foot", 'o'));
assertEquals("", StringUtils.substringBefore("abc", 'a'));
assertEquals("a", StringUtils.substringBefore("abcba", 'b'));
assertEquals("ab", StringUtils.substringBefore("abc", 'c'));
assertEquals("abc", StringUtils.substringBefore("abc", 0));
}
@Test
void testSubstringBefore_StringString() {
assertEquals("foo", StringUtils.substringBefore("fooXXbarXXbaz", "XX"));
assertNull(StringUtils.substringBefore(null, null));
assertNull(StringUtils.substringBefore(null, ""));
assertNull(StringUtils.substringBefore(null, "XX"));
assertEquals("", StringUtils.substringBefore("", null));
assertEquals("", StringUtils.substringBefore("", ""));
assertEquals("", StringUtils.substringBefore("", "XX"));
assertEquals("foo", StringUtils.substringBefore("foo", null));
assertEquals("foo", StringUtils.substringBefore("foo", "b"));
assertEquals("f", StringUtils.substringBefore("foot", "o"));
assertEquals("", StringUtils.substringBefore("abc", "a"));
assertEquals("a", StringUtils.substringBefore("abcba", "b"));
assertEquals("ab", StringUtils.substringBefore("abc", "c"));
assertEquals("", StringUtils.substringBefore("abc", ""));
assertEquals("abc", StringUtils.substringBefore("abc", "X"));
}
@Test
void testSubstringBeforeLast_StringString() {
assertEquals("fooXXbar", StringUtils.substringBeforeLast("fooXXbarXXbaz", "XX"));
assertNull(StringUtils.substringBeforeLast(null, null));
assertNull(StringUtils.substringBeforeLast(null, ""));
assertNull(StringUtils.substringBeforeLast(null, "XX"));
assertEquals("", StringUtils.substringBeforeLast("", null));
assertEquals("", StringUtils.substringBeforeLast("", ""));
assertEquals("", StringUtils.substringBeforeLast("", "XX"));
assertEquals("foo", StringUtils.substringBeforeLast("foo", null));
assertEquals("foo", StringUtils.substringBeforeLast("foo", "b"));
assertEquals("fo", StringUtils.substringBeforeLast("foo", "o"));
assertEquals("abc\r\n", StringUtils.substringBeforeLast("abc\r\n", "d"));
assertEquals("abc", StringUtils.substringBeforeLast("abcdabc", "d"));
assertEquals("abcdabc", StringUtils.substringBeforeLast("abcdabcd", "d"));
assertEquals("a", StringUtils.substringBeforeLast("abc", "b"));
assertEquals("abc ", StringUtils.substringBeforeLast("abc \n", "\n"));
assertEquals("a", StringUtils.substringBeforeLast("a", null));
assertEquals("a", StringUtils.substringBeforeLast("a", ""));
assertEquals("", StringUtils.substringBeforeLast("a", "a"));
}
@Test
void testSubstringBetween_StringString() {
assertNull(StringUtils.substringBetween(null, "tag"));
assertEquals("", StringUtils.substringBetween("", ""));
assertNull(StringUtils.substringBetween("", "abc"));
assertEquals("", StringUtils.substringBetween(" ", " "));
assertNull(StringUtils.substringBetween("abc", null));
assertEquals("", StringUtils.substringBetween("abc", ""));
assertNull(StringUtils.substringBetween("abc", "a"));
assertEquals("bc", StringUtils.substringBetween("abca", "a"));
assertEquals("bc", StringUtils.substringBetween("abcabca", "a"));
assertEquals("bar", StringUtils.substringBetween("\nbar\n", "\n"));
}
@Test
void testSubstringBetween_StringStringString() {
assertNull(StringUtils.substringBetween(null, "", ""));
assertNull(StringUtils.substringBetween("", null, ""));
assertNull(StringUtils.substringBetween("", "", null));
assertEquals("", StringUtils.substringBetween("", "", ""));
assertEquals("", StringUtils.substringBetween("foo", "", ""));
assertNull(StringUtils.substringBetween("foo", "", "]"));
assertNull(StringUtils.substringBetween("foo", "[", "]"));
assertEquals("", StringUtils.substringBetween(" ", " ", " "));
assertEquals("bar", StringUtils.substringBetween("<foo>bar</foo>", "<foo>", "</foo>"));
assertEquals("abc", StringUtils.substringBetween("yabczyabcz", "y", "z"));
}
/**
* Tests the substringsBetween method that returns a String Array of substrings.
*/
@Test
void testSubstringsBetween_StringStringString() {
String[] results = StringUtils.substringsBetween("[one], [two], [three]", "[", "]");
assertEquals(3, results.length);
assertEquals("one", results[0]);
assertEquals("two", results[1]);
assertEquals("three", results[2]);
results = StringUtils.substringsBetween("[one], [two], three", "[", "]");
assertEquals(2, results.length);
assertEquals("one", results[0]);
assertEquals("two", results[1]);
results = StringUtils.substringsBetween("[one], [two], three]", "[", "]");
assertEquals(2, results.length);
assertEquals("one", results[0]);
assertEquals("two", results[1]);
results = StringUtils.substringsBetween("[one], two], three]", "[", "]");
assertEquals(1, results.length);
assertEquals("one", results[0]);
results = StringUtils.substringsBetween("one], two], [three]", "[", "]");
assertEquals(1, results.length);
assertEquals("three", results[0]);
// 'ab hello ba' will match, but 'ab non ba' won't
// this is because the 'a' is shared between the two and can't be matched twice
results = StringUtils.substringsBetween("aabhellobabnonba", "ab", "ba");
assertEquals(1, results.length);
assertEquals("hello", results[0]);
results = StringUtils.substringsBetween("one, two, three", "[", "]");
assertNull(results);
results = StringUtils.substringsBetween("[one, two, three", "[", "]");
assertNull(results);
results = StringUtils.substringsBetween("one, two, three]", "[", "]");
assertNull(results);
results = StringUtils.substringsBetween("[one], [two], [three]", "[", null);
assertNull(results);
results = StringUtils.substringsBetween("[one], [two], [three]", null, "]");
assertNull(results);
results = StringUtils.substringsBetween("[one], [two], [three]", "", "");
assertNull(results);
results = StringUtils.substringsBetween(null, "[", "]");
assertNull(results);
results = StringUtils.substringsBetween("", "[", "]");
assertEquals(0, results.length);
}
}
| StringUtilsSubstringTest |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/builder/FlexibleAggregationStrategy.java | {
"start": 2252,
"end": 11862
} | class ____<E> implements AggregationStrategy {
private static final Logger LOG = LoggerFactory.getLogger(FlexibleAggregationStrategy.class);
private Expression pickExpression = ExpressionBuilder.bodyExpression();
private Predicate conditionPredicate;
@SuppressWarnings("rawtypes")
private Class<? extends Collection> collectionType;
@SuppressWarnings("unchecked")
private Class<E> castAs = (Class<E>) Object.class;
private boolean storeNulls;
private boolean ignoreInvalidCasts;
private FlexibleAggregationStrategyInjector injector = new BodyInjector(castAs);
private TimeoutAwareMixin timeoutMixin;
private CompletionAwareMixin completionMixin;
/**
* Initializes a new instance with {@link Object} as the {@link FlexibleAggregationStrategy#castAs} type.
*/
public FlexibleAggregationStrategy() {
}
/**
* Initializes a new instance with the specified type as the {@link FlexibleAggregationStrategy#castAs} type.
*
* @param type The castAs type.
*/
public FlexibleAggregationStrategy(Class<E> type) {
this.castAs = type;
}
/**
* Set an expression to extract the element to be aggregated from the incoming {@link Exchange}. All results are
* cast to the {@link FlexibleAggregationStrategy#castAs} type (or the type specified in the constructor).
* <p/>
* By default, it picks the full IN message body of the incoming exchange.
*
* @param expression The picking expression.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> pick(Expression expression) {
this.pickExpression = expression;
return this;
}
/**
* Set a filter condition such as only results satisfying it will be aggregated. By default, all picked values will
* be processed.
*
* @param predicate The condition.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> condition(Predicate predicate) {
this.conditionPredicate = predicate;
return this;
}
/**
* Accumulate the result of the <i>pick expression</i> in a collection of the designated type. No <tt>null</tt>s
* will stored unless the {@link FlexibleAggregationStrategy#storeNulls()} option is enabled.
*
* @param collectionType The type of the Collection to aggregate into.
* @return This instance.
*/
@SuppressWarnings("rawtypes")
public FlexibleAggregationStrategy<E> accumulateInCollection(Class<? extends Collection> collectionType) {
this.collectionType = collectionType;
return this;
}
/**
* Store the result of this Aggregation Strategy (whether an atomic element or a Collection) in a property with the
* designated name.
*
* @param propertyName The property name.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> storeInProperty(String propertyName) {
this.injector = new PropertyInjector(castAs, propertyName);
return this;
}
/**
* Store the result of this Aggregation Strategy (whether an atomic element or a Collection) in a variable with the
* designated name.
*
* @param variableName The variable name.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> storeInVariable(String variableName) {
this.injector = new VariableInjector(castAs, variableName);
return this;
}
/**
* Store the result of this Aggregation Strategy (whether an atomic element or a Collection) in an IN message header
* with the designated name.
*
* @param headerName The header name.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> storeInHeader(String headerName) {
this.injector = new HeaderInjector(castAs, headerName);
return this;
}
/**
* Store the result of this Aggregation Strategy (whether an atomic element or a Collection) in the body of the IN
* message.
*
* @return This instance.
*/
public FlexibleAggregationStrategy<E> storeInBody() {
this.injector = new BodyInjector(castAs);
return this;
}
/**
* Cast the result of the <i>pick expression</i> to this type.
*
* @param castAs Type for the cast.
* @return This instance.
*/
public FlexibleAggregationStrategy<E> castAs(Class<E> castAs) {
this.castAs = castAs;
injector.setType(castAs);
return this;
}
/**
* Enables storing null values in the resulting collection. By default, this aggregation strategy will drop null
* values.
*
* @return This instance.
*/
public FlexibleAggregationStrategy<E> storeNulls() {
this.storeNulls = true;
return this;
}
/**
* Ignores invalid casts instead of throwing an exception if the <i>pick expression</i> result cannot be casted to
* the specified type. By default, this aggregation strategy will throw an exception if an invalid cast occurs.
*
* @return This instance.
*/
public FlexibleAggregationStrategy<E> ignoreInvalidCasts() {
this.ignoreInvalidCasts = true;
return this;
}
/**
* Plugs in logic to execute when a timeout occurs.
*
* @param timeoutMixin custom logic on timeout
* @return This instance.
*/
public FlexibleAggregationStrategy<E> timeoutAware(TimeoutAwareMixin timeoutMixin) {
this.timeoutMixin = timeoutMixin;
return this;
}
/**
* Plugs in logic to execute when an aggregation batch completes.
*
* @param completionMixin custom logic on completion
* @return This instance.
*/
public FlexibleAggregationStrategy<E> completionAware(CompletionAwareMixin completionMixin) {
this.completionMixin = completionMixin;
return this;
}
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
Exchange exchange = oldExchange;
if (exchange == null) {
exchange = ExchangeHelper.createCorrelatedCopy(newExchange, true);
injector.prepareAggregationExchange(exchange);
}
// 1. Apply the condition and reject the aggregation if unmatched
if (conditionPredicate != null && !conditionPredicate.matches(newExchange)) {
LOG.trace("Dropped exchange {} from aggregation as predicate {} was not matched", newExchange, conditionPredicate);
return exchange;
}
// 2. Pick the appropriate element of the incoming message, casting it
// to the specified class
// If null, act accordingly based on storeNulls
E picked = null;
try {
picked = pickExpression.evaluate(newExchange, castAs);
} catch (TypeConversionException exception) {
if (!ignoreInvalidCasts) {
throw exception;
}
}
if (picked == null && !storeNulls) {
LOG.trace("Dropped exchange {} from aggregation as pick expression returned null and storing nulls is not enabled",
newExchange);
return exchange;
}
if (collectionType == null) {
injectAsRawValue(exchange, picked);
} else {
injectAsCollection(exchange, picked, collectionType);
}
return exchange;
}
@Override
public void timeout(Exchange oldExchange, int index, int total, long timeout) {
if (timeoutMixin == null) {
return;
}
timeoutMixin.timeout(oldExchange, index, total, timeout);
}
@Override
public void onCompletion(Exchange exchange) {
if (completionMixin == null) {
return;
}
completionMixin.onCompletion(exchange);
}
private void injectAsRawValue(Exchange oldExchange, E picked) {
injector.setValue(oldExchange, picked);
}
private void injectAsCollection(Exchange oldExchange, E picked, Class<? extends Collection> collectionType) {
Collection<E> col = injector.getValueAsCollection(oldExchange, collectionType);
col = safeInsertIntoCollection(oldExchange, col, picked);
injector.setValueAsCollection(oldExchange, col);
}
@SuppressWarnings("unchecked")
private Collection<E> safeInsertIntoCollection(Exchange oldExchange, Collection<E> oldValue, E toInsert) {
Collection<E> collection = null;
try {
if (oldValue == null || oldExchange.getProperty(Exchange.AGGREGATED_COLLECTION_GUARD, Boolean.class) == null) {
try {
collection = collectionType.getDeclaredConstructor().newInstance();
} catch (Exception e) {
LOG.warn("Could not instantiate collection of type {}. Aborting aggregation.", collectionType);
throw CamelExecutionException.wrapCamelExecutionException(oldExchange, e);
}
oldExchange.setProperty(Exchange.AGGREGATED_COLLECTION_GUARD, Boolean.FALSE);
} else {
collection = collectionType.cast(oldValue);
}
collection.add(toInsert);
} catch (ClassCastException exception) {
if (!ignoreInvalidCasts) {
throw exception;
}
}
return collection;
}
public | FlexibleAggregationStrategy |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/annotations/ConstructorArgs.java | {
"start": 1025,
"end": 1517
} | interface ____ {
* @ConstructorArgs({ @Arg(column = "id", javaType = int.class, id = true),
* @Arg(column = "name", javaType = String.class),
* @Arg(javaType = UserEmail.class, select = "selectUserEmailById", column = "id") })
* @Select("SELECT id, name FROM users WHERE id = #{id}")
* User selectById(int id);
* }
* }</pre>
*
* @author Clinton Begin
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @ | UserMapper |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/allocator/ReservedSlots.java | {
"start": 1154,
"end": 1731
} | class ____ {
private final Map<ExecutionVertexID, LogicalSlot> slotPerExecutionVertex;
private ReservedSlots(Map<ExecutionVertexID, LogicalSlot> slotPerExecutionVertex) {
this.slotPerExecutionVertex = slotPerExecutionVertex;
}
public LogicalSlot getSlotFor(ExecutionVertexID executionVertexId) {
return Preconditions.checkNotNull(slotPerExecutionVertex.get(executionVertexId));
}
public static ReservedSlots create(Map<ExecutionVertexID, LogicalSlot> assignedSlots) {
return new ReservedSlots(assignedSlots);
}
}
| ReservedSlots |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/extensions/CollectionTemplateExtensionsTest.java | {
"start": 419,
"end": 2920
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot(root -> root
.addAsResource(new StringAsset(
"{@java.util.List<Boolean> list}{list.0.booleanValue}={list[0]}"),
"templates/getByIndex.html")
.addAsResource(new StringAsset(
"{@java.util.List<String> list}{#each list.reversed}{it.toUpperCase}::{/each}"),
"templates/reversed.html")
.addAsResource(new StringAsset(
"{@java.util.List<String> list}{#each list.take(2)}{it}::{/each}"),
"templates/take.html")
.addAsResource(new StringAsset(
"{@java.util.List<String> list}{#each list.takeLast(2)}{it}::{/each}"),
"templates/takeLast.html")
.addAsResource(new StringAsset(
"{@java.util.List<String> list}{list.first.toUpperCase}"),
"templates/first.html")
.addAsResource(new StringAsset(
"{@java.util.List<String> list}{list.last.toUpperCase}"),
"templates/last.html")
);
@Inject
Engine engine;
@Test
public void testListGetByIndex() {
assertEquals("true=true",
engine.getTemplate("getByIndex").data("list", Collections.singletonList(true)).render());
}
@Test
public void testListReversed() {
assertEquals("CHARLIE::BRAVO::ALPHA::", engine.getTemplate("reversed").data("list", listOfNames()).render());
}
@Test
public void testTake() {
assertEquals("alpha::bravo::", engine.getTemplate("take").data("list", listOfNames()).render());
}
@Test
public void testTakeLast() {
assertEquals("bravo::charlie::", engine.getTemplate("takeLast").data("list", listOfNames()).render());
}
@Test
public void testFirst() {
assertEquals("ALPHA", engine.getTemplate("first").data("list", listOfNames()).render());
}
@Test
public void testLast() {
assertEquals("CHARLIE", engine.getTemplate("last").data("list", listOfNames()).render());
}
private List<String> listOfNames() {
return List.of("alpha", "bravo", "charlie");
}
}
| CollectionTemplateExtensionsTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/erroneous/ambiguousannotatedfactorymethod/AmbiguousBarFactory.java | {
"start": 296,
"end": 424
} | class ____ {
@ObjectFactory
public Bar createBar( Foo foo ) {
return new Bar( "BAR" );
}
}
| AmbiguousBarFactory |
java | google__gson | gson/src/test/java/com/google/gson/JsonParserParameterizedTest.java | {
"start": 1000,
"end": 1815
} | class ____ {
@Parameters
public static Iterable<String> data() {
return Arrays.asList(
"[]",
"{}",
"null",
"1.0",
"true",
"\"string\"",
"[true,1.0,null,{},2.0,{\"a\":[false]},[3.0,\"test\"],4.0]",
"{\"\":1.0,\"a\":true,\"b\":null,\"c\":[],\"d\":{\"a1\":2.0,\"b2\":[true,{\"a3\":3.0}]},\"e\":[{\"f\":4.0},\"test\"]}");
}
private final TypeAdapter<JsonElement> adapter = JsonElementTypeAdapter.ADAPTER;
@Parameter public String json;
@Test
public void testParse() {
JsonElement deserialized = JsonParser.parseString(json);
String actualSerialized = adapter.toJson(deserialized);
// Serialized JsonElement should be the same as original JSON
assertThat(actualSerialized).isEqualTo(json);
}
}
| JsonParserParameterizedTest |
java | netty__netty | buffer/src/main/java/io/netty/buffer/search/MultiSearchProcessorFactory.java | {
"start": 671,
"end": 874
} | interface ____ extends SearchProcessorFactory {
/**
* Returns a new {@link MultiSearchProcessor}.
*/
@Override
MultiSearchProcessor newSearchProcessor();
}
| MultiSearchProcessorFactory |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/main/java/io/quarkus/smallrye/graphql/deployment/SmallRyeGraphQLBuildItem.java | {
"start": 104,
"end": 644
} | class ____ extends SimpleBuildItem {
private final String graphqlUiFinalDestination;
private final String graphqlUiPath;
public SmallRyeGraphQLBuildItem(String graphqlUiFinalDestination, String graphqlUiPath) {
this.graphqlUiFinalDestination = graphqlUiFinalDestination;
this.graphqlUiPath = graphqlUiPath;
}
public String getGraphqlUiFinalDestination() {
return graphqlUiFinalDestination;
}
public String getGraphqlUiPath() {
return graphqlUiPath;
}
} | SmallRyeGraphQLBuildItem |
java | apache__dubbo | dubbo-common/src/main/java/org/apache/dubbo/common/config/configcenter/ConfigChangeType.java | {
"start": 899,
"end": 1096
} | enum ____ {
/**
* A config is created.
*/
ADDED,
/**
* A config is updated.
*/
MODIFIED,
/**
* A config is deleted.
*/
DELETED
}
| ConfigChangeType |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregator.java | {
"start": 4049,
"end": 5759
} | class ____ implements AggregatorState {
private final BytesRefHash values;
private SingleState(BigArrays bigArrays) {
values = new BytesRefHash(1, bigArrays);
}
@Override
public void toIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = toBlock(driverContext.blockFactory());
}
Block toBlock(BlockFactory blockFactory) {
if (values.size() == 0) {
return blockFactory.newConstantNullBlock(1);
}
BytesRef scratch = new BytesRef();
if (values.size() == 1) {
return blockFactory.newConstantBytesRefBlockWith(BytesRef.deepCopyOf(values.get(0, scratch)), 1);
}
try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder((int) values.size())) {
builder.beginPositionEntry();
for (int id = 0; id < values.size(); id++) {
builder.appendBytesRef(values.get(id, scratch));
}
builder.endPositionEntry();
return builder.build();
}
}
@Override
public void close() {
values.close();
}
}
/**
* Values after the first in each group are collected in a hash, keyed by the pair of groupId and value.
* When emitting the output, we need to iterate the hash one group at a time to build the output block,
* which would require O(N^2). To avoid this, we compute the counts for each group and remap the hash id
* to an array, allowing us to build the output in O(N) instead.
*/
private static | SingleState |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/reactive/RedisPublisherVerification.java | {
"start": 1045,
"end": 2685
} | class ____ extends PublisherVerification<String> {
private static RedisClient client;
private static StatefulRedisConnection<String, String> connection;
public RedisPublisherVerification() {
super(new TestEnvironment(1000));
}
@BeforeClass
private static void beforeClass() {
client = RedisClient.create(TestClientResources.get(), RedisURI.create(TestSettings.host(), TestSettings.port()));
connection = client.connect();
connection.sync().flushall();
}
@AfterClass
private static void afterClass() {
connection.close();
FastShutdown.shutdown(client);
}
@Override
public Publisher<String> createPublisher(long elements) {
RedisCommands<String, String> sync = connection.sync();
if (elements == Long.MAX_VALUE) {
return null;
}
String id = UUID.randomUUID().toString();
String key = "PublisherVerification-" + id;
for (int i = 0; i < elements; i++) {
sync.lpush(key, "element-" + i);
}
Supplier<Command<String, String, List<String>>> supplier = () -> {
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.UTF8).addKey(key).add(0).add(-1);
return new Command<>(LRANGE, new ValueListOutput<>(StringCodec.UTF8), args);
};
return new TestRedisPublisher(supplier, connection, true);
}
@Override
public long maxElementsFromPublisher() {
return 100;
}
@Override
public Publisher<String> createFailedPublisher() {
return null;
}
}
| RedisPublisherVerification |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/CompilerHints.java | {
"start": 1019,
"end": 1387
} | class ____ compiler hints describing the behavior of the user function. If set, the
* optimizer will use them to estimate the sizes of the intermediate results. Note that these values
* are optional hints, the optimizer will always generate a valid plan without them as well. The
* hints may help, however, to improve the plan choice.
*/
@Internal
public | encapsulating |
java | apache__camel | core/camel-xml-io/src/generated/java/org/apache/camel/xml/in/ModelParser.java | {
"start": 2055,
"end": 2204
} | class ____ internally by the framework.
// Same for unchecked type safety warnings.
@SuppressWarnings({"deprecation","rawtypes","unchecked"})
public | used |
java | elastic__elasticsearch | libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Similarities.java | {
"start": 718,
"end": 2558
} | class ____ {
static final VectorSimilarityFunctions DISTANCE_FUNCS = NativeAccess.instance()
.getVectorSimilarityFunctions()
.orElseThrow(AssertionError::new);
static final MethodHandle DOT_PRODUCT_7U = DISTANCE_FUNCS.dotProductHandle7u();
static final MethodHandle SQUARE_DISTANCE_7U = DISTANCE_FUNCS.squareDistanceHandle7u();
static final MethodHandle DOT_PRODUCT_7U_BULK = DISTANCE_FUNCS.dotProductHandle7uBulk();
static int dotProduct7u(MemorySegment a, MemorySegment b, int length) {
try {
return (int) DOT_PRODUCT_7U.invokeExact(a, b, length);
} catch (Throwable e) {
if (e instanceof Error err) {
throw err;
} else if (e instanceof RuntimeException re) {
throw re;
} else {
throw new RuntimeException(e);
}
}
}
static int squareDistance7u(MemorySegment a, MemorySegment b, int length) {
try {
return (int) SQUARE_DISTANCE_7U.invokeExact(a, b, length);
} catch (Throwable e) {
if (e instanceof Error err) {
throw err;
} else if (e instanceof RuntimeException re) {
throw re;
} else {
throw new RuntimeException(e);
}
}
}
static void dotProduct7uBulk(MemorySegment a, MemorySegment b, int length, int count, MemorySegment scores) {
try {
DOT_PRODUCT_7U_BULK.invokeExact(a, b, length, count, scores);
} catch (Throwable e) {
if (e instanceof Error err) {
throw err;
} else if (e instanceof RuntimeException re) {
throw re;
} else {
throw new RuntimeException(e);
}
}
}
}
| Similarities |
java | apache__dubbo | dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/router/affinity/config/AffinityProviderAppStateRouterFactory.java | {
"start": 1165,
"end": 1474
} | class ____ extends CacheableStateRouterFactory {
public static final String NAME = "affinity-provider-app";
@Override
protected <T> StateRouter<T> createRouter(Class<T> interfaceClass, URL url) {
return new AffinityProviderAppStateRouter<>(url);
}
}
| AffinityProviderAppStateRouterFactory |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/RemoteClusterPermissionGroup.java | {
"start": 1159,
"end": 6538
} | class ____ implements NamedWriteable, ToXContentObject {
public static final String NAME = "remote_cluster_permission_group";
private final String[] clusterPrivileges;
private final String[] remoteClusterAliases;
private final StringMatcher remoteClusterAliasMatcher;
public RemoteClusterPermissionGroup(StreamInput in) throws IOException {
clusterPrivileges = in.readStringArray();
remoteClusterAliases = in.readStringArray();
remoteClusterAliasMatcher = StringMatcher.of(remoteClusterAliases);
}
public RemoteClusterPermissionGroup(Map<String, List<String>> remoteClusterGroup) {
assert remoteClusterGroup.get(PRIVILEGES.getPreferredName()) != null : "privileges must be non-null";
assert remoteClusterGroup.get(CLUSTERS.getPreferredName()) != null : "clusters must be non-null";
clusterPrivileges = remoteClusterGroup.get(PRIVILEGES.getPreferredName()).toArray(new String[0]);
remoteClusterAliases = remoteClusterGroup.get(CLUSTERS.getPreferredName()).toArray(new String[0]);
remoteClusterAliasMatcher = StringMatcher.of(remoteClusterAliases);
}
/**
* @param clusterPrivileges The list of cluster privileges that are allowed for the remote cluster. must not be null or empty.
* @param remoteClusterAliases The list of remote clusters that the privileges apply to. must not be null or empty.
*/
public RemoteClusterPermissionGroup(String[] clusterPrivileges, String[] remoteClusterAliases) {
if (clusterPrivileges == null
|| remoteClusterAliases == null
|| clusterPrivileges.length <= 0
|| remoteClusterAliases.length <= 0) {
throw new IllegalArgumentException("remote cluster groups must not be null or empty");
}
if (Arrays.stream(clusterPrivileges).anyMatch(s -> Strings.hasText(s) == false)) {
throw new IllegalArgumentException(
"remote_cluster privileges must contain valid non-empty, non-null values " + Arrays.toString(clusterPrivileges)
);
}
if (Arrays.stream(remoteClusterAliases).anyMatch(s -> Strings.hasText(s) == false)) {
throw new IllegalArgumentException(
"remote_cluster clusters aliases must contain valid non-empty, non-null values " + Arrays.toString(remoteClusterAliases)
);
}
this.clusterPrivileges = clusterPrivileges;
this.remoteClusterAliases = remoteClusterAliases;
this.remoteClusterAliasMatcher = StringMatcher.of(remoteClusterAliases);
}
/**
* @param remoteClusterAlias The remote cluster alias to check to see if has privileges defined in this group.
* @return true if the remote cluster alias has privileges defined in this group, false otherwise.
*/
public boolean hasPrivileges(final String remoteClusterAlias) {
return remoteClusterAliasMatcher.test(remoteClusterAlias);
}
/**
* @return A copy of the cluster privileges.
*/
public String[] clusterPrivileges() {
return Arrays.copyOf(clusterPrivileges, clusterPrivileges.length);
}
/**
* @return A copy of the cluster aliases.
*/
public String[] remoteClusterAliases() {
return Arrays.copyOf(remoteClusterAliases, remoteClusterAliases.length);
}
/**
* Converts the group to a map representation.
* @return A map representation of the group.
*/
public Map<String, List<String>> toMap() {
return Map.of(
PRIVILEGES.getPreferredName(),
Arrays.asList(clusterPrivileges),
CLUSTERS.getPreferredName(),
Arrays.asList(remoteClusterAliases)
);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.array(PRIVILEGES.getPreferredName(), clusterPrivileges);
builder.array(CLUSTERS.getPreferredName(), remoteClusterAliases);
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeStringArray(clusterPrivileges);
out.writeStringArray(remoteClusterAliases);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RemoteClusterPermissionGroup that = (RemoteClusterPermissionGroup) o;
// remoteClusterAliasMatcher property is intentionally omitted
return Arrays.equals(clusterPrivileges, that.clusterPrivileges) && Arrays.equals(remoteClusterAliases, that.remoteClusterAliases);
}
@Override
public int hashCode() {
// remoteClusterAliasMatcher property is intentionally omitted
int result = Arrays.hashCode(clusterPrivileges);
result = 31 * result + Arrays.hashCode(remoteClusterAliases);
return result;
}
@Override
public String toString() {
return "RemoteClusterPermissionGroup{"
+ "privileges="
+ Arrays.toString(clusterPrivileges)
+ ", clusters="
+ Arrays.toString(remoteClusterAliases)
+ '}';
}
@Override
public String getWriteableName() {
return NAME;
}
}
| RemoteClusterPermissionGroup |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/invoker/basic/InvokerVisibilityTest.java | {
"start": 709,
"end": 2744
} | class ____ {
@RegisterExtension
public ArcTestContainer container = ArcTestContainer.builder()
.beanClasses(MyPublicService.class, MyProtectedService.class, MyPackagePrivateService.class)
.beanRegistrars(new BeanRegistrar() {
@Override
public void register(RegistrationContext context) {
Map<String, InvokerInfo> invokers = new LinkedHashMap<>();
for (Class<?> clazz : List.of(MyPublicService.class, MyProtectedService.class,
MyPackagePrivateService.class)) {
BeanInfo bean = context.beans().withBeanClass(clazz).firstResult().orElseThrow();
for (MethodInfo method : bean.getImplClazz().methods()) {
if (method.isConstructor()) {
continue;
}
invokers.put(clazz.getSimpleName() + "_" + method.name(),
context.getInvokerFactory().createInvoker(bean, method).build());
}
}
InvokerHelperRegistrar.synthesizeInvokerHelper(context, invokers);
}
})
.build();
@Test
public void test() throws Exception {
InvokerHelper helper = Arc.container().instance(InvokerHelper.class).get();
for (Class<?> clazz : List.of(MyPublicService.class, MyProtectedService.class, MyPackagePrivateService.class)) {
InstanceHandle<?> service = Arc.container().instance(clazz);
for (String method : List.of("hello", "helloProtected", "helloPackagePrivate",
"helloStatic", "helloProtectedStatic", "helloPackagePrivateStatic")) {
String id = clazz.getSimpleName() + "_" + method;
assertEquals(id, helper.getInvoker(id).invoke(service.get(), null));
}
}
}
@Singleton
public static | InvokerVisibilityTest |
java | elastic__elasticsearch | x-pack/plugin/fleet/src/test/java/org/elasticsearch/xpack/fleet/action/PostSecretResponseTests.java | {
"start": 425,
"end": 1042
} | class ____ extends AbstractWireSerializingTestCase<PostSecretResponse> {
@Override
protected Writeable.Reader<PostSecretResponse> instanceReader() {
return PostSecretResponse::new;
}
@Override
protected PostSecretResponse createTestInstance() {
return new PostSecretResponse(randomAlphaOfLengthBetween(2, 10));
}
@Override
protected PostSecretResponse mutateInstance(PostSecretResponse instance) {
String id = randomValueOtherThan(instance.id(), () -> randomAlphaOfLengthBetween(2, 10));
return new PostSecretResponse(id);
}
}
| PostSecretResponseTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequest.java | {
"start": 1717,
"end": 1846
} | class ____ swap index under an alias or increment data stream generation upon satisfying conditions
* <p>
* Note: there is a new | to |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/hierarchies/standard/TestHierarchyLevelTwoWithSingleLevelContextHierarchyTests.java | {
"start": 1509,
"end": 1668
} | class ____ extends
TestHierarchyLevelOneWithSingleLevelContextHierarchyTests {
@Configuration
static | TestHierarchyLevelTwoWithSingleLevelContextHierarchyTests |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/MvcUriComponentsBuilder.java | {
"start": 32587,
"end": 34503
} | class ____ {
private final Class<?> controllerType;
private final Method method;
private final Object[] argumentValues;
private final UriComponentsBuilder baseUrl;
/**
* Create a new {@link MethodArgumentBuilder} instance.
* @since 4.2
*/
public MethodArgumentBuilder(Class<?> controllerType, Method method) {
this(null, controllerType, method);
}
/**
* Create a new {@link MethodArgumentBuilder} instance.
* @since 4.2
*/
public MethodArgumentBuilder(@Nullable UriComponentsBuilder baseUrl, Class<?> controllerType, Method method) {
Assert.notNull(controllerType, "'controllerType' is required");
Assert.notNull(method, "'method' is required");
this.baseUrl = (baseUrl != null ? baseUrl : UriComponentsBuilder.fromPath(getPath()));
this.controllerType = controllerType;
this.method = method;
this.argumentValues = new Object[method.getParameterCount()];
}
private static String getPath() {
UriComponentsBuilder builder = ServletUriComponentsBuilder.fromCurrentServletMapping();
String path = builder.build().getPath();
return (path != null ? path : "");
}
public MethodArgumentBuilder arg(int index, Object value) {
this.argumentValues[index] = value;
return this;
}
/**
* Use this method only if you need to apply strong encoding to expanded
* URI variables by quoting all characters with reserved meaning.
* @since 5.0.8
*/
public MethodArgumentBuilder encode() {
this.baseUrl.encode();
return this;
}
public String build() {
return fromMethodInternal(this.baseUrl, this.controllerType, this.method, this.argumentValues)
.build().encode().toUriString();
}
public String buildAndExpand(Object... uriVars) {
return fromMethodInternal(this.baseUrl, this.controllerType, this.method, this.argumentValues)
.buildAndExpand(uriVars).encode().toString();
}
}
}
| MethodArgumentBuilder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/search/QueryPhaseResultConsumer.java | {
"start": 3157,
"end": 30245
} | class ____ extends ArraySearchPhaseResults<SearchPhaseResult> {
private static final Logger logger = LogManager.getLogger(QueryPhaseResultConsumer.class);
private final Executor executor;
private final CircuitBreaker circuitBreaker;
private final SearchProgressListener progressListener;
private final AggregationReduceContext.Builder aggReduceContextBuilder;
private final QueryPhaseRankCoordinatorContext queryPhaseRankCoordinatorContext;
private final int topNSize;
private final boolean hasTopDocs;
private final boolean hasAggs;
private final boolean performFinalReduce;
private final Consumer<Exception> onPartialMergeFailure;
private final int batchReduceSize;
private List<QuerySearchResult> buffer = new ArrayList<>();
private List<SearchShard> emptyResults = new ArrayList<>();
// the memory that is accounted in the circuit breaker for this consumer
private volatile long circuitBreakerBytes;
// the memory that is currently used in the buffer
private volatile long aggsCurrentBufferSize;
private volatile long maxAggsCurrentBufferSize = 0;
private final ArrayDeque<MergeTask> queue = new ArrayDeque<>();
private final AtomicReference<MergeTask> runningTask = new AtomicReference<>();
final AtomicReference<Exception> failure = new AtomicReference<>();
final TopDocsStats topDocsStats;
private volatile MergeResult mergeResult;
private volatile boolean hasPartialReduce;
// Note: at this time, numReducePhases does not count reductions that occur on the data node as part of batched query execution.
private volatile int numReducePhases;
/**
* Creates a {@link QueryPhaseResultConsumer} that incrementally reduces aggregation results
* as shard results are consumed.
*/
public QueryPhaseResultConsumer(
SearchRequest request,
Executor executor,
CircuitBreaker circuitBreaker,
SearchPhaseController controller,
Supplier<Boolean> isCanceled,
SearchProgressListener progressListener,
int expectedResultSize,
Consumer<Exception> onPartialMergeFailure
) {
super(expectedResultSize);
this.executor = executor;
this.circuitBreaker = circuitBreaker;
this.progressListener = progressListener;
this.topNSize = getTopDocsSize(request);
this.performFinalReduce = request.isFinalReduce();
this.onPartialMergeFailure = onPartialMergeFailure;
SearchSourceBuilder source = request.source();
int size = source == null || source.size() == -1 ? SearchService.DEFAULT_SIZE : source.size();
int from = source == null || source.from() == -1 ? SearchService.DEFAULT_FROM : source.from();
this.queryPhaseRankCoordinatorContext = source == null || source.rankBuilder() == null
? null
: source.rankBuilder().buildQueryPhaseCoordinatorContext(size, from);
this.hasTopDocs = (source == null || size != 0) && queryPhaseRankCoordinatorContext == null;
this.hasAggs = source != null && source.aggregations() != null;
this.aggReduceContextBuilder = hasAggs ? controller.getReduceContext(isCanceled, source.aggregations()) : null;
batchReduceSize = (hasAggs || hasTopDocs) ? Math.min(request.getBatchedReduceSize(), expectedResultSize) : expectedResultSize;
topDocsStats = new TopDocsStats(request.resolveTrackTotalHitsUpTo());
}
@Override
protected synchronized void doClose() {
assert assertFailureAndBreakerConsistent();
releaseBuffer();
circuitBreaker.addWithoutBreaking(-circuitBreakerBytes);
circuitBreakerBytes = 0;
if (hasPendingMerges()) {
// This is a theoretically unreachable exception.
throw new IllegalStateException("Attempted to close with partial reduce in-flight");
}
}
private boolean assertFailureAndBreakerConsistent() {
boolean hasFailure = failure.get() != null;
if (hasFailure) {
assert circuitBreakerBytes == 0;
} else {
assert circuitBreakerBytes >= 0;
}
return true;
}
@Override
public void consumeResult(SearchPhaseResult result, Runnable next) {
super.consumeResult(result, () -> {});
QuerySearchResult querySearchResult = result.queryResult();
progressListener.notifyQueryResult(querySearchResult.getShardIndex(), querySearchResult);
consume(querySearchResult, next);
}
private final ArrayDeque<Tuple<TopDocsStats, MergeResult>> batchedResults = new ArrayDeque<>();
/**
* Unlinks partial merge results from this instance and returns them as a partial merge result to be sent to the coordinating node.
*
* @return the partial MergeResult for all shards queried on this data node.
*/
MergeResult consumePartialMergeResultDataNode() {
var mergeResult = this.mergeResult;
this.mergeResult = null;
assert runningTask.get() == null;
final List<QuerySearchResult> buffer;
synchronized (this) {
buffer = this.buffer;
}
if (buffer != null && buffer.isEmpty() == false) {
this.buffer = null;
buffer.sort(RESULT_COMPARATOR);
mergeResult = partialReduce(buffer, emptyResults, topDocsStats, mergeResult, 0);
emptyResults = null;
}
return mergeResult;
}
void addBatchedPartialResult(TopDocsStats topDocsStats, MergeResult mergeResult) {
synchronized (batchedResults) {
batchedResults.add(new Tuple<>(topDocsStats, mergeResult));
}
}
@Override
public SearchPhaseController.ReducedQueryPhase reduce() throws Exception {
if (hasPendingMerges()) {
throw new AssertionError("partial reduce in-flight");
}
Exception f = failure.get();
if (f != null) {
throw f;
}
List<QuerySearchResult> buffer;
synchronized (this) {
// final reduce, we're done with the buffer so we just null it out and continue with a local variable to
// save field references. The synchronized block is never contended but needed to have a memory barrier and sync buffer's
// contents with all the previous writers to it
buffer = this.buffer;
buffer = buffer == null ? Collections.emptyList() : buffer;
this.buffer = null;
}
// ensure consistent ordering
buffer.sort(RESULT_COMPARATOR);
final TopDocsStats topDocsStats = this.topDocsStats;
var mergeResult = this.mergeResult;
final ArrayDeque<Tuple<TopDocsStats, MergeResult>> batchedResults;
synchronized (this.batchedResults) {
batchedResults = this.batchedResults;
}
final int resultSize = buffer.size() + (mergeResult == null ? 0 : 1) + batchedResults.size();
final boolean hasBatchedResults = batchedResults.isEmpty() == false;
final List<TopDocs> topDocsList = hasTopDocs ? new ArrayList<>(resultSize) : null;
final Deque<DelayableWriteable<InternalAggregations>> aggsList = hasAggs ? new ArrayDeque<>(resultSize) : null;
SearchPhaseController.ReducedQueryPhase reducePhase;
long breakerSize = circuitBreakerBytes;
final InternalAggregations aggs;
try {
// consume partial merge result from the un-batched execution path that is used for BwC, shard-level retries, and shard level
// execution for shards on the coordinating node itself
if (mergeResult != null) {
consumePartialMergeResult(mergeResult, topDocsList, aggsList);
breakerSize = addEstimateAndMaybeBreak(mergeResult.estimatedSize);
}
Tuple<TopDocsStats, MergeResult> batchedResult;
while ((batchedResult = batchedResults.poll()) != null) {
topDocsStats.add(batchedResult.v1());
consumePartialMergeResult(batchedResult.v2(), topDocsList, aggsList);
// Add the estimate of the agg size
breakerSize = addEstimateAndMaybeBreak(batchedResult.v2().estimatedSize);
}
for (QuerySearchResult result : buffer) {
topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly());
if (topDocsList != null) {
TopDocsAndMaxScore topDocs = result.consumeTopDocs();
setShardIndex(topDocs.topDocs, result.getShardIndex());
topDocsList.add(topDocs.topDocs);
}
}
if (aggsList != null) {
// Add an estimate of the final reduce size
breakerSize = addEstimateAndMaybeBreak(estimateRamBytesUsedForReduce(circuitBreakerBytes));
AggregationReduceContext aggReduceContext = performFinalReduce
? aggReduceContextBuilder.forFinalReduction()
: aggReduceContextBuilder.forPartialReduction();
aggReduceContext.setHasBatchedResult(hasBatchedResults);
aggs = aggregate(buffer.iterator(), new Iterator<>() {
@Override
public boolean hasNext() {
return aggsList.isEmpty() == false;
}
@Override
public DelayableWriteable<InternalAggregations> next() {
return aggsList.pollFirst();
}
}, resultSize, aggReduceContext);
} else {
aggs = null;
}
reducePhase = SearchPhaseController.reducedQueryPhase(
results.asList(),
aggs,
topDocsList == null ? Collections.emptyList() : topDocsList,
topDocsStats,
numReducePhases,
false,
queryPhaseRankCoordinatorContext
);
buffer = null;
} finally {
// Buffer is non-null on exception
if (buffer != null) {
releaseAggs(buffer);
if (aggsList != null) {
Releasables.close(aggsList);
}
}
}
if (hasAggs
// reduced aggregations can be null if all shards failed
&& aggs != null) {
// Update the circuit breaker to replace the estimation with the serialized size of the newly reduced result
long finalSize = DelayableWriteable.getSerializedSize(reducePhase.aggregations()) - breakerSize;
addWithoutBreaking(finalSize);
logger.trace("aggs final reduction [{}] max [{}]", aggsCurrentBufferSize, maxAggsCurrentBufferSize);
}
if (progressListener != SearchProgressListener.NOOP) {
progressListener.notifyFinalReduce(
SearchProgressListener.buildSearchShards(results.asList()),
reducePhase.totalHits(),
reducePhase.aggregations(),
reducePhase.numReducePhases()
);
}
return reducePhase;
}
private static void consumePartialMergeResult(
MergeResult partialResult,
List<TopDocs> topDocsList,
Collection<DelayableWriteable<InternalAggregations>> aggsList
) {
if (topDocsList != null) {
addTopDocsToList(partialResult, topDocsList);
}
if (aggsList != null) {
addAggsToList(partialResult, aggsList);
}
}
private static void addTopDocsToList(MergeResult partialResult, List<TopDocs> topDocsList) {
if (partialResult.reducedTopDocs != null) {
topDocsList.add(partialResult.reducedTopDocs);
}
}
private static void addAggsToList(MergeResult partialResult, Collection<DelayableWriteable<InternalAggregations>> aggsList) {
var aggs = partialResult.reducedAggs;
if (aggs != null) {
aggsList.add(aggs);
}
}
private static final Comparator<QuerySearchResult> RESULT_COMPARATOR = Comparator.comparingInt(QuerySearchResult::getShardIndex);
/**
* Called on both the coordinating- and data-node. Both types of nodes use this to partially reduce the merge result once
* {@link #batchReduceSize} shard responses have accumulated. Data nodes also do a final partial reduce before sending query phase
* results back to the coordinating node.
*/
private MergeResult partialReduce(
List<QuerySearchResult> toConsume,
List<SearchShard> processedShards,
TopDocsStats topDocsStats,
@Nullable MergeResult lastMerge,
int numReducePhases
) {
// ensure consistent ordering
toConsume.sort(RESULT_COMPARATOR);
final TopDocs newTopDocs;
final int resultSetSize = toConsume.size() + (lastMerge != null ? 1 : 0);
List<TopDocs> topDocsList;
if (hasTopDocs) {
topDocsList = new ArrayList<>(resultSetSize);
if (lastMerge != null) {
addTopDocsToList(lastMerge, topDocsList);
}
} else {
topDocsList = null;
}
final InternalAggregations newAggs;
try {
for (QuerySearchResult result : toConsume) {
topDocsStats.add(result.topDocs(), result.searchTimedOut(), result.terminatedEarly());
SearchShardTarget target = result.getSearchShardTarget();
processedShards.add(new SearchShard(target.getClusterAlias(), target.getShardId()));
if (topDocsList != null) {
TopDocsAndMaxScore topDocs = result.consumeTopDocs();
setShardIndex(topDocs.topDocs, result.getShardIndex());
topDocsList.add(topDocs.topDocs);
}
}
// we have to merge here in the same way we collect on a shard
newTopDocs = topDocsList == null ? null : mergeTopDocs(topDocsList, topNSize, 0);
newAggs = hasAggs
? aggregate(
toConsume.iterator(),
lastMerge == null ? Collections.emptyIterator() : Iterators.single(lastMerge.reducedAggs),
resultSetSize,
aggReduceContextBuilder.forPartialReduction()
)
: null;
for (QuerySearchResult querySearchResult : toConsume) {
querySearchResult.markAsPartiallyReduced();
}
toConsume = null;
} finally {
releaseAggs(toConsume);
}
if (lastMerge != null) {
processedShards.addAll(lastMerge.processedShards);
}
if (progressListener != SearchProgressListener.NOOP) {
progressListener.notifyPartialReduce(processedShards, topDocsStats.getTotalHits(), newAggs, numReducePhases);
}
// we leave the results un-serialized because serializing is slow but we compute the serialized
// size as an estimate of the memory used by the newly reduced aggregations.
return new MergeResult(
processedShards,
newTopDocs,
newAggs != null ? DelayableWriteable.referencing(newAggs) : null,
newAggs != null ? DelayableWriteable.getSerializedSize(newAggs) : 0
);
}
private static InternalAggregations aggregate(
Iterator<QuerySearchResult> toConsume,
Iterator<DelayableWriteable<InternalAggregations>> partialResults,
int resultSetSize,
AggregationReduceContext reduceContext
) {
try {
Iterator<InternalAggregations> aggsIter = Iterators.map(toConsume, r -> {
try (var res = r.consumeAggs()) {
return res.expand();
}
});
return InternalAggregations.topLevelReduce(partialResults.hasNext() ? Iterators.concat(Iterators.map(partialResults, r -> {
try (r) {
return r.expand();
}
}), aggsIter) : aggsIter, resultSetSize, reduceContext);
} finally {
toConsume.forEachRemaining(QuerySearchResult::releaseAggs);
partialResults.forEachRemaining(Releasable::close);
}
}
public int getNumReducePhases() {
return numReducePhases;
}
private boolean hasFailure() {
return failure.get() != null;
}
private boolean hasPendingMerges() {
return queue.isEmpty() == false || runningTask.get() != null;
}
private synchronized void addWithoutBreaking(long size) {
circuitBreaker.addWithoutBreaking(size);
circuitBreakerBytes += size;
maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes);
}
private synchronized long addEstimateAndMaybeBreak(long estimatedSize) {
circuitBreaker.addEstimateBytesAndMaybeBreak(estimatedSize, "<reduce_aggs>");
circuitBreakerBytes += estimatedSize;
maxAggsCurrentBufferSize = Math.max(maxAggsCurrentBufferSize, circuitBreakerBytes);
return circuitBreakerBytes;
}
/**
* Returns the size of the serialized aggregation that is contained in the
* provided {@link QuerySearchResult}.
*/
private long ramBytesUsedQueryResult(QuerySearchResult result) {
return hasAggs ? result.aggregations().getSerializedSize() : 0;
}
/**
* Returns an estimation of the size that a reduce of the provided size
* would take on memory.
* This size is estimated as roughly 1.5 times the size of the serialized
* aggregations that need to be reduced.
* This method expects an already accounted size, so only an extra 0.5x is returned.
* This estimation can be completely off for some aggregations
* but it is corrected with the real size after the reduce completes.
*/
private static long estimateRamBytesUsedForReduce(long size) {
return Math.round(0.5d * size);
}
private void consume(QuerySearchResult result, Runnable next) {
if (hasFailure()) {
result.consumeAll();
next.run();
} else if (result.isNull() || result.isPartiallyReduced()) {
SearchShardTarget target = result.getSearchShardTarget();
SearchShard searchShard = new SearchShard(target.getClusterAlias(), target.getShardId());
synchronized (this) {
emptyResults.add(searchShard);
}
next.run();
} else {
final long aggsSize = ramBytesUsedQueryResult(result);
boolean executeNextImmediately = true;
boolean hasFailure = false;
synchronized (this) {
if (hasFailure()) {
hasFailure = true;
} else {
if (hasAggs) {
try {
addEstimateAndMaybeBreak(aggsSize);
} catch (Exception exc) {
releaseBuffer();
onMergeFailure(exc);
hasFailure = true;
}
}
if (hasFailure == false) {
var b = buffer;
aggsCurrentBufferSize += aggsSize;
// add one if a partial merge is pending
int size = b.size() + (hasPartialReduce ? 1 : 0);
if (size >= batchReduceSize) {
hasPartialReduce = true;
executeNextImmediately = false;
MergeTask task = new MergeTask(b, aggsCurrentBufferSize, emptyResults, next);
b = buffer = new ArrayList<>();
emptyResults = new ArrayList<>();
aggsCurrentBufferSize = 0;
queue.add(task);
tryExecuteNext();
}
b.add(result);
}
}
}
if (hasFailure) {
result.consumeAll();
}
if (executeNextImmediately) {
next.run();
}
}
}
private void releaseBuffer() {
var b = buffer;
if (b != null) {
this.buffer = null;
for (QuerySearchResult querySearchResult : b) {
querySearchResult.releaseAggs();
}
}
synchronized (this.batchedResults) {
Tuple<TopDocsStats, MergeResult> batchedResult;
while ((batchedResult = batchedResults.poll()) != null) {
Releasables.close(batchedResult.v2().reducedAggs());
}
}
}
private synchronized void onMergeFailure(Exception exc) {
if (failure.compareAndSet(null, exc) == false) {
assert circuitBreakerBytes == 0;
return;
}
assert circuitBreakerBytes >= 0;
if (circuitBreakerBytes > 0) {
// make sure that we reset the circuit breaker
circuitBreaker.addWithoutBreaking(-circuitBreakerBytes);
circuitBreakerBytes = 0;
}
onPartialMergeFailure.accept(exc);
final MergeTask task = runningTask.getAndSet(null);
if (task != null) {
task.cancel();
}
MergeTask mergeTask;
while ((mergeTask = queue.pollFirst()) != null) {
mergeTask.cancel();
}
mergeResult = null;
}
private void tryExecuteNext() {
assert Thread.holdsLock(this);
final MergeTask task;
if (hasFailure() || runningTask.get() != null) {
return;
}
task = queue.poll();
runningTask.set(task);
if (task == null) {
return;
}
executor.execute(new AbstractRunnable() {
@Override
protected void doRun() {
MergeTask mergeTask = task;
List<QuerySearchResult> toConsume = mergeTask.consumeBuffer();
while (mergeTask != null) {
final MergeResult thisMergeResult = mergeResult;
long estimatedTotalSize = (thisMergeResult != null ? thisMergeResult.estimatedSize : 0) + mergeTask.aggsBufferSize;
final MergeResult newMerge;
try {
long estimatedMergeSize = estimateRamBytesUsedForReduce(estimatedTotalSize);
addEstimateAndMaybeBreak(estimatedMergeSize);
estimatedTotalSize += estimatedMergeSize;
++numReducePhases;
newMerge = partialReduce(toConsume, mergeTask.emptyResults, topDocsStats, thisMergeResult, numReducePhases);
} catch (Exception t) {
QueryPhaseResultConsumer.releaseAggs(toConsume);
onMergeFailure(t);
return;
}
synchronized (QueryPhaseResultConsumer.this) {
if (hasFailure()) {
return;
}
mergeResult = newMerge;
if (hasAggs) {
// Update the circuit breaker to remove the size of the source aggregations
// and replace the estimation with the serialized size of the newly reduced result.
long newSize = mergeResult.estimatedSize - estimatedTotalSize;
addWithoutBreaking(newSize);
if (logger.isTraceEnabled()) {
logger.trace(
"aggs partial reduction [{}->{}] max [{}]",
estimatedTotalSize,
mergeResult.estimatedSize,
maxAggsCurrentBufferSize
);
}
}
}
Runnable r = mergeTask.consumeListener();
synchronized (QueryPhaseResultConsumer.this) {
while (true) {
mergeTask = queue.poll();
runningTask.set(mergeTask);
if (mergeTask == null) {
break;
}
toConsume = mergeTask.consumeBuffer();
if (toConsume != null) {
break;
}
}
}
if (r != null) {
r.run();
}
}
}
@Override
public void onFailure(Exception exc) {
onMergeFailure(exc);
}
});
}
private static void releaseAggs(@Nullable List<QuerySearchResult> toConsume) {
if (toConsume != null) {
for (QuerySearchResult result : toConsume) {
result.releaseAggs();
}
}
}
record MergeResult(
List<SearchShard> processedShards,
@Nullable TopDocs reducedTopDocs,
@Nullable DelayableWriteable<InternalAggregations> reducedAggs,
long estimatedSize
) implements Writeable {
private static final TransportVersion BATCHED_QUERY_EXECUTION_DELAYABLE_WRITEABLE = TransportVersion.fromName(
"batched_query_execution_delayable_writeable"
);
static MergeResult readFrom(StreamInput in) throws IOException {
return new MergeResult(List.of(), Lucene.readTopDocsIncludingShardIndex(in), in.readOptionalWriteable(i -> {
if (i.getTransportVersion().supports(BATCHED_QUERY_EXECUTION_DELAYABLE_WRITEABLE)) {
return DelayableWriteable.delayed(InternalAggregations::readFrom, i);
} else {
return DelayableWriteable.referencing(InternalAggregations.readFrom(i));
}
}), in.readVLong());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
Lucene.writeTopDocsIncludingShardIndex(out, reducedTopDocs);
out.writeOptionalWriteable(
reducedAggs == null
? null
: (out.getTransportVersion().supports(BATCHED_QUERY_EXECUTION_DELAYABLE_WRITEABLE) ? reducedAggs : reducedAggs.expand())
);
out.writeVLong(estimatedSize);
}
}
private static | QueryPhaseResultConsumer |
java | dropwizard__dropwizard | dropwizard-request-logging/src/main/java/io/dropwizard/request/logging/filter/UriFilterFactory.java | {
"start": 504,
"end": 1196
} | class ____ implements FilterFactory<IAccessEvent> {
@NotNull
private Set<String> uris = Collections.emptySet();
@JsonProperty
public Set<String> getUris() {
return uris;
}
@JsonProperty
public void setUris(final Set<String> uris) {
this.uris = uris;
}
@Override
public Filter<IAccessEvent> build() {
return new Filter<>() {
@Override
public FilterReply decide(final IAccessEvent event) {
if (uris.contains(event.getRequestURI())) {
return FilterReply.DENY;
}
return FilterReply.NEUTRAL;
}
};
}
}
| UriFilterFactory |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/AlterShareGroupOffsetsRequest.java | {
"start": 1478,
"end": 3584
} | class ____ extends AbstractRequest.Builder<AlterShareGroupOffsetsRequest> {
private final AlterShareGroupOffsetsRequestData data;
public Builder(AlterShareGroupOffsetsRequestData data) {
super(ApiKeys.ALTER_SHARE_GROUP_OFFSETS);
this.data = data;
}
@Override
public AlterShareGroupOffsetsRequest build(short version) {
return new AlterShareGroupOffsetsRequest(data, version);
}
@Override
public String toString() {
return data.toString();
}
}
@Override
public AlterShareGroupOffsetsResponse getErrorResponse(int throttleTimeMs, Throwable e) {
return getErrorResponse(throttleTimeMs, Errors.forException(e));
}
public AlterShareGroupOffsetsResponse getErrorResponse(int throttleTimeMs, Errors error) {
return getErrorResponse(throttleTimeMs, error.code(), error.message());
}
public AlterShareGroupOffsetsResponse getErrorResponse(int throttleTimeMs, short errorCode, String message) {
return new AlterShareGroupOffsetsResponse(
new AlterShareGroupOffsetsResponseData()
.setThrottleTimeMs(throttleTimeMs)
.setErrorCode(errorCode)
.setErrorMessage(message)
);
}
public static AlterShareGroupOffsetsResponseData getErrorResponseData(Errors error) {
return getErrorResponseData(error, null);
}
public static AlterShareGroupOffsetsResponseData getErrorResponseData(Errors error, String errorMessage) {
return new AlterShareGroupOffsetsResponseData()
.setErrorCode(error.code())
.setErrorMessage(errorMessage == null ? error.message() : errorMessage);
}
public static AlterShareGroupOffsetsRequest parse(Readable readable, short version) {
return new AlterShareGroupOffsetsRequest(
new AlterShareGroupOffsetsRequestData(readable, version),
version
);
}
@Override
public AlterShareGroupOffsetsRequestData data() {
return data;
}
}
| Builder |
java | quarkusio__quarkus | extensions/smallrye-graphql/deployment/src/test/java/io/quarkus/smallrye/graphql/deployment/ExcludeNullFieldsInResponseTest.java | {
"start": 566,
"end": 2588
} | class ____ extends AbstractGraphQLTest {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(TestApi.class, Book.class, Author.class)
.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml")
.addAsResource(
new StringAsset(
"quarkus.smallrye-graphql.exclude-null-fields-in-responses=true"),
"application.properties"));
@Test
void testExcludeNullFieldsInResponse() {
final String request = getPayload("""
{
books {
name
pages
author {
firstName
lastName
}
}
}""");
given()
.when()
.accept(MEDIATYPE_JSON)
.contentType(MEDIATYPE_JSON)
.body(request)
.post("/graphql")
.then()
.assertThat()
.statusCode(OK)
.and()
.body(containsString("{\"data\":{" +
"\"books\":[{" +
"\"name\":\"The Hobbit\"," +
// missing null field
"\"author\":{" +
"\"firstName\":\"J.R.R.\"" +
// missing null field
"}" +
"},{" +
"\"name\":\"The Lord of the Rings\"," +
"\"pages\":1178," +
"\"author\":{" +
"\"firstName\":\"J.R.R.\"," +
"\"lastName\":\"Tolkien\"" +
"}" +
"}]" +
"}}"));
}
@GraphQLApi
public static | ExcludeNullFieldsInResponseTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/KubernetesCronJobEndpointBuilderFactory.java | {
"start": 1433,
"end": 1579
} | interface ____ {
/**
* Builder for endpoint for the Kubernetes Cronjob component.
*/
public | KubernetesCronJobEndpointBuilderFactory |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/inject/MoreThanOneScopeAnnotationOnClass.java | {
"start": 2145,
"end": 2918
} | class ____ extends BugChecker implements ClassTreeMatcher {
private static final MultiMatcher<Tree, AnnotationTree> SCOPE_ANNOTATION_MATCHER =
annotations(AT_LEAST_ONE, IS_SCOPING_ANNOTATION);
@Override
public final Description matchClass(ClassTree classTree, VisitorState state) {
MultiMatchResult<AnnotationTree> scopeAnnotationResult =
SCOPE_ANNOTATION_MATCHER.multiMatchResult(classTree, state);
if (scopeAnnotationResult.matches() && !IS_DAGGER_COMPONENT.matches(classTree, state)) {
ImmutableList<AnnotationTree> scopeAnnotations = scopeAnnotationResult.matchingNodes();
if (scopeAnnotations.size() > 1) {
return buildDescription(classTree)
.setMessage(
"This | MoreThanOneScopeAnnotationOnClass |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineDataToRetrieve.java | {
"start": 3530,
"end": 6810
} | class ____ {
private TimelineFilterList confsToRetrieve;
private TimelineFilterList metricsToRetrieve;
private EnumSet<Field> fieldsToRetrieve;
private Integer metricsLimit;
private Long metricsTimeBegin;
private Long metricsTimeEnd;
private static final long DEFAULT_METRICS_BEGIN_TIME = 0L;
private static final long DEFAULT_METRICS_END_TIME = Long.MAX_VALUE;
/**
* Default limit of number of metrics to return.
*/
public static final Integer DEFAULT_METRICS_LIMIT = 1;
public TimelineDataToRetrieve() {
this(null, null, null, null, null, null);
}
public TimelineDataToRetrieve(TimelineFilterList confs,
TimelineFilterList metrics, EnumSet<Field> fields,
Integer limitForMetrics, Long metricTimeBegin, Long metricTimeEnd) {
this.confsToRetrieve = confs;
this.metricsToRetrieve = metrics;
this.fieldsToRetrieve = fields;
if (limitForMetrics == null || limitForMetrics < 1) {
this.metricsLimit = DEFAULT_METRICS_LIMIT;
} else {
this.metricsLimit = limitForMetrics;
}
if (this.fieldsToRetrieve == null) {
this.fieldsToRetrieve = EnumSet.noneOf(Field.class);
}
if (metricTimeBegin == null || metricTimeBegin < 0) {
this.metricsTimeBegin = DEFAULT_METRICS_BEGIN_TIME;
} else {
this.metricsTimeBegin = metricTimeBegin;
}
if (metricTimeEnd == null || metricTimeEnd < 0) {
this.metricsTimeEnd = DEFAULT_METRICS_END_TIME;
} else {
this.metricsTimeEnd = metricTimeEnd;
}
if (this.metricsTimeBegin > this.metricsTimeEnd) {
throw new IllegalArgumentException("metricstimebegin should not be " +
"greater than metricstimeend");
}
}
public TimelineFilterList getConfsToRetrieve() {
return confsToRetrieve;
}
public void setConfsToRetrieve(TimelineFilterList confs) {
this.confsToRetrieve = confs;
}
public TimelineFilterList getMetricsToRetrieve() {
return metricsToRetrieve;
}
public void setMetricsToRetrieve(TimelineFilterList metrics) {
this.metricsToRetrieve = metrics;
}
public EnumSet<Field> getFieldsToRetrieve() {
return fieldsToRetrieve;
}
public void setFieldsToRetrieve(EnumSet<Field> fields) {
this.fieldsToRetrieve = fields;
}
/**
* Adds configs and metrics fields to fieldsToRetrieve(if they are not
* present) if confsToRetrieve and metricsToRetrieve are specified.
*/
public void addFieldsBasedOnConfsAndMetricsToRetrieve() {
if (!fieldsToRetrieve.contains(Field.CONFIGS) && confsToRetrieve != null &&
!confsToRetrieve.getFilterList().isEmpty()) {
fieldsToRetrieve.add(Field.CONFIGS);
}
if (!fieldsToRetrieve.contains(Field.METRICS) &&
metricsToRetrieve != null &&
!metricsToRetrieve.getFilterList().isEmpty()) {
fieldsToRetrieve.add(Field.METRICS);
}
}
public Integer getMetricsLimit() {
return metricsLimit;
}
public Long getMetricsTimeBegin() {
return this.metricsTimeBegin;
}
public Long getMetricsTimeEnd() {
return metricsTimeEnd;
}
public void setMetricsLimit(Integer limit) {
if (limit == null || limit < 1) {
this.metricsLimit = DEFAULT_METRICS_LIMIT;
} else {
this.metricsLimit = limit;
}
}
}
| TimelineDataToRetrieve |
java | spring-projects__spring-boot | core/spring-boot-test/src/test/java/org/springframework/boot/test/context/SpringBootTestUseMainMethodWithPropertiesTests.java | {
"start": 1288,
"end": 1600
} | class ____ {
@Autowired
private ApplicationContext applicationContext;
@Test
void propertyIsSet() {
assertThat(this.applicationContext.getEnvironment().getProperty("test")).isEqualTo("123");
}
@SpringBootConfiguration(proxyBeanMethods = false)
public static | SpringBootTestUseMainMethodWithPropertiesTests |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/NNHAServiceTarget.java | {
"start": 1723,
"end": 7319
} | class ____ extends HAServiceTarget {
// Keys added to the fencing script environment
private static final String NAMESERVICE_ID_KEY = "nameserviceid";
private static final String NAMENODE_ID_KEY = "namenodeid";
private final InetSocketAddress addr;
private final InetSocketAddress lifelineAddr;
private InetSocketAddress zkfcAddr;
private NodeFencer fencer;
private BadFencingConfigurationException fenceConfigError;
private HdfsConfiguration targetConf;
private String nnId;
private String nsId;
private boolean autoFailoverEnabled;
/**
* Create a NNHAServiceTarget for a namenode.
* Look up addresses from configuration.
*
* @param conf HDFS configuration.
* @param nsId nsId of this nn.
* @param nnId nnId of this nn.
*/
public NNHAServiceTarget(Configuration conf,
String nsId, String nnId) {
initializeNnConfig(conf, nsId, nnId);
String serviceAddr =
DFSUtil.getNamenodeServiceAddr(targetConf, nsId, nnId);
if (serviceAddr == null) {
throw new IllegalArgumentException(
"Unable to determine service address for namenode '" + nnId + "'");
}
this.addr = NetUtils.createSocketAddr(serviceAddr,
HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT);
String lifelineAddrStr =
DFSUtil.getNamenodeLifelineAddr(targetConf, nsId, nnId);
this.lifelineAddr = (lifelineAddrStr != null) ?
NetUtils.createSocketAddr(lifelineAddrStr) : null;
initializeFailoverConfig();
}
/**
* Create a NNHAServiceTarget for a namenode.
* Addresses are provided so we don't need to lookup the config.
*
* @param conf HDFS configuration.
* @param nsId nsId of this nn.
* @param nnId nnId of this nn.
* @param addr Provided service address.
* @param lifelineAddr Provided lifeline address.
*/
public NNHAServiceTarget(Configuration conf,
String nsId, String nnId,
String addr, String lifelineAddr) {
initializeNnConfig(conf, nsId, nnId);
this.addr = NetUtils.createSocketAddr(addr);
this.lifelineAddr = NetUtils.createSocketAddr(lifelineAddr);
initializeFailoverConfig();
}
private void initializeNnConfig(Configuration conf,
String providedNsId, String providedNnId) {
Preconditions.checkNotNull(providedNnId);
if (providedNsId == null) {
providedNsId = DFSUtil.getOnlyNameServiceIdOrNull(conf);
if (providedNsId == null) {
String errorString = "Unable to determine the name service ID.";
String[] dfsNames = conf.getStrings(DFS_NAMESERVICES);
if ((dfsNames != null) && (dfsNames.length > 1)) {
errorString = "Unable to determine the name service ID. " +
"This is an HA configuration with multiple name services " +
"configured. " + DFS_NAMESERVICES + " is set to " +
Arrays.toString(dfsNames) + ". Please re-run with the -ns option.";
}
throw new IllegalArgumentException(errorString);
}
}
// Make a copy of the conf, and override configs based on the
// target node -- not the node we happen to be running on.
this.targetConf = new HdfsConfiguration(conf);
NameNode.initializeGenericKeys(targetConf, providedNsId, providedNnId);
this.nsId = providedNsId;
this.nnId = providedNnId;
}
private void initializeFailoverConfig() {
this.autoFailoverEnabled = targetConf.getBoolean(
DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_KEY,
DFSConfigKeys.DFS_HA_AUTO_FAILOVER_ENABLED_DEFAULT);
if (autoFailoverEnabled) {
int port = DFSZKFailoverController.getZkfcPort(targetConf);
if (port != 0) {
setZkfcPort(port);
}
}
try {
this.fencer = NodeFencer.create(targetConf,
DFSConfigKeys.DFS_HA_FENCE_METHODS_KEY);
} catch (BadFencingConfigurationException e) {
this.fenceConfigError = e;
}
}
/**
* @return the NN's IPC address.
*/
@Override
public InetSocketAddress getAddress() {
return addr;
}
@Override
public InetSocketAddress getHealthMonitorAddress() {
return lifelineAddr;
}
@Override
public InetSocketAddress getZKFCAddress() {
Preconditions.checkState(autoFailoverEnabled,
"ZKFC address not relevant when auto failover is off");
assert zkfcAddr != null;
return zkfcAddr;
}
void setZkfcPort(int port) {
assert autoFailoverEnabled;
this.zkfcAddr = new InetSocketAddress(addr.getAddress(), port);
}
@Override
public void checkFencingConfigured() throws BadFencingConfigurationException {
if (fenceConfigError != null) {
throw fenceConfigError;
}
if (fencer == null) {
throw new BadFencingConfigurationException(
"No fencer configured for " + this);
}
}
@Override
public NodeFencer getFencer() {
return fencer;
}
@Override
public String toString() {
return "NameNode at " + (lifelineAddr != null ? lifelineAddr : addr);
}
public String getNameServiceId() {
return this.nsId;
}
public String getNameNodeId() {
return this.nnId;
}
@Override
protected void addFencingParameters(Map<String, String> ret) {
super.addFencingParameters(ret);
ret.put(NAMESERVICE_ID_KEY, getNameServiceId());
ret.put(NAMENODE_ID_KEY, getNameNodeId());
}
@Override
public boolean isAutoFailoverEnabled() {
return autoFailoverEnabled;
}
@Override
public boolean supportObserver() {
return true;
}
}
| NNHAServiceTarget |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/impl/SpringDefaultCamelContextAllowUseOriginalMessageTrueTest.java | {
"start": 1072,
"end": 1451
} | class ____
extends DefaultCamelContextAllowUseOriginalMessageTrueTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this,
"org/apache/camel/spring/impl/SpringDefaultCamelContextAllowUseOriginalMessageTrueTest.xml");
}
}
| SpringDefaultCamelContextAllowUseOriginalMessageTrueTest |
java | google__guava | guava-gwt/src-super/com/google/common/util/concurrent/super/com/google/common/util/concurrent/Platform.java | {
"start": 970,
"end": 2008
} | class ____ {
static boolean isInstanceOfThrowableClass(Throwable t, Class<? extends Throwable> expectedClass) {
/*
* This method is used only by CatchingFuture, and CatchingFuture accepts only Throwable.class
* under GWT.
*/
return true;
}
static void restoreInterruptIfIsInterruptedException(Throwable t) {}
static void interruptCurrentThread() {}
static void rethrowIfErrorOtherThanStackOverflow(Throwable t) {
if (t instanceof Error) {
// There is no StackOverflowError under GWT/J2CL.
throw (Error) t;
}
}
static <V extends @Nullable Object> V get(AbstractFuture<V> future)
throws InterruptedException, ExecutionException {
return future.getFromAlreadyDoneTrustedFuture();
}
static <V extends @Nullable Object> V get(AbstractFuture<V> future, long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
checkNotNull(unit);
return future.getFromAlreadyDoneTrustedFuture();
}
private Platform() {}
}
| Platform |
java | quarkusio__quarkus | integration-tests/test-extension/extension/deployment/src/main/java/io/quarkus/extest/deployment/MapBuildTimeConfigBuildStep.java | {
"start": 450,
"end": 1480
} | class ____ {
public static final String TEST_MAP_CONFIG_MARKER = "test-map-config";
public static final String INVOKED = "the test was invoked";
@BuildStep
void validate(BuildProducer<ConfigPropertyBuildItem> configProperties, TestMappingBuildTimeRunTime mapConfig) {
Optional<String> pathToMarkerFile = ConfigProvider.getConfig().getOptionalValue("test-map-config", String.class);
if (pathToMarkerFile.isPresent()) {
assert mapConfig.mapMap().get("main-profile") != null;
assert mapConfig.mapMap().get("main-profile").get("property") != null;
assert mapConfig.mapMap().get("test-profile") != null;
assert mapConfig.mapMap().get("test-profile").get("property") != null;
try {
Files.write(Paths.get(pathToMarkerFile.get()), INVOKED.getBytes());
} catch (IOException e) {
throw new RuntimeException("Unable to write to the marker file.");
}
}
}
}
| MapBuildTimeConfigBuildStep |
java | processing__processing4 | java/libraries/io/src/processing/io/I2C.java | {
"start": 2658,
"end": 4355
} | interface ____
* @see list
* @webref I2C
*/
public I2C(String dev) {
NativeInterface.loadLibrary();
this.dev = dev;
if (NativeInterface.isSimulated()) {
return;
}
handle = NativeInterface.openDevice("/dev/" + dev);
if (handle < 0) {
throw new RuntimeException(NativeInterface.getError(handle));
}
}
/**
* Begins a transmission to an attached device.<br/>
* <br/>
* This function expects the address in the lower 7 bits, the same way as in
* Arduino's Wire library, and as shown in the output of the i2cdetect tool. If
* the address provided in a datasheet is greater than 127 (hex 0x7f) or there
* are separate addresses for read and write operations listed, which vary
* exactly by one, then you want to shift the this number by one bit to the
* right before passing it as an argument to this function.
*
* @see write
* @see read
* @see endTransmission
* @webref I2C
* @webBrief Begins a transmission to an attached device
*/
public void beginTransmission(int slave) {
// addresses 120 (0x78) to 127 are additionally reserved
if (0x78 <= slave) {
System.err.println("beginTransmission expects a 7 bit address, try shifting one bit to the right");
throw new IllegalArgumentException("Illegal address");
}
this.slave = slave;
transmitting = true;
out = null;
}
/**
* Closes the I2C device<br/>
* </br>
* It is normally not necessary to explicitly close I2C interfaces, as they are
* closed automatically by the operating system when the sketch exits.</br>
* </br>
* Note: It is possible to have two or more object using the same | name |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentRequirementRequestRepresentation.java | {
"start": 2350,
"end": 2871
} | class ____ {
abstract ComponentRequirementRequestRepresentation create(
ContributionBinding binding, ComponentRequirement componentRequirement);
final ComponentRequirementRequestRepresentation create(BoundInstanceBinding binding) {
return create(binding, ComponentRequirement.forBoundInstance(binding));
}
final ComponentRequirementRequestRepresentation create(ComponentDependencyBinding binding) {
return create(binding, ComponentRequirement.forDependency(binding));
}
}
}
| Factory |
java | quarkusio__quarkus | extensions/scheduler/deployment/src/test/java/io/quarkus/scheduler/test/DuplicateJobIdentityTest.java | {
"start": 780,
"end": 1014
} | class ____ {
@Scheduled(cron = "0/1 * * * * ?", identity = "identity")
void firstMethod() {
}
@Scheduled(cron = "0/1 * * * * ?", identity = "identity")
void secondMethod() {
}
}
}
| Jobs |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/field/vectors/BitKnnDenseVectorDocValuesField.java | {
"start": 685,
"end": 1063
} | class ____ extends ByteKnnDenseVectorDocValuesField {
public BitKnnDenseVectorDocValuesField(@Nullable ByteVectorValues input, String name, int dims) {
super(input, name, dims / 8, DenseVectorFieldMapper.ElementType.BIT);
}
@Override
protected DenseVector getVector() {
return new BitKnnDenseVector(vector);
}
}
| BitKnnDenseVectorDocValuesField |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/asm/ClassWriter.java | {
"start": 34660,
"end": 34848
} | class ____ build. Does nothing if the
* constant pool already contains a similar item. <i>This method is intended for {@link Attribute}
* sub classes, and is normally not needed by | being |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/Schema.java | {
"start": 41297,
"end": 41409
} | class ____ extends Schema {
public LongSchema() {
super(Type.LONG);
}
}
private static | LongSchema |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/EnrichWithUnitOfWorkTest.java | {
"start": 1139,
"end": 3671
} | class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testEnrichWith() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:routeA").enrichWith("direct:routeB", true, false).body((a, b) -> b);
from("direct:routeB").enrichWith("direct:routeC", true, false).body((a, b) -> b);
from("direct:routeC").setBody(constant("Bye World"));
}
});
context.start();
Exchange out = template.request("direct:routeA", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setBody("Hello World");
exchange.getExchangeExtension().addOnCompletion(new SynchronizationAdapter() {
@Override
public void onDone(Exchange exchange) {
exchange.getMessage().setBody("Done " + exchange.getMessage().getBody());
}
});
}
});
Assertions.assertFalse(out.isFailed());
Assertions.assertEquals("Done Bye World", out.getMessage().getBody());
}
@Test
public void testEnrichWithShareUnitOfWork() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:routeA").enrichWith("direct:routeB", true, true).body((a, b) -> b);
from("direct:routeB").enrichWith("direct:routeC", true, true).body((a, b) -> b);
from("direct:routeC").setBody(constant("Bye World"));
}
});
context.start();
Exchange out = template.request("direct:routeA", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setBody("Hello World");
exchange.getExchangeExtension().addOnCompletion(new SynchronizationAdapter() {
@Override
public void onDone(Exchange exchange) {
exchange.getMessage().setBody("Done " + exchange.getMessage().getBody());
}
});
}
});
Assertions.assertFalse(out.isFailed());
Assertions.assertEquals("Done Bye World", out.getMessage().getBody());
}
}
| EnrichWithUnitOfWorkTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeNameShadowingTest.java | {
"start": 8562,
"end": 8772
} | class ____ {}
<T, BadParameterName> void f(T t, BadParameterName u) {}
}
""")
.addOutputLines(
"out/Foo.java",
"""
| BadParameterName |
java | playframework__playframework | core/play/src/main/java/play/http/HttpFilters.java | {
"start": 336,
"end": 614
} | interface ____ {
/** @return the list of filters that should filter every request. */
List<EssentialFilter> getFilters();
/** @return a Scala HttpFilters object */
default play.api.http.HttpFilters asScala() {
return new JavaHttpFiltersAdapter(this);
}
}
| HttpFilters |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/UntypedDeserializationTest.java | {
"start": 4311,
"end": 6412
} | class ____ the target.
*/
Object root = MAPPER.readValue(JSON, Object.class);
assertInstanceOf(Map.class, root);
Map<?,?> rootMap = (Map<?,?>) root;
assertEquals(1, rootMap.size());
Map.Entry<?,?> rootEntry = rootMap.entrySet().iterator().next();
assertEquals("Image", rootEntry.getKey());
Object image = rootEntry.getValue();
assertInstanceOf(Map.class, image);
Map<?,?> imageMap = (Map<?,?>) image;
assertEquals(5, imageMap.size());
Object value = imageMap.get("Width");
assertInstanceOf(Integer.class, value);
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_WIDTH), value);
value = imageMap.get("Height");
assertInstanceOf(Integer.class, value);
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_HEIGHT), value);
assertEquals(SAMPLE_SPEC_VALUE_TITLE, imageMap.get("Title"));
// Another Object, "thumbnail"
value = imageMap.get("Thumbnail");
assertInstanceOf(Map.class, value);
Map<?,?> tnMap = (Map<?,?>) value;
assertEquals(3, tnMap.size());
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_TN_HEIGHT), tnMap.get("Height"));
// for some reason, width is textual, not numeric...
assertEquals(SAMPLE_SPEC_VALUE_TN_WIDTH, tnMap.get("Width"));
assertEquals(SAMPLE_SPEC_VALUE_TN_URL, tnMap.get("Url"));
// And then number list, "IDs"
value = imageMap.get("IDs");
assertInstanceOf(List.class, value);
List<Object> ids = (List<Object>) value;
assertEquals(4, ids.size());
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_TN_ID1), ids.get(0));
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_TN_ID2), ids.get(1));
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_TN_ID3), ids.get(2));
assertEquals(Integer.valueOf(SAMPLE_SPEC_VALUE_TN_ID4), ids.get(3));
// and that's all folks!
}
@Test
public void testUntypedMap() throws Exception
{
// to get "untyped" default map-to-map, pass Object. | as |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/LazyCollectionLoadingTest.java | {
"start": 5921,
"end": 6176
} | class ____ {
@Id
@GeneratedValue( strategy = GenerationType.AUTO )
Long id;
@ManyToOne( cascade = CascadeType.ALL, fetch = FetchType.LAZY )
Parent parent;
String name;
Child() {
}
Child(String name) {
this.name = name;
}
}
}
| Child |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/beanparam/BeanParamTest.java | {
"start": 1403,
"end": 2644
} | class ____ extends Top {
@HeaderParam("headerParam")
private String headerParam = "headerParam";
@CookieParam("cookieParam")
private String cookieParam = "cookieParam";
@FormParam("formParam")
private String formParam = "formParam";
@QueryParam("queryParam")
private String queryParam = "queryParam";
// FIXME: Matrix not supported
public String getHeaderParam() {
return headerParam;
}
public void setHeaderParam(String headerParam) {
this.headerParam = headerParam;
}
public String getCookieParam() {
return cookieParam;
}
public void setCookieParam(String cookieParam) {
this.cookieParam = cookieParam;
}
public String getFormParam() {
return formParam;
}
public void setFormParam(String formParam) {
this.formParam = formParam;
}
public String getQueryParam() {
return queryParam;
}
public void setQueryParam(String queryParam) {
this.queryParam = queryParam;
}
}
@Path("/")
public static | MyBeanParamWithFieldsAndProperties |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/codec/xml/Jaxb2Helper.java | {
"start": 1329,
"end": 1401
} | class ____ JAXB2.
*
* @author Arjen Poutsma
* @since 6.1
*/
abstract | for |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/SharedMetadataReaderFactoryContextInitializer.java | {
"start": 6250,
"end": 7204
} | class ____ implements Supplier<Object> {
private final ConfigurableApplicationContext context;
private final Supplier<?> instanceSupplier;
ConfigurationClassPostProcessorCustomizingSupplier(ConfigurableApplicationContext context,
Supplier<?> instanceSupplier) {
this.context = context;
this.instanceSupplier = instanceSupplier;
}
@Override
public Object get() {
Object instance = this.instanceSupplier.get();
if (instance instanceof ConfigurationClassPostProcessor postProcessor) {
configureConfigurationClassPostProcessor(postProcessor);
}
return instance;
}
private void configureConfigurationClassPostProcessor(ConfigurationClassPostProcessor instance) {
instance.setMetadataReaderFactory(this.context.getBean(BEAN_NAME, MetadataReaderFactory.class));
}
}
/**
* {@link FactoryBean} to create the shared {@link MetadataReaderFactory}.
*/
static | ConfigurationClassPostProcessorCustomizingSupplier |
java | google__truth | core/src/main/java/com/google/common/truth/OptionalDoubleSubject.java | {
"start": 1035,
"end": 3763
} | class ____ extends Subject {
private final @Nullable OptionalDouble actual;
private OptionalDoubleSubject(FailureMetadata failureMetadata, @Nullable OptionalDouble actual) {
super(failureMetadata, actual);
this.actual = actual;
}
/** Checks that the actual {@link OptionalDouble} contains a value. */
public void isPresent() {
if (actual == null) {
failWithActual(simpleFact("expected present optional"));
} else if (!actual.isPresent()) {
failWithoutActual(simpleFact("expected to be present"));
}
}
/** Checks that the actual {@link OptionalDouble} does not contain a value. */
public void isEmpty() {
if (actual == null) {
failWithActual(simpleFact("expected empty optional"));
} else if (actual.isPresent()) {
failWithoutActual(
simpleFact("expected to be empty"),
fact("but was present with value", doubleToString(actual.getAsDouble())));
}
}
/**
* Checks that the actual {@link OptionalDouble} contains the given value. This method is
* <i>not</i> recommended when the code under test is doing any kind of arithmetic, since the
* exact result of floating point arithmetic is sensitive to apparently trivial changes. More
* sophisticated comparisons can be done using {@code assertThat(optional.getAsDouble())…}. This
* method is recommended when the code under test is specified as either copying a value without
* modification from its input or returning a well-defined literal or constant value.
*/
public void hasValue(double expected) {
if (actual == null) {
failWithActual("expected an optional with value", expected);
} else if (!actual.isPresent()) {
failWithoutActual(
fact("expected to have value", doubleToString(expected)), simpleFact("but was absent"));
} else {
checkNoNeedToDisplayBothValues("getAsDouble()")
.that(actual.getAsDouble())
.isEqualTo(expected);
}
}
/**
* Obsolete factory instance. This factory was previously necessary for assertions like {@code
* assertWithMessage(...).about(optionalDoubles()).that(optional)....}. Now, you can perform
* assertions like that without the {@code about(...)} call.
*
* @deprecated Instead of {@code about(optionalDoubles()).that(...)}, use just {@code that(...)}.
* Similarly, instead of {@code assertAbout(optionalDoubles()).that(...)}, use just {@code
* assertThat(...)}.
*/
@Deprecated
@SuppressWarnings("InlineMeSuggester") // We want users to remove the surrounding call entirely.
public static Factory<OptionalDoubleSubject, OptionalDouble> optionalDoubles() {
return OptionalDoubleSubject::new;
}
}
| OptionalDoubleSubject |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/datatiers/NodesDataTiersUsageTransportAction.java | {
"start": 7563,
"end": 8144
} | class ____ extends AbstractTransportRequest {
public NodeRequest(StreamInput in) throws IOException {
super(in);
}
public NodeRequest() {
}
@Override
public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) {
return new CancellableTask(id, type, action, "", parentTaskId, headers);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
}
}
public static | NodeRequest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BeanEndpointBuilderFactory.java | {
"start": 1565,
"end": 5317
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedBeanEndpointBuilder advanced() {
return (AdvancedBeanEndpointBuilder) this;
}
/**
* Sets the name of the method to invoke on the bean.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param method the value to set
* @return the dsl builder
*/
default BeanEndpointBuilder method(String method) {
doSetProperty("method", method);
return this;
}
/**
* Scope of bean. When using singleton scope (default) the bean is
* created or looked up only once and reused for the lifetime of the
* endpoint. The bean should be thread-safe in case concurrent threads
* is calling the bean at the same time. When using request scope the
* bean is created or looked up once per request (exchange). This can be
* used if you want to store state on a bean while processing a request
* and you want to call the same bean instance multiple times while
* processing the request. The bean does not have to be thread-safe as
* the instance is only called from the same request. When using
* prototype scope, then the bean will be looked up or created per call.
* However in case of lookup then this is delegated to the bean registry
* such as Spring or CDI (if in use), which depends on their
* configuration can act as either singleton or prototype scope. so when
* using prototype then this depends on the delegated registry.
*
* The option is a: <code>org.apache.camel.BeanScope</code> type.
*
* Default: Singleton
* Group: common
*
* @param scope the value to set
* @return the dsl builder
*/
default BeanEndpointBuilder scope(org.apache.camel.BeanScope scope) {
doSetProperty("scope", scope);
return this;
}
/**
* Scope of bean. When using singleton scope (default) the bean is
* created or looked up only once and reused for the lifetime of the
* endpoint. The bean should be thread-safe in case concurrent threads
* is calling the bean at the same time. When using request scope the
* bean is created or looked up once per request (exchange). This can be
* used if you want to store state on a bean while processing a request
* and you want to call the same bean instance multiple times while
* processing the request. The bean does not have to be thread-safe as
* the instance is only called from the same request. When using
* prototype scope, then the bean will be looked up or created per call.
* However in case of lookup then this is delegated to the bean registry
* such as Spring or CDI (if in use), which depends on their
* configuration can act as either singleton or prototype scope. so when
* using prototype then this depends on the delegated registry.
*
* The option will be converted to a
* <code>org.apache.camel.BeanScope</code> type.
*
* Default: Singleton
* Group: common
*
* @param scope the value to set
* @return the dsl builder
*/
default BeanEndpointBuilder scope(String scope) {
doSetProperty("scope", scope);
return this;
}
}
/**
* Advanced builder for endpoint for the Bean component.
*/
public | BeanEndpointBuilder |
java | google__gson | extras/src/test/java/com/google/gson/graph/GraphAdapterBuilderTest.java | {
"start": 8603,
"end": 8830
} | class ____ {
final String name;
final Company company;
Employee(String name, Company company) {
this.name = name;
this.company = company;
this.company.employees.add(this);
}
}
static | Employee |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/io/statistics/BaseStatistics.java | {
"start": 1055,
"end": 2177
} | interface ____ {
/** Constant indicating that the input size is unknown. */
@PublicEvolving public static final long SIZE_UNKNOWN = -1;
/** Constant indicating that the number of records is unknown; */
@PublicEvolving public static final long NUM_RECORDS_UNKNOWN = -1;
/** Constant indicating that average record width is unknown. */
@PublicEvolving public static final float AVG_RECORD_BYTES_UNKNOWN = -1.0f;
// --------------------------------------------------------------------------------------------
/**
* Gets the total size of the input.
*
* @return The total size of the input, in bytes.
*/
@PublicEvolving
public long getTotalInputSize();
/**
* Gets the number of records in the input (= base cardinality).
*
* @return The number of records in the input.
*/
@PublicEvolving
public long getNumberOfRecords();
/**
* Gets the average width of a record, in bytes.
*
* @return The average width of a record in bytes.
*/
@PublicEvolving
public float getAverageRecordWidth();
}
| BaseStatistics |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/metamodel/generics/embeddable/SomeString.java | {
"start": 216,
"end": 365
} | class ____ extends AbstractValueObject<String> {
protected SomeString() {
}
public SomeString(final String value) {
super( value );
}
}
| SomeString |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/bidirectional/BidirectionalOneToOneWithIdClassesTest.java | {
"start": 1158,
"end": 1869
} | class ____ {
@Test
public void test(SessionFactoryScope scope) {
StreamSupport.stream( scope.getMetadataImplementor().getDatabase().getNamespaces().spliterator(), false )
.flatMap( namespace -> namespace.getTables().stream() )
.forEach( t -> {
if ( t.getName().equals( "Product" ) ) {
assertThat( t.getColumns().stream().map( Column::getName ) ).contains( "productId" );
}
else if ( t.getName().equals( "Price" ) ) {
assertThat( t.getColumns().stream().map( Column::getName ) ).contains(
"operator_operatorId",
"price",
"product_productId"
);
}
} );
}
@Entity( name = "Operator" )
public static | BidirectionalOneToOneWithIdClassesTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InstanceOfAndCastMatchWrongTypeTest.java | {
"start": 14502,
"end": 15011
} | class ____ {
void foo() {
Object[] values = null;
if (values[0] instanceof Integer) {
int x = (Integer) values[0];
} else if (values[0] instanceof Long) {
long y = (Long) values[0];
}
}
}
""")
.doTest();
}
@Test
public void handlesArrayAccessOnIdentifier() {
compilationHelper
.addSourceLines(
"Foo.java",
" | Foo |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/PartitionFieldExtractor.java | {
"start": 1230,
"end": 2126
} | interface ____<T extends FileSourceSplit> extends Serializable {
Object extract(T split, String fieldName, LogicalType fieldType);
static PartitionFieldExtractor<FileSourceSplit> forFileSystem(String defaultPartValue) {
return (split, fieldName, fieldType) -> {
LinkedHashMap<String, String> partitionSpec =
PartitionPathUtils.extractPartitionSpecFromPath(split.path());
if (!partitionSpec.containsKey(fieldName)) {
throw new RuntimeException(
"Cannot find the partition value from path for partition: " + fieldName);
}
String valueStr = partitionSpec.get(fieldName);
valueStr = valueStr.equals(defaultPartValue) ? null : valueStr;
return RowPartitionComputer.restorePartValueFromType(valueStr, fieldType);
};
}
}
| PartitionFieldExtractor |
java | apache__flink | flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/history/HistoryServerArchiveFetcher.java | {
"start": 3183,
"end": 3293
} | class ____ {
/** Possible job archive operations in history-server. */
public | HistoryServerArchiveFetcher |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/groovy/GroovyBeanDefinitionReaderTests.java | {
"start": 31159,
"end": 31227
} | class ____ {
public SomeClass(List<SomeOtherClass> soc) {}
}
| SomeClass |
java | elastic__elasticsearch | x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/TDigestFieldMapper.java | {
"start": 7985,
"end": 21009
} | class ____ extends MappedFieldType {
public TDigestFieldType(String name, Map<String, String> meta) {
super(name, IndexType.docValuesOnly(), false, meta);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public BlockLoader blockLoader(BlockLoaderContext blContext) {
DoublesBlockLoader minimaLoader = new DoublesBlockLoader(valuesMinSubFieldName(name()), NumericUtils::sortableLongToDouble);
DoublesBlockLoader maximaLoader = new DoublesBlockLoader(valuesMaxSubFieldName(name()), NumericUtils::sortableLongToDouble);
DoublesBlockLoader sumsLoader = new DoublesBlockLoader(valuesSumSubFieldName(name()), NumericUtils::sortableLongToDouble);
LongsBlockLoader valueCountsLoader = new LongsBlockLoader(valuesCountSubFieldName(name()));
BytesRefsFromBinaryBlockLoader digestLoader = new BytesRefsFromBinaryBlockLoader(name());
// TODO: We're constantly passing around this set of 5 things. It would be nice to make a container for that.
return new TDigestBlockLoader(digestLoader, minimaLoader, maximaLoader, sumsLoader, valueCountsLoader);
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
return SourceValueFetcher.identity(name(), context, format);
}
@Override
public boolean isSearchable() {
return false;
}
@Override
public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) {
failIfNoDocValues();
// TODO - This needs to be changed to a custom values source type
return (cache, breakerService) -> new IndexHistogramFieldData(name(), AnalyticsValuesSourceType.HISTOGRAM) {
@Override
public LeafHistogramFieldData load(LeafReaderContext context) {
return new LeafHistogramFieldData() {
@Override
public HistogramValues getHistogramValues() throws IOException {
try {
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldName);
final InternalTDigestValue value = new InternalTDigestValue();
return new HistogramValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public HistogramValue histogram() throws IOException {
try {
value.reset(values.binaryValue());
return value;
} catch (IOException e) {
throw new IOException("Cannot load doc value", e);
}
}
};
} catch (IOException e) {
throw new IOException("Cannot load doc values", e);
}
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " + "support scripts");
}
@Override
public FormattedDocValues getFormattedValues(DocValueFormat format) {
try {
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldName);
final InternalTDigestValue value = new InternalTDigestValue();
return new FormattedDocValues() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}
@Override
public int docValueCount() {
return 1;
}
@Override
public Object nextValue() throws IOException {
value.reset(values.binaryValue());
return value;
}
};
} catch (IOException e) {
throw new UncheckedIOException("Unable to loead histogram doc values", e);
}
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException(
"String representation of doc values " + "for [" + CONTENT_TYPE + "] fields is not supported"
);
}
@Override
public long ramBytesUsed() {
return 0; // Unknown
}
};
}
@Override
public LeafHistogramFieldData loadDirect(LeafReaderContext context) {
return load(context);
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
}
@Override
public BucketedSort newBucketedSort(
BigArrays bigArrays,
Object missingValue,
MultiValueMode sortMode,
Nested nested,
SortOrder sortOrder,
DocValueFormat format,
int bucketSize,
BucketedSort.ExtraData extra
) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
}
};
}
@Override
public Query termQuery(Object value, SearchExecutionContext context) {
throw new IllegalArgumentException(
"[" + CONTENT_TYPE + "] field do not support searching, " + "use dedicated aggregations instead: [" + name() + "]"
);
}
}
@Override
protected boolean supportsParsingObject() {
return true;
}
@Override
public void parse(DocumentParserContext context) throws IOException {
context.path().add(leafName());
boolean shouldStoreMalformedDataForSyntheticSource = context.mappingLookup().isSourceSynthetic() && ignoreMalformed();
XContentParser.Token token;
XContentSubParser subParser = null;
XContentBuilder malformedDataForSyntheticSource = null;
try {
token = context.parser().currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
context.path().remove();
return;
}
// should be an object
ensureExpectedToken(XContentParser.Token.START_OBJECT, token, context.parser());
if (shouldStoreMalformedDataForSyntheticSource) {
var copyingParser = new CopyingXContentParser(context.parser());
malformedDataForSyntheticSource = copyingParser.getBuilder();
subParser = new XContentSubParser(copyingParser);
} else {
subParser = new XContentSubParser(context.parser());
}
subParser.nextToken();
// TODO: Here we should build a t-digest out of the input, based on the settings on the field
TDigestParser.ParsedTDigest parsedTDigest = TDigestParser.parse(fullPath(), subParser);
BytesStreamOutput streamOutput = new BytesStreamOutput();
for (int i = 0; i < parsedTDigest.centroids().size(); i++) {
long count = parsedTDigest.counts().get(i);
assert count >= 0;
// we do not add elements with count == 0
if (count > 0) {
streamOutput.writeVLong(count);
streamOutput.writeDouble(parsedTDigest.centroids().get(i));
}
}
BytesRef docValue = streamOutput.bytes().toBytesRef();
Field digestField = new BinaryDocValuesField(fullPath(), docValue);
// Add numeric doc values fields for the summary data
NumericDocValuesField maxField = null;
if (Double.isNaN(parsedTDigest.max()) == false) {
maxField = new NumericDocValuesField(
valuesMaxSubFieldName(fullPath()),
NumericUtils.doubleToSortableLong(parsedTDigest.max())
);
}
NumericDocValuesField minField = null;
if (Double.isNaN(parsedTDigest.min()) == false) {
minField = new NumericDocValuesField(
valuesMinSubFieldName(fullPath()),
NumericUtils.doubleToSortableLong(parsedTDigest.min())
);
}
NumericDocValuesField countField = new NumericDocValuesField(valuesCountSubFieldName(fullPath()), parsedTDigest.count());
NumericDocValuesField sumField = null;
if (Double.isNaN(parsedTDigest.sum()) == false) {
sumField = new NumericDocValuesField(
valuesSumSubFieldName(fullPath()),
NumericUtils.doubleToSortableLong(parsedTDigest.sum())
);
}
if (context.doc().getByKey(fieldType().name()) != null) {
throw new IllegalArgumentException(
"Field ["
+ fullPath()
+ "] of type ["
+ typeName()
+ "] doesn't support indexing multiple values for the same field in the same document"
);
}
context.doc().addWithKey(fieldType().name(), digestField);
context.doc().add(countField);
if (sumField != null) {
context.doc().add(sumField);
}
if (maxField != null) {
context.doc().add(maxField);
}
if (minField != null) {
context.doc().add(minField);
}
} catch (Exception ex) {
if (ignoreMalformed.value() == false) {
throw new DocumentParsingException(
context.parser().getTokenLocation(),
"failed to parse field [" + fieldType().name() + "] of type [" + fieldType().typeName() + "]",
ex
);
}
if (subParser != null) {
// close the subParser so we advance to the end of the object
subParser.close();
} else if (shouldStoreMalformedDataForSyntheticSource) {
// We have a malformed value, but it's not an object given that `subParser` is null.
// So we just remember whatever it is.
malformedDataForSyntheticSource = XContentBuilder.builder(context.parser().contentType().xContent())
.copyCurrentStructure(context.parser());
}
if (malformedDataForSyntheticSource != null) {
context.doc().add(IgnoreMalformedStoredValues.storedField(fullPath(), malformedDataForSyntheticSource));
}
context.addIgnoredField(fieldType().name());
}
context.path().remove();
}
private static String valuesCountSubFieldName(String fullPath) {
return fullPath + "._values_count";
}
private static String valuesSumSubFieldName(String fullPath) {
return fullPath + "._values_sum";
}
private static String valuesMinSubFieldName(String fullPath) {
return fullPath + "._values_min";
}
private static String valuesMaxSubFieldName(String fullPath) {
return fullPath + "._values_max";
}
/** re-usable {@link HistogramValue} implementation */
static | TDigestFieldType |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/sentinel/SentinelCommandBuilder.java | {
"start": 1686,
"end": 7044
} | class ____<K, V> extends BaseRedisCommandBuilder<K, V> {
public SentinelCommandBuilder(RedisCodec<K, V> codec) {
super(codec);
}
public Command<K, V, SocketAddress> getMasterAddrByKey(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("get-master-addr-by-name").addKey(key);
return createCommand(SENTINEL, new SocketAddressOutput<>(codec), args);
}
public Command<K, V, List<Map<K, V>>> masters() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("masters");
return createCommand(SENTINEL, new ListOfMapsOutput<>(codec), args);
}
public Command<K, V, Map<K, V>> master(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("master").addKey(key);
return createCommand(SENTINEL, new MapOutput<>(codec), args);
}
public Command<K, V, List<Map<K, V>>> slaves(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SLAVES).addKey(key);
return createCommand(SENTINEL, new ListOfMapsOutput<>(codec), args);
}
public Command<K, V, List<Map<K, V>>> replicas(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(REPLICAS).addKey(key);
return createCommand(SENTINEL, new ListOfMapsOutput<>(codec), args);
}
public Command<K, V, Long> reset(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESET).addKey(key);
return createCommand(SENTINEL, new IntegerOutput<>(codec), args);
}
public Command<K, V, String> failover(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FAILOVER).addKey(key);
return createCommand(SENTINEL, new StatusOutput<>(codec), args);
}
public Command<K, V, String> monitor(K key, String ip, int port, int quorum) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(MONITOR).addKey(key).add(ip).add(port).add(quorum);
return createCommand(SENTINEL, new StatusOutput<>(codec), args);
}
public Command<K, V, String> set(K key, String option, V value) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SET).addKey(key).add(option).addValue(value);
return createCommand(SENTINEL, new StatusOutput<>(codec), args);
}
public Command<K, V, K> clientGetname() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GETNAME);
return createCommand(CLIENT, new KeyOutput<>(codec), args);
}
public Command<K, V, String> clientSetname(K name) {
LettuceAssert.notNull(name, "Name must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETNAME).addKey(name);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> clientSetinfo(String key, String value) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETINFO).add(key).add(value);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> clientKill(String addr) {
LettuceAssert.notNull(addr, "Addr must not be null");
LettuceAssert.notEmpty(addr, "Addr must not be empty");
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KILL).add(addr);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, Long> clientKill(KillArgs killArgs) {
LettuceAssert.notNull(killArgs, "KillArgs must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KILL);
killArgs.build(args);
return createCommand(CLIENT, new IntegerOutput<>(codec), args);
}
public Command<K, V, String> clientPause(long timeout) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(PAUSE).add(timeout);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> clientList() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LIST);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> clientList(ClientListArgs clientListArgs) {
LettuceAssert.notNull(clientListArgs, "ClientListArgs must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LIST);
clientListArgs.build(args);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> clientInfo() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CommandKeyword.INFO);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
public Command<K, V, String> info() {
return createCommand(CommandType.INFO, new StatusOutput<>(codec));
}
public Command<K, V, String> info(String section) {
LettuceAssert.notNull(section, "Section must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec).add(section);
return createCommand(CommandType.INFO, new StatusOutput<>(codec), args);
}
public Command<K, V, String> ping() {
return createCommand(PING, new StatusOutput<>(codec));
}
public Command<K, V, String> remove(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CommandKeyword.REMOVE).addKey(key);
return createCommand(SENTINEL, new StatusOutput<>(codec), args);
}
}
| SentinelCommandBuilder |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ReflectionSupport.java | {
"start": 32648,
"end": 32959
} | class ____ be searched; never {@code null}
* @param predicate the predicate against which the list of nested classes is
* checked; never {@code null}
* @return a stream of all such classes found; never {@code null}
* but potentially empty
* @throws JUnitException if a cycle is detected within an inner | to |
java | apache__camel | components/camel-wordpress/src/test/java/org/apache/camel/component/wordpress/WordpressComponentTestSupport.java | {
"start": 1105,
"end": 1543
} | class ____ extends CamelTestSupport {
@BeforeAll
public static void beforeClass() throws IOException {
WordpressMockServerTestSupport.setUpMockServer();
}
@AfterAll
public static void afterClass() {
WordpressMockServerTestSupport.tearDownMockServer();
}
protected String getServerBaseUrl() {
return WordpressMockServerTestSupport.getServerBaseUrl();
}
}
| WordpressComponentTestSupport |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/authentication/configuration/EnableGlobalAuthenticationTests.java | {
"start": 3427,
"end": 3465
} | class ____ {
Child() {
}
}
}
| Child |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DirectBufferPool.java | {
"start": 1266,
"end": 1662
} | class ____ pooling direct ByteBuffers. This is necessary
* because Direct Byte Buffers do not take up much space on the heap,
* and hence will not trigger GCs on their own. However, they do take
* native memory, and thus can cause high memory usage if not pooled.
* The pooled instances are referred to only via weak references, allowing
* them to be collected when a GC does run.
*
* This | for |
java | spring-projects__spring-framework | spring-aop/src/main/java/org/springframework/aop/Advisor.java | {
"start": 1156,
"end": 1379
} | interface ____ support for different types of advice,
* such as <b>before</b> and <b>after</b> advice, which need not be
* implemented using interception.
*
* @author Rod Johnson
* @author Juergen Hoeller
*/
public | allows |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/async/RouterAsyncSnapshot.java | {
"start": 2487,
"end": 12040
} | class ____ extends RouterSnapshot {
/** RPC server to receive client calls. */
private final RouterRpcServer rpcServer;
/** RPC clients to connect to the Namenodes. */
private final RouterRpcClient rpcClient;
/** Find generic locations. */
private final ActiveNamenodeResolver namenodeResolver;
public RouterAsyncSnapshot(RouterRpcServer server) {
super(server);
this.rpcServer = server;
this.rpcClient = this.rpcServer.getRPCClient();
this.namenodeResolver = rpcServer.getNamenodeResolver();
}
/**
* Asynchronously creates a snapshot with the given root and name.
* This method checks the operation category and then invokes the createSnapshot
* method concurrently across all namespaces, returning the first successful response.
*
* @param snapshotRoot The root path of the snapshot.
* @param snapshotName The name of the snapshot.
* @return The path of the created snapshot.
* @throws IOException If an I/O error occurs.
*/
@Override
public String createSnapshot(String snapshotRoot, String snapshotName) throws IOException {
rpcServer.checkOperation(NameNode.OperationCategory.WRITE);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(snapshotRoot, true, false);
RemoteMethod method = new RemoteMethod("createSnapshot",
new Class<?>[] {String.class, String.class}, new RemoteParam(),
snapshotName);
if (rpcServer.isInvokeConcurrent(snapshotRoot)) {
rpcClient.invokeConcurrent(locations, method, String.class);
asyncApply((ApplyFunction<Map<RemoteLocation, String>, String>)
results -> {
Map.Entry<RemoteLocation, String> firstelement =
results.entrySet().iterator().next();
RemoteLocation loc = firstelement.getKey();
String result = firstelement.getValue();
return result.replaceFirst(loc.getDest(), loc.getSrc());
});
} else {
rpcClient.invokeSequential(method, locations, String.class, null);
asyncApply((ApplyFunction<RemoteResult<RemoteLocation, String>, String>)
response -> {
RemoteLocation loc = response.getLocation();
String invokedResult = response.getResult();
return invokedResult.replaceFirst(loc.getDest(), loc.getSrc());
});
}
return asyncReturn(String.class);
}
/**
* Asynchronously get an array of snapshottable directory listings.
* This method checks the operation category and then invokes the
* getSnapshottableDirListing method concurrently across all namespaces, merging
* the results into a single array.
*
* @return Array of SnapshottableDirectoryStatus.
* @throws IOException If an I/O error occurs.
*/
@Override
public SnapshottableDirectoryStatus[] getSnapshottableDirListing() throws IOException {
rpcServer.checkOperation(NameNode.OperationCategory.READ);
RemoteMethod method = new RemoteMethod("getSnapshottableDirListing");
Set<FederationNamespaceInfo> nss = namenodeResolver.getNamespaces();
rpcClient.invokeConcurrent(
nss, method, true, false, SnapshottableDirectoryStatus[].class);
asyncApply((ApplyFunction<Map<FederationNamespaceInfo, SnapshottableDirectoryStatus[]>,
SnapshottableDirectoryStatus[]>)
ret -> RouterRpcServer.merge(ret, SnapshottableDirectoryStatus.class));
return asyncReturn(SnapshottableDirectoryStatus[].class);
}
/**
* Asynchronously get an array of snapshot listings for the given snapshot root.
* This method checks the operation category and then invokes the
* getSnapshotListing method, either sequentially or concurrently based on the
* configuration, and returns the merged results.
*
* @param snapshotRoot The root path of the snapshots.
* @return Array of SnapshotStatus.
* @throws IOException If an I/O error occurs.
*/
@Override
public SnapshotStatus[] getSnapshotListing(String snapshotRoot) throws IOException {
rpcServer.checkOperation(NameNode.OperationCategory.READ);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(snapshotRoot, true, false);
RemoteMethod remoteMethod = new RemoteMethod("getSnapshotListing",
new Class<?>[]{String.class},
new RemoteParam());
if (rpcServer.isInvokeConcurrent(snapshotRoot)) {
rpcClient.invokeConcurrent(
locations, remoteMethod, true, false, SnapshotStatus[].class);
asyncApply((ApplyFunction<Map<RemoteLocation, SnapshotStatus[]>, SnapshotStatus[]>)
ret -> {
SnapshotStatus[] response = ret.values().iterator().next();
String src = ret.keySet().iterator().next().getSrc();
String dst = ret.keySet().iterator().next().getDest();
for (SnapshotStatus s : response) {
String mountPath = DFSUtil.bytes2String(s.getParentFullPath()).
replaceFirst(src, dst);
s.setParentFullPath(DFSUtil.string2Bytes(mountPath));
}
return response;
});
} else {
rpcClient
.invokeSequential(remoteMethod, locations, SnapshotStatus[].class,
null);
asyncApply((ApplyFunction<RemoteResult<RemoteLocation, SnapshotStatus[]>, SnapshotStatus[]>)
invokedResponse -> {
RemoteLocation loc = invokedResponse.getLocation();
SnapshotStatus[] response = invokedResponse.getResult();
for (SnapshotStatus s : response) {
String mountPath = DFSUtil.bytes2String(s.getParentFullPath()).
replaceFirst(loc.getDest(), loc.getSrc());
s.setParentFullPath(DFSUtil.string2Bytes(mountPath));
}
return response;
});
}
return asyncReturn(SnapshotStatus[].class);
}
/**
* Asynchronously get a snapshot diff report for the given root and snapshot names.
* This method checks the operation category and then invokes the
* getSnapshotDiffReport method, either sequentially or concurrently based on the
* configuration, and returns the result.
*
* @param snapshotRoot The root path of the snapshot.
* @param earlierSnapshotName The name of the earlier snapshot.
* @param laterSnapshotName The name of the later snapshot.
* @return SnapshotDiffReport for the snapshots.
* @throws IOException If an I/O error occurs.
*/
@Override
public SnapshotDiffReport getSnapshotDiffReport(
String snapshotRoot, String earlierSnapshotName,
String laterSnapshotName) throws IOException {
rpcServer.checkOperation(NameNode.OperationCategory.READ);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(snapshotRoot, true, false);
RemoteMethod remoteMethod = new RemoteMethod("getSnapshotDiffReport",
new Class<?>[] {String.class, String.class, String.class},
new RemoteParam(), earlierSnapshotName, laterSnapshotName);
if (rpcServer.isInvokeConcurrent(snapshotRoot)) {
rpcClient.invokeConcurrent(
locations, remoteMethod, true, false, SnapshotDiffReport.class);
asyncApply((ApplyFunction<Map<RemoteLocation, SnapshotDiffReport>, SnapshotDiffReport>)
ret -> ret.values().iterator().next());
return asyncReturn(SnapshotDiffReport.class);
} else {
return rpcClient.invokeSequential(
locations, remoteMethod, SnapshotDiffReport.class, null);
}
}
/**
* Asynchronously get a snapshot diff report listing for the given root and snapshot names.
* This method checks the operation category and then invokes the
* getSnapshotDiffReportListing method, either sequentially or concurrently based
* on the configuration, and returns the result.
*
* @param snapshotRoot The root path of the snapshot.
* @param earlierSnapshotName The name of the earlier snapshot.
* @param laterSnapshotName The name of the later snapshot.
* @param startPath The starting path for the diff report.
* @param index The index for the diff report listing.
* @return SnapshotDiffReportListing for the snapshots.
* @throws IOException If an I/O error occurs.
*/
@Override
public SnapshotDiffReportListing getSnapshotDiffReportListing(
String snapshotRoot, String earlierSnapshotName, String laterSnapshotName,
byte[] startPath, int index) throws IOException {
rpcServer.checkOperation(NameNode.OperationCategory.READ);
final List<RemoteLocation> locations =
rpcServer.getLocationsForPath(snapshotRoot, true, false);
Class<?>[] params = new Class<?>[] {
String.class, String.class, String.class,
byte[].class, int.class};
RemoteMethod remoteMethod = new RemoteMethod(
"getSnapshotDiffReportListing", params,
new RemoteParam(), earlierSnapshotName, laterSnapshotName,
startPath, index);
if (rpcServer.isInvokeConcurrent(snapshotRoot)) {
rpcClient.invokeConcurrent(locations, remoteMethod, false, false,
SnapshotDiffReportListing.class);
asyncApply((ApplyFunction<Map<RemoteLocation, SnapshotDiffReportListing>,
SnapshotDiffReportListing>) ret -> {
Collection<SnapshotDiffReportListing> listings = ret.values();
SnapshotDiffReportListing listing0 = listings.iterator().next();
return listing0;
});
return asyncReturn(SnapshotDiffReportListing.class);
} else {
return rpcClient.invokeSequential(
locations, remoteMethod, SnapshotDiffReportListing.class, null);
}
}
}
| RouterAsyncSnapshot |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.