language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/json/DelegateJsonValue.java | {
"start": 564,
"end": 2892
} | class ____ implements JsonValue {
protected JsonNode node;
protected final ObjectMapper objectMapper;
DelegateJsonValue(JsonNode node, ObjectMapper objectMapper) {
this.node = node;
this.objectMapper = objectMapper;
}
@Override
public String toString() {
return node.toString();
}
@Override
public ByteBuffer asByteBuffer() {
byte[] result = node.toString().getBytes();
return ByteBuffer.wrap(result);
}
@Override
public boolean isJsonArray() {
return node.isArray();
}
@Override
public JsonArray asJsonArray() {
return null;
}
@Override
public boolean isJsonObject() {
return node.isObject();
}
@Override
public JsonObject asJsonObject() {
return null;
}
@Override
public boolean isString() {
return node.isTextual();
}
@Override
public String asString() {
return node.isTextual() ? node.asText() : null;
}
@Override
public boolean isNumber() {
return node.isNumber();
}
@Override
public Boolean asBoolean() {
return node.isBoolean() ? node.asBoolean() : null;
}
@Override
public boolean isBoolean() {
return node.isBoolean();
}
public boolean isNull() {
return node.isNull();
}
@Override
public Number asNumber() {
if (node.isNull()) {
return null;
}
return node.numberValue();
}
protected JsonNode getNode() {
return node;
}
@Override
public <T> T toObject(Class<T> type) {
try {
return objectMapper.treeToValue(node, type);
} catch (IllegalArgumentException | JsonProcessingException e) {
throw new RedisJsonException("Unable to map the provided JsonValue to " + type.getName(), e);
}
}
static JsonValue wrap(JsonNode root, ObjectMapper objectMapper) {
LettuceAssert.notNull(root, "Root must not be null");
if (root.isObject()) {
return new DelegateJsonObject(root, objectMapper);
} else if (root.isArray()) {
return new DelegateJsonArray(root, objectMapper);
}
return new DelegateJsonValue(root, objectMapper);
}
}
| DelegateJsonValue |
java | square__javapoet | src/main/java/com/squareup/javapoet/ClassName.java | {
"start": 1785,
"end": 3664
} | class ____ like "java.util.Map.Entry". */
final String canonicalName;
private ClassName(String packageName, ClassName enclosingClassName, String simpleName) {
this(packageName, enclosingClassName, simpleName, Collections.emptyList());
}
private ClassName(String packageName, ClassName enclosingClassName, String simpleName,
List<AnnotationSpec> annotations) {
super(annotations);
this.packageName = Objects.requireNonNull(packageName, "packageName == null");
this.enclosingClassName = enclosingClassName;
this.simpleName = simpleName;
this.canonicalName = enclosingClassName != null
? (enclosingClassName.canonicalName + '.' + simpleName)
: (packageName.isEmpty() ? simpleName : packageName + '.' + simpleName);
}
@Override public ClassName annotated(List<AnnotationSpec> annotations) {
return new ClassName(packageName, enclosingClassName, simpleName,
concatAnnotations(annotations));
}
@Override public ClassName withoutAnnotations() {
if (!isAnnotated()) return this;
ClassName resultEnclosingClassName = enclosingClassName != null
? enclosingClassName.withoutAnnotations()
: null;
return new ClassName(packageName, resultEnclosingClassName, simpleName);
}
@Override public boolean isAnnotated() {
return super.isAnnotated() || (enclosingClassName != null && enclosingClassName.isAnnotated());
}
/**
* Returns the package name, like {@code "java.util"} for {@code Map.Entry}. Returns the empty
* string for the default package.
*/
public String packageName() {
return packageName;
}
/**
* Returns the enclosing class, like {@link Map} for {@code Map.Entry}. Returns null if this class
* is not nested in another class.
*/
public ClassName enclosingClassName() {
return enclosingClassName;
}
/**
* Returns the top | name |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableOuterJoin.java | {
"start": 6387,
"end": 8693
} | class ____ implements KTableValueGetter<K, VOut> {
private final KTableValueGetter<K, V1> valueGetter1;
private final KTableValueGetter<K, V2> valueGetter2;
KTableKTableOuterJoinValueGetter(final KTableValueGetter<K, V1> valueGetter1,
final KTableValueGetter<K, V2> valueGetter2) {
this.valueGetter1 = valueGetter1;
this.valueGetter2 = valueGetter2;
}
@Override
public void init(final ProcessorContext<?, ?> context) {
valueGetter1.init(context);
valueGetter2.init(context);
}
@Override
public ValueAndTimestamp<VOut> get(final K key) {
VOut newValue = null;
final ValueAndTimestamp<V1> valueAndTimestamp1 = valueGetter1.get(key);
final V1 value1;
final long timestamp1;
if (valueAndTimestamp1 == null) {
value1 = null;
timestamp1 = UNKNOWN;
} else {
value1 = valueAndTimestamp1.value();
timestamp1 = valueAndTimestamp1.timestamp();
}
final ValueAndTimestamp<V2> valueAndTimestamp2 = valueGetter2.get(key);
final V2 value2;
final long timestamp2;
if (valueAndTimestamp2 == null) {
value2 = null;
timestamp2 = UNKNOWN;
} else {
value2 = valueAndTimestamp2.value();
timestamp2 = valueAndTimestamp2.timestamp();
}
if (value1 != null || value2 != null) {
newValue = joiner.apply(value1, value2);
}
return ValueAndTimestamp.make(newValue, Math.max(timestamp1, timestamp2));
}
@Override
public boolean isVersioned() {
// even though we can derive a proper versioned result (assuming both parent value
// getters are versioned), we choose not to since the output of a join of two
// versioned tables today is not considered versioned (cf KIP-914)
return false;
}
@Override
public void close() {
valueGetter1.close();
valueGetter2.close();
}
}
}
| KTableKTableOuterJoinValueGetter |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/NestedJsonEmbeddableTest.java | {
"start": 17780,
"end": 18448
} | class ____ {
private String stringField;
private SimpleEmbeddable simpleEmbeddable;
@JdbcTypeCode(SqlTypes.JSON)
private EmbeddableAggregate nested;
public TheJson() {
}
public TheJson(String stringField, Integer integerField, String leaf, EmbeddableAggregate nested) {
this.stringField = stringField;
this.simpleEmbeddable = new SimpleEmbeddable( integerField, leaf );
this.nested = nested;
}
public TheJson(String stringField, SimpleEmbeddable simpleEmbeddable, EmbeddableAggregate nested) {
this.stringField = stringField;
this.simpleEmbeddable = simpleEmbeddable;
this.nested = nested;
}
}
@Embeddable
public static | TheJson |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/CxfEndpointBuilderFactory.java | {
"start": 34031,
"end": 48649
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedCxfEndpointProducerBuilder advanced() {
return (AdvancedCxfEndpointProducerBuilder) this;
}
/**
* The data type messages supported by the CXF endpoint.
*
* The option is a:
* <code>org.apache.camel.component.cxf.common.DataFormat</code> type.
*
* Default: POJO
* Group: common
*
* @param dataFormat the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder dataFormat(org.apache.camel.component.cxf.common.DataFormat dataFormat) {
doSetProperty("dataFormat", dataFormat);
return this;
}
/**
* The data type messages supported by the CXF endpoint.
*
* The option will be converted to a
* <code>org.apache.camel.component.cxf.common.DataFormat</code> type.
*
* Default: POJO
* Group: common
*
* @param dataFormat the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder dataFormat(String dataFormat) {
doSetProperty("dataFormat", dataFormat);
return this;
}
/**
* The WSDL style that describes how parameters are represented in the
* SOAP body. If the value is false, CXF will chose the document-literal
* unwrapped style, If the value is true, CXF will chose the
* document-literal wrapped style.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: common
*
* @param wrappedStyle the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder wrappedStyle(Boolean wrappedStyle) {
doSetProperty("wrappedStyle", wrappedStyle);
return this;
}
/**
* The WSDL style that describes how parameters are represented in the
* SOAP body. If the value is false, CXF will chose the document-literal
* unwrapped style, If the value is true, CXF will chose the
* document-literal wrapped style.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: common
*
* @param wrappedStyle the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder wrappedStyle(String wrappedStyle) {
doSetProperty("wrappedStyle", wrappedStyle);
return this;
}
/**
* Configure a cookie handler to maintain a HTTP session.
*
* The option is a:
* <code>org.apache.camel.http.base.cookie.CookieHandler</code> type.
*
* Group: producer
*
* @param cookieHandler the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder cookieHandler(org.apache.camel.http.base.cookie.CookieHandler cookieHandler) {
doSetProperty("cookieHandler", cookieHandler);
return this;
}
/**
* Configure a cookie handler to maintain a HTTP session.
*
* The option will be converted to a
* <code>org.apache.camel.http.base.cookie.CookieHandler</code> type.
*
* Group: producer
*
* @param cookieHandler the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder cookieHandler(String cookieHandler) {
doSetProperty("cookieHandler", cookieHandler);
return this;
}
/**
* This option will set the default operationName that will be used by
* the CxfProducer which invokes the remote service.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param defaultOperationName the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder defaultOperationName(String defaultOperationName) {
doSetProperty("defaultOperationName", defaultOperationName);
return this;
}
/**
* This option will set the default operationNamespace that will be used
* by the CxfProducer which invokes the remote service.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param defaultOperationNamespace the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder defaultOperationNamespace(String defaultOperationNamespace) {
doSetProperty("defaultOperationNamespace", defaultOperationNamespace);
return this;
}
/**
* The hostname verifier to be used. Use the # notation to reference a
* HostnameVerifier from the registry.
*
* The option is a: <code>javax.net.ssl.HostnameVerifier</code> type.
*
* Group: producer
*
* @param hostnameVerifier the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder hostnameVerifier(javax.net.ssl.HostnameVerifier hostnameVerifier) {
doSetProperty("hostnameVerifier", hostnameVerifier);
return this;
}
/**
* The hostname verifier to be used. Use the # notation to reference a
* HostnameVerifier from the registry.
*
* The option will be converted to a
* <code>javax.net.ssl.HostnameVerifier</code> type.
*
* Group: producer
*
* @param hostnameVerifier the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder hostnameVerifier(String hostnameVerifier) {
doSetProperty("hostnameVerifier", hostnameVerifier);
return this;
}
/**
* The Camel SSL setting reference. Use the # notation to reference the
* SSL Context.
*
* The option is a:
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: producer
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder sslContextParameters(org.apache.camel.support.jsse.SSLContextParameters sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* The Camel SSL setting reference. Use the # notation to reference the
* SSL Context.
*
* The option will be converted to a
* <code>org.apache.camel.support.jsse.SSLContextParameters</code> type.
*
* Group: producer
*
* @param sslContextParameters the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder sslContextParameters(String sslContextParameters) {
doSetProperty("sslContextParameters", sslContextParameters);
return this;
}
/**
* Which kind of operation that CXF endpoint producer will invoke.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param wrapped the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder wrapped(boolean wrapped) {
doSetProperty("wrapped", wrapped);
return this;
}
/**
* Which kind of operation that CXF endpoint producer will invoke.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param wrapped the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder wrapped(String wrapped) {
doSetProperty("wrapped", wrapped);
return this;
}
/**
* This option enables CXF Logging Feature which writes inbound and
* outbound SOAP messages to log.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param loggingFeatureEnabled the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder loggingFeatureEnabled(boolean loggingFeatureEnabled) {
doSetProperty("loggingFeatureEnabled", loggingFeatureEnabled);
return this;
}
/**
* This option enables CXF Logging Feature which writes inbound and
* outbound SOAP messages to log.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param loggingFeatureEnabled the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder loggingFeatureEnabled(String loggingFeatureEnabled) {
doSetProperty("loggingFeatureEnabled", loggingFeatureEnabled);
return this;
}
/**
* To limit the total size of number of bytes the logger will output
* when logging feature has been enabled and -1 for no limit.
*
* The option is a: <code>int</code> type.
*
* Default: 49152
* Group: logging
*
* @param loggingSizeLimit the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder loggingSizeLimit(int loggingSizeLimit) {
doSetProperty("loggingSizeLimit", loggingSizeLimit);
return this;
}
/**
* To limit the total size of number of bytes the logger will output
* when logging feature has been enabled and -1 for no limit.
*
* The option will be converted to a <code>int</code> type.
*
* Default: 49152
* Group: logging
*
* @param loggingSizeLimit the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder loggingSizeLimit(String loggingSizeLimit) {
doSetProperty("loggingSizeLimit", loggingSizeLimit);
return this;
}
/**
* This option controls whether the PhaseInterceptorChain skips logging
* the Fault that it catches.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param skipFaultLogging the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder skipFaultLogging(boolean skipFaultLogging) {
doSetProperty("skipFaultLogging", skipFaultLogging);
return this;
}
/**
* This option controls whether the PhaseInterceptorChain skips logging
* the Fault that it catches.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: logging
*
* @param skipFaultLogging the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder skipFaultLogging(String skipFaultLogging) {
doSetProperty("skipFaultLogging", skipFaultLogging);
return this;
}
/**
* This option is used to set the basic authentication information of
* password for the CXF client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param password the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder password(String password) {
doSetProperty("password", password);
return this;
}
/**
* This option is used to set the basic authentication information of
* username for the CXF client.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: security
*
* @param username the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder username(String username) {
doSetProperty("username", username);
return this;
}
/**
* The bindingId for the service model to use.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: service
*
* @param bindingId the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder bindingId(String bindingId) {
doSetProperty("bindingId", bindingId);
return this;
}
/**
* The endpoint name this service is implementing, it maps to the
* wsdl:portname. In the format of ns:PORT_NAME where ns is a namespace
* prefix valid at this scope.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: service
*
* @param portName the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder portName(String portName) {
doSetProperty("portName", portName);
return this;
}
/**
* This option can override the endpointUrl that published from the WSDL
* which can be accessed with service address url plus wsd.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: service
*
* @param publishedEndpointUrl the value to set
* @return the dsl builder
*/
default CxfEndpointProducerBuilder publishedEndpointUrl(String publishedEndpointUrl) {
doSetProperty("publishedEndpointUrl", publishedEndpointUrl);
return this;
}
/**
* The | CxfEndpointProducerBuilder |
java | spring-projects__spring-security | oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/OAuth2ProtectedResourceMetadataClaimAccessor.java | {
"start": 1244,
"end": 4236
} | interface ____ extends ClaimAccessor {
/**
* Returns the {@code URL} the protected resource asserts as its resource identifier
* {@code (resource)}.
* @return the {@code URL} the protected resource asserts as its resource identifier
*/
default URL getResource() {
return getClaimAsURL(OAuth2ProtectedResourceMetadataClaimNames.RESOURCE);
}
/**
* Returns a list of {@code issuer} identifier {@code URL}'s, for authorization
* servers that can be used with this protected resource
* {@code (authorization_servers)}.
* @return a list of {@code issuer} identifier {@code URL}'s, for authorization
* servers that can be used with this protected resource
*/
default List<URL> getAuthorizationServers() {
List<String> authorizationServers = getClaimAsStringList(
OAuth2ProtectedResourceMetadataClaimNames.AUTHORIZATION_SERVERS);
List<URL> urls = new ArrayList<>();
authorizationServers.forEach((authorizationServer) -> {
try {
urls.add(new URI(authorizationServer).toURL());
}
catch (Exception ex) {
throw new IllegalArgumentException("Failed to convert authorization_server to URL", ex);
}
});
return urls;
}
/**
* Returns a list of {@code scope} values supported, that are used in authorization
* requests to request access to this protected resource {@code (scopes_supported)}.
* @return a list of {@code scope} values supported, that are used in authorization
* requests to request access to this protected resource
*/
default List<String> getScopes() {
return getClaimAsStringList(OAuth2ProtectedResourceMetadataClaimNames.SCOPES_SUPPORTED);
}
/**
* Returns a list of the supported methods for sending an OAuth 2.0 bearer token to
* the protected resource. Defined values are "header", "body" and "query".
* {@code (bearer_methods_supported)}.
* @return a list of the supported methods for sending an OAuth 2.0 bearer token to
* the protected resource
*/
default List<String> getBearerMethodsSupported() {
return getClaimAsStringList(OAuth2ProtectedResourceMetadataClaimNames.BEARER_METHODS_SUPPORTED);
}
/**
* Returns the name of the protected resource intended for display to the end user
* {@code (resource_name)}.
* @return the name of the protected resource intended for display to the end user
*/
default String getResourceName() {
return getClaimAsString(OAuth2ProtectedResourceMetadataClaimNames.RESOURCE_NAME);
}
/**
* Returns {@code true} to indicate protected resource support for mutual-TLS client
* certificate-bound access tokens
* {@code (tls_client_certificate_bound_access_tokens)}.
* @return {@code true} to indicate protected resource support for mutual-TLS client
* certificate-bound access tokens
*/
default boolean isTlsClientCertificateBoundAccessTokens() {
return Boolean.TRUE.equals(getClaimAsBoolean(
OAuth2ProtectedResourceMetadataClaimNames.TLS_CLIENT_CERTIFICATE_BOUND_ACCESS_TOKENS));
}
}
| OAuth2ProtectedResourceMetadataClaimAccessor |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/xml/internal/attr/CommonPluralAttributeProcessing.java | {
"start": 1989,
"end": 9628
} | class ____ {
public static void applyPluralAttributeStructure(
JaxbPluralAttribute jaxbPluralAttribute,
MutableMemberDetails memberDetails,
XmlDocumentContext xmlDocumentContext) {
final ModelsContext buildingContext = xmlDocumentContext.getModelBuildingContext();
final ClassDetailsRegistry classDetailsRegistry = buildingContext.getClassDetailsRegistry();
if ( jaxbPluralAttribute.getFetchMode() != null ) {
final FetchAnnotation fetchAnn = (FetchAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.FETCH,
buildingContext
);
fetchAnn.value( interpretFetchMode( jaxbPluralAttribute.getFetchMode() ) );
}
if ( jaxbPluralAttribute.getClassification() != null ) {
if ( jaxbPluralAttribute.getClassification() == LimitedCollectionClassification.BAG ) {
memberDetails.applyAnnotationUsage( HibernateAnnotations.BAG, buildingContext );
}
else {
XmlAnnotationHelper.applyCollectionClassification(
jaxbPluralAttribute.getClassification(),
memberDetails,
xmlDocumentContext
);
}
}
if ( jaxbPluralAttribute.getBatchSize() != null ) {
final BatchSizeAnnotation batchSizeAnnotation = (BatchSizeAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.BATCH_SIZE,
buildingContext
);
batchSizeAnnotation.size( jaxbPluralAttribute.getBatchSize() );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// collection-structure
XmlAnnotationHelper.applyCollectionUserType( jaxbPluralAttribute.getCollectionType(), memberDetails, xmlDocumentContext );
XmlAnnotationHelper.applyCollectionId( jaxbPluralAttribute.getCollectionId(), memberDetails, xmlDocumentContext );
if ( StringHelper.isNotEmpty( jaxbPluralAttribute.getOrderBy() ) ) {
final OrderByJpaAnnotation orderByAnn = (OrderByJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.ORDER_BY,
buildingContext
);
orderByAnn.value( jaxbPluralAttribute.getOrderBy() );
}
applyOrderColumn( jaxbPluralAttribute, memberDetails, xmlDocumentContext );
if ( StringHelper.isNotEmpty( jaxbPluralAttribute.getSort() ) ) {
final SortComparatorAnnotation sortAnn = (SortComparatorAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.SORT_COMPARATOR,
buildingContext
);
final ClassDetails comparatorClassDetails = classDetailsRegistry.resolveClassDetails( jaxbPluralAttribute.getSort() );
sortAnn.value( comparatorClassDetails.toJavaClass() );
}
if ( jaxbPluralAttribute.getSortNatural() != null ) {
memberDetails.applyAnnotationUsage( HibernateAnnotations.SORT_NATURAL, buildingContext );
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// map-key
if ( jaxbPluralAttribute.getMapKey() != null ) {
final MapKeyJpaAnnotation mapKeyAnn = (MapKeyJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.MAP_KEY,
buildingContext
);
if ( jaxbPluralAttribute.getMapKey() != null && StringHelper.isNotEmpty( jaxbPluralAttribute.getMapKey().getName() ) ) {
mapKeyAnn.name( jaxbPluralAttribute.getMapKey().getName() );
}
}
if ( jaxbPluralAttribute.getMapKeyClass() != null ) {
final String className = xmlDocumentContext.resolveClassName( jaxbPluralAttribute.getMapKeyClass().getClazz() );
final ClassDetails mapKeyClass = classDetailsRegistry.resolveClassDetails( className );
final MapKeyClassJpaAnnotation mapKeyClassAnn = (MapKeyClassJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.MAP_KEY_CLASS,
buildingContext
);
mapKeyClassAnn.value( mapKeyClass.toJavaClass() );
}
if ( jaxbPluralAttribute.getMapKeyTemporal() != null ) {
final MapKeyTemporalJpaAnnotation mapKeyTemporalAnn = (MapKeyTemporalJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.MAP_KEY_TEMPORAL,
buildingContext
);
mapKeyTemporalAnn.value( jaxbPluralAttribute.getMapKeyTemporal() );
}
if ( jaxbPluralAttribute.getMapKeyEnumerated() != null ) {
final MapKeyEnumeratedJpaAnnotation mapKeyEnumeratedAnn = (MapKeyEnumeratedJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.MAP_KEY_ENUMERATED,
buildingContext
);
mapKeyEnumeratedAnn.value( jaxbPluralAttribute.getMapKeyEnumerated() );
}
XmlAnnotationHelper.applyConverts(
jaxbPluralAttribute.getMapKeyConverts(),
"key",
memberDetails,
xmlDocumentContext
);
if ( jaxbPluralAttribute.getMapKeyColumn() != null ) {
final MapKeyColumnJpaAnnotation columnAnn = (MapKeyColumnJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.MAP_KEY_COLUMN,
xmlDocumentContext.getModelBuildingContext()
);
columnAnn.apply( jaxbPluralAttribute.getMapKeyColumn(), xmlDocumentContext );
}
if ( jaxbPluralAttribute.getMapKeyType() != null ) {
XmlAnnotationHelper.applyMapKeyUserType( jaxbPluralAttribute.getMapKeyType(), memberDetails, xmlDocumentContext );
}
JoinColumnProcessing.applyMapKeyJoinColumns(
jaxbPluralAttribute.getMapKeyJoinColumns(),
memberDetails,
xmlDocumentContext
);
ForeignKeyProcessing.applyForeignKey( jaxbPluralAttribute.getMapKeyForeignKey(), memberDetails, xmlDocumentContext );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// filters and custom sql
XmlAnnotationHelper.applyFilters( jaxbPluralAttribute.getFilters(), memberDetails, xmlDocumentContext );
XmlAnnotationHelper.applySqlRestriction( jaxbPluralAttribute.getSqlRestriction(), memberDetails, xmlDocumentContext );
XmlAnnotationHelper.applyCustomSql(
jaxbPluralAttribute.getSqlInsert(),
memberDetails,
HibernateAnnotations.SQL_INSERT,
xmlDocumentContext
);
XmlAnnotationHelper.applyCustomSql(
jaxbPluralAttribute.getSqlUpdate(),
memberDetails,
HibernateAnnotations.SQL_UPDATE,
xmlDocumentContext
);
XmlAnnotationHelper.applyCustomSql(
jaxbPluralAttribute.getSqlDelete(),
memberDetails,
HibernateAnnotations.SQL_DELETE,
xmlDocumentContext
);
XmlAnnotationHelper.applyCustomSql(
jaxbPluralAttribute.getSqlDeleteAll(),
memberDetails,
HibernateAnnotations.SQL_DELETE_ALL,
xmlDocumentContext
);
}
private static FetchMode interpretFetchMode(JaxbPluralFetchModeImpl fetchMode) {
return switch ( fetchMode ) {
case JOIN -> FetchMode.JOIN;
case SELECT, SUBSELECT -> FetchMode.SELECT;
};
}
private static void applyOrderColumn(
JaxbPluralAttribute jaxbPluralAttribute,
MutableMemberDetails memberDetails,
XmlDocumentContext xmlDocumentContext) {
final JaxbOrderColumnImpl jaxbOrderColumn = jaxbPluralAttribute.getOrderColumn();
final Integer listIndexBase = jaxbPluralAttribute.getListIndexBase();
if ( jaxbOrderColumn != null
|| listIndexBase != null
|| jaxbPluralAttribute.getClassification() == LimitedCollectionClassification.LIST ) {
// apply @OrderColumn in any of these cases
final OrderColumnJpaAnnotation orderColumnAnn = (OrderColumnJpaAnnotation) memberDetails.applyAnnotationUsage(
JpaAnnotations.ORDER_COLUMN,
xmlDocumentContext.getModelBuildingContext()
);
if ( jaxbOrderColumn != null ) {
// apply any explicit config
orderColumnAnn.apply( jaxbOrderColumn, xmlDocumentContext );
}
}
if ( listIndexBase != null ) {
final ListIndexBaseAnnotation annUsage = (ListIndexBaseAnnotation) memberDetails.applyAnnotationUsage(
HibernateAnnotations.LIST_INDEX_BASE,
xmlDocumentContext.getModelBuildingContext()
);
annUsage.value( listIndexBase );
}
}
}
| CommonPluralAttributeProcessing |
java | quarkusio__quarkus | extensions/smallrye-openapi/runtime/src/main/java/io/quarkus/smallrye/openapi/runtime/filter/AutoBasicSecurityFilter.java | {
"start": 254,
"end": 1246
} | class ____ extends AutoSecurityFilter {
private String basicSecuritySchemeValue;
public AutoBasicSecurityFilter() {
super();
}
public AutoBasicSecurityFilter(String securitySchemeName, String securitySchemeDescription,
Map<String, String> securitySchemeExtensions,
String basicSecuritySchemeValue) {
super(securitySchemeName, securitySchemeDescription, securitySchemeExtensions);
this.basicSecuritySchemeValue = basicSecuritySchemeValue;
}
public String getBasicSecuritySchemeValue() {
return basicSecuritySchemeValue;
}
public void setBasicSecuritySchemeValue(String basicSecuritySchemeValue) {
this.basicSecuritySchemeValue = basicSecuritySchemeValue;
}
@Override
protected void updateSecurityScheme(SecurityScheme securityScheme) {
securityScheme.setType(SecurityScheme.Type.HTTP);
securityScheme.setScheme(basicSecuritySchemeValue);
}
} | AutoBasicSecurityFilter |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java | {
"start": 1411,
"end": 3023
} | class ____ {
private final ExchangeBuffer buffer;
private final Executor fetchExecutor;
private final PendingInstances outstandingSinks;
private final PendingInstances outstandingSources;
// Track if this exchange source should abort. There is no need to track the actual failure since the actual failure
// should be notified via #addRemoteSink(RemoteSink, boolean, Runnable, int, ActionListener).
private volatile boolean aborted = false;
private final AtomicInteger nextSinkId = new AtomicInteger();
private final Map<Integer, RemoteSink> remoteSinks = ConcurrentCollections.newConcurrentMap();
/**
* Creates a new ExchangeSourceHandler.
*
* @param maxBufferSize the maximum size of the exchange buffer. A larger buffer reduces ``pauses`` but uses more memory,
* which could otherwise be allocated for other purposes.
* @param fetchExecutor the executor used to fetch pages.
*/
public ExchangeSourceHandler(int maxBufferSize, Executor fetchExecutor) {
this.buffer = new ExchangeBuffer(maxBufferSize);
this.fetchExecutor = fetchExecutor;
this.outstandingSinks = new PendingInstances(() -> buffer.finish(false));
this.outstandingSources = new PendingInstances(() -> finishEarly(true, ActionListener.noop()));
}
public boolean isFinished() {
return buffer.isFinished();
}
private void checkFailure() {
if (aborted) {
throw new TaskCancelledException("remote sinks failed");
}
}
private | ExchangeSourceHandler |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonCanonicalTypeTest.java | {
"start": 3635,
"end": 4008
} | class ____ {
void test() {
Map.Entry<?, ?> entry = null;
}
}
""")
.doTest();
}
@Test
public void qualifiedName_inLambdaParameter_cantFix() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.function.Function;
| Test |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/UnmodifiableMultiValueMap.java | {
"start": 15903,
"end": 17298
} | class ____<T> implements Spliterator<List<T>> {
private final Spliterator<List<T>> delegate;
public UnmodifiableValueSpliterator(Spliterator<List<T>> delegate) {
this.delegate = delegate;
}
@Override
public boolean tryAdvance(Consumer<? super List<T>> action) {
return this.delegate.tryAdvance(l -> action.accept(Collections.unmodifiableList(l)));
}
@Override
public void forEachRemaining(Consumer<? super List<T>> action) {
this.delegate.forEachRemaining(l -> action.accept(Collections.unmodifiableList(l)));
}
@Override
public @Nullable Spliterator<List<T>> trySplit() {
Spliterator<List<T>> split = this.delegate.trySplit();
if (split != null) {
return new UnmodifiableValueSpliterator<>(split);
}
else {
return null;
}
}
@Override
public long estimateSize() {
return this.delegate.estimateSize();
}
@Override
public long getExactSizeIfKnown() {
return this.delegate.getExactSizeIfKnown();
}
@Override
public int characteristics() {
return this.delegate.characteristics();
}
@Override
public boolean hasCharacteristics(int characteristics) {
return this.delegate.hasCharacteristics(characteristics);
}
@Override
public Comparator<? super List<T>> getComparator() {
return this.delegate.getComparator();
}
}
}
}
| UnmodifiableValueSpliterator |
java | grpc__grpc-java | okhttp/third_party/okhttp/main/java/io/grpc/okhttp/internal/OptionalMethod.java | {
"start": 1213,
"end": 6593
} | class ____<T> {
/** The return type of the method. null means "don't care". */
private final Class<?> returnType;
private final String methodName;
@SuppressWarnings("rawtypes")
private final Class[] methodParams;
/**
* Creates an optional method.
*
* @param returnType the return type to required, null if it does not matter
* @param methodName the name of the method
* @param methodParams the method parameter types
*/
@SuppressWarnings("rawtypes")
public OptionalMethod(Class<?> returnType, String methodName, Class... methodParams) {
this.returnType = returnType;
this.methodName = methodName;
this.methodParams = methodParams;
}
/**
* Returns true if the method exists on the supplied {@code target}.
*/
public boolean isSupported(T target) {
return getMethod(target.getClass()) != null;
}
/**
* Invokes the method on {@code target} with {@code args}. If the method does not exist or is not
* public then {@code null} is returned. See also
* {@link #invokeOptionalWithoutCheckedException(Object, Object...)}.
*
* @throws IllegalArgumentException if the arguments are invalid
* @throws InvocationTargetException if the invocation throws an exception
*/
public Object invokeOptional(T target, Object... args) throws InvocationTargetException {
Method m = getMethod(target.getClass());
if (m == null) {
return null;
}
try {
return m.invoke(target, args);
} catch (IllegalAccessException e) {
return null;
}
}
/**
* Invokes the method on {@code target}. If the method does not exist or is not
* public then {@code null} is returned. Any RuntimeException thrown by the method is thrown,
* checked exceptions are wrapped in an {@link AssertionError}.
*
* @throws IllegalArgumentException if the arguments are invalid
*/
public Object invokeOptionalWithoutCheckedException(T target, Object... args) {
try {
return invokeOptional(target, args);
} catch (InvocationTargetException e) {
Throwable targetException = e.getTargetException();
if (targetException instanceof RuntimeException) {
throw (RuntimeException) targetException;
}
AssertionError error = new AssertionError("Unexpected exception");
error.initCause(targetException);
throw error;
}
}
/**
* Invokes the method on {@code target} with {@code args}. Throws an error if the method is not
* supported. See also {@link #invokeWithoutCheckedException(Object, Object...)}.
*
* @throws IllegalArgumentException if the arguments are invalid
* @throws InvocationTargetException if the invocation throws an exception
*/
public Object invoke(T target, Object... args) throws InvocationTargetException {
Method m = getMethod(target.getClass());
if (m == null) {
throw new AssertionError("Method " + methodName + " not supported for object " + target);
}
try {
return m.invoke(target, args);
} catch (IllegalAccessException e) {
// Method should be public: we checked.
AssertionError error = new AssertionError("Unexpectedly could not call: " + m);
error.initCause(e);
throw error;
}
}
/**
* Invokes the method on {@code target}. Throws an error if the method is not supported. Any
* RuntimeException thrown by the method is thrown, checked exceptions are wrapped in
* an {@link AssertionError}.
*
* @throws IllegalArgumentException if the arguments are invalid
*/
public Object invokeWithoutCheckedException(T target, Object... args) {
try {
return invoke(target, args);
} catch (InvocationTargetException e) {
Throwable targetException = e.getTargetException();
if (targetException instanceof RuntimeException) {
throw (RuntimeException) targetException;
}
AssertionError error = new AssertionError("Unexpected exception");
error.initCause(targetException);
throw error;
}
}
/**
* Perform a lookup for the method. No caching.
* In order to return a method the method name and arguments must match those specified when
* the {@link OptionalMethod} was created. If the return type is specified (i.e. non-null) it
* must also be compatible. The method must also be public.
*/
private Method getMethod(Class<?> clazz) {
Method method = null;
if (methodName != null) {
method = getPublicMethod(clazz, methodName, methodParams);
if (method != null
&& returnType != null
&& !returnType.isAssignableFrom(method.getReturnType())) {
// If the return type is non-null it must be compatible.
method = null;
}
}
return method;
}
@SuppressWarnings("rawtypes")
private static Method getPublicMethod(Class<?> clazz, String methodName, Class[] parameterTypes) {
Method method = null;
try {
if (clazz == null) {
return null;
}
if ((clazz.getModifiers() & Modifier.PUBLIC) == 0) {
return getPublicMethod(clazz.getSuperclass(), methodName, parameterTypes);
}
method = clazz.getMethod(methodName, parameterTypes);
if ((method.getModifiers() & Modifier.PUBLIC) == 0) {
method = null;
}
} catch (NoSuchMethodException e) {
// None.
}
return method;
}
}
| OptionalMethod |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/introspect/POJOPropertyBuilder.java | {
"start": 674,
"end": 15628
} | class ____
extends BeanPropertyDefinition
implements Comparable<POJOPropertyBuilder>
{
/**
* Marker value used to denote that no reference-property information found for
* this property
*/
private final static AnnotationIntrospector.ReferenceProperty NOT_REFEFERENCE_PROP =
AnnotationIntrospector.ReferenceProperty.managed("");
/**
* Whether property is being composed for serialization
* (true) or deserialization (false)
*/
protected final boolean _forSerialization;
protected final MapperConfig<?> _config;
protected final AnnotationIntrospector _annotationIntrospector;
/**
* External name of logical property; may change with
* renaming (by new instance being constructed using
* a new name)
*/
protected final PropertyName _name;
/**
* Original internal name, derived from accessor, of this
* property. Will not be changed by renaming.
*/
protected final PropertyName _internalName;
protected Linked<AnnotatedField> _fields;
protected Linked<AnnotatedParameter> _ctorParameters;
protected Linked<AnnotatedMethod> _getters;
protected Linked<AnnotatedMethod> _setters;
protected transient PropertyMetadata _metadata;
/**
* Lazily accessed information about this property iff it is a forward or
* back reference.
*
* @since 2.9
*/
protected transient AnnotationIntrospector.ReferenceProperty _referenceInfo;
public POJOPropertyBuilder(MapperConfig<?> config, AnnotationIntrospector ai,
boolean forSerialization, PropertyName internalName) {
this(config, ai, forSerialization, internalName, internalName);
}
protected POJOPropertyBuilder(MapperConfig<?> config, AnnotationIntrospector ai,
boolean forSerialization, PropertyName internalName, PropertyName name)
{
_config = config;
_annotationIntrospector = ai;
_internalName = internalName;
_name = name;
_forSerialization = forSerialization;
}
protected POJOPropertyBuilder(POJOPropertyBuilder src, PropertyName newName)
{
_config = src._config;
_annotationIntrospector = src._annotationIntrospector;
_internalName = src._internalName;
_name = newName;
_fields = src._fields;
_ctorParameters = src._ctorParameters;
_getters = src._getters;
_setters = src._setters;
_forSerialization = src._forSerialization;
}
/*
/**********************************************************
/* Mutant factory methods
/**********************************************************
*/
@Override
public POJOPropertyBuilder withName(PropertyName newName) {
return new POJOPropertyBuilder(this, newName);
}
@Override
public POJOPropertyBuilder withSimpleName(String newSimpleName)
{
PropertyName newName = _name.withSimpleName(newSimpleName);
return (newName == _name) ? this : new POJOPropertyBuilder(this, newName);
}
/*
/**********************************************************
/* Comparable implementation: sort alphabetically, except
/* that properties with constructor parameters sorted
/* before other properties
/**********************************************************
*/
@Override
public int compareTo(POJOPropertyBuilder other)
{
// first, if one has ctor params, that should come first:
if (_ctorParameters != null) {
if (other._ctorParameters == null) {
return -1;
}
} else if (other._ctorParameters != null) {
return 1;
}
// otherwise sort by external name (including sorting of ctor parameters)
return getName().compareTo(other.getName());
}
/*
/**********************************************************
/* BeanPropertyDefinition implementation, name/type
/**********************************************************
*/
@Override
public String getName() {
return (_name == null) ? null : _name.getSimpleName();
}
@Override
public PropertyName getFullName() {
return _name;
}
@Override
public boolean hasName(PropertyName name) {
return _name.equals(name);
}
@Override
public String getInternalName() { return _internalName.getSimpleName(); }
@Override
public PropertyName getWrapperName() {
/* 13-Mar-2013, tatu: Accessing via primary member SHOULD work,
* due to annotation merging. However, I have seen some problems
* with this access (for other annotations)... so if this should
* occur, try commenting out full traversal code
*/
AnnotatedMember member = getPrimaryMember();
return (member == null) ? null
: _annotationIntrospector.findWrapperName(_config, member);
/*
return fromMemberAnnotations(new WithMember<PropertyName>() {
@Override
public PropertyName withMember(AnnotatedMember member) {
return _annotationIntrospector.findWrapperName(member);
}
});
*/
}
@Override
public boolean isExplicitlyIncluded() {
return _anyExplicits(_fields)
|| _anyExplicits(_getters)
|| _anyExplicits(_setters)
// 16-Jan-2016, tatu: Creator names are special, in that name should exist too;
// reason for this is [databind#1317]. Let's hope this works well, may need
// to tweak further if this lowers visibility
// || _anyExplicits(_ctorParameters)
|| _anyExplicitNames(_ctorParameters)
;
}
@Override
public boolean isExplicitlyNamed() {
return _anyExplicitNames(_fields)
|| _anyExplicitNames(_getters)
|| _anyExplicitNames(_setters)
|| _anyExplicitNames(_ctorParameters)
;
}
/*
/**********************************************************
/* Simple metadata
/**********************************************************
*/
@Override
public PropertyMetadata getMetadata() {
if (_metadata == null) {
// 20-Jun-2020, tatu: Unfortunately there may be issues if validity
// checks for accessor/mutator ambiguity is checked when we get
// this info (see [databind#2757] so...
final AnnotatedMember prim = getPrimaryMemberUnchecked();
if (prim == null) { // can this occur?
_metadata = PropertyMetadata.STD_REQUIRED_OR_OPTIONAL;
} else {
final Boolean b;
final String desc;
final Integer idx;
final String def;
b = _annotationIntrospector.hasRequiredMarker(_config, prim);
desc = _annotationIntrospector.findPropertyDescription(_config, prim);
idx = _annotationIntrospector.findPropertyIndex(_config, prim);
def = _annotationIntrospector.findPropertyDefaultValue(_config, prim);
if (b == null && idx == null && def == null) {
_metadata = (desc == null) ? PropertyMetadata.STD_REQUIRED_OR_OPTIONAL
: PropertyMetadata.STD_REQUIRED_OR_OPTIONAL.withDescription(desc);
} else {
_metadata = PropertyMetadata.construct(b, desc, idx, def);
}
if (!_forSerialization) {
_metadata = _getSetterInfo(_metadata, prim);
}
}
}
return _metadata;
}
/**
* Helper method that contains logic for accessing and merging all setter
* information that we needed, regarding things like possible merging
* of property value, and handling of incoming nulls.
* Only called for deserialization purposes.
*/
protected PropertyMetadata _getSetterInfo(PropertyMetadata metadata,
AnnotatedMember primary)
{
boolean needMerge = true;
Nulls valueNulls = null;
Nulls contentNulls = null;
// Slightly confusing: first, annotations should be accessed via primary member
// (mutator); but accessor is needed for actual merge operation. So
// 20-Jun-2020, tatu: Unfortunately strict checks lead to [databind#2757]
// so we will need to try to avoid them at this point
AnnotatedMember acc = getAccessor();
if (primary != null) {
// Ok, first: does property itself have something to say?
if (_annotationIntrospector != null) {
if (acc != null) {
Boolean b = _annotationIntrospector.findMergeInfo(_config, primary);
if (b != null) {
needMerge = false;
if (b.booleanValue()) {
metadata = metadata.withMergeInfo(PropertyMetadata.MergeInfo.createForPropertyOverride(acc));
}
}
}
JsonSetter.Value setterInfo = _annotationIntrospector.findSetterInfo(_config, primary);
if (setterInfo != null) {
valueNulls = setterInfo.nonDefaultValueNulls();
contentNulls = setterInfo.nonDefaultContentNulls();
}
}
// If not, config override?
// 25-Oct-2016, tatu: Either this, or type of accessor...
if (needMerge || (valueNulls == null) || (contentNulls == null)) {
// 20-Jun-2020, tatu: Related to [databind#2757], need to find type
// but keeping mind that type for setters is trickier; and that
// generic typing gets tricky as well.
Class<?> rawType = _rawTypeOf(primary);
ConfigOverride co = _config.getConfigOverride(rawType);
JsonSetter.Value nullHandling = co.getNullHandling();
if (nullHandling != null) {
if (valueNulls == null) {
valueNulls = nullHandling.nonDefaultValueNulls();
}
if (contentNulls == null) {
contentNulls = nullHandling.nonDefaultContentNulls();
}
}
if (needMerge && (acc != null)) {
Boolean b = co.getMergeable();
if (b != null) {
needMerge = false;
if (b.booleanValue()) {
metadata = metadata.withMergeInfo(PropertyMetadata.MergeInfo.createForTypeOverride(acc));
}
}
}
}
}
if (needMerge || (valueNulls == null) || (contentNulls == null)) {
JsonSetter.Value setterInfo = _config.getDefaultNullHandling();
if (valueNulls == null) {
valueNulls = setterInfo.nonDefaultValueNulls();
}
if (contentNulls == null) {
contentNulls = setterInfo.nonDefaultContentNulls();
}
if (needMerge) {
Boolean b = _config.getDefaultMergeable();
if (Boolean.TRUE.equals(b) && (acc != null)) {
metadata = metadata.withMergeInfo(PropertyMetadata.MergeInfo.createForDefaults(acc));
}
}
}
if ((valueNulls != null) || (contentNulls != null)) {
metadata = metadata.withNulls(valueNulls, contentNulls);
}
return metadata;
}
/**
* Type determined from the primary member for the property being built,
* considering precedence according to whether we are processing serialization
* or deserialization.
*/
@Override
public JavaType getPrimaryType() {
if (_forSerialization) {
AnnotatedMember m = getGetter();
if (m == null) {
m = getField();
if (m == null) {
// 09-Feb-2017, tatu: Not sure if this or `null` but...
return TypeFactory.unknownType();
}
}
return m.getType();
}
AnnotatedMember m = getConstructorParameter();
if (m == null) {
m = getSetter();
// Important: can't try direct type access for setter; what we need is
// type of the first parameter
if (m != null) {
return ((AnnotatedMethod) m).getParameterType(0);
}
m = getField();
}
// for setterless properties, however, can further try getter
if (m == null) {
m = getGetter();
if (m == null) {
return TypeFactory.unknownType();
}
}
return m.getType();
}
@Override
public Class<?> getRawPrimaryType() {
return getPrimaryType().getRawClass();
}
/*
/**********************************************************
/* BeanPropertyDefinition implementation, accessor access
/**********************************************************
*/
@Override
public boolean hasGetter() { return _getters != null; }
@Override
public boolean hasSetter() { return _setters != null; }
@Override
public boolean hasField() { return _fields != null; }
// @since 2.20 additional accessor
public boolean hasFieldAndNothingElse() {
return (_fields != null)
&& ((_getters == null) && (_setters == null) && (_ctorParameters == null));
}
@Override
public boolean hasConstructorParameter() { return _ctorParameters != null; }
@Override
public boolean couldDeserialize() {
return (_ctorParameters != null)
|| (_setters != null)
|| ((_fields != null)
// [databind#736] Since 2.17: Fix `REQUIRE_SETTERS_FOR_GETTERS` taking no effect
&& (_anyVisible(_fields)));
}
@Override
public boolean couldSerialize() {
return (_getters != null) || (_fields != null);
}
@Override
public AnnotatedMethod getGetter()
{
// Easy with zero or one getters...
Linked<AnnotatedMethod> curr = _getters;
if (curr == null) {
return null;
}
Linked<AnnotatedMethod> next = curr.next;
if (next == null) {
return curr.value;
}
// But if multiple, verify that they do not conflict...
for (; next != null; next = next.next) {
// Allow masking, i.e. do not report exception if one
// is in super- | POJOPropertyBuilder |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/signatures/SubscriberSignatureTest.java | {
"start": 7484,
"end": 8022
} | class ____ {
AtomicBoolean completed = new AtomicBoolean();
AtomicReference<Throwable> failure = new AtomicReference<>();
List<Integer> items = new CopyOnWriteArrayList<>();
List<Message<Integer>> messages = new CopyOnWriteArrayList<>();
public boolean hasCompleted() {
return completed.get();
}
public List<Integer> getItems() {
return items;
}
public List<Message<Integer>> getMessages() {
return messages;
}
}
}
| Spy |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/MapWithGenericValuesDeserTest.java | {
"start": 1698,
"end": 1782
} | class ____<KEY> extends HashMap<KEY,StringWrapper> { }
static | StringWrapperValueMap |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/customtype/ExtendedEnumTypeTest.java | {
"start": 2512,
"end": 4542
} | enum ____ {
ARCHIVED,
DRAFT
}
Widget() {
}
Widget(Status status) {
this.status = status;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
public Status getStatus2() {
return status2;
}
public void setStatus2(Status status2) {
this.status2 = status2;
}
}
private Integer widgetId;
@BeforeClassTemplate
public void initData(EntityManagerFactoryScope scope) {
// Revision 1 - insert
this.widgetId = scope.fromTransaction( entityManager -> {
final Widget widget = new Widget( Widget.Status.DRAFT );
entityManager.persist( widget );
return widget.getId();
} );
// Revision 2 - update
scope.inTransaction( entityManager -> {
final Widget widget = entityManager.find( Widget.class, this.widgetId );
widget.setStatus( Widget.Status.ARCHIVED );
entityManager.merge( widget );
} );
// Revision 3 - delete
scope.inTransaction( entityManager -> {
final Widget widget = entityManager.find( Widget.class, this.widgetId );
entityManager.remove( widget );
} );
}
@Test
public void testRevisionHistory(EntityManagerFactoryScope scope) {
scope.inEntityManager( em -> {
final var auditReader = AuditReaderFactory.get( em );
List revisions = auditReader.getRevisions( Widget.class, this.widgetId );
assertEquals( Arrays.asList( 1, 2, 3 ), revisions );
final Widget rev1 = auditReader.find( Widget.class, this.widgetId, 1 );
assertEquals( Widget.Status.DRAFT, rev1.getStatus() );
final Widget rev2 = auditReader.find( Widget.class, this.widgetId, 2 );
assertEquals( Widget.Status.ARCHIVED, rev2.getStatus() );
final Widget rev3 = auditReader.find( Widget.class, this.widgetId, 3 );
assertNull( rev3 );
} );
}
@Test
public void testEnumPropertyStorageType(EntityManagerFactoryScope scope) {
// test that property 'status' translates to an | Status |
java | google__auto | value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderKotlinTest.java | {
"start": 2910,
"end": 5510
} | class ____ {
static KotlinDataWithDefaultsBuilder builder() {
return new AutoBuilder_AutoBuilderKotlinTest_KotlinDataWithDefaultsBuilder();
}
abstract KotlinDataWithDefaultsBuilder setAnInt(int x);
abstract int getAnInt();
abstract ImmutableList.Builder<String> anImmutableListBuilder();
abstract KotlinDataWithDefaultsBuilder setNotDefaulted(long x);
abstract long getNotDefaulted();
abstract KotlinDataWithDefaultsBuilder setAString(String x);
abstract String getAString();
abstract KotlinDataWithDefaults build();
}
@Test
public void kotlinWithDefaults_explicit() {
KotlinDataWithDefaultsBuilder builder =
KotlinDataWithDefaultsBuilder.builder()
.setAString("answer")
.setNotDefaulted(100L)
.setAnInt(42);
builder.anImmutableListBuilder().add("bar");
KotlinDataWithDefaults x = builder.build();
assertThat(x.getAString()).isEqualTo("answer");
assertThat(x.getAnImmutableList()).containsExactly("bar");
assertThat(x.getNotDefaulted()).isEqualTo(100L);
assertThat(x.getAnInt()).isEqualTo(42);
}
@Test
public void kotlinWithDefaults_defaulted() {
KotlinDataWithDefaults x =
KotlinDataWithDefaultsBuilder.builder().setNotDefaulted(100L).build();
assertThat(x.getAnInt()).isEqualTo(23);
assertThat(x.getAnImmutableList()).containsExactly("foo");
assertThat(x.getAString()).isEqualTo("skidoo");
assertThat(x.getNotDefaulted()).isEqualTo(100L);
KotlinDataWithDefaults copy =
new AutoBuilder_AutoBuilderKotlinTest_KotlinDataWithDefaultsBuilder(x).build();
assertThat(copy).isEqualTo(x);
assertThat(copy).isNotSameInstanceAs(x);
KotlinDataWithDefaults modified =
new AutoBuilder_AutoBuilderKotlinTest_KotlinDataWithDefaultsBuilder(x).setAnInt(17).build();
assertThat(modified.getAnInt()).isEqualTo(17);
}
@Test
public void kotlinWithDefaults_getter() {
KotlinDataWithDefaultsBuilder builder = KotlinDataWithDefaultsBuilder.builder();
assertThrows(IllegalStateException.class, builder::getAnInt);
builder.setAnInt(42);
assertThat(builder.getAnInt()).isEqualTo(42);
assertThrows(IllegalStateException.class, builder::getNotDefaulted);
builder.setNotDefaulted(100L);
assertThat(builder.getNotDefaulted()).isEqualTo(100L);
assertThrows(IllegalStateException.class, builder::getAString);
builder.setAString("answer");
assertThat(builder.getAString()).isEqualTo("answer");
}
@AutoBuilder(ofClass = KotlinDataEightDefaults.class)
| KotlinDataWithDefaultsBuilder |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/jdbc/SqlScriptsTestExecutionListenerTests.java | {
"start": 6325,
"end": 6428
} | class ____ {
@Sql
public void foo() {
}
}
static | MissingValueAndScriptsAndStatementsAtMethodLevel |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/multipart/MultipartFilenameTest.java | {
"start": 17779,
"end": 17922
} | class ____ {
@FormParam("myFile")
@PartType(APPLICATION_OCTET_STREAM)
public File file;
}
public static | ClientForm |
java | google__guice | extensions/throwingproviders/test/com/google/inject/throwingproviders/CheckedProviderTest.java | {
"start": 50025,
"end": 51196
} | interface ____<T> extends CheckedProvider<T> {
T bar();
String baz();
}
public void testResultSerializes() throws Exception {
Result result = Result.forValue("foo");
result = Asserts.reserialize(result);
assertEquals("foo", result.getOrThrow());
}
public void testResultExceptionSerializes() throws Exception {
Result result = Result.forException(new Exception("boo"));
result = Asserts.reserialize(result);
try {
result.getOrThrow();
fail();
} catch (Exception ex) {
assertEquals("boo", ex.getMessage());
}
}
public void testEarlyBindingError() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
ThrowingProviderBinder.create(binder())
.bind(StringRemoteProvider.class, String.class)
.to(FailingProvider.class);
}
});
fail();
} catch (CreationException ce) {
assertContains(
ce.getMessage(),
"No injectable constructor for type CheckedProviderTest$FailingProvider.");
}
}
private static | ManyMethods |
java | quarkusio__quarkus | extensions/hibernate-orm/runtime/src/main/java/io/quarkus/hibernate/orm/runtime/boot/FastBootEntityManagerFactoryBuilder.java | {
"start": 2374,
"end": 9454
} | class ____ implements EntityManagerFactoryBuilder {
protected final QuarkusPersistenceUnitDescriptor puDescriptor;
protected final PrevalidatedQuarkusMetadata metadata;
protected final StandardServiceRegistry standardServiceRegistry;
private final RuntimeSettings runtimeSettings;
private final Object validatorFactory;
private final Object cdiBeanManager;
private final BuiltinFormatMapperBehaviour builtinFormatMapperBehaviour;
private final JsonFormatterCustomizationCheck jsonFormatterCustomizationCheck;
protected final MultiTenancyStrategy multiTenancyStrategy;
protected final boolean shouldApplySchemaMigration;
public FastBootEntityManagerFactoryBuilder(
QuarkusPersistenceUnitDescriptor puDescriptor,
PrevalidatedQuarkusMetadata metadata,
StandardServiceRegistry standardServiceRegistry, RuntimeSettings runtimeSettings, Object validatorFactory,
Object cdiBeanManager, MultiTenancyStrategy multiTenancyStrategy, boolean shouldApplySchemaMigration,
BuiltinFormatMapperBehaviour builtinFormatMapperBehaviour,
JsonFormatterCustomizationCheck jsonFormatterCustomizationCheck) {
this.puDescriptor = puDescriptor;
this.metadata = metadata;
this.standardServiceRegistry = standardServiceRegistry;
this.runtimeSettings = runtimeSettings;
this.validatorFactory = validatorFactory;
this.cdiBeanManager = cdiBeanManager;
this.multiTenancyStrategy = multiTenancyStrategy;
this.shouldApplySchemaMigration = shouldApplySchemaMigration;
this.builtinFormatMapperBehaviour = builtinFormatMapperBehaviour;
this.jsonFormatterCustomizationCheck = jsonFormatterCustomizationCheck;
}
@Override
public EntityManagerFactoryBuilder withValidatorFactory(Object validatorFactory) {
return null;
}
@Override
public EntityManagerFactoryBuilder withDataSource(DataSource dataSource) {
return null;
}
@Override
public EntityManagerFactory build() {
try {
final SessionFactoryOptionsBuilder optionsBuilder = metadata.buildSessionFactoryOptionsBuilder();
populate(puDescriptor.getName(), optionsBuilder, standardServiceRegistry);
return new SessionFactoryImpl(metadata, optionsBuilder.buildOptions(),
metadata.getTypeConfiguration().getMetadataBuildingContext().getBootstrapContext());
} catch (Exception e) {
throw persistenceException("Unable to build Hibernate SessionFactory", e);
}
}
@Override
public void cancel() {
// nothing?
}
@Override
public void generateSchema() {
throw new UnsupportedOperationException(
"This isn't used for schema generation - see SessionFactoryObserverForSchemaExport instead");
}
protected PersistenceException persistenceException(String message, Exception cause) {
// Provide a comprehensible message if there is an issue with SSL support
Throwable t = cause;
while (t != null) {
if (t instanceof NoSuchAlgorithmException) {
message += "Unable to enable SSL support. You might be in the case where you used the `quarkus.ssl.native=false` configuration"
+ " and SSL was not disabled automatically for your driver.";
break;
}
if (t instanceof CommandAcceptanceException) {
message = "Invalid import file. Make sure your statements are valid and properly separated by a semi-colon.";
break;
}
t = t.getCause();
}
return new PersistenceException(getExceptionHeader() + message, cause);
}
private String getExceptionHeader() {
return "[PersistenceUnit: " + puDescriptor.getName() + "] ";
}
protected void populate(String persistenceUnitName, SessionFactoryOptionsBuilder options, StandardServiceRegistry ssr) {
// will use user override value or default to false if not supplied to follow
// JPA spec.
final boolean jtaTransactionAccessEnabled = runtimeSettings.getBoolean(
org.hibernate.cfg.AvailableSettings.ALLOW_JTA_TRANSACTION_ACCESS);
if (!jtaTransactionAccessEnabled) {
options.disableJtaTransactionAccess();
}
//Check for use of deprecated org.hibernate.jpa.AvailableSettings.SESSION_FACTORY_OBSERVER
final Object legacyObserver = runtimeSettings.get("hibernate.ejb.session_factory_observer");
if (legacyObserver != null) {
throw new HibernateException("Legacy setting being used: 'hibernate.ejb.session_factory_observer' was replaced by '"
+ org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER + "'. Please update your configuration.");
}
// Locate and apply any requested SessionFactoryObserver
final Object sessionFactoryObserverSetting = runtimeSettings
.get(org.hibernate.cfg.AvailableSettings.SESSION_FACTORY_OBSERVER);
if (sessionFactoryObserverSetting != null) {
final StrategySelector strategySelector = ssr.getService(StrategySelector.class);
final SessionFactoryObserver suppliedSessionFactoryObserver = strategySelector
.resolveStrategy(SessionFactoryObserver.class, sessionFactoryObserverSetting);
options.addSessionFactoryObservers(suppliedSessionFactoryObserver);
}
options.addSessionFactoryObservers(new ServiceRegistryCloser());
//New in ORM 6.2:
options.addSessionFactoryObservers(new SessionFactoryObserverForNamedQueryValidation(metadata));
// We should avoid running schema migrations multiple times
if (shouldApplySchemaMigration) {
options.addSessionFactoryObservers(new SessionFactoryObserverForSchemaExport(metadata));
}
//Vanilla ORM registers this one as well; we don't:
//options.addSessionFactoryObservers( new SessionFactoryObserverForRegistration() );
// This one is specific to Quarkus
options.addSessionFactoryObservers(new QuarkusSessionFactoryObserverForDbVersionCheck());
options.applyEntityNotFoundDelegate(new JpaEntityNotFoundDelegate());
// This is necessary for Hibernate Reactive, see https://github.com/quarkusio/quarkus/issues/15814
// This is also necessary for Hibernate ORM if we want to prevent calls to getters on initialized entities
// outside of sessions from throwing exceptions, see https://github.com/quarkusio/quarkus/discussions/27657
options.enableCollectionInDefaultFetchGroup(true);
if (this.validatorFactory != null) {
options.applyValidatorFactory(validatorFactory);
}
if (this.cdiBeanManager != null) {
options.applyBeanManager(cdiBeanManager);
}
//Small memory optimisations: ensure the | FastBootEntityManagerFactoryBuilder |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/debugging/VerboseLoggingOfInvocationsOnMockTest.java | {
"start": 1019,
"end": 4737
} | class ____ {
private ByteArrayOutputStream output;
private PrintStream original;
@Mock UnrelatedClass unrelatedMock;
@Before
public void setUp() {
original = System.out;
output = new ByteArrayOutputStream();
System.setOut(new PrintStream(output));
}
@After
public void tearDown() {
System.setOut(original);
}
@Test
public void shouldNotPrintInvocationOnMockWithoutSetting() {
// given
Foo foo = mock(Foo.class, withSettings().verboseLogging());
// when
foo.giveMeSomeString("Klipsch");
unrelatedMock.unrelatedMethod("Apple");
// then
Assertions.assertThat(printed())
.doesNotContain(mockName(unrelatedMock))
.doesNotContain("unrelatedMethod")
.doesNotContain("Apple");
}
@Test
public void shouldPrintUnstubbedInvocationOnMockToStdOut() {
// given
Foo foo = mock(Foo.class, withSettings().verboseLogging());
// when
foo.doSomething("Klipsch");
// then
Assertions.assertThat(printed())
.contains(getClass().getName())
.contains(mockName(foo))
.contains("doSomething")
.contains("Klipsch");
}
@Test
public void shouldPrintStubbedInvocationOnMockToStdOut() {
// given
Foo foo = mock(Foo.class, withSettings().verboseLogging());
given(foo.giveMeSomeString("Klipsch")).willReturn("earbuds");
// when
foo.giveMeSomeString("Klipsch");
// then
Assertions.assertThat(printed())
.contains(getClass().getName())
.contains(mockName(foo))
.contains("giveMeSomeString")
.contains("Klipsch")
.contains("earbuds");
}
@Test
public void shouldPrintThrowingInvocationOnMockToStdOut() {
// given
Foo foo = mock(Foo.class, withSettings().verboseLogging());
doThrow(new ThirdPartyException()).when(foo).doSomething("Klipsch");
try {
// when
foo.doSomething("Klipsch");
fail("Exception excepted.");
} catch (ThirdPartyException e) {
// then
Assertions.assertThat(printed())
.contains(getClass().getName())
.contains(mockName(foo))
.contains("doSomething")
.contains("Klipsch")
.contains(ThirdPartyException.class.getName());
}
}
@Test
public void shouldPrintRealInvocationOnSpyToStdOut() {
// given
FooImpl fooSpy =
mock(FooImpl.class, withSettings().spiedInstance(new FooImpl()).verboseLogging());
doCallRealMethod().when(fooSpy).doSomething("Klipsch");
// when
fooSpy.doSomething("Klipsch");
// then
Assertions.assertThat(printed())
.contains(getClass().getName())
.contains(mockName(fooSpy))
.contains("doSomething")
.contains("Klipsch");
}
@Test
public void usage() {
// given
Foo foo = mock(Foo.class, withSettings().verboseLogging());
given(foo.giveMeSomeString("Apple")).willReturn("earbuds");
// when
foo.giveMeSomeString("Shure");
foo.giveMeSomeString("Apple");
foo.doSomething("Klipsch");
}
private String printed() {
return output.toString();
}
private String mockName(Object mock) {
return MockUtil.getMockName(mock).toString();
}
private static | VerboseLoggingOfInvocationsOnMockTest |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/state/v2/adaptor/AsyncKeyedStateBackendAdaptor.java | {
"start": 3153,
"end": 8748
} | class ____<K> implements AsyncKeyedStateBackend<K> {
private final CheckpointableKeyedStateBackend<K> keyedStateBackend;
public AsyncKeyedStateBackendAdaptor(CheckpointableKeyedStateBackend<K> keyedStateBackend) {
this.keyedStateBackend = keyedStateBackend;
}
@Override
public void setup(@Nonnull StateRequestHandler stateRequestHandler) {}
@Override
public <N, S extends State, SV> S getOrCreateKeyedState(
N defaultNamespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<SV> stateDesc)
throws Exception {
return createStateInternal(defaultNamespace, namespaceSerializer, stateDesc);
}
@Nonnull
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public <N, S extends InternalKeyedState, SV> S createStateInternal(
@Nonnull N defaultNamespace,
@Nonnull TypeSerializer<N> namespaceSerializer,
@Nonnull StateDescriptor<SV> stateDesc)
throws Exception {
org.apache.flink.api.common.state.StateDescriptor rawStateDesc =
StateDescriptorUtils.transformFromV2ToV1(stateDesc);
org.apache.flink.api.common.state.State rawState =
keyedStateBackend.getPartitionedState(
defaultNamespace, namespaceSerializer, rawStateDesc);
switch (rawStateDesc.getType()) {
case VALUE:
return (S) new ValueStateAdaptor((InternalValueState) rawState);
case LIST:
return (S) new ListStateAdaptor<>((InternalListState) rawState);
case REDUCING:
return (S) new ReducingStateAdaptor<>((InternalReducingState) rawState);
case AGGREGATING:
return (S) new AggregatingStateAdaptor<>((InternalAggregatingState) rawState);
case MAP:
return (S) new MapStateAdaptor<>((InternalMapState) rawState);
default:
throw new UnsupportedOperationException(
String.format("Unsupported state type: %s", rawStateDesc.getType()));
}
}
@Nonnull
@Override
public StateExecutor createStateExecutor() {
return new InvalidStateExecutor();
}
@Override
public KeyGroupRange getKeyGroupRange() {
return keyedStateBackend.getKeyGroupRange();
}
@Override
public void switchContext(@Nullable RecordContext<K> context) {
if (context != null) {
keyedStateBackend.setCurrentKeyAndKeyGroup(context.getKey(), context.getKeyGroup());
}
}
@Override
public void dispose() {}
@Override
public String getBackendTypeIdentifier() {
return keyedStateBackend.getBackendTypeIdentifier();
}
@Override
public void close() throws IOException {}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (keyedStateBackend instanceof CheckpointListener) {
((CheckpointListener) keyedStateBackend).notifyCheckpointComplete(checkpointId);
}
}
@Override
public void notifyCheckpointAborted(long checkpointId) throws Exception {
if (keyedStateBackend instanceof CheckpointListener) {
((CheckpointListener) keyedStateBackend).notifyCheckpointAborted(checkpointId);
}
}
@Override
public void notifyCheckpointSubsumed(long checkpointId) throws Exception {
if (keyedStateBackend instanceof InternalCheckpointListener) {
((InternalCheckpointListener) keyedStateBackend).notifyCheckpointSubsumed(checkpointId);
}
}
@Nonnull
@Override
public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot(
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory streamFactory,
@Nonnull CheckpointOptions checkpointOptions)
throws Exception {
return keyedStateBackend.snapshot(
checkpointId, timestamp, streamFactory, checkpointOptions);
}
@Nonnull
@Override
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
return keyedStateBackend.create(stateName, byteOrderedElementSerializer);
}
@Override
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer,
boolean allowFutureMetadataUpdates) {
return keyedStateBackend.create(
stateName, byteOrderedElementSerializer, allowFutureMetadataUpdates);
}
@Override
public boolean requiresLegacySynchronousTimerSnapshots(SnapshotType checkpointType) {
if (keyedStateBackend instanceof AbstractKeyedStateBackend) {
return ((AbstractKeyedStateBackend) keyedStateBackend)
.requiresLegacySynchronousTimerSnapshots(checkpointType);
}
return false;
}
@Override
public boolean isSafeToReuseKVState() {
return keyedStateBackend.isSafeToReuseKVState();
}
public CheckpointableKeyedStateBackend<K> getKeyedStateBackend() {
return keyedStateBackend;
}
private static | AsyncKeyedStateBackendAdaptor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timeline-pluginstorage/src/main/java/org/apache/hadoop/yarn/server/timeline/EntityGroupFSTimelineStore.java | {
"start": 38646,
"end": 47496
} | class ____ implements Runnable {
@Override
public void run() {
LOG.debug("Cleaner starting");
long startTime = Time.monotonicNow();
try {
cleanLogs(doneRootPath, logRetainMillis);
} catch (Exception e) {
Throwable t = extract(e);
if (t instanceof InterruptedException) {
LOG.info("Cleaner interrupted");
} else {
LOG.error("Error cleaning files", e);
}
} finally {
metrics.addLogCleanTime(Time.monotonicNow() - startTime);
}
LOG.debug("Cleaner finished");
}
}
@InterfaceAudience.Private
@VisibleForTesting
void setFs(FileSystem incomingFs) {
this.fs = incomingFs;
}
@InterfaceAudience.Private
@VisibleForTesting
void setCachedLogs(TimelineEntityGroupId groupId, EntityCacheItem cacheItem) {
cachedLogs.put(groupId, cacheItem);
}
private List<TimelineStore> getTimelineStoresFromCacheIds(
Set<TimelineEntityGroupId> groupIds, String entityType,
List<EntityCacheItem> cacheItems)
throws IOException {
List<TimelineStore> stores = new LinkedList<TimelineStore>();
// For now we just handle one store in a context. We return the first
// non-null storage for the group ids.
for (TimelineEntityGroupId groupId : groupIds) {
TimelineStore storeForId = getCachedStore(groupId, cacheItems);
if (storeForId != null) {
LOG.debug("Adding {} as a store for the query", storeForId.getName());
stores.add(storeForId);
metrics.incrGetEntityToDetailOps();
}
}
if (stores.size() == 0) {
LOG.debug("Using summary store for {}", entityType);
stores.add(this.summaryStore);
metrics.incrGetEntityToSummaryOps();
}
return stores;
}
protected List<TimelineStore> getTimelineStoresForRead(String entityId,
String entityType, List<EntityCacheItem> cacheItems)
throws IOException {
Set<TimelineEntityGroupId> groupIds = new HashSet<TimelineEntityGroupId>();
for (TimelineEntityGroupPlugin cacheIdPlugin : cacheIdPlugins) {
LOG.debug("Trying plugin {} for id {} and type {}",
cacheIdPlugin.getClass().getName(), entityId, entityType);
Set<TimelineEntityGroupId> idsFromPlugin
= cacheIdPlugin.getTimelineEntityGroupId(entityId, entityType);
if (idsFromPlugin == null) {
LOG.debug("Plugin returned null " + cacheIdPlugin.getClass().getName());
} else {
LOG.debug("Plugin returned ids: " + idsFromPlugin);
}
if (idsFromPlugin != null) {
groupIds.addAll(idsFromPlugin);
LOG.debug("plugin {} returns a non-null value on query",
cacheIdPlugin.getClass().getName());
}
}
return getTimelineStoresFromCacheIds(groupIds, entityType, cacheItems);
}
private List<TimelineStore> getTimelineStoresForRead(String entityType,
NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
List<EntityCacheItem> cacheItems) throws IOException {
Set<TimelineEntityGroupId> groupIds = new HashSet<TimelineEntityGroupId>();
for (TimelineEntityGroupPlugin cacheIdPlugin : cacheIdPlugins) {
Set<TimelineEntityGroupId> idsFromPlugin =
cacheIdPlugin.getTimelineEntityGroupId(entityType, primaryFilter,
secondaryFilters);
if (idsFromPlugin != null) {
LOG.debug("plugin {} returns a non-null value on query {}",
cacheIdPlugin.getClass().getName(), idsFromPlugin);
groupIds.addAll(idsFromPlugin);
}
}
return getTimelineStoresFromCacheIds(groupIds, entityType, cacheItems);
}
// find a cached timeline store or null if it cannot be located
private TimelineStore getCachedStore(TimelineEntityGroupId groupId,
List<EntityCacheItem> cacheItems) throws IOException {
EntityCacheItem cacheItem;
synchronized (this.cachedLogs) {
// Note that the content in the cache log storage may be stale.
cacheItem = this.cachedLogs.get(groupId);
if (cacheItem == null) {
LOG.debug("Set up new cache item for id {}", groupId);
cacheItem = new EntityCacheItem(groupId, getConfig());
AppLogs appLogs = getAndSetAppLogs(groupId.getApplicationId());
if (appLogs != null) {
LOG.debug("Set applogs {} for group id {}", appLogs, groupId);
cacheItem.setAppLogs(appLogs);
this.cachedLogs.put(groupId, cacheItem);
} else {
LOG.warn("AppLogs for groupId {} is set to null!", groupId);
}
}
}
TimelineStore store = null;
if (cacheItem.getAppLogs() != null) {
AppLogs appLogs = cacheItem.getAppLogs();
LOG.debug("try refresh cache {} {}", groupId, appLogs.getAppId());
cacheItems.add(cacheItem);
store = cacheItem.refreshCache(aclManager, metrics);
} else {
LOG.warn("AppLogs for group id {} is null", groupId);
}
return store;
}
@Override
public TimelineEntities getEntities(String entityType, Long limit,
Long windowStart, Long windowEnd, String fromId, Long fromTs,
NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
EnumSet<Field> fieldsToRetrieve, CheckAcl checkAcl) throws IOException {
LOG.debug("getEntities type={} primary={}", entityType, primaryFilter);
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
List<TimelineStore> stores = getTimelineStoresForRead(entityType,
primaryFilter, secondaryFilters, relatedCacheItems);
TimelineEntities returnEntities = new TimelineEntities();
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {} for the request", store.getName());
TimelineEntities entities = store.getEntities(entityType, limit,
windowStart, windowEnd, fromId, fromTs, primaryFilter,
secondaryFilters, fieldsToRetrieve, checkAcl);
if (entities != null) {
returnEntities.addEntities(entities.getEntities());
}
}
return returnEntities;
}
@Override
public TimelineEntity getEntity(String entityId, String entityType,
EnumSet<Field> fieldsToRetrieve) throws IOException {
LOG.debug("getEntity type={} id={}", entityType, entityId);
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
List<TimelineStore> stores = getTimelineStoresForRead(entityId, entityType,
relatedCacheItems);
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {}:{} for the request", store.getName(),
store.toString());
TimelineEntity e =
store.getEntity(entityId, entityType, fieldsToRetrieve);
if (e != null) {
return e;
}
}
LOG.debug("getEntity: Found nothing");
return null;
}
@Override
public TimelineEvents getEntityTimelines(String entityType,
SortedSet<String> entityIds, Long limit, Long windowStart,
Long windowEnd, Set<String> eventTypes) throws IOException {
LOG.debug("getEntityTimelines type={} ids={}", entityType, entityIds);
TimelineEvents returnEvents = new TimelineEvents();
List<EntityCacheItem> relatedCacheItems = new ArrayList<>();
if (entityIds == null || entityIds.isEmpty()) {
return returnEvents;
}
for (String entityId : entityIds) {
LOG.debug("getEntityTimeline type={} id={}", entityType, entityId);
List<TimelineStore> stores
= getTimelineStoresForRead(entityId, entityType, relatedCacheItems);
for (TimelineStore store : stores) {
LOG.debug("Try timeline store {}:{} for the request", store.getName(),
store.toString());
SortedSet<String> entityIdSet = new TreeSet<>();
entityIdSet.add(entityId);
TimelineEvents events =
store.getEntityTimelines(entityType, entityIdSet, limit,
windowStart, windowEnd, eventTypes);
if (events != null) {
returnEvents.addEvents(events.getAllEvents());
}
}
}
return returnEvents;
}
@Override
public TimelineDomain getDomain(String domainId) throws IOException {
return summaryStore.getDomain(domainId);
}
@Override
public TimelineDomains getDomains(String owner) throws IOException {
return summaryStore.getDomains(owner);
}
@Override
public TimelinePutResponse put(TimelineEntities data) throws IOException {
return summaryStore.put(data);
}
@Override
public void put(TimelineDomain domain) throws IOException {
summaryStore.put(domain);
}
/**
* This is a special remote iterator whose {@link #hasNext()} method
* returns false if {@link #stopExecutors} is true.
*
* This provides an implicit shutdown of all iterative file list and scan
* operations without needing to implement it in the while loops themselves.
*/
private | EntityLogCleaner |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/util/ConcurrentReferenceHashMap.java | {
"start": 18267,
"end": 18552
} | enum ____ {
/** Use {@link SoftReference SoftReferences}. */
SOFT,
/** Use {@link WeakReference WeakReferences}. */
WEAK
}
/**
* A single segment used to divide the map to allow better concurrent performance.
*/
@SuppressWarnings("serial")
protected final | ReferenceType |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/profile/query/CollectorResultTests.java | {
"start": 1145,
"end": 5669
} | class ____ extends AbstractXContentSerializingTestCase<CollectorResult> {
public static CollectorResult createTestItem(int depth) {
String name = randomAlphaOfLengthBetween(5, 10);
String reason = randomAlphaOfLengthBetween(5, 10);
long time = randomNonNegativeLong();
if (randomBoolean()) {
// also often use relatively "small" values, otherwise we will mostly test huge longs
time = time % 100000;
}
int size = randomIntBetween(0, 5);
List<CollectorResult> children = new ArrayList<>(size);
if (depth > 0) {
for (int i = 0; i < size; i++) {
children.add(createTestItem(depth - 1));
}
}
return new CollectorResult(name, reason, time, children);
}
@Override
protected CollectorResult createTestInstance() {
return createTestItem(1);
}
@Override
protected CollectorResult mutateInstance(CollectorResult instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
@Override
protected CollectorResult doParseInstance(XContentParser parser) throws IOException {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
CollectorResult result = SearchResponseUtils.parseCollectorResult(parser);
ensureExpectedToken(null, parser.nextToken(), parser);
return result;
}
@Override
protected Reader<CollectorResult> instanceReader() {
return CollectorResult::new;
}
public void testToXContent() throws IOException {
List<CollectorResult> children = new ArrayList<>();
children.add(new CollectorResult("child1", "reason1", 100L, Collections.emptyList()));
children.add(new CollectorResult("child2", "reason1", 123356L, Collections.emptyList()));
CollectorResult result = new CollectorResult("collectorName", "some reason", 123456L, children);
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("""
{
"name" : "collectorName",
"reason" : "some reason",
"time_in_nanos" : 123456,
"children" : [
{
"name" : "child1",
"reason" : "reason1",
"time_in_nanos" : 100
},
{
"name" : "child2",
"reason" : "reason1",
"time_in_nanos" : 123356
}
]
}""", Strings.toString(builder));
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("""
{
"name" : "collectorName",
"reason" : "some reason",
"time" : "123.4micros",
"time_in_nanos" : 123456,
"children" : [
{
"name" : "child1",
"reason" : "reason1",
"time" : "100nanos",
"time_in_nanos" : 100
},
{
"name" : "child2",
"reason" : "reason1",
"time" : "123.3micros",
"time_in_nanos" : 123356
}
]
}""", Strings.toString(builder));
result = new CollectorResult("collectorName", "some reason", 12345678L, Collections.emptyList());
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("""
{
"name" : "collectorName",
"reason" : "some reason",
"time" : "12.3ms",
"time_in_nanos" : 12345678
}""", Strings.toString(builder));
result = new CollectorResult("collectorName", "some reason", 1234567890L, Collections.emptyList());
builder = XContentFactory.jsonBuilder().prettyPrint().humanReadable(true);
result.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("""
{
"name" : "collectorName",
"reason" : "some reason",
"time" : "1.2s",
"time_in_nanos" : 1234567890
}""", Strings.toString(builder));
}
}
| CollectorResultTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/replication/TransportWriteAction.java | {
"start": 23494,
"end": 24565
} | class ____ extends ReplicasProxy {
@Override
public void failShardIfNeeded(
ShardRouting replica,
long primaryTerm,
String message,
Exception exception,
ActionListener<Void> listener
) {
if (TransportActions.isShardNotAvailableException(exception) == false) {
logger.warn(() -> format("[%s] %s", replica.shardId(), message), exception);
}
shardStateAction.remoteShardFailed(
replica.shardId(),
replica.allocationId().getId(),
primaryTerm,
true,
message,
exception,
listener
);
}
@Override
public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, long primaryTerm, ActionListener<Void> listener) {
shardStateAction.remoteShardFailed(shardId, allocationId, primaryTerm, true, "mark copy as stale", null, listener);
}
}
}
| WriteActionReplicasProxy |
java | spring-projects__spring-boot | cli/spring-boot-cli/src/test/java/org/springframework/boot/cli/command/CommandRunnerTests.java | {
"start": 5813,
"end": 5880
} | enum ____ {
SHOW_USAGE, ERROR_MESSAGE, PRINT_STACK_TRACE
}
}
| Call |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/Sample.java | {
"start": 867,
"end": 964
} | class ____ NOT immutable (to optimize memory) which means its associations need to be managed.
*/
| is |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmnode/RMNodeReconnectEvent.java | {
"start": 1070,
"end": 1828
} | class ____ extends RMNodeEvent {
private RMNode reconnectedNode;
private List<ApplicationId> runningApplications;
private List<NMContainerStatus> containerStatuses;
public RMNodeReconnectEvent(NodeId nodeId, RMNode newNode,
List<ApplicationId> runningApps, List<NMContainerStatus> containerReports) {
super(nodeId, RMNodeEventType.RECONNECTED);
reconnectedNode = newNode;
runningApplications = runningApps;
containerStatuses = containerReports;
}
public RMNode getReconnectedNode() {
return reconnectedNode;
}
public List<ApplicationId> getRunningApplications() {
return runningApplications;
}
public List<NMContainerStatus> getNMContainerStatuses() {
return containerStatuses;
}
}
| RMNodeReconnectEvent |
java | quarkusio__quarkus | extensions/hibernate-reactive/deployment/src/test/java/io/quarkus/hibernate/reactive/compatibility/ORMReactiveCompatbilityDefaultBothUnitTest.java | {
"start": 503,
"end": 2415
} | class ____ extends CompatibilityUnitTestBase {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(Hero.class)
.addAsResource("complexMultilineImports.sql", "import.sql"))
.setForcedDependencies(List.of(
Dependency.of("io.quarkus", "quarkus-jdbc-postgresql-deployment", Version.getVersion()) // this triggers Agroal
))
.withConfigurationResource("application-unittest-both.properties")
.overrideConfigKey("quarkus.hibernate-orm.schema-management.strategy", SCHEMA_MANAGEMENT_STRATEGY)
.overrideConfigKey("quarkus.datasource.reactive", "true")
.overrideConfigKey("quarkus.datasource.db-kind", POSTGRES_KIND)
.overrideConfigKey("quarkus.datasource.username", USERNAME_PWD)
.overrideConfigKey("quarkus.datasource.password", USERNAME_PWD)
.overrideConfigKey("quarkus.hibernate-orm.log.format-sql", "false")
.overrideConfigKey("quarkus.hibernate-orm.log.highlight-sql", "false")
.overrideConfigKey("quarkus.log.category.\"org.hibernate.SQL\".level", "DEBUG")
.setLogRecordPredicate(record -> "org.hibernate.SQL".equals(record.getLoggerName()))
.assertLogRecords(
records -> // When using both blocking and reactive we don't want migration to be applied twice
assertThat(records.stream().map(l -> l.getMessage()))
.containsOnlyOnce("create sequence hero_SEQ start with 1 increment by 50"));
@Test
@RunOnVertxContext
public void testReactive(UniAsserter asserter) {
testReactiveWorks(asserter);
}
@Test
public void testBlocking() {
testBlockingWorks();
}
}
| ORMReactiveCompatbilityDefaultBothUnitTest |
java | elastic__elasticsearch | x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TextEmbeddingQueryIT.java | {
"start": 2024,
"end": 28438
} | class ____ extends PyTorchModelRestTestCase {
static final String BASE_64_ENCODED_MODEL = "UEsDBAAACAgAAAAAAAAAAAAAAAAAAAAAAAAUAA4Ac2ltcGxlbW9kZWwvZGF0YS5wa2xGQgoAWl"
+ "paWlpaWlpaWoACY19fdG9yY2hfXwpUaW55VGV4dEVtYmVkZGluZwpxACmBfShYCAAAAHRy"
+ "YWluaW5ncQGJWBYAAABfaXNfZnVsbF9iYWNrd2FyZF9ob29rcQJOdWJxAy5QSwcIsFTQsF"
+ "gAAABYAAAAUEsDBBQACAgIAAAAAAAAAAAAAAAAAAAAAAAdAB0Ac2ltcGxlbW9kZWwvY29k"
+ "ZS9fX3RvcmNoX18ucHlGQhkAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWoWPMWvDMBCF9/"
+ "yKGy1IQ7Ia0q1j2yWbMYdsnWphWWd0Em3+fS3bBEopXd99j/dd77UI3Fy43+grvUwdGePC"
+ "R/XKJntS9QEAcdZRT5QoCiJcoWnXtMvW/ohS1C4sZaihY/YFcoI2e4+d7sdPHQ0OzONyf5"
+ "+T46B9U8DSNWTBcixMJeRtvQwkjv2AePpld1wKAC7MOaEzUsONgnDc4sQjBUz3mbbbY2qD"
+ "2usbB9rQmcWV47/gOiVIReAvUsHT8y5S7yKL/mnSIWuPQmSqLRm0DJWkWD0eUEqtjUgpx7"
+ "AXow6mai5HuJzPrTp8A1BLBwiD/6yJ6gAAAKkBAABQSwMEFAAICAgAAAAAAAAAAAAAAAAA"
+ "AAAAACcAQQBzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xGQj0AWl"
+ "paWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpa"
+ "WlpaWlpaWo2Qz0rDQBDGk/5RmjfwlmMCbWivBZ9gWL0IFkRCdLcmmOwmuxu0N08O3r2rCO"
+ "rdx9CDgm/hWUUQMdugzUk6LCwzv++bGeak5YE1saoorNgCCwsbzFc9sm1PvivQo2zqToU8"
+ "iiT1FEunfadXRcLzUocJVWN3i3ElZF3W4pDxUM9yVrPNXCeCR+lOLdp1190NwVktzoVKDF"
+ "5COh+nQpbtsX+0/tjpOWYJuR8HMuJUZEEW8TJKQ8UY9eJIxZ7S0vvb3vf9yiCZLiV3Fz5v"
+ "1HdHw6HvFK3JWnUElWR5ygbz8TThB4NMUJYG+axowyoWHbiHBwQbSWbHHXiEJ4QWkmOTPM"
+ "MLQhvJaZOgSX49Z3a8uPq5Ia/whtBBctEkl4a8wwdCF8lVk1wb8glfCCtIbprkttntrkF0"
+ "0Q1+AFBLBwi4BIswOAEAAP0BAABQSwMEAAAICAAAAAAAAAAAAAAAAAAAAAAAABkAQQBzaW"
+ "1wbGVtb2RlbC9jb25zdGFudHMucGtsRkI9AFpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpa"
+ "WlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlqAAikuUEsHCG0vCVcEAAAABA"
+ "AAAFBLAwQAAAgIAAAAAAAAAAAAAAAAAAAAAAAAEwA7AHNpbXBsZW1vZGVsL3ZlcnNpb25G"
+ "QjcAWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWlpaWl"
+ "paWlpaWjMKUEsHCNGeZ1UCAAAAAgAAAFBLAQIAAAAACAgAAAAAAACwVNCwWAAAAFgAAAAU"
+ "AAAAAAAAAAAAAAAAAAAAAABzaW1wbGVtb2RlbC9kYXRhLnBrbFBLAQIAABQACAgIAAAAAA"
+ "CD/6yJ6gAAAKkBAAAdAAAAAAAAAAAAAAAAAKgAAABzaW1wbGVtb2RlbC9jb2RlL19fdG9y"
+ "Y2hfXy5weVBLAQIAABQACAgIAAAAAAC4BIswOAEAAP0BAAAnAAAAAAAAAAAAAAAAAPoBAA"
+ "BzaW1wbGVtb2RlbC9jb2RlL19fdG9yY2hfXy5weS5kZWJ1Z19wa2xQSwECAAAAAAgIAAAA"
+ "AAAAbS8JVwQAAAAEAAAAGQAAAAAAAAAAAAAAAADIAwAAc2ltcGxlbW9kZWwvY29uc3Rhbn"
+ "RzLnBrbFBLAQIAAAAACAgAAAAAAADRnmdVAgAAAAIAAAATAAAAAAAAAAAAAAAAAFQEAABz"
+ "aW1wbGVtb2RlbC92ZXJzaW9uUEsGBiwAAAAAAAAAHgMtAAAAAAAAAAAABQAAAAAAAAAFAA"
+ "AAAAAAAGoBAAAAAAAA0gQAAAAAAABQSwYHAAAAADwGAAAAAAAAAQAAAFBLBQYAAAAABQAFAGoBAADSBAAAAAA=";
static final long RAW_MODEL_SIZE; // size of the model before base64 encoding
static {
RAW_MODEL_SIZE = Base64.getDecoder().decode(BASE_64_ENCODED_MODEL).length;
}
private static final String TOP_LEVEL_KNN_TEMPLATE = """
{
"_source": {
"exclude_vectors": false
},
"knn": {
"field": "%s",
"k": 5,
"num_candidates": 10,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "%s"
}
}
}
}""";
private static final String TOP_LEVEL_KNN_FILTER_TEMPLATE = """
{
"_source": {
"exclude_vectors": false
},
"knn": {
"field": "%s",
"k": 5,
"num_candidates": 10,
"filter": %s,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "%s"
}
}
}
}""";
private static final String TOP_LEVEL_KNN_HYBRID_ALL = """
{
"_source": {
"exclude_vectors": false
},
"knn": {
"field": "embedding",
"k": 3,
"num_candidates": 10,
"boost": 10.0,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "my words"
}
}
},
"query": {"match_all": {}},
"size": 7
}""";
private static final String TOP_LEVEL_KNN_HYBRID_MATCH = """
{
"_source": {
"exclude_vectors": false
},
"knn": {
"field": "embedding",
"k": 3,
"num_candidates": 10,
"boost": 10.0,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "my words"
}
}
},
"query": {"match": {"source_text": {"query": "apricot unrelated"}}}
}""";
private static final String QUERY_DSL_KNN_TEMPLATE = """
{
"_source": {
"exclude_vectors": false
},
"query": {
"knn" : {
"field": "%s",
"num_candidates": 10,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "%s"
}
}
}
}
}""";
private static final String QUERY_DSL_KNN_FILTER_TEMPLATE = """
{
"_source": {
"exclude_vectors": false
},
"query": {
"knn" : {
"field": "%s",
"num_candidates": 10,
"filter": %s,
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "%s"
}
}
}
}
}""";
private static final String QUERY_DSL_KNN_HYBRID_ALL = """
{
"_source": {
"exclude_vectors": false
},
"query": {
"bool": {
"should": [
{
"match_all": {}
},
{
"knn": {
"field": "embedding",
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "my words"
}
},
"num_candidates": 10,
"boost": 10
}
}
]
}
},
"size": 7
}""";
private static final String QUERY_DSL_KNN_HYBRID_MATCH = """
{
"_source": {
"exclude_vectors": false
},
"query": {
"bool": {
"should": [
{
"match": {
"source_text": {
"query": "apricot unrelated",
"boost": 1
}
}
},
{
"knn": {
"field": "embedding",
"query_vector_builder": {
"text_embedding": {
"model_id": "%s",
"model_text": "my words"
}
},
"num_candidates": 10,
"boost": 10
}
}
]
}
},
"size": 7
}""";
@SuppressWarnings("unchecked")
public void testTextEmbeddingQuery() throws IOException {
String modelId = "text-embedding-test";
String indexName = modelId + "-index";
createTextEmbeddingModel(modelId);
putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE);
putVocabulary(
List.of("these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"),
modelId
);
startDeployment(modelId);
List<String> inputs = List.of(
"my words",
"the machine is leaking",
"washing machine",
"these are my words",
"the octopus comforter smells",
"the octopus comforter is leaking",
"washing machine smells"
);
List<String> filters = List.of("foo", "bar", "baz", "foo", "bar", "baz", "foo");
List<List<Double>> embeddings = new ArrayList<>();
// Generate the text embeddings via the inference API
// then index them for search
for (var input : inputs) {
Response inference = infer(input, modelId);
List<Map<String, Object>> responseMap = (List<Map<String, Object>>) entityAsMap(inference).get("inference_results");
Map<String, Object> inferenceResult = responseMap.get(0);
List<Double> embedding = (List<Double>) inferenceResult.get("predicted_value");
embeddings.add(embedding);
}
// index dense vectors
createVectorSearchIndex(indexName);
bulkIndexDocs(inputs, filters, embeddings, indexName);
forceMergeIndex(indexName);
// Test text embedding search against the indexed vectors
for (int i = 0; i < 5; i++) {
int randomInput = randomIntBetween(0, inputs.size() - 1);
for (String template : new String[] { TOP_LEVEL_KNN_TEMPLATE, QUERY_DSL_KNN_TEMPLATE }) {
var textEmbeddingSearchResponse = textEmbeddingSearch(indexName, inputs.get(randomInput), modelId, "embedding", template);
assertOkWithErrorMessage(textEmbeddingSearchResponse);
Map<String, Object> responseMap = responseAsMap(textEmbeddingSearchResponse);
List<Map<String, Object>> hits = (List<Map<String, Object>>) MapHelper.dig("hits.hits", responseMap);
Map<String, Object> topHit = hits.get(0);
String sourceText = (String) MapHelper.dig("_source.source_text", topHit);
assertEquals(inputs.get(randomInput), sourceText);
}
}
// Test text embedding search with filters
{
for (String template : new String[] { TOP_LEVEL_KNN_FILTER_TEMPLATE, QUERY_DSL_KNN_FILTER_TEMPLATE }) {
var textEmbeddingSearchResponse = textEmbeddingSearchWithTermsFilter(
indexName,
inputs.get(0),
"foo",
modelId,
"embedding",
template
);
assertOkWithErrorMessage(textEmbeddingSearchResponse);
Map<String, Object> responseMap = responseAsMap(textEmbeddingSearchResponse);
List<Map<String, Object>> hits = (List<Map<String, Object>>) MapHelper.dig("hits.hits", responseMap);
assertThat(hits, hasSize(3));
for (var hit : hits) {
String filter = (String) MapHelper.dig("_source.filter_field", hit);
assertEquals("foo", filter);
}
}
}
{
for (String template : new String[] { TOP_LEVEL_KNN_FILTER_TEMPLATE, QUERY_DSL_KNN_FILTER_TEMPLATE }) {
var textEmbeddingSearchResponse = textEmbeddingSearchWithTermsFilter(
indexName,
inputs.get(2),
"baz",
modelId,
"embedding",
template
);
assertOkWithErrorMessage(textEmbeddingSearchResponse);
Map<String, Object> responseMap = responseAsMap(textEmbeddingSearchResponse);
List<Map<String, Object>> hits = (List<Map<String, Object>>) MapHelper.dig("hits.hits", responseMap);
assertThat(hits, hasSize(2));
for (var hit : hits) {
String filter = (String) MapHelper.dig("_source.filter_field", hit);
assertEquals("baz", filter);
}
}
}
}
@SuppressWarnings("unchecked")
public void testHybridSearch() throws IOException {
String modelId = "hybrid-semantic-search-test";
String indexName = modelId + "-index";
createTextEmbeddingModel(modelId);
putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE);
putVocabulary(
List.of("these", "are", "my", "words", "the", "washing", "machine", "is", "leaking", "octopus", "comforter", "smells"),
modelId
);
startDeployment(modelId);
List<String> inputs = List.of(
"my words",
"the machine is leaking",
"washing machine",
"these are my words",
"the octopus comforter smells",
"the octopus comforter is leaking",
"washing machine smells"
);
List<String> filters = List.of("foo", "bar", "baz", "foo", "bar", "baz", "foo");
List<List<Double>> embeddings = new ArrayList<>();
// Generate the text embeddings via the inference API
// then index them for search
for (var input : inputs) {
Response inference = infer(input, modelId);
List<Map<String, Object>> responseMap = (List<Map<String, Object>>) entityAsMap(inference).get("inference_results");
Map<String, Object> inferenceResult = responseMap.get(0);
List<Double> embedding = (List<Double>) inferenceResult.get("predicted_value");
embeddings.add(embedding);
}
// index dense vectors
createVectorSearchIndex(indexName);
bulkIndexDocs(inputs, filters, embeddings, indexName);
forceMergeIndex(indexName);
{
for (String template : new String[] { TOP_LEVEL_KNN_HYBRID_ALL, QUERY_DSL_KNN_HYBRID_ALL }) {
// combined query should return size documents where size > k
Request request = new Request("GET", indexName + "/_search");
request.setJsonEntity(Strings.format(template, modelId));
var semanticSearchResponse = client().performRequest(request);
assertOkWithErrorMessage(semanticSearchResponse);
Map<String, Object> responseMap = responseAsMap(semanticSearchResponse);
int hitCount = (Integer) MapHelper.dig("hits.total.value", responseMap);
assertEquals(7, hitCount);
}
}
{
for (String template : new String[] { TOP_LEVEL_KNN_HYBRID_MATCH, QUERY_DSL_KNN_HYBRID_MATCH }) {
// boost the knn score, as the query is an exact match the unboosted
// score should be close to 1.0. Use an unrelated query so scores are
// not combined
Request request = new Request("GET", indexName + "/_search");
request.setJsonEntity(Strings.format(template, modelId));
var semanticSearchResponse = client().performRequest(request);
assertOkWithErrorMessage(semanticSearchResponse);
Map<String, Object> responseMap = responseAsMap(semanticSearchResponse);
List<Map<String, Object>> hits = (List<Map<String, Object>>) MapHelper.dig("hits.hits", responseMap);
boolean found = false;
for (var hit : hits) {
String source = (String) MapHelper.dig("_source.source_text", hit);
if (source.equals("my words")) {
assertThat((Double) MapHelper.dig("_score", hit), closeTo(10.0, 0.01));
found = true;
}
}
assertTrue("should have found hit for string 'my words'", found);
}
}
}
public void testSearchWithMissingModel() throws IOException {
String modelId = "missing-model";
String indexName = modelId + "-index";
createVectorSearchIndex(indexName);
for (String template : new String[] { TOP_LEVEL_KNN_TEMPLATE, QUERY_DSL_KNN_TEMPLATE }) {
var e = expectThrows(
ResponseException.class,
() -> textEmbeddingSearch(indexName, "the machine is leaking", modelId, "embedding", template)
);
assertThat(e.getMessage(), containsString("[missing-model] is not an inference service model or a deployed ml model"));
}
}
@SuppressWarnings("unchecked")
public void testModelWithPrefixStrings() throws IOException {
String modelId = "model-with-prefix-strings";
String ingestPrefix = "passage: ";
String searchPrefix = "query: ";
createTextEmbeddingModelWithPrefixString(modelId, searchPrefix, ingestPrefix);
putModelDefinition(modelId, BASE_64_ENCODED_MODEL, RAW_MODEL_SIZE);
putVocabulary(
List.of(
"these",
"are",
"my",
"words",
"the",
"washing",
"machine",
"is",
"leaking",
"octopus",
"comforter",
"smells",
ingestPrefix,
searchPrefix
),
modelId
);
startDeployment(modelId);
String pipelineDefinition = Strings.format("""
{
"processors": [
{
"inference": {
"model_id": "%s",
"input_output": {
"input_field": "source_text",
"output_field": "embedding"
},
"inference_config": {
"text_embedding": {
}
}
}
}
]
}
""", modelId);
String docSource = """
[
{"_source": {
"source_text": "the washing machine is leaking"}}
]
""";
// At ingest the prefix is automatically added
var simulateResponse = simulatePipeline(pipelineDefinition, docSource);
var simulateResponseMap = entityAsMap(simulateResponse);
var simulatedDocs = (List<Map<String, Object>>) simulateResponseMap.get("docs");
List<Double> pipelineEmbedding = (List<Double>) MapHelper.dig("doc._source.embedding", simulatedDocs.get(0));
assertNotNull(simulateResponseMap.toString(), pipelineEmbedding);
// Create the embedding for the same input text used in
// simulate pipeline ingest. Here the ingest prefix is
// manually added, the resulting embeddings should be
// the same.
var inferenceResponse = infer(ingestPrefix + "the washing machine is leaking", modelId);
Map<String, Object> inferenceResult = ((List<Map<String, Object>>) entityAsMap(inferenceResponse).get("inference_results")).get(0);
List<Double> inferenceEmbedding = (List<Double>) inferenceResult.get("predicted_value");
assertNotNull(inferenceResult.toString(), inferenceEmbedding);
// embeddings are exactly equal
assertEquals(inferenceEmbedding, pipelineEmbedding);
// Now check the search prefix
List<String> inputs = List.of(
searchPrefix + "my words",
"the machine is leaking",
"washing machine",
"these are my words",
"the octopus comforter smells"
);
List<String> filters = List.of("foo", "bar", "baz", "foo", "bar");
List<List<Double>> embeddings = new ArrayList<>();
// Generate the text embeddings via the inference API
// then index them for search
for (var input : inputs) {
Response inference = infer(input, modelId);
List<Map<String, Object>> responseMap = (List<Map<String, Object>>) entityAsMap(inference).get("inference_results");
List<Double> embedding = (List<Double>) responseMap.get(0).get("predicted_value");
embeddings.add(embedding);
}
// index dense vectors
String indexName = modelId + "_index";
createVectorSearchIndex(indexName);
bulkIndexDocs(inputs, filters, embeddings, indexName);
forceMergeIndex(indexName);
for (String template : new String[] { TOP_LEVEL_KNN_TEMPLATE, QUERY_DSL_KNN_TEMPLATE }) {
// the input "my words" should be prefixed with searchPrefix
var textEmbeddingSearchResponse = textEmbeddingSearch(indexName, "my words", modelId, "embedding", template);
assertOkWithErrorMessage(textEmbeddingSearchResponse);
Map<String, Object> responseMap = responseAsMap(textEmbeddingSearchResponse);
List<Map<String, Object>> hits = (List<Map<String, Object>>) MapHelper.dig("hits.hits", responseMap);
Map<String, Object> topHit = hits.get(0);
String sourceText = (String) MapHelper.dig("_source.source_text", topHit);
// The top hit should have the search prefix
assertEquals(searchPrefix + "my words", sourceText);
List<Double> foundEmbedding = (List<Double>) MapHelper.dig("_source.embedding", topHit);
var expectedEmbeddings = embeddings.get(0);
assertThat(foundEmbedding.size(), equalTo(expectedEmbeddings.size()));
for (int i = 0; i < foundEmbedding.size(); i++) {
assertEquals(expectedEmbeddings.get(i), foundEmbedding.get(i), 0.01f);
}
}
}
protected Response textEmbeddingSearch(
String index,
String modelText,
String modelId,
String denseVectorFieldName,
String queryTemplate
) throws IOException {
Request request = new Request("GET", index + "/_search?error_trace=true");
request.setJsonEntity(Strings.format(queryTemplate, denseVectorFieldName, modelId, modelText));
return client().performRequest(request);
}
protected Response textEmbeddingSearchWithTermsFilter(
String index,
String modelText,
String filter,
String modelId,
String denseVectorFieldName,
String queryTemplate
) throws IOException {
Request request = new Request("GET", index + "/_search?error_trace=true");
String termsFilter = Strings.format("""
{"term": {"filter_field": "%s"}}
""", filter);
request.setJsonEntity(Strings.format(queryTemplate, denseVectorFieldName, termsFilter, modelId, modelText));
return client().performRequest(request);
}
private void createVectorSearchIndex(String indexName) throws IOException {
Request createIndex = new Request("PUT", "/" + indexName);
createIndex.setJsonEntity("""
{
"mappings": {
"properties": {
"source_text": {
"type": "text"
},
"filter_field": {
"type": "keyword"
},
"embedding": {
"type": "dense_vector",
"dims": 100,
"index": true,
"similarity": "cosine"
}
}
}
}""");
var response = client().performRequest(createIndex);
assertOkWithErrorMessage(response);
}
private void bulkIndexDocs(List<String> sourceText, List<String> filters, List<List<Double>> embeddings, String indexName)
throws IOException {
String createAction = "{\"create\": {\"_index\": \"" + indexName + "\"}}\n";
StringBuilder bulkBuilder = new StringBuilder();
for (int i = 0; i < sourceText.size(); i++) {
bulkBuilder.append(createAction);
bulkBuilder.append("{\"source_text\": \"")
.append(sourceText.get(i))
.append("\", \"filter_field\":\"")
.append(filters.get(i))
.append("\", \"embedding\":")
.append(embeddings.get(i))
.append("}\n");
}
Request bulkRequest = new Request("POST", "/_bulk");
bulkRequest.setJsonEntity(bulkBuilder.toString());
bulkRequest.addParameter("refresh", "true");
var bulkResponse = client().performRequest(bulkRequest);
assertOkWithErrorMessage(bulkResponse);
}
protected void createTextEmbeddingModelWithPrefixString(String modelId, String searchPrefix, String ingestPrefix) throws IOException {
Request request = new Request("PUT", "/_ml/trained_models/" + modelId);
request.setJsonEntity(Strings.format("""
{
"description": "a text embedding model",
"model_type": "pytorch",
"inference_config": {
"text_embedding": {
"tokenization": {
"bert": {
"with_special_tokens": false
}
}
}
},
"prefix_strings": {
"search": "%s",
"ingest": "%s"
}
}""", searchPrefix, ingestPrefix));
client().performRequest(request);
}
}
| TextEmbeddingQueryIT |
java | quarkusio__quarkus | extensions/arc/runtime/src/main/java/io/quarkus/arc/runtime/ConfigStaticInitCheckInterceptor.java | {
"start": 1059,
"end": 3841
} | class ____ {
private static final Logger LOG = Logger.getLogger(ConfigStaticInitCheckInterceptor.class);
@Inject
ConfigStaticInitValues configValues;
@AroundInvoke
Object aroundInvoke(InvocationContext context) throws Exception {
InjectionPoint injectionPoint = null;
for (Object parameter : context.getParameters()) {
if (parameter instanceof InjectionPoint) {
injectionPoint = (InjectionPoint) parameter;
break;
}
}
recordConfigValue(injectionPoint, configValues);
return context.proceed();
}
static void recordConfigValue(InjectionPoint injectionPoint, ConfigStaticInitValues configValues) {
if (ExecutionMode.current() != ExecutionMode.STATIC_INIT) {
// No-op for any other execution mode
return;
}
if (injectionPoint == null) {
throw new IllegalStateException("No current injection point found");
}
ConfigProperty configProperty = null;
for (Annotation qualifier : injectionPoint.getQualifiers()) {
if (qualifier instanceof ConfigProperty) {
configProperty = ((ConfigProperty) qualifier);
}
}
if (configProperty == null
|| injectionPoint.getAnnotated().isAnnotationPresent(StaticInitSafe.class)) {
return;
}
String propertyName = configProperty.name();
SmallRyeConfig config = ConfigProvider.getConfig().unwrap(SmallRyeConfig.class);
String value = config.getConfigValue(propertyName).getValue();
if (value == null) {
value = getDefaultValue(injectionPoint, configProperty);
}
if (value == null) {
LOG.debugf("No config value found for %s - recording <null> value", propertyName);
}
if (configValues == null) {
configValues = Arc.requireContainer().instance(ConfigStaticInitValues.class).get();
}
configValues.recordConfigValue(injectionPoint, propertyName, value);
}
private static String getDefaultValue(InjectionPoint injectionPoint, ConfigProperty configProperty) {
String str = configProperty.defaultValue();
if (!ConfigProperty.UNCONFIGURED_VALUE.equals(str)) {
return str;
}
if (injectionPoint.getType() instanceof Class && ((Class<?>) injectionPoint.getType()).isPrimitive()) {
if (injectionPoint.getType() == char.class) {
return null;
} else if (injectionPoint.getType() == boolean.class) {
return "false";
} else {
return "0";
}
}
return null;
}
}
| ConfigStaticInitCheckInterceptor |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/hamlet2/HamletSpec.java | {
"start": 44117,
"end": 44210
} | interface ____ extends Attrs, _Child {
}
/**
*
*/
@Element(endTag=false)
public | HR |
java | elastic__elasticsearch | x-pack/qa/multi-project/core-rest-tests-with-multiple-projects/src/yamlRestTest/java/org/elasticsearch/multiproject/test/CoreWithMultipleProjectsClientYamlTestSuiteIT.java | {
"start": 874,
"end": 2632
} | class ____ extends MultipleProjectsClientYamlSuiteTestCase {
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("constant-keyword")
.module("mapper-extras")
.module("rank-eval")
.module("x-pack-ilm")
.module("x-pack-stack")
.module("ingest-common")
.module("reindex")
.module("wildcard")
.module("analysis-common")
.module("health-shards-availability")
.module("data-streams")
.module("test-multi-project")
.module("lang-mustache")
.module("parent-join")
.module("streams")
.setting("test.multi_project.enabled", "true")
.setting("xpack.security.enabled", "true")
.setting("xpack.watcher.enabled", "false")
.setting("xpack.ml.enabled", "false")
.setting("xpack.license.self_generated.type", "trial")
.setting("xpack.security.autoconfiguration.enabled", "false")
// disable ILM history, since it disturbs tests using _all
.setting("indices.lifecycle.history_index_enabled", "false")
.user(USER, PASS)
.systemProperty("es.queryable_built_in_roles_enabled", "false")
.feature(FeatureFlag.TIME_SERIES_MODE)
.nodes(2)
.build();
public CoreWithMultipleProjectsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
| CoreWithMultipleProjectsClientYamlTestSuiteIT |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/char2darray/Char2DArrayAssert_isEmpty_Test.java | {
"start": 941,
"end": 1377
} | class ____ extends Char2DArrayAssertBaseTest {
@Override
protected Char2DArrayAssert invoke_api_method() {
assertions.isEmpty();
return null;
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEmpty(getInfo(assertions), getActual(assertions));
}
@Override
@Test
public void should_return_this() {
// Disable this test because isEmpty is void
}
}
| Char2DArrayAssert_isEmpty_Test |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterQuotaUpdateService.java | {
"start": 2049,
"end": 10634
} | class ____ extends PeriodicService {
private static final Logger LOG =
LoggerFactory.getLogger(RouterQuotaUpdateService.class);
private MountTableStore mountTableStore;
private RouterRpcServer rpcServer;
/** Router using this Service. */
private final Router router;
/** Router Quota manager. */
private RouterQuotaManager quotaManager;
public RouterQuotaUpdateService(final Router router) throws IOException {
super(RouterQuotaUpdateService.class.getName());
this.router = router;
this.rpcServer = router.getRpcServer();
this.quotaManager = router.getQuotaManager();
if (this.quotaManager == null) {
throw new IOException("Router quota manager is not initialized.");
}
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
this.setIntervalMs(conf.getTimeDuration(
RBFConfigKeys.DFS_ROUTER_QUOTA_CACHE_UPDATE_INTERVAL,
RBFConfigKeys.DFS_ROUTER_QUOTA_CACHE_UPDATE_INTERVAL_DEFAULT,
TimeUnit.MILLISECONDS));
super.serviceInit(conf);
}
@Override
protected void periodicInvoke() {
LOG.debug("Start to update quota cache.");
try {
List<MountTable> mountTables = getQuotaSetMountTables();
Map<RemoteLocation, QuotaUsage> remoteQuotaUsage = new HashMap<>();
for (MountTable entry : mountTables) {
String src = entry.getSourcePath();
RouterQuotaUsage oldQuota = entry.getQuota();
long nsQuota = oldQuota.getQuota();
long ssQuota = oldQuota.getSpaceQuota();
long[] typeQuota = new long[StorageType.values().length];
Quota.eachByStorageType(
t -> typeQuota[t.ordinal()] = oldQuota.getTypeQuota(t));
QuotaUsage currentQuotaUsage = null;
// Check whether destination path exists in filesystem. When the
// mtime is zero, the destination is not present and reset the usage.
// This is because mount table does not have mtime.
// For other mount entry get current quota usage
HdfsFileStatus ret = this.rpcServer.getFileInfo(src);
if (rpcServer.isAsync()) {
ret = syncReturn(HdfsFileStatus.class);
}
if (ret == null || ret.getModificationTime() == 0) {
long[] zeroConsume = new long[StorageType.values().length];
currentQuotaUsage =
new RouterQuotaUsage.Builder().fileAndDirectoryCount(0)
.quota(nsQuota).spaceConsumed(0).spaceQuota(ssQuota)
.typeConsumed(zeroConsume)
.typeQuota(typeQuota).build();
} else {
// Call RouterRpcServer#getQuotaUsage for getting current quota usage.
// If any exception occurs catch it and proceed with other entries.
try {
Quota quotaModule = this.rpcServer.getQuotaModule();
Map<RemoteLocation, QuotaUsage> usageMap =
quotaModule.getEachQuotaUsage(src);
if (this.rpcServer.isAsync()) {
usageMap = (Map<RemoteLocation, QuotaUsage>)syncReturn(Map.class);
}
currentQuotaUsage = quotaModule.aggregateQuota(src, usageMap);
remoteQuotaUsage.putAll(usageMap);
} catch (IOException ioe) {
LOG.error("Unable to get quota usage for " + src, ioe);
continue;
}
}
RouterQuotaUsage newQuota = generateNewQuota(oldQuota,
currentQuotaUsage);
this.quotaManager.put(src, newQuota);
entry.setQuota(newQuota);
}
// Fix inconsistent quota.
for (Entry<RemoteLocation, QuotaUsage> en : remoteQuotaUsage
.entrySet()) {
RemoteLocation remoteLocation = en.getKey();
QuotaUsage currentQuota = en.getValue();
fixGlobalQuota(remoteLocation, currentQuota);
}
} catch (IOException e) {
LOG.error("Quota cache updated error.", e);
} catch (Exception e) {
LOG.error(e.toString());
}
}
private void fixGlobalQuota(RemoteLocation location, QuotaUsage remoteQuota)
throws IOException {
QuotaUsage gQuota =
this.rpcServer.getQuotaModule().getGlobalQuota(location.getSrc());
if (remoteQuota.getQuota() != gQuota.getQuota()
|| remoteQuota.getSpaceQuota() != gQuota.getSpaceQuota()) {
this.rpcServer.getQuotaModule()
.setQuotaInternal(location.getSrc(), Arrays.asList(location),
gQuota.getQuota(), gQuota.getSpaceQuota(), null);
LOG.info("[Fix Quota] src={} dst={} oldQuota={}/{} newQuota={}/{}",
location.getSrc(), location, remoteQuota.getQuota(),
remoteQuota.getSpaceQuota(), gQuota.getQuota(),
gQuota.getSpaceQuota());
}
for (StorageType t : StorageType.values()) {
if (remoteQuota.getTypeQuota(t) != gQuota.getTypeQuota(t)) {
this.rpcServer.getQuotaModule()
.setQuotaInternal(location.getSrc(), Arrays.asList(location),
HdfsConstants.QUOTA_DONT_SET, gQuota.getTypeQuota(t), t);
LOG.info("[Fix Quota] src={} dst={} type={} oldQuota={} newQuota={}",
location.getSrc(), location, t, remoteQuota.getTypeQuota(t),
gQuota.getTypeQuota(t));
}
}
}
/**
* Get mount table store management interface.
* @return MountTableStore instance.
* @throws IOException
*/
private MountTableStore getMountTableStore() throws IOException {
if (this.mountTableStore == null) {
this.mountTableStore = router.getStateStore().getRegisteredRecordStore(
MountTableStore.class);
if (this.mountTableStore == null) {
throw new IOException("Mount table state store is not available.");
}
}
return this.mountTableStore;
}
/**
* Get all the existing mount tables.
* @return List of mount tables.
* @throws IOException
*/
private List<MountTable> getMountTableEntries() throws IOException {
// scan mount tables from root path
GetMountTableEntriesRequest getRequest = GetMountTableEntriesRequest
.newInstance("/");
GetMountTableEntriesResponse getResponse = getMountTableStore()
.getMountTableEntries(getRequest);
return getResponse.getEntries();
}
/**
* Get mount tables which quota was set.
* During this time, the quota usage cache will also be updated by
* quota manager:
* 1. Stale paths (entries) will be removed.
* 2. Existing entries will be overridden and updated.
* @return List of mount tables which quota was set.
* @throws IOException
*/
private List<MountTable> getQuotaSetMountTables() throws IOException {
List<MountTable> mountTables = getMountTableEntries();
Set<String> allPaths = this.quotaManager.getAll();
Set<String> stalePaths = new HashSet<>(allPaths);
List<MountTable> neededMountTables = new LinkedList<>();
for (MountTable entry : mountTables) {
// select mount tables which is quota set
if (isQuotaSet(entry)) {
neededMountTables.add(entry);
}
// update mount table entries info in quota cache
String src = entry.getSourcePath();
this.quotaManager.updateQuota(src, entry.getQuota());
stalePaths.remove(src);
}
// remove stale paths that currently cached
for (String stalePath : stalePaths) {
this.quotaManager.remove(stalePath);
}
return neededMountTables;
}
/**
* Check if the quota was set in given MountTable.
* @param mountTable Mount table entry.
*/
private boolean isQuotaSet(MountTable mountTable) {
if (mountTable != null) {
return this.quotaManager.isQuotaSet(mountTable.getQuota());
}
return false;
}
/**
* Generate a new quota based on old quota and current quota usage value.
* @param oldQuota Old quota stored in State Store.
* @param currentQuotaUsage Current quota usage value queried from
* subcluster.
* @return A new RouterQuotaUsage.
*/
private RouterQuotaUsage generateNewQuota(RouterQuotaUsage oldQuota,
QuotaUsage currentQuotaUsage) {
RouterQuotaUsage.Builder newQuotaBuilder = new RouterQuotaUsage.Builder()
.fileAndDirectoryCount(currentQuotaUsage.getFileAndDirectoryCount())
.quota(oldQuota.getQuota())
.spaceConsumed(currentQuotaUsage.getSpaceConsumed())
.spaceQuota(oldQuota.getSpaceQuota());
Quota.eachByStorageType(t -> {
newQuotaBuilder.typeQuota(t, oldQuota.getTypeQuota(t));
newQuotaBuilder.typeConsumed(t, currentQuotaUsage.getTypeConsumed(t));
});
return newQuotaBuilder.build();
}
}
| RouterQuotaUpdateService |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeCheckerTest.java | {
"start": 1421,
"end": 2148
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(ThreadSafeChecker.class, getClass());
private final BugCheckerRefactoringTestHelper refactoringHelper =
BugCheckerRefactoringTestHelper.newInstance(ThreadSafeChecker.class, getClass());
@Test
public void basicFields() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.ImmutableList;
import com.google.errorprone.annotations.ThreadSafe;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
@ThreadSafe
| ThreadSafeCheckerTest |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/oscar/visitor/OscarStatementParser.java | {
"start": 1351,
"end": 22583
} | class ____ extends SQLStatementParser {
public static final String TIME_ZONE = "TIME ZONE";
public static final String TIME = "TIME";
public static final String LOCAL = "LOCAL";
public OscarStatementParser(OscarExprParser parser) {
super(parser);
}
public OscarStatementParser(String sql) {
super(new OscarExprParser(sql));
}
public OscarStatementParser(String sql, SQLParserFeature... features) {
super(new OscarExprParser(sql, features));
}
public OscarStatementParser(Lexer lexer) {
super(new OscarExprParser(lexer));
}
public OscarSelectParser createSQLSelectParser() {
return new OscarSelectParser(this.exprParser, selectListCache);
}
public SQLUpdateStatement parseUpdateStatement() {
accept(Token.UPDATE);
OscarUpdateStatement updateStatement = new OscarUpdateStatement();
SQLSelectParser selectParser = this.exprParser.createSelectParser();
SQLTableSource tableSource = selectParser.parseTableSource();
updateStatement.setTableSource(tableSource);
parseUpdateSet(updateStatement);
if (lexer.token() == Token.FROM) {
lexer.nextToken();
SQLTableSource from = selectParser.parseTableSource();
updateStatement.setFrom(from);
}
if (lexer.token() == (Token.WHERE)) {
lexer.nextToken();
updateStatement.setWhere(this.exprParser.expr());
}
if (lexer.token() == Token.RETURNING) {
lexer.nextToken();
for (;;) {
updateStatement.getReturning().add(this.exprParser.expr());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
}
return updateStatement;
}
public OscarInsertStatement parseInsert() {
OscarInsertStatement stmt = new OscarInsertStatement();
if (lexer.token() == Token.INSERT) {
lexer.nextToken();
accept(Token.INTO);
SQLName tableName = this.exprParser.name();
stmt.setTableName(tableName);
if (lexer.token() == Token.AS) {
lexer.nextToken();
stmt.setAlias(lexer.stringVal());
lexer.nextToken();
} else if (lexer.token() == Token.IDENTIFIER) {
stmt.setAlias(lexer.stringVal());
lexer.nextToken();
}
}
if (lexer.token() == Token.DEFAULT) {
lexer.nextToken();
accept(Token.VALUES);
stmt.setDefaultValues(true);
}
if (lexer.token() == (Token.LPAREN)) {
lexer.nextToken();
this.exprParser.exprList(stmt.getColumns(), stmt);
accept(Token.RPAREN);
}
if (lexer.token() == (Token.VALUES)) {
lexer.nextToken();
for (;;) {
accept(Token.LPAREN);
SQLInsertStatement.ValuesClause valuesCaluse = new SQLInsertStatement.ValuesClause();
this.exprParser.exprList(valuesCaluse.getValues(), valuesCaluse);
stmt.addValueCause(valuesCaluse);
accept(Token.RPAREN);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
} else if (lexer.token() == (Token.SELECT)) {
SQLQueryExpr queryExpr = (SQLQueryExpr) this.exprParser.expr();
stmt.setQuery(queryExpr.getSubQuery());
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.CONFLICT)) {
lexer.nextToken();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
List<SQLExpr> onConflictTarget = new ArrayList<SQLExpr>();
this.exprParser.exprList(onConflictTarget, stmt);
stmt.setOnConflictTarget(onConflictTarget);
accept(Token.RPAREN);
}
if (lexer.token() == Token.ON) {
lexer.nextToken();
accept(Token.CONSTRAINT);
SQLName constraintName = this.exprParser.name();
stmt.setOnConflictConstraint(constraintName);
}
if (lexer.token() == Token.WHERE) {
lexer.nextToken();
SQLExpr where = this.exprParser.expr();
stmt.setOnConflictWhere(where);
}
if (lexer.token() == Token.DO) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.NOTHING)) {
lexer.nextToken();
stmt.setOnConflictDoNothing(true);
} else {
accept(Token.UPDATE);
accept(Token.SET);
for (;;) {
SQLUpdateSetItem item = this.exprParser.parseUpdateSetItem();
stmt.addConflicUpdateItem(item);
if (lexer.token() != Token.COMMA) {
break;
}
lexer.nextToken();
}
if (lexer.token() == Token.WHERE) {
lexer.nextToken();
SQLExpr where = this.exprParser.expr();
stmt.setOnConflictUpdateWhere(where);
}
}
}
}
}
if (lexer.token() == Token.RETURNING) {
lexer.nextToken();
SQLExpr returning = this.exprParser.expr();
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
SQLListExpr list = new SQLListExpr();
list.addItem(returning);
this.exprParser.exprList(list.getItems(), list);
returning = list;
}
stmt.setReturning(returning);
}
return stmt;
}
public OscarCreateSchemaStatement parseCreateSchema() {
accept(Token.CREATE);
accept(Token.SCHEMA);
OscarCreateSchemaStatement stmt = new OscarCreateSchemaStatement();
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExists(true);
}
if (lexer.token() == Token.IDENTIFIER) {
if (lexer.identifierEquals("AUTHORIZATION")) {
lexer.nextToken();
stmt.setAuthorization(true);
SQLIdentifierExpr userName = (SQLIdentifierExpr) this.exprParser.expr();
stmt.setUserName(userName);
} else {
SQLIdentifierExpr schemaName = (SQLIdentifierExpr) this.exprParser.expr();
stmt.setSchemaName(schemaName);
if (lexer.identifierEquals("AUTHORIZATION")) {
lexer.nextToken();
stmt.setAuthorization(true);
SQLIdentifierExpr userName = (SQLIdentifierExpr) this.exprParser.expr();
stmt.setUserName(userName);
}
}
} else {
throw new ParserException("TODO " + lexer.info());
}
return stmt;
}
protected SQLStatement alterSchema() {
accept(Token.ALTER);
accept(Token.SCHEMA);
OscarAlterSchemaStatement stmt = new OscarAlterSchemaStatement();
stmt.setSchemaName(this.exprParser.identifier());
if (lexer.identifierEquals(FnvHash.Constants.RENAME)) {
lexer.nextToken();
accept(Token.TO);
stmt.setNewName(this.exprParser.identifier());
} else if (lexer.identifierEquals(FnvHash.Constants.OWNER)) {
lexer.nextToken();
accept(Token.TO);
stmt.setNewOwner(this.exprParser.identifier());
}
return stmt;
}
public OscarDropSchemaStatement parseDropSchema(boolean physical) {
OscarDropSchemaStatement stmt = new OscarDropSchemaStatement();
if (lexer.token() == Token.SCHEMA) {
lexer.nextToken();
} else {
accept(Token.DATABASE);
}
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.EXISTS);
stmt.setIfExists(true);
}
SQLIdentifierExpr name = this.exprParser.identifier();
stmt.setSchemaName(name);
if (lexer.identifierEquals(FnvHash.Constants.RESTRICT)) {
lexer.nextToken();
stmt.setRestrict(true);
} else if (lexer.token() == Token.CASCADE || lexer.identifierEquals(FnvHash.Constants.CASCADE)) {
lexer.nextToken();
stmt.setCascade(true);
} else {
stmt.setCascade(false);
}
return stmt;
}
public OscarDeleteStatement parseDeleteStatement() {
lexer.nextToken();
OscarDeleteStatement deleteStatement = new OscarDeleteStatement();
if (lexer.token() == (Token.FROM)) {
lexer.nextToken();
}
if (lexer.token() == (Token.ONLY)) {
lexer.nextToken();
deleteStatement.setOnly(true);
}
SQLName tableName = exprParser.name();
deleteStatement.setTableName(tableName);
if (lexer.token() == Token.AS) {
accept(Token.AS);
}
if (lexer.token() == Token.IDENTIFIER) {
deleteStatement.setAlias(lexer.stringVal());
lexer.nextToken();
}
if (lexer.token() == Token.USING) {
lexer.nextToken();
SQLTableSource tableSource = createSQLSelectParser().parseTableSource();
deleteStatement.setUsing(tableSource);
}
if (lexer.token() == (Token.WHERE)) {
lexer.nextToken();
if (lexer.token() == Token.CURRENT) {
lexer.nextToken();
accept(Token.OF);
SQLName cursorName = this.exprParser.name();
SQLExpr where = new SQLCurrentOfCursorExpr(cursorName);
deleteStatement.setWhere(where);
} else {
SQLExpr where = this.exprParser.expr();
deleteStatement.setWhere(where);
}
}
if (lexer.token() == Token.RETURNING) {
lexer.nextToken();
accept(Token.STAR);
deleteStatement.setReturning(true);
}
return deleteStatement;
}
public boolean parseStatementListDialect(List<SQLStatement> statementList) {
switch (lexer.token()) {
case BEGIN:
case START: {
OscarStartTransactionStatement stmt = parseBegin();
statementList.add(stmt);
return true;
}
case WITH:
statementList.add(parseWith());
return true;
default:
if (lexer.identifierEquals(FnvHash.Constants.CONNECT)) {
SQLStatement stmt = parseConnectTo();
statementList.add(stmt);
return true;
}
break;
}
return false;
}
protected OscarStartTransactionStatement parseBegin() {
OscarStartTransactionStatement stmt = new OscarStartTransactionStatement();
if (lexer.token() == Token.START) {
lexer.nextToken();
acceptIdentifier("TRANSACTION");
} else {
accept(Token.BEGIN);
}
return stmt;
}
public SQLStatement parseConnectTo() {
acceptIdentifier("CONNECT");
accept(Token.TO);
OscarConnectToStatement stmt = new OscarConnectToStatement();
SQLName target = this.exprParser.name();
stmt.setTarget(target);
return stmt;
}
public OscarSelectStatement parseSelect() {
OscarSelectParser selectParser = createSQLSelectParser();
SQLSelect select = selectParser.select();
return new OscarSelectStatement(select);
}
public SQLStatement parseWith() {
SQLWithSubqueryClause with = this.parseWithQuery();
// OscarWithClause with = this.parseWithClause();
if (lexer.token() == Token.INSERT) {
OscarInsertStatement stmt = this.parseInsert();
stmt.setWith(with);
return stmt;
}
if (lexer.token() == Token.SELECT) {
OscarSelectStatement stmt = this.parseSelect();
stmt.getSelect().setWithSubQuery(with);
return stmt;
}
if (lexer.token() == Token.DELETE) {
OscarDeleteStatement stmt = this.parseDeleteStatement();
stmt.setWith(with);
return stmt;
}
if (lexer.token() == Token.UPDATE) {
OscarUpdateStatement stmt = (OscarUpdateStatement) this.parseUpdateStatement();
stmt.setWith(with);
return stmt;
}
throw new ParserException("TODO. " + lexer.info());
}
protected SQLAlterTableAlterColumn parseAlterColumn() {
if (lexer.token() == Token.COLUMN) {
lexer.nextToken();
}
SQLColumnDefinition column = this.exprParser.parseColumn();
SQLAlterTableAlterColumn alterColumn = new SQLAlterTableAlterColumn();
alterColumn.setColumn(column);
if (column.getDataType() == null && column.getConstraints().isEmpty()) {
if (lexer.token() == Token.SET) {
lexer.nextToken();
if (lexer.token() == Token.NOT) {
lexer.nextToken();
accept(Token.NULL);
alterColumn.setSetNotNull(true);
} else {
accept(Token.DEFAULT);
SQLExpr defaultValue = this.exprParser.expr();
alterColumn.setSetDefault(defaultValue);
}
} else if (lexer.token() == Token.DROP) {
lexer.nextToken();
if (lexer.token() == Token.NOT) {
lexer.nextToken();
accept(Token.NULL);
alterColumn.setDropNotNull(true);
} else {
accept(Token.DEFAULT);
alterColumn.setDropDefault(true);
}
}
}
return alterColumn;
}
public SQLStatement parseShow() {
accept(Token.SHOW);
OscarShowStatement stmt = new OscarShowStatement();
switch (lexer.token()) {
case ALL:
stmt.setExpr(new SQLIdentifierExpr(Token.ALL.name()));
lexer.nextToken();
break;
default:
stmt.setExpr(this.exprParser.expr());
break;
}
return stmt;
}
@Override
public SQLStatement parseCommit() {
SQLCommitStatement stmt = new SQLCommitStatement();
stmt.setDbType(this.dbType);
lexer.nextToken();
return stmt;
}
@Override
public SQLStatement parseSet() {
accept(Token.SET);
Token token = lexer.token();
String range = "";
SQLSetStatement.Option option = null;
if (token == Token.SESSION) {
lexer.nextToken();
range = Token.SESSION.name();
option = SQLSetStatement.Option.SESSION;
} else if (token == Token.IDENTIFIER && LOCAL.equalsIgnoreCase(lexer.stringVal())) {
range = LOCAL;
option = SQLSetStatement.Option.LOCAL;
lexer.nextToken();
}
long hash = lexer.hashLCase();
String parameter = lexer.stringVal();
SQLExpr paramExpr;
List<SQLExpr> values = new ArrayList<SQLExpr>();
if (hash == FnvHash.Constants.TIME) {
lexer.nextToken();
acceptIdentifier("ZONE");
paramExpr = new SQLIdentifierExpr("TIME ZONE");
String value = lexer.stringVal();
if (lexer.token() == Token.IDENTIFIER) {
values.add(new SQLIdentifierExpr(value.toUpperCase()));
} else {
values.add(new SQLCharExpr(value));
}
lexer.nextToken();
// return new PGSetStatement(range, TIME_ZONE, exprs);
} else if (hash == FnvHash.Constants.ROLE) {
paramExpr = new SQLIdentifierExpr(parameter);
lexer.nextToken();
values.add(this.exprParser.primary());
lexer.nextToken();
} else {
paramExpr = new SQLIdentifierExpr(parameter);
lexer.nextToken();
while (!lexer.isEOF()) {
lexer.nextToken();
if (lexer.token() == Token.LITERAL_CHARS) {
values.add(new SQLCharExpr(lexer.stringVal()));
} else if (lexer.token() == Token.LITERAL_INT) {
values.add(new SQLIdentifierExpr(lexer.numberString()));
} else if (lexer.identifierEquals(FnvHash.Constants.JSON_SET)
|| lexer.identifierEquals(FnvHash.Constants.JSONB_SET)) {
SQLExpr json_set = this.exprParser.expr();
values.add(json_set);
} else {
values.add(new SQLIdentifierExpr(lexer.stringVal()));
}
// skip comma
lexer.nextToken();
}
}
// value | 'value' | DEFAULT
SQLExpr valueExpr;
if (values.size() == 1) {
valueExpr = values.get(0);
} else {
SQLListExpr listExpr = new SQLListExpr();
for (SQLExpr value : values) {
listExpr.addItem(value);
}
valueExpr = listExpr;
}
SQLSetStatement stmt = new SQLSetStatement(paramExpr, valueExpr, dbType);
stmt.setOption(option);
return stmt;
}
public SQLCreateIndexStatement parseCreateIndex() {
accept(Token.CREATE);
SQLCreateIndexStatement stmt = new SQLCreateIndexStatement(getDbType());
if (lexer.token() == Token.UNIQUE) {
lexer.nextToken();
if (lexer.identifierEquals("CLUSTERED")) {
lexer.nextToken();
stmt.setType("UNIQUE CLUSTERED");
} else {
stmt.setType("UNIQUE");
}
} else if (lexer.identifierEquals("FULLTEXT")) {
stmt.setType("FULLTEXT");
lexer.nextToken();
} else if (lexer.identifierEquals("NONCLUSTERED")) {
stmt.setType("NONCLUSTERED");
lexer.nextToken();
}
accept(Token.INDEX);
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExists(true);
}
if (lexer.identifierEquals(FnvHash.Constants.CONCURRENTLY)) {
lexer.nextToken();
stmt.setConcurrently(true);
}
if (lexer.token() != Token.ON) {
stmt.setName(this.exprParser.name());
}
accept(Token.ON);
stmt.setTable(this.exprParser.name());
if (lexer.token() == Token.USING) {
lexer.nextToken();
String using = lexer.stringVal();
accept(Token.IDENTIFIER);
stmt.setUsing(using);
}
accept(Token.LPAREN);
for (;;) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
item.setParent(stmt);
stmt.addItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
if (lexer.token() == Token.WITH) {
lexer.nextToken();
accept(Token.LPAREN);
for (;;) {
String optionName = lexer.stringVal();
accept(Token.IDENTIFIER);
accept(Token.EQ);
SQLExpr option = this.exprParser.expr();
option.setParent(stmt);
stmt.addOption(optionName, option);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.token() == Token.TABLESPACE) {
lexer.nextToken();
SQLName tablespace = this.exprParser.name();
stmt.setTablespace(tablespace);
}
return stmt;
}
public SQLCreateTableParser getSQLCreateTableParser() {
return new OscarCreateTableParser(this.exprParser);
}
}
| OscarStatementParser |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/Clamp.java | {
"start": 1543,
"end": 6548
} | class ____ extends EsqlScalarFunction implements SurrogateExpression {
private final Expression field;
private final Expression min;
private final Expression max;
private DataType resolvedType;
@FunctionInfo(
returnType = { "double", "integer", "long", "double", "unsigned_long", "keyword", "ip", "boolean", "date", "version" },
description = "Clamps the values of all samples to have a lower limit of min and an upper limit of max.",
examples = { @Example(file = "k8s-timeseries-clamp", tag = "clamp") }
)
public Clamp(
Source source,
@Param(
name = "field",
type = { "double", "integer", "long", "double", "unsigned_long", "keyword", "ip", "boolean", "date", "version" },
description = "Numeric expression. If `null`, the function returns `null`."
) Expression field,
@Param(
name = "min",
type = { "double", "integer", "long", "double", "unsigned_long", "keyword", "ip", "boolean", "date", "version" },
description = "The min value to clamp data into."
) Expression min,
@Param(
name = "max",
type = { "double", "integer", "long", "double", "unsigned_long", "keyword", "ip", "boolean", "date", "version" },
description = "The max value to clamp data into."
) Expression max
) {
super(source, List.of(field, min, max));
this.field = field;
this.min = min;
this.max = max;
}
@Override
public String getWriteableName() {
throw new UnsupportedOperationException("Clamp does not support serialization.");
}
@Override
protected TypeResolution resolveType() {
if (childrenResolved() == false) {
return new TypeResolution("Unresolved children");
}
var field = children().get(0);
var max = children().get(1);
var min = children().get(2);
var fieldDataType = field.dataType().noText();
TypeResolution resolution = TypeResolutions.isType(
field,
t -> t.isNumeric() || t == DataType.BOOLEAN || t.isDate() || DataType.isString(t) || t == DataType.IP || t == DataType.VERSION,
sourceText(),
TypeResolutions.ParamOrdinal.FIRST,
fieldDataType.typeName()
);
if (resolution.unresolved()) {
return resolution;
}
if (fieldDataType == NULL) {
return new TypeResolution("'field' must not be null in clamp()");
}
resolution = TypeResolutions.isType(
max,
t -> t.isNumeric() ? fieldDataType.isNumeric() : t.noText() == fieldDataType.noText(),
sourceText(),
TypeResolutions.ParamOrdinal.SECOND,
fieldDataType.typeName()
);
if (resolution.unresolved()) {
return resolution;
}
resolution = TypeResolutions.isType(
min,
t -> t.isNumeric() ? fieldDataType.isNumeric() : t.noText() == fieldDataType.noText(),
sourceText(),
TypeResolutions.ParamOrdinal.THIRD,
fieldDataType.typeName()
);
if (resolution.unresolved()) {
return resolution;
}
if (fieldDataType.isNumeric() == false) {
resolvedType = fieldDataType;
} else {
// When the types are equally wide, prefer rational numbers
resolvedType = Stream.of(fieldDataType, max.dataType(), min.dataType())
.sorted(Comparator.comparingInt(DataType::estimatedSize).thenComparing(DataType::isRationalNumber))
.toList()
.getLast();
}
return TypeResolution.TYPE_RESOLVED;
}
@Override
public DataType dataType() {
if (resolvedType == null && resolveType().resolved() == false) {
throw new EsqlIllegalArgumentException("Unable to resolve data type for clamp_max");
}
return resolvedType;
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new Clamp(source(), newChildren.get(0), newChildren.get(1), newChildren.get(2));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, Clamp::new, field, children().get(1), children().get(2));
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException("Clamp does not support serialization.");
}
@Override
public ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) {
throw new UnsupportedOperationException(
"Clamp should have been replaced by ClampMin and ClampMax. Something went wrong in the compute engine."
);
}
@Override
public Expression surrogate() {
return new ClampMax(source(), new ClampMin(source(), field, min), max);
}
}
| Clamp |
java | grpc__grpc-java | binder/src/test/java/io/grpc/binder/internal/IntentNameResolverProviderTest.java | {
"start": 1713,
"end": 4331
} | class ____ {
private final Application appContext = ApplicationProvider.getApplicationContext();
private final SynchronizationContext syncContext = newSynchronizationContext();
private final NameResolver.Args args = newNameResolverArgs();
private NameResolverProvider provider;
@Rule public MockitoTestRule mockitoTestRule = MockitoJUnit.testRule(this);
@Mock public NameResolver.Listener2 mockListener;
@Captor public ArgumentCaptor<ResolutionResult> resultCaptor;
@Before
public void setUp() {
provider = new IntentNameResolverProvider();
}
@Test
public void testProviderScheme_returnsIntentScheme() throws Exception {
assertThat(provider.getDefaultScheme())
.isEqualTo(IntentNameResolverProvider.ANDROID_INTENT_SCHEME);
}
@Test
public void testNoResolverForUnknownScheme_returnsNull() throws Exception {
assertThat(provider.newNameResolver(new URI("random://uri"), args)).isNull();
}
@Test
public void testResolutionWithBadUri_throwsIllegalArg() throws Exception {
assertThrows(
IllegalArgumentException.class,
() -> provider.newNameResolver(new URI("intent:xxx#Intent;e.x=1;end;"), args));
}
@Test
public void testResolverForIntentScheme_returnsResolver() throws Exception {
URI uri = new URI("intent://authority/path#Intent;action=action;scheme=scheme;end");
NameResolver resolver = provider.newNameResolver(uri, args);
assertThat(resolver).isNotNull();
assertThat(resolver.getServiceAuthority()).isEqualTo("localhost");
syncContext.execute(() -> resolver.start(mockListener));
shadowOf(getMainLooper()).idle();
verify(mockListener).onResult2(resultCaptor.capture());
assertThat(resultCaptor.getValue().getAddressesOrError()).isNotNull();
syncContext.execute(resolver::shutdown);
shadowOf(getMainLooper()).idle();
}
/** Returns a new test-specific {@link NameResolver.Args} instance. */
private NameResolver.Args newNameResolverArgs() {
return NameResolver.Args.newBuilder()
.setDefaultPort(-1)
.setProxyDetector((target) -> null) // No proxies here.
.setSynchronizationContext(syncContext)
.setOffloadExecutor(ContextCompat.getMainExecutor(appContext))
.setServiceConfigParser(mock(ServiceConfigParser.class))
.setArg(ApiConstants.SOURCE_ANDROID_CONTEXT, appContext)
.build();
}
private static SynchronizationContext newSynchronizationContext() {
return new SynchronizationContext(
(thread, exception) -> {
throw new AssertionError(exception);
});
}
}
| IntentNameResolverProviderTest |
java | apache__flink | flink-formats/flink-protobuf/src/test/java/org/apache/flink/formats/protobuf/MapRowToProtoTest.java | {
"start": 1335,
"end": 2904
} | class ____ {
@Test
public void testSimple() throws Exception {
Map<StringData, StringData> map1 = new HashMap<>();
map1.put(StringData.fromString("a"), StringData.fromString("b"));
Map<StringData, RowData> map2 = new HashMap<>();
map2.put(StringData.fromString("c"), GenericRowData.of(1, 2L));
Map<StringData, byte[]> map3 = new HashMap<>();
map3.put(StringData.fromString("e"), new byte[] {1, 2, 3});
RowData row =
GenericRowData.of(
1,
new GenericMapData(map1),
new GenericMapData(map2),
new GenericMapData(map3));
byte[] bytes = ProtobufTestHelper.rowToPbBytes(row, MapTest.class);
MapTest mapTest = MapTest.parseFrom(bytes);
assertEquals(1, mapTest.getA());
assertEquals("b", mapTest.getMap1Map().get("a"));
MapTest.InnerMessageTest innerMessageTest = mapTest.getMap2Map().get("c");
assertEquals(1, innerMessageTest.getA());
assertEquals(2L, innerMessageTest.getB());
assertEquals(ByteString.copyFrom(new byte[] {1, 2, 3}), mapTest.getMap3Map().get("e"));
}
@Test
public void testNull() throws Exception {
RowData row = GenericRowData.of(1, null, null, null);
byte[] bytes = ProtobufTestHelper.rowToPbBytes(row, MapTest.class);
MapTest mapTest = MapTest.parseFrom(bytes);
Map<String, String> map = mapTest.getMap1Map();
assertEquals(0, map.size());
}
}
| MapRowToProtoTest |
java | apache__maven | its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng4955LocalVsRemoteSnapshotResolutionTest.java | {
"start": 1125,
"end": 2552
} | class ____ extends AbstractMavenIntegrationTestCase {
/**
* Verify that dependency resolution prefers newer local snapshots over outdated remote snapshots that use the new
* metadata format.
*
* @throws Exception in case of failure
*/
@Test
public void testit() throws Exception {
File testDir = extractResources("/mng-4955");
Verifier verifier = newVerifier(new File(testDir, "dep").getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.deleteArtifacts("org.apache.maven.its.mng4955");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
verifier = newVerifier(testDir.getAbsolutePath());
verifier.setAutoclean(false);
verifier.deleteDirectory("target");
verifier.addCliArgument("-s");
verifier.addCliArgument("settings.xml");
verifier.filterFile("settings-template.xml", "settings.xml");
verifier.addCliArgument("validate");
verifier.execute();
verifier.verifyErrorFreeLog();
List<String> classpath = verifier.loadLines("target/classpath.txt");
File jarFile = new File(classpath.get(1).toString());
assertEquals("eeff09b1b80e823eeb2a615b1d4b09e003e86fd3", ItUtils.calcHash(jarFile, "SHA-1"));
}
}
| MavenITmng4955LocalVsRemoteSnapshotResolutionTest |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/userguide/WireTransferPayment.java | {
"start": 274,
"end": 363
} | class ____ extends Payment {
}
//end::hql-examples-domain-model-example[]
| WireTransferPayment |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/hql/EmbeddableAsParameterTest.java | {
"start": 3417,
"end": 3725
} | class ____ {
@Id
private Long id;
private String name;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
@Embeddable
public static | EntityTest |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/BeanWrapperImpl.java | {
"start": 3480,
"end": 5087
} | class ____ instantiate and wrap
*/
public BeanWrapperImpl(Class<?> clazz) {
super(clazz);
}
/**
* Create a new BeanWrapperImpl for the given object,
* registering a nested path that the object is in.
* @param object the object wrapped by this BeanWrapper
* @param nestedPath the nested path of the object
* @param rootObject the root object at the top of the path
*/
public BeanWrapperImpl(Object object, String nestedPath, Object rootObject) {
super(object, nestedPath, rootObject);
}
/**
* Create a new BeanWrapperImpl for the given object,
* registering a nested path that the object is in.
* @param object the object wrapped by this BeanWrapper
* @param nestedPath the nested path of the object
* @param parent the containing BeanWrapper (must not be {@code null})
*/
private BeanWrapperImpl(Object object, String nestedPath, BeanWrapperImpl parent) {
super(object, nestedPath, parent);
}
/**
* Set a bean instance to hold, without any unwrapping of {@link java.util.Optional}.
* @param object the actual target object
* @since 4.3
* @see #setWrappedInstance(Object)
*/
public void setBeanInstance(Object object) {
this.wrappedObject = object;
this.rootObject = object;
this.typeConverterDelegate = new TypeConverterDelegate(this, this.wrappedObject);
setIntrospectionClass(object.getClass());
}
@Override
public void setWrappedInstance(Object object, @Nullable String nestedPath, @Nullable Object rootObject) {
super.setWrappedInstance(object, nestedPath, rootObject);
setIntrospectionClass(getWrappedClass());
}
/**
* Set the | to |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/proxy/Log4jFilterTest.java | {
"start": 11000,
"end": 11383
} | class ____ extends Logger {
private boolean enable;
public FakeLogger(String name, boolean enable) {
super(name);
this.enable = enable;
}
public boolean isDebugEnabled() {
return this.enable;
}
public boolean isEnabledFor(Priority level) {
return this.enable;
}
}
}
| FakeLogger |
java | elastic__elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailTemplate.java | {
"start": 974,
"end": 8969
} | class ____ implements ToXContentObject {
final TextTemplate from;
final TextTemplate[] replyTo;
final TextTemplate priority;
final TextTemplate[] to;
final TextTemplate[] cc;
final TextTemplate[] bcc;
final TextTemplate subject;
final TextTemplate textBody;
final TextTemplate htmlBody;
public EmailTemplate(
TextTemplate from,
TextTemplate[] replyTo,
TextTemplate priority,
TextTemplate[] to,
TextTemplate[] cc,
TextTemplate[] bcc,
TextTemplate subject,
TextTemplate textBody,
TextTemplate htmlBody
) {
this.from = from;
this.replyTo = replyTo;
this.priority = priority;
this.to = to;
this.cc = cc;
this.bcc = bcc;
this.subject = subject;
this.textBody = textBody;
this.htmlBody = htmlBody;
}
public TextTemplate from() {
return from;
}
public TextTemplate[] replyTo() {
return replyTo;
}
public TextTemplate priority() {
return priority;
}
public TextTemplate[] to() {
return to;
}
public TextTemplate[] cc() {
return cc;
}
public TextTemplate[] bcc() {
return bcc;
}
public TextTemplate subject() {
return subject;
}
public TextTemplate textBody() {
return textBody;
}
public TextTemplate htmlBody() {
return htmlBody;
}
public Email.Builder render(
TextTemplateEngine engine,
Map<String, Object> model,
HtmlSanitizer htmlSanitizer,
Map<String, Attachment> attachments
) throws AddressException {
Email.Builder builder = Email.builder();
if (from != null) {
builder.from(engine.render(from, model));
}
if (replyTo != null) {
Email.AddressList addresses = templatesToAddressList(engine, replyTo, model);
builder.replyTo(addresses);
}
if (priority != null) {
builder.priority(Email.Priority.resolve(engine.render(priority, model)));
}
if (to != null) {
Email.AddressList addresses = templatesToAddressList(engine, to, model);
builder.to(addresses);
}
if (cc != null) {
Email.AddressList addresses = templatesToAddressList(engine, cc, model);
builder.cc(addresses);
}
if (bcc != null) {
Email.AddressList addresses = templatesToAddressList(engine, bcc, model);
builder.bcc(addresses);
}
if (subject != null) {
builder.subject(engine.render(subject, model));
}
Set<String> warnings = Sets.newHashSetWithExpectedSize(1);
if (attachments != null) {
for (Attachment attachment : attachments.values()) {
builder.attach(attachment);
warnings.addAll(attachment.getWarnings());
}
}
String htmlWarnings = "";
String textWarnings = "";
if (warnings.isEmpty() == false) {
StringBuilder textWarningBuilder = new StringBuilder();
StringBuilder htmlWarningBuilder = new StringBuilder();
warnings.forEach(w -> {
if (Strings.isNullOrEmpty(w) == false) {
textWarningBuilder.append(w).append("\n");
htmlWarningBuilder.append(w).append("<br>");
}
});
textWarningBuilder.append("\n");
htmlWarningBuilder.append("<br>");
htmlWarnings = htmlWarningBuilder.toString();
textWarnings = textWarningBuilder.toString();
}
if (textBody != null) {
builder.textBody(textWarnings + engine.render(textBody, model));
}
if (htmlBody != null) {
String renderedHtml = htmlWarnings + engine.render(htmlBody, model);
renderedHtml = htmlSanitizer.sanitize(renderedHtml);
builder.htmlBody(renderedHtml);
}
if (htmlBody == null && textBody == null && Strings.isNullOrEmpty(textWarnings) == false) {
builder.textBody(textWarnings);
}
return builder;
}
private static Email.AddressList templatesToAddressList(TextTemplateEngine engine, TextTemplate[] templates, Map<String, Object> model)
throws AddressException {
List<Email.Address> addresses = new ArrayList<>(templates.length);
for (TextTemplate template : templates) {
Email.AddressList.parse(engine.render(template, model)).forEach(addresses::add);
}
return new Email.AddressList(addresses);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EmailTemplate that = (EmailTemplate) o;
return Objects.equals(from, that.from)
&& Arrays.equals(replyTo, that.replyTo)
&& Objects.equals(priority, that.priority)
&& Arrays.equals(to, that.to)
&& Arrays.equals(cc, that.cc)
&& Arrays.equals(bcc, that.bcc)
&& Objects.equals(subject, that.subject)
&& Objects.equals(textBody, that.textBody)
&& Objects.equals(htmlBody, that.htmlBody);
}
@Override
public int hashCode() {
return Objects.hash(
from,
Arrays.hashCode(replyTo),
priority,
Arrays.hashCode(to),
Arrays.hashCode(cc),
Arrays.hashCode(bcc),
subject,
textBody,
htmlBody
);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
xContentBody(builder, params);
return builder.endObject();
}
public XContentBuilder xContentBody(XContentBuilder builder, Params params) throws IOException {
if (from != null) {
builder.field(Email.Field.FROM.getPreferredName(), from, params);
}
if (replyTo != null) {
builder.startArray(Email.Field.REPLY_TO.getPreferredName());
for (TextTemplate template : replyTo) {
template.toXContent(builder, params);
}
builder.endArray();
}
if (priority != null) {
builder.field(Email.Field.PRIORITY.getPreferredName(), priority, params);
}
if (to != null) {
builder.startArray(Email.Field.TO.getPreferredName());
for (TextTemplate template : to) {
template.toXContent(builder, params);
}
builder.endArray();
}
if (cc != null) {
builder.startArray(Email.Field.CC.getPreferredName());
for (TextTemplate template : cc) {
template.toXContent(builder, params);
}
builder.endArray();
}
if (bcc != null) {
builder.startArray(Email.Field.BCC.getPreferredName());
for (TextTemplate template : bcc) {
template.toXContent(builder, params);
}
builder.endArray();
}
if (subject != null) {
builder.field(Email.Field.SUBJECT.getPreferredName(), subject, params);
}
if (textBody != null || htmlBody != null) {
builder.startObject(Email.Field.BODY.getPreferredName());
if (textBody != null) {
builder.field(Email.Field.BODY_TEXT.getPreferredName(), textBody, params);
}
if (htmlBody != null) {
builder.field(Email.Field.BODY_HTML.getPreferredName(), htmlBody, params);
}
builder.endObject();
}
return builder;
}
public static Builder builder() {
return new Builder();
}
public static | EmailTemplate |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/context/ConfigurableWebApplicationContext.java | {
"start": 1522,
"end": 3796
} | interface ____ extends WebApplicationContext, ConfigurableApplicationContext {
/**
* Prefix for ApplicationContext ids that refer to context path and/or servlet name.
*/
String APPLICATION_CONTEXT_ID_PREFIX = WebApplicationContext.class.getName() + ":";
/**
* Name of the ServletConfig environment bean in the factory.
* @see jakarta.servlet.ServletConfig
*/
String SERVLET_CONFIG_BEAN_NAME = "servletConfig";
/**
* Set the ServletContext for this web application context.
* <p>Does not cause an initialization of the context: refresh needs to be
* called after the setting of all configuration properties.
* @see #refresh()
*/
void setServletContext(@Nullable ServletContext servletContext);
/**
* Set the ServletConfig for this web application context.
* Only called for a WebApplicationContext that belongs to a specific Servlet.
* @see #refresh()
*/
void setServletConfig(@Nullable ServletConfig servletConfig);
/**
* Return the ServletConfig for this web application context, if any.
*/
@Nullable ServletConfig getServletConfig();
/**
* Set the namespace for this web application context,
* to be used for building a default context config location.
* The root web application context does not have a namespace.
*/
void setNamespace(@Nullable String namespace);
/**
* Return the namespace for this web application context, if any.
*/
@Nullable String getNamespace();
/**
* Set the config locations for this web application context in init-param style,
* i.e. with distinct locations separated by commas, semicolons or whitespace.
* <p>If not set, the implementation is supposed to use a default for the
* given namespace or the root web application context, as appropriate.
*/
void setConfigLocation(String configLocation);
/**
* Set the config locations for this web application context.
* <p>If not set, the implementation is supposed to use a default for the
* given namespace or the root web application context, as appropriate.
*/
void setConfigLocations(String... configLocations);
/**
* Return the config locations for this web application context,
* or {@code null} if none specified.
*/
String @Nullable [] getConfigLocations();
}
| ConfigurableWebApplicationContext |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/mapper/CompositeRuntimeField.java | {
"start": 1221,
"end": 6133
} | class ____ implements RuntimeField {
public static final String CONTENT_TYPE = "composite";
public static final Parser PARSER = new Parser(name -> new RuntimeField.Builder(name) {
private final FieldMapper.Parameter<Script> script = new FieldMapper.Parameter<>(
"script",
false,
() -> null,
RuntimeField::parseScript,
RuntimeField.initializerNotSupported(),
XContentBuilder::field,
Objects::toString
).addValidator(s -> {
if (s == null) {
throw new IllegalArgumentException("composite runtime field [" + name + "] must declare a [script]");
}
});
private final FieldMapper.Parameter<OnScriptError> onScriptError = FieldMapper.Parameter.onScriptErrorParam(
m -> m.builderParams.onScriptError(),
script
);
private final FieldMapper.Parameter<Map<String, Object>> fields = new FieldMapper.Parameter<Map<String, Object>>(
"fields",
false,
Collections::emptyMap,
(f, p, o) -> parseFields(f, o),
RuntimeField.initializerNotSupported(),
XContentBuilder::field,
Objects::toString
).addValidator(objectMap -> {
if (objectMap == null || objectMap.isEmpty()) {
throw new IllegalArgumentException("composite runtime field [" + name + "] must declare its [fields]");
}
});
@Override
protected List<FieldMapper.Parameter<?>> getParameters() {
List<FieldMapper.Parameter<?>> parameters = new ArrayList<>(super.getParameters());
parameters.add(script);
parameters.add(fields);
parameters.add(onScriptError);
return Collections.unmodifiableList(parameters);
}
@Override
protected RuntimeField createChildRuntimeField(
MappingParserContext parserContext,
String parent,
Function<SearchLookup, CompositeFieldScript.LeafFactory> parentScriptFactory,
OnScriptError onScriptError
) {
throw new IllegalArgumentException("Composite field [" + name + "] cannot be a child of composite field [" + parent + "]");
}
@Override
protected RuntimeField createRuntimeField(MappingParserContext parserContext) {
CompositeFieldScript.Factory factory = parserContext.scriptCompiler().compile(script.get(), CompositeFieldScript.CONTEXT);
Function<RuntimeField.Builder, RuntimeField> builder = b -> b.createChildRuntimeField(
parserContext,
name,
lookup -> factory.newFactory(name, script.get().getParams(), lookup, onScriptError.get()),
onScriptError.get()
);
Map<String, RuntimeField> runtimeFields = RuntimeField.parseRuntimeFields(
new HashMap<>(fields.getValue()),
parserContext,
builder,
false
);
return new CompositeRuntimeField(name, getParameters(), runtimeFields.values());
}
});
private final String name;
private final List<FieldMapper.Parameter<?>> parameters;
private final Collection<RuntimeField> subfields;
CompositeRuntimeField(String name, List<FieldMapper.Parameter<?>> parameters, Collection<RuntimeField> subfields) {
this.name = name;
this.parameters = parameters;
this.subfields = subfields;
}
@Override
public String name() {
return name;
}
@Override
public Stream<MappedFieldType> asMappedFieldTypes() {
return subfields.stream().flatMap(RuntimeField::asMappedFieldTypes);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name);
builder.field("type", "composite");
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
for (FieldMapper.Parameter<?> parameter : parameters) {
parameter.toXContent(builder, includeDefaults);
}
builder.endObject();
return builder;
}
private static Map<String, Object> parseFields(String name, Object fieldsObject) {
if (fieldsObject instanceof Map == false) {
throw new MapperParsingException(
"[fields] must be an object, got "
+ fieldsObject.getClass().getSimpleName()
+ "["
+ fieldsObject
+ "] for field ["
+ name
+ "]"
);
}
@SuppressWarnings("unchecked")
Map<String, Object> fields = (Map<String, Object>) fieldsObject;
return fields;
}
}
| CompositeRuntimeField |
java | grpc__grpc-java | api/src/context/java/io/grpc/Context.java | {
"start": 36221,
"end": 37753
} | class ____ {
/**
* Implements {@link io.grpc.Context#attach}.
*
* <p>Caution: {@link Context#attach()} interprets a return value of {@code null} to mean
* the same thing as {@link Context#ROOT}.
*
* <p>See also: {@link #current()}.
* @param toAttach the context to be attached
* @return A {@link Context} that should be passed back into {@link #detach(Context, Context)}
* as the {@code toRestore} parameter. {@code null} is a valid return value, but see
* caution note.
*/
public abstract Context doAttach(Context toAttach);
/**
* Implements {@link io.grpc.Context#detach}.
*
* @param toDetach the context to be detached. Should be, or be equivalent to, the current
* context of the current scope
* @param toRestore the context to be the current. Should be, or be equivalent to, the context
* of the outer scope
*/
public abstract void detach(Context toDetach, Context toRestore);
/**
* Implements {@link io.grpc.Context#current}.
*
* <p>Caution: {@link Context} interprets a return value of {@code null} to mean the same
* thing as {@link Context#ROOT}.
*
* <p>See also {@link #doAttach(Context)}.
*
* @return The context of the current scope. {@code null} is a valid return value, but see
* caution note.
*/
public abstract Context current();
}
/**
* Stores listener and executor pair.
*/
private static final | Storage |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/SchemaUpdateSQLServerTest.java | {
"start": 8729,
"end": 8998
} | class ____ extends InheritanceRootEntity {
}
@Entity(name = "InheritanceSecondChildEntity")
@Table(name = "InheritanceSecondChildEntity", catalog = "hibernate_orm_test_collation", schema = "dbo")
@PrimaryKeyJoinColumn(name = "ID")
public static | InheritanceChildEntity |
java | spring-projects__spring-security | acl/src/test/java/org/springframework/security/acls/TargetObjectWithUUID.java | {
"start": 721,
"end": 802
} | class ____ a {@link UUID} for the Id.
*
* @author Luke Taylor
*/
public final | with |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/casting/CastRulesTest.java | {
"start": 4901,
"end": 5064
} | class ____ unit tests of {@link CastRule} implementations. For IT test cases, check out the
* {@link CastFunctionITCase}
*/
@Execution(ExecutionMode.CONCURRENT)
| runs |
java | redisson__redisson | redisson/src/main/java/org/redisson/mapreduce/Collector.java | {
"start": 1040,
"end": 2341
} | class ____<K, V> implements RCollector<K, V> {
private RedissonClient client;
private String name;
private int parts;
private Codec codec;
private long timeout;
private BitSet expirationsBitSet = new BitSet();
public Collector(Codec codec, RedissonClient client, String name, int parts, long timeout) {
super();
this.client = client;
this.name = name;
this.parts = parts;
this.codec = codec;
this.timeout = timeout;
expirationsBitSet = new BitSet(parts);
}
@Override
public void emit(K key, V value) {
try {
ByteBuf encodedKey = codec.getValueEncoder().encode(key);
long hash = Hash.hash64(encodedKey);
encodedKey.release();
int part = (int) Math.abs(hash % parts);
String partName = name + ":" + part;
RListMultimap<K, V> multimap = client.getListMultimap(partName, codec);
multimap.put(key, value);
if (timeout > 0 && !expirationsBitSet.get(part)) {
multimap.expire(Duration.ofMillis(timeout));
expirationsBitSet.set(part);
}
} catch (IOException e) {
throw new IllegalArgumentException(e);
}
}
}
| Collector |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/ASTHelpersFindSuperMethodsTest.java | {
"start": 2236,
"end": 2543
} | class ____ extends Baz {
public void foo(String string) {
System.out.println("I am not an override! " + string);
}
public int bar(int x, int y) {
return x * y;
}
}
""");
writeFile(
"Norf.java",
"""
| Quux |
java | elastic__elasticsearch | x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java | {
"start": 1139,
"end": 4446
} | class ____ extends ShapeValuesSourceType {
static GeoShapeValuesSourceType INSTANCE = new GeoShapeValuesSourceType();
public static GeoShapeValuesSourceType instance() {
return INSTANCE;
}
@Override
public ValuesSource getEmpty() {
return GeoShapeValuesSource.EMPTY;
}
@Override
@SuppressWarnings("unchecked")
public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) {
boolean isPoint = fieldContext.indexFieldData() instanceof IndexGeoPointFieldData;
boolean isShape = fieldContext.indexFieldData() instanceof IndexShapeFieldData;
if (isPoint == false && isShape == false) {
throw new IllegalArgumentException(
"Expected geo_point or geo_shape type on field ["
+ fieldContext.field()
+ "], but got ["
+ fieldContext.fieldType().typeName()
+ "]"
);
}
if (isPoint) {
return new ValuesSource.GeoPoint.Fielddata((IndexGeoPointFieldData) fieldContext.indexFieldData());
}
return new GeoShapeValuesSource.Fielddata((IndexShapeFieldData<GeoShapeValues>) fieldContext.indexFieldData());
}
@Override
public ValuesSource replaceMissing(
ValuesSource valuesSource,
Object rawMissing,
DocValueFormat docValueFormat,
LongSupplier nowInMillis
) {
GeoShapeValuesSource shapeValuesSource = (GeoShapeValuesSource) valuesSource;
final GeoShapeValues.GeoShapeValue missing = GeoShapeValues.EMPTY.missing(rawMissing.toString());
return new GeoShapeValuesSource() {
@Override
public GeoShapeValues shapeValues(LeafReaderContext context) {
GeoShapeValues values = shapeValuesSource.shapeValues(context);
return new GeoShapeValues() {
private boolean exists;
@Override
public boolean advanceExact(int doc) throws IOException {
exists = values.advanceExact(doc);
// always return true because we want to return a value even if
// the document does not have a value
return true;
}
@Override
public ValuesSourceType valuesSourceType() {
return values.valuesSourceType();
}
@Override
public GeoShapeValue value() throws IOException {
return exists ? values.value() : missing;
}
@Override
public String toString() {
return "anon MultiShapeValues of [" + super.toString() + "]";
}
};
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return MissingValues.replaceMissing(valuesSource.bytesValues(context), new BytesRef(missing.toString()));
}
};
}
@Override
public String typeName() {
return "geoshape";
}
}
| GeoShapeValuesSourceType |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/OverriddenSQLSelectsAnnotation.java | {
"start": 757,
"end": 1927
} | class ____
implements DialectOverride.SQLSelects, RepeatableContainer<DialectOverride.SQLSelect> {
private DialectOverride.SQLSelect[] value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public OverriddenSQLSelectsAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public OverriddenSQLSelectsAnnotation(SQLRestrictions annotation, ModelsContext modelContext) {
this.value = extractJdkValue(
annotation,
DialectOverrideAnnotations.DIALECT_OVERRIDE_SQL_RESTRICTIONS,
"value",
modelContext
);
}
/**
* Used in creating annotation instances from Jandex variant
*/
public OverriddenSQLSelectsAnnotation(
Map<String, Object> attributeValues,
ModelsContext modelContext) {
this.value = (DialectOverride.SQLSelect[]) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return SQLRestrictions.class;
}
@Override
public DialectOverride.SQLSelect[] value() {
return value;
}
public void value(DialectOverride.SQLSelect[] value) {
this.value = value;
}
}
| OverriddenSQLSelectsAnnotation |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/internal/util/collections/ArrayHelper.java | {
"start": 549,
"end": 9195
} | class ____ {
public static <T> boolean contains(T[] array, T object) {
return indexOf( array, object ) > -1;
}
public static <T> boolean containsAll(T[] array, T[] elements) {
for ( T element : elements ) {
if ( !contains( array, element ) ) {
return false;
}
}
return true;
}
public static boolean contains(int[] array, int value) {
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < array.length; i++ ) {
if ( array[i] == value ) {
return true;
}
}
return false;
}
public static int indexOf(Object[] array, Object object) {
return indexOf( array, array.length, object );
}
public static int indexOf(Object[] array, int end, Object object) {
for ( int i = 0; i < end; i++ ) {
if ( object.equals( array[i] ) ) {
return i;
}
}
return -1;
}
@SuppressWarnings("unchecked")
@AllowReflection
public static <T> T[] filledArray(T value, Class<T> valueJavaType, int size) {
final T[] array = (T[]) Array.newInstance( valueJavaType, size );
Arrays.fill( array, value );
return array;
}
public static String[] toStringArray(Object[] objects) {
int length = objects.length;
String[] result = new String[length];
for ( int i = 0; i < length; i++ ) {
result[i] = objects[i].toString();
}
return result;
}
public static String[] toStringArray(Collection<String> coll) {
return coll.toArray( EMPTY_STRING_ARRAY );
}
public static Object[] toObjectArray(Collection<Object> coll) {
return coll.toArray( EMPTY_OBJECT_ARRAY );
}
public static String[][] to2DStringArray(Collection<String[]> coll) {
return coll.toArray( new String[0][] );
}
public static int[][] to2DIntArray(Collection<int[]> coll) {
return coll.toArray( new int[0][] );
}
public static Type[] toTypeArray(Collection<Type> coll) {
return coll.toArray( EMPTY_TYPE_ARRAY );
}
public static int[] toIntArray(Collection<Integer> coll) {
final var iter = coll.iterator();
final int[] arr = new int[coll.size()];
int i = 0;
while ( iter.hasNext() ) {
arr[i++] = iter.next();
}
return arr;
}
public static boolean[] toBooleanArray(Collection<Boolean> coll) {
final var iter = coll.iterator();
final boolean[] arr = new boolean[coll.size()];
int i = 0;
while ( iter.hasNext() ) {
arr[i++] = iter.next();
}
return arr;
}
public static String[] slice(String[] strings, int begin, int length) {
final var result = new String[length];
System.arraycopy( strings, begin, result, 0, length );
return result;
}
public static Object[] slice(Object[] objects, int begin, int length) {
final var result = new Object[length];
System.arraycopy( objects, begin, result, 0, length );
return result;
}
public static String[] join(String[] x, String[] y) {
final var result = new String[x.length + y.length];
System.arraycopy( x, 0, result, 0, x.length );
System.arraycopy( y, 0, result, x.length, y.length );
return result;
}
public static String[] join(String[] x, String[] y, boolean[] use) {
final var result = new String[x.length + countTrue( use )];
System.arraycopy( x, 0, result, 0, x.length );
int k = x.length;
for ( int i = 0; i < y.length; i++ ) {
if ( use[i] ) {
result[k++] = y[i];
}
}
return result;
}
public static int[] join(int[] x, int[] y) {
final var result = new int[x.length + y.length];
System.arraycopy( x, 0, result, 0, x.length );
System.arraycopy( y, 0, result, x.length, y.length );
return result;
}
@SuppressWarnings("unchecked")
@AllowReflection
public static <T> T[] join(T[] x, T... y) {
final T[] result = (T[]) Array.newInstance( x.getClass().getComponentType(), x.length + y.length );
System.arraycopy( x, 0, result, 0, x.length );
System.arraycopy( y, 0, result, x.length, y.length );
return result;
}
@SuppressWarnings("unchecked")
@AllowReflection
public static <T> T[] add(T[] x, T y) {
final T[] result = (T[]) Array.newInstance( x.getClass().getComponentType(), x.length + 1 );
System.arraycopy( x, 0, result, 0, x.length );
result[x.length] = y;
return result;
}
public static final boolean[] TRUE = {true};
public static final boolean[] FALSE = {false};
private ArrayHelper() {
}
public static String toString(Object[] array) {
final var string = new StringBuilder();
string.append( "[" );
for ( int i = 0; i < array.length; i++ ) {
string.append( array[i] );
if ( i < array.length - 1 ) {
string.append( "," );
}
}
string.append( "]" );
return string.toString();
}
public static boolean isAllNegative(int[] array) {
for ( int element : array ) {
if ( element >= 0 ) {
return false;
}
}
return true;
}
public static boolean isAllTrue(boolean... array) {
for ( boolean element : array ) {
if ( !element ) {
return false;
}
}
return true;
}
public static int countTrue(boolean... array) {
int result = 0;
for ( boolean element : array ) {
if ( element ) {
result++;
}
}
return result;
}
public static boolean isAllFalse(boolean... array) {
for ( boolean element : array ) {
if ( element ) {
return false;
}
}
return true;
}
public static boolean isAnyTrue(boolean... values) {
for ( boolean value : values ) {
if ( value ) {
return true;
}
}
return false;
}
public static boolean[] negate(boolean[] valueNullness) {
final var result = new boolean[valueNullness.length];
for (int i = 0; i < valueNullness.length; i++) {
result[i] = !valueNullness[i];
}
return result;
}
public static <T> void addAll(Collection<T> collection, T[] array) {
collection.addAll( asList( array ) );
}
public static final String[] EMPTY_STRING_ARRAY = {};
public static final int[] EMPTY_INT_ARRAY = {};
public static final boolean[] EMPTY_BOOLEAN_ARRAY = {};
public static final Class<?>[] EMPTY_CLASS_ARRAY = {};
public static final Object[] EMPTY_OBJECT_ARRAY = {};
public static final Type[] EMPTY_TYPE_ARRAY = {};
public static final byte[] EMPTY_BYTE_ARRAY = {};
/**
* Reverse the elements of the incoming array
*
* @return New array with all elements in reversed order
*/
public static String[] reverse(String[] source) {
final int length = source.length;
final var destination = new String[length];
for ( int i = 0; i < length; i++ ) {
destination[length - i - 1] = source[i];
}
return destination;
}
/**
* Reverse the first n elements of the incoming array
*
* @return New array with the first n elements in reversed order
*/
public static String[] reverseFirst(String[] objects, int n) {
final int length = objects.length;
final var destination = new String[length];
for ( int i = 0; i < n; i++ ) {
destination[i] = objects[n - i - 1];
}
for ( int i = n; i < length; i++ ) {
destination[i] = objects[i];
}
return destination;
}
/**
* Reverse the first n elements of the incoming array
*
* @return New array with the first n elements in reversed order
*/
public static String[][] reverseFirst(String[][] objects, int n) {
final int length = objects.length;
final var destination = new String[length][];
for ( int i = 0; i < n; i++ ) {
destination[i] = objects[n - i - 1];
}
for ( int i = n; i < length; i++ ) {
destination[i] = objects[i];
}
return destination;
}
public static int[] trim(int[] from, int length) {
final var trimmed = new int[length];
System.arraycopy( from, 0, trimmed, 0, length );
return trimmed;
}
public static Object[] toObjectArray(Object array) {
if ( array instanceof Object[] objects ) {
return objects;
}
else {
final int arrayLength = getLength( array );
final var result = new Object[arrayLength];
for ( int i = 0; i < arrayLength; ++i ) {
result[i] = get( array, i );
}
return result;
}
}
public static <T> List<T> toExpandableList(T[] values) {
return values == null ? new ArrayList<>() : asList( values );
}
public static boolean isEmpty(Object[] array) {
return array == null || array.length == 0;
}
public static <T> int size(T[] array) {
return array == null ? 0 : array.length;
}
public static <T> void forEach(T[] array, Consumer<T> consumer) {
if ( array != null ) {
//noinspection ForLoopReplaceableByForEach
for ( int i = 0; i < array.length; i++ ) {
consumer.accept( array[i] );
}
}
}
/**
* @deprecated Use {@link Array#newInstance(Class, int)} instead.
*/
@Deprecated
@SuppressWarnings("unchecked")
@AllowReflection
public static <T> T[] newInstance(Class<T> elementType, int length) {
return (T[]) Array.newInstance( elementType, length );
}
}
| ArrayHelper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/duplicatedgenerator/Flight.java | {
"start": 374,
"end": 415
} | class ____ {
@Id
public String id;
}
| Flight |
java | apache__maven | compat/maven-compat/src/main/java/org/apache/maven/repository/metadata/MetadataResolutionRequestTypeEnum.java | {
"start": 919,
"end": 1265
} | enum ____ {
tree(1),
graph(2),
classpathCompile(3),
classpathTest(4),
classpathRuntime(5),
versionedGraph(6),
scopedGraph(7);
private int id;
// Constructor
MetadataResolutionRequestTypeEnum(int id) {
this.id = id;
}
int getId() {
return id;
}
}
| MetadataResolutionRequestTypeEnum |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/FunctionCatalog.java | {
"start": 1204,
"end": 2291
} | interface ____ extends CatalogPlugin {
/**
* List the functions in a namespace from the catalog.
* <p>
* If there are no functions in the namespace, implementations should return an empty array.
*
* @param namespace a multi-part namespace
* @return an array of Identifiers for functions
* @throws NoSuchNamespaceException If the namespace does not exist (optional).
*/
Identifier[] listFunctions(String[] namespace) throws NoSuchNamespaceException;
/**
* Load a function by {@link Identifier identifier} from the catalog.
*
* @param ident a function identifier
* @return an unbound function instance
* @throws NoSuchFunctionException If the function doesn't exist
*/
UnboundFunction loadFunction(Identifier ident) throws NoSuchFunctionException;
/**
* Returns true if the function exists, false otherwise.
*
* @since 3.3.0
*/
default boolean functionExists(Identifier ident) {
try {
loadFunction(ident);
return true;
} catch (NoSuchFunctionException e) {
return false;
}
}
}
| FunctionCatalog |
java | quarkusio__quarkus | integration-tests/oidc-token-propagation-reactive/src/main/java/io/quarkus/it/keycloak/ProtectedResource.java | {
"start": 364,
"end": 619
} | class ____ {
@Inject
JsonWebToken jwt;
@GET
@Produces("text/plain")
@RolesAllowed("user")
public Uni<String> principalName() {
return Uni.createFrom().item(jwt.getClaim("typ") + ":" + jwt.getName());
}
}
| ProtectedResource |
java | netty__netty | transport/src/main/java/io/netty/channel/socket/DatagramChannel.java | {
"start": 2049,
"end": 2430
} | interface ____ notifies the {@link ChannelFuture}
* once the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture joinGroup(
InetSocketAddress multicastAddress, NetworkInterface networkInterface, ChannelPromise future);
/**
* Joins the specified multicast group at the specified | and |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/util/Instantiator.java | {
"start": 3750,
"end": 3960
} | class ____ to instantiate
* @return a list of instantiated instances
*/
public List<T> instantiate(Collection<String> names) {
return instantiate(null, names);
}
/**
* Instantiate the given set of | names |
java | spring-projects__spring-boot | module/spring-boot-mongodb/src/test/java/org/springframework/boot/mongodb/autoconfigure/MongoReactiveAutoConfigurationTests.java | {
"start": 11784,
"end": 12157
} | class ____ {
private static final TransportSettings transportSettings = TransportSettings.nettyBuilder().build();
@Bean
MongoClientSettingsBuilderCustomizer customizer() {
return (clientSettingsBuilder) -> clientSettingsBuilder.applicationName("custom-transport-settings")
.transportSettings(transportSettings);
}
}
}
| SimpleTransportSettingsCustomizerConfig |
java | netty__netty | testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketMultipleConnectTest.java | {
"start": 1422,
"end": 3547
} | class ____ extends AbstractSocketTest {
@Test
@Timeout(value = 30000, unit = TimeUnit.MILLISECONDS)
public void testMultipleConnect(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testMultipleConnect(serverBootstrap, bootstrap);
}
});
}
public void testMultipleConnect(ServerBootstrap sb, Bootstrap cb) throws Exception {
Channel sc = null;
Channel cc = null;
try {
sb.childHandler(new ChannelInboundHandlerAdapter());
sc = sb.bind(NetUtil.LOCALHOST, 0).syncUninterruptibly().channel();
cb.handler(new ChannelInboundHandlerAdapter());
cc = cb.register().syncUninterruptibly().channel();
cc.connect(sc.localAddress()).syncUninterruptibly();
ChannelFuture connectFuture2 = cc.connect(sc.localAddress()).await();
assertTrue(connectFuture2.cause() instanceof AlreadyConnectedException);
} finally {
if (cc != null) {
cc.close();
}
if (sc != null) {
sc.close();
}
}
}
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> newFactories() {
List<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>> factories
= new ArrayList<TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap>>();
for (TestsuitePermutation.BootstrapComboFactory<ServerBootstrap, Bootstrap> comboFactory
: SocketTestPermutation.INSTANCE.socketWithFastOpen()) {
EventLoopGroup group = comboFactory.newClientInstance().config().group();
if (group instanceof IoEventLoopGroup && ((IoEventLoopGroup) group).isIoType(NioIoHandler.class)) {
factories.add(comboFactory);
}
}
return factories;
}
}
| SocketMultipleConnectTest |
java | google__auto | value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java | {
"start": 153503,
"end": 154422
} | class ____ {",
" public abstract String string();",
"}");
AutoValueProcessor autoValueProcessor = new AutoValueProcessor();
Compilation compilation =
javac()
.withProcessors(autoValueProcessor)
.withOptions("-Xlint:-processing", "-implicit:none")
.compile(test, metadata);
assertThat(compilation).succeededWithoutWarnings();
assertThat(compilation)
.generatedSourceFile("foo.bar.AutoValue_Test")
.contentsAsUtf8String()
.doesNotContain("kotlin.Metadata");
}
@Test
public void autoValueBuilderNullableSetterPrimitiveGetter() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract | Test |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/search/arguments/SpellCheckArgs.java | {
"start": 4992,
"end": 5634
} | enum ____ {
INCLUDE, EXCLUDE
}
private final Type type;
private final K dictionary;
private final V[] terms;
@SafeVarargs
TermsClause(Type type, K dictionary, V... terms) {
this.type = type;
this.dictionary = dictionary;
this.terms = terms;
}
void build(CommandArgs<K, V> args) {
args.add(CommandKeyword.TERMS).add(type.name()).addKey(dictionary);
if (terms != null) {
for (V term : terms) {
args.addValue(term);
}
}
}
}
}
| Type |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestRackResolver.java | {
"start": 2945,
"end": 3061
} | class ____ to test the resolve method which accepts a list of hosts
* in RackResolver.
*/
public static final | is |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/IdempotentConsumerCompletionEagerTest.java | {
"start": 1334,
"end": 5255
} | class ____ extends ContextTestSupport {
protected Endpoint startEndpoint;
protected MockEndpoint resultEndpoint;
protected MockEndpoint a;
protected MockEndpoint b;
protected MockEndpoint dead;
protected IdempotentRepository repo;
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testCompletionEager() throws Exception {
repo = MemoryIdempotentRepository.memoryIdempotentRepository(200);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:dead"));
from("direct:start").idempotentConsumer(header("messageId"), repo).completionEager(true).to("log:a", "mock:a")
.to("log:b", "mock:b").end()
.filter(simple("${header.messageId} == '2'")).throwException(new IllegalArgumentException("Forced"))
.end().to("log:result", "mock:result");
}
});
context.start();
// we are on block only scope as "two" was success in the block, and
// then "two" failed afterwards does not matter
// the idempotent consumer will not receive "two" again
a.expectedBodiesReceived("one", "two", "three");
b.expectedBodiesReceived("one", "two", "three");
dead.expectedBodiesReceived("two", "two");
resultEndpoint.expectedBodiesReceived("one", "one", "one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
@Test
public void testNotCompletionEager() throws Exception {
repo = MemoryIdempotentRepository.memoryIdempotentRepository(200);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:dead"));
from("direct:start").idempotentConsumer(header("messageId"), repo).completionEager(false).to("log:a", "mock:a")
.to("log:b", "mock:b").end()
.filter(simple("${header.messageId} == '2'")).throwException(new IllegalArgumentException("Forced"))
.end().to("log:result", "mock:result");
}
});
context.start();
// we are on completion scope so the "two" will rollback and therefore
// the idempotent consumer receives those again
a.expectedBodiesReceived("one", "two", "two", "three");
b.expectedBodiesReceived("one", "two", "two", "three");
dead.expectedBodiesReceived("two", "two");
resultEndpoint.expectedBodiesReceived("one", "one", "one", "three");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("2", "two");
sendMessage("1", "one");
sendMessage("3", "three");
assertMockEndpointsSatisfied();
}
protected void sendMessage(final Object messageId, final Object body) {
template.send(startEndpoint, new Processor() {
public void process(Exchange exchange) {
// now lets fire in a message
Message in = exchange.getIn();
in.setBody(body);
in.setHeader("messageId", messageId);
}
});
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
startEndpoint = resolveMandatoryEndpoint("direct:start");
resultEndpoint = getMockEndpoint("mock:result");
a = getMockEndpoint("mock:a");
b = getMockEndpoint("mock:b");
dead = getMockEndpoint("mock:dead");
}
}
| IdempotentConsumerCompletionEagerTest |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/pathparams/HttpPathParamLimitWithProgrammaticRoutes400Test.java | {
"start": 2792,
"end": 3453
} | class ____ {
void registerProgrammaticRoutes(@Observes Router router) {
router.get("/programmatic").handler(rc -> {
rc.response().end("hello");
});
router.get("/programmatic/:message").handler(rc -> {
rc.response().end("hello " + rc.pathParam("message"));
});
router.get("/bad").handler(rc -> {
rc.response().setStatusCode(400).end("hello");
});
router.get("/bad/:message").handler(rc -> {
rc.response().setStatusCode(400).end("hello " + rc.pathParam("message"));
});
}
}
}
| Resource |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/annotation/Jsr250MethodSecurityMetadataSourceTests.java | {
"start": 7643,
"end": 7813
} | class ____ {
public void noRoleMethod() {
}
@RolesAllowed("ADMIN")
public void adminMethod() {
}
}
// JSR-250 Spec
@RolesAllowed("IPARENT")
| UserAllowedClass |
java | mockito__mockito | mockito-core/src/test/java/org/mockitousage/annotation/AnnotationsTest.java | {
"start": 3221,
"end": 3368
} | class ____ {
@Mock private IMethods mock;
public IMethods getSuperBaseMock() {
return mock;
}
}
| SuperBase |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java | {
"start": 4377,
"end": 4742
} | class ____ extends CreateOpts {
private final FsPermission permissions;
protected Perms(FsPermission perm) {
if(perm == null) {
throw new IllegalArgumentException("Permissions must not be null");
}
permissions = perm;
}
public FsPermission getValue() { return permissions; }
}
public static | Perms |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ReturnMissingNullableTest.java | {
"start": 66932,
"end": 67435
} | class ____ {
java.lang.@UnderInitialization Object method(boolean b) {
if (b) {
return null;
} else {
return null;
}
}
}
""")
.addOutputLines(
"out/Test.java",
"""
import org.checkerframework.checker.initialization.qual.UnderInitialization;
import org.checkerframework.checker.nullness.qual.Nullable;
| T |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/convert/CoerceContainersTest.java | {
"start": 458,
"end": 7018
} | class ____
{
private final String JSON_EMPTY = q("");
private final ObjectMapper VANILLA_MAPPER = sharedMapper();
private final ObjectMapper COERCING_MAPPER = jsonMapperBuilder()
.withCoercionConfigDefaults(cfg ->
cfg.setCoercion(CoercionInputShape.EmptyString, CoercionAction.AsEmpty))
.build();
/*
/********************************************************
/* Tests for collections
/********************************************************
*/
@Test
public void testScalarCollections() throws Exception
{
final JavaType listType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<List<Double>>() { });
// 03-Aug-2022, tatu: Due to [databind#3418] message changed; not
// 100% sure how it should work but let's try this
// _verifyNoCoercion(listType);
try {
VANILLA_MAPPER.readerFor(listType).readValue(JSON_EMPTY);
fail("Should not pass");
} catch (DatabindException e) {
// verifyException(e, "Cannot coerce empty String");
verifyException(e, "Cannot deserialize value of type");
verifyException(e, "from String value");
}
List<Double> result = _readWithCoercion(listType);
assertNotNull(result);
assertEquals(0, result.size());
}
@Test
public void testStringCollections() throws Exception
{
final JavaType listType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<List<String>>() { });
_verifyNoCoercion(listType);
List<String> result = _readWithCoercion(listType);
assertNotNull(result);
assertEquals(0, result.size());
}
/*
/********************************************************
/* Tests for Maps
/********************************************************
*/
@Test
public void testScalarMap() throws Exception
{
final JavaType mapType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<Map<Long, Boolean>>() { });
_verifyNoCoercion(mapType);
Map<?,?> result = _readWithCoercion(mapType);
assertNotNull(result);
assertEquals(0, result.size());
}
@Test
public void testEnumMap() throws Exception
{
final JavaType mapType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<EnumMap<ABC, Boolean>>() { });
_verifyNoCoercion(mapType);
Map<?,?> result = _readWithCoercion(mapType);
assertNotNull(result);
assertEquals(0, result.size());
}
/*
/********************************************************
/* Tests for arrays
/********************************************************
*/
@Test
public void testObjectArray() throws Exception
{
final JavaType arrayType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<Object[]>() { });
_verifyNoCoercion(arrayType);
Object[] result = _readWithCoercion(arrayType);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testStringArray() throws Exception
{
final JavaType arrayType = VANILLA_MAPPER.getTypeFactory()
.constructType(new TypeReference<String[]>() { });
_verifyNoCoercion(arrayType);
String[] result = _readWithCoercion(arrayType);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testBooleanArray() throws Exception
{
_verifyNoCoercion(boolean[].class);
boolean[] result = _readWithCoercion(boolean[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testIntArray() throws Exception
{
_verifyNoCoercion(int[].class);
int[] result = _readWithCoercion(int[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testLongArray() throws Exception
{
_verifyNoCoercion(long[].class);
long[] result = _readWithCoercion(long[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testFloatArray() throws Exception
{
// 06-Aug-2025, tatu: with [databind#5242] will coerce empty String
// as empty Base64 array, so no exception here
//_verifyNoCoercion(float[].class);
float[] result = _readWithCoercion(float[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testDoubleArray() throws Exception
{
// 06-Aug-2025, tatu: with [databind#5242] will coerce empty String
// as empty Base64 array, so no exception here
//_verifyNoCoercion(double[].class);
double[] result = _readWithCoercion(double[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
@Test
public void testPOJOArray() throws Exception
{
_verifyNoCoercion(StringWrapper[].class);
StringWrapper[] result = _readWithCoercion(StringWrapper[].class);
assertNotNull(result);
assertEquals(0, result.length);
}
/*
/********************************************************
/* Helper methods
/********************************************************
*/
private void _verifyNoCoercion(Class<?> targetType) throws Exception {
_verifyNoCoercion(VANILLA_MAPPER.constructType(targetType));
}
private void _verifyNoCoercion(JavaType targetType) throws Exception {
try {
VANILLA_MAPPER.readerFor(targetType).readValue(JSON_EMPTY);
fail("Should not pass");
} catch (DatabindException e) {
// 06-Nov-2020, tatu: tests for failure get rather fragile unfortunately,
// but this seems to be what we should be getting
verifyException(e, "Cannot coerce empty String");
// verifyException(e, "Cannot deserialize value of type");
}
}
private <T> T _readWithCoercion(Class<?> targetType) throws Exception {
return COERCING_MAPPER.readerFor(targetType).readValue(JSON_EMPTY);
}
private <T> T _readWithCoercion(JavaType targetType) throws Exception {
return COERCING_MAPPER.readerFor(targetType).readValue(JSON_EMPTY);
}
}
| CoerceContainersTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/api/TestGetApplicationsRequest.java | {
"start": 1366,
"end": 3996
} | class ____ {
@Test
void testGetApplicationsRequest() {
GetApplicationsRequest request = GetApplicationsRequest.newInstance();
EnumSet<YarnApplicationState> appStates =
EnumSet.of(YarnApplicationState.ACCEPTED);
request.setApplicationStates(appStates);
Set<String> tags = new HashSet<String>();
tags.add("tag1");
request.setApplicationTags(tags);
Set<String> types = new HashSet<String>();
types.add("type1");
request.setApplicationTypes(types);
long startBegin = System.currentTimeMillis();
long startEnd = System.currentTimeMillis() + 1;
request.setStartRange(startBegin, startEnd);
long finishBegin = System.currentTimeMillis() + 2;
long finishEnd = System.currentTimeMillis() + 3;
request.setFinishRange(finishBegin, finishEnd);
long limit = 100L;
request.setLimit(limit);
Set<String> queues = new HashSet<String>();
queues.add("queue1");
request.setQueues(queues);
Set<String> users = new HashSet<String>();
users.add("user1");
request.setUsers(users);
ApplicationsRequestScope scope = ApplicationsRequestScope.ALL;
request.setScope(scope);
GetApplicationsRequest requestFromProto = new GetApplicationsRequestPBImpl(
((GetApplicationsRequestPBImpl) request).getProto());
// verify the whole record equals with original record
assertEquals(requestFromProto, request);
// verify all properties are the same as original request
assertEquals(requestFromProto.getApplicationStates(), appStates,
"ApplicationStates from proto is not the same with original request");
assertEquals(requestFromProto.getApplicationTags(), tags,
"ApplicationTags from proto is not the same with original request");
assertEquals(requestFromProto.getApplicationTypes(), types,
"ApplicationTypes from proto is not the same with original request");
assertEquals(requestFromProto.getStartRange(), Range.between(startBegin, startEnd),
"StartRange from proto is not the same with original request");
assertEquals(requestFromProto.getFinishRange(), Range.between(finishBegin, finishEnd),
"FinishRange from proto is not the same with original request");
assertEquals(requestFromProto.getLimit(), limit,
"Limit from proto is not the same with original request");
assertEquals(requestFromProto.getQueues(), queues,
"Queues from proto is not the same with original request");
assertEquals(requestFromProto.getUsers(), users,
"Users from proto is not the same with original request");
}
}
| TestGetApplicationsRequest |
java | apache__dubbo | dubbo-plugin/dubbo-rest-openapi/src/main/java/org/apache/dubbo/rpc/protocol/tri/rest/openapi/DefinitionResolver.java | {
"start": 18287,
"end": 18587
} | class ____ extends AbstractContext implements OperationContext {
OperationContextImpl(OpenAPI openAPI, SchemaResolver schemaResolver, ExtensionFactory extensionFactory) {
super(openAPI, schemaResolver, extensionFactory);
}
}
private static final | OperationContextImpl |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/method/configuration/ReactiveMethodSecurityConfigurationTests.java | {
"start": 17404,
"end": 17692
} | class ____ {
MethodAuthorizationDeniedHandler handler = mock(MethodAuthorizationDeniedHandler.class);
@Bean
MethodAuthorizationDeniedHandler methodAuthorizationDeniedHandler() {
return this.handler;
}
}
@Configuration
@EnableReactiveMethodSecurity
static | CustomResultConfig |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/connector/source/VectorSearchRuntimeProviderContext.java | {
"start": 1533,
"end": 2825
} | class ____
implements VectorSearchTableSource.VectorSearchContext {
private final int[][] searchColumns;
private final ReadableConfig runtimeConfig;
public VectorSearchRuntimeProviderContext(int[][] searchColumns, ReadableConfig runtimeConfig) {
this.searchColumns = searchColumns;
this.runtimeConfig = runtimeConfig;
}
@Override
public int[][] getSearchColumns() {
return searchColumns;
}
@Override
public ReadableConfig runtimeConfig() {
return runtimeConfig;
}
@Override
public <T> TypeInformation<T> createTypeInformation(DataType producedDataType) {
validateInputDataType(producedDataType);
return InternalTypeInfo.of(producedDataType.getLogicalType());
}
@Override
public <T> TypeInformation<T> createTypeInformation(LogicalType producedLogicalType) {
return InternalTypeInfo.of(producedLogicalType);
}
@Override
public DynamicTableSource.DataStructureConverter createDataStructureConverter(
DataType producedDataType) {
validateInputDataType(producedDataType);
return new DataStructureConverterWrapper(
DataStructureConverters.getConverter(producedDataType));
}
}
| VectorSearchRuntimeProviderContext |
java | netty__netty | example/src/main/java/io/netty/example/factorial/FactorialServer.java | {
"start": 1225,
"end": 1993
} | class ____ {
static final int PORT = Integer.parseInt(System.getProperty("port", "8322"));
public static void main(String[] args) throws Exception {
// Configure SSL.
final SslContext sslCtx = ServerUtil.buildSslContext();
EventLoopGroup group = new MultiThreadIoEventLoopGroup(NioIoHandler.newFactory());
try {
ServerBootstrap b = new ServerBootstrap();
b.group(group)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new FactorialServerInitializer(sslCtx));
b.bind(PORT).sync().channel().closeFuture().sync();
} finally {
group.shutdownGracefully();
}
}
}
| FactorialServer |
java | apache__rocketmq | tools/src/test/java/org/apache/rocketmq/tools/command/consumer/GetConsumerConfigSubCommandTest.java | {
"start": 1614,
"end": 4105
} | class ____ {
private ServerResponseMocker brokerMocker;
private ServerResponseMocker nameServerMocker;
@Before
public void before() {
brokerMocker = startOneBroker();
nameServerMocker = startNameServer();
}
@After
public void after() {
brokerMocker.shutdown();
nameServerMocker.shutdown();
}
@Test
public void testExecute() throws SubCommandException {
GetConsumerConfigSubCommand cmd = new GetConsumerConfigSubCommand();
Options options = ServerUtil.buildCommandlineOptions(new Options());
String[] subargs = new String[] {"-g group_test", String.format("-n localhost:%d", nameServerMocker.listenPort())};
final CommandLine commandLine =
ServerUtil.parseCmdLine("mqadmin " + cmd.commandName(), subargs,
cmd.buildCommandlineOptions(options),
new DefaultParser());
cmd.execute(commandLine, options, null);
}
private ServerResponseMocker startNameServer() {
ClusterInfo clusterInfo = new ClusterInfo();
HashMap<String, BrokerData> brokerAddressTable = new HashMap<>();
BrokerData brokerData = new BrokerData();
brokerData.setBrokerName("mockBrokerName");
HashMap<Long, String> brokerAddress = new HashMap<>();
brokerAddress.put(1L, "127.0.0.1:" + brokerMocker.listenPort());
brokerData.setBrokerAddrs(brokerAddress);
brokerData.setCluster("mockCluster");
brokerAddressTable.put("mockBrokerName", brokerData);
clusterInfo.setBrokerAddrTable(brokerAddressTable);
HashMap<String, Set<String>> clusterAddressTable = new HashMap<>();
Set<String> brokerNames = new HashSet<>();
brokerNames.add("mockBrokerName");
clusterAddressTable.put("mockCluster", brokerNames);
clusterInfo.setClusterAddrTable(clusterAddressTable);
// start name server
return ServerResponseMocker.startServer(clusterInfo.encode());
}
private ServerResponseMocker startOneBroker() {
ConsumerConnection consumerConnection = new ConsumerConnection();
HashSet<Connection> connectionSet = new HashSet<>();
Connection connection = mock(Connection.class);
connectionSet.add(connection);
consumerConnection.setConnectionSet(connectionSet);
// start broker
return ServerResponseMocker.startServer(consumerConnection.encode());
}
}
| GetConsumerConfigSubCommandTest |
java | apache__kafka | jmh-benchmarks/src/main/java/org/apache/kafka/jmh/server/PartitionCreationBench.java | {
"start": 3189,
"end": 8352
} | class ____ {
@Param({"false", "true"})
public boolean useTopicIds;
@Param({"20"})
public int numPartitions;
private final String topicName = "foo";
private Option<Uuid> topicId;
private Scheduler scheduler;
private Metrics metrics;
private Time time;
private KafkaConfig brokerProperties;
private ReplicaManager replicaManager;
private QuotaFactory.QuotaManagers quotaManagers;
private LogDirFailureChannel failureChannel;
private LogManager logManager;
private AlterPartitionManager alterPartitionManager;
private List<TopicPartition> topicPartitions;
@Setup(Level.Invocation)
public void setup() {
if (useTopicIds)
topicId = Option.apply(Uuid.randomUuid());
else
topicId = Option.empty();
this.scheduler = new KafkaScheduler(1, true, "scheduler-thread");
this.brokerProperties = KafkaConfig.fromProps(TestUtils.createBrokerConfig(
0, true, true, 9092, Option.empty(), Option.empty(),
Option.empty(), true, false, 0, false, 0, false, 0, Option.empty(), 1, true, 1,
(short) 1, false));
this.metrics = new Metrics();
this.time = Time.SYSTEM;
this.failureChannel = new LogDirFailureChannel(brokerProperties.logDirs().size());
final BrokerTopicStats brokerTopicStats = new BrokerTopicStats(false);
final List<File> files = brokerProperties.logDirs().stream().map(File::new).toList();
CleanerConfig cleanerConfig = new CleanerConfig(1,
4 * 1024 * 1024L, 0.9d,
1024 * 1024, 32 * 1024 * 1024,
Double.MAX_VALUE, 15 * 1000, true);
ConfigRepository configRepository = new MockConfigRepository();
this.logManager = new LogManagerBuilder().
setLogDirs(files).
setInitialOfflineDirs(List.of()).
setConfigRepository(configRepository).
setInitialDefaultConfig(createLogConfig()).
setCleanerConfig(cleanerConfig).
setRecoveryThreadsPerDataDir(1).
setFlushCheckMs(1000L).
setFlushRecoveryOffsetCheckpointMs(10000L).
setFlushStartOffsetCheckpointMs(10000L).
setRetentionCheckMs(1000L).
setProducerStateManagerConfig(60000, false).
setScheduler(scheduler).
setBrokerTopicStats(brokerTopicStats).
setLogDirFailureChannel(failureChannel).
setTime(Time.SYSTEM).
build();
scheduler.startup();
this.quotaManagers = QuotaFactory.instantiate(this.brokerProperties, this.metrics, this.time, "", "");
this.alterPartitionManager = TestUtils.createAlterIsrManager();
this.replicaManager = new ReplicaManagerBuilder().
setConfig(brokerProperties).
setMetrics(metrics).
setTime(time).
setScheduler(scheduler).
setLogManager(logManager).
setQuotaManagers(quotaManagers).
setBrokerTopicStats(brokerTopicStats).
setMetadataCache(new KRaftMetadataCache(this.brokerProperties.brokerId(), () -> KRAFT_VERSION_1)).
setLogDirFailureChannel(failureChannel).
setAlterPartitionManager(alterPartitionManager).
build();
replicaManager.startup();
replicaManager.checkpointHighWatermarks();
}
@TearDown(Level.Invocation)
public void tearDown() throws Exception {
this.replicaManager.shutdown(false);
logManager.shutdown(-1L);
this.metrics.close();
this.scheduler.shutdown();
this.quotaManagers.shutdown();
for (File dir : CollectionConverters.asJava(logManager.liveLogDirs())) {
Utils.delete(dir);
}
}
private static LogConfig createLogConfig() {
return new LogConfig(new Properties());
}
@Benchmark
@Threads(1)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
public void makeFollower() {
topicPartitions = new ArrayList<>();
for (int partitionNum = 0; partitionNum < numPartitions; partitionNum++) {
topicPartitions.add(new TopicPartition(topicName, partitionNum));
}
int[] replicas = {0, 1, 2};
OffsetCheckpoints checkpoints = (logDir, topicPartition) -> Optional.of(0L);
for (TopicPartition topicPartition : topicPartitions) {
final Partition partition = this.replicaManager.createPartition(topicPartition);
int[] isr = {0, 1, 2};
PartitionRegistration partitionRegistration = new PartitionRegistration.Builder()
.setLeader(0)
.setLeaderRecoveryState(LeaderRecoveryState.RECOVERED)
.setLeaderEpoch(0)
.setIsr(isr)
.setPartitionEpoch(1)
.setReplicas(replicas)
.setDirectories(DirectoryId.unassignedArray(replicas.length))
.build();
partition.makeFollower(partitionRegistration, true, checkpoints, topicId, Option.empty());
}
}
}
| PartitionCreationBench |
java | grpc__grpc-java | api/src/main/java/io/grpc/LoadBalancer.java | {
"start": 60288,
"end": 61030
} | class ____ extends SubchannelPicker {
private final PickResult result;
public FixedResultPicker(PickResult result) {
this.result = Preconditions.checkNotNull(result, "result");
}
@Override
public PickResult pickSubchannel(PickSubchannelArgs args) {
return result;
}
@Override
public String toString() {
return "FixedResultPicker(" + result + ")";
}
@Override
public int hashCode() {
return result.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof FixedResultPicker)) {
return false;
}
FixedResultPicker that = (FixedResultPicker) o;
return this.result.equals(that.result);
}
}
}
| FixedResultPicker |
java | resilience4j__resilience4j | resilience4j-retry/src/test/java/io/github/resilience4j/retry/internal/CallableTest.java | {
"start": 754,
"end": 4927
} | class ____ {
private HelloWorldService helloWorldService;
@Before
public void setUp() {
helloWorldService = mock(HelloWorldService.class);
}
@Test
public void shouldPropagateLastExceptionWhenSleepFunctionThrowsException() throws IOException {
willThrow(new HelloWorldException()).given(helloWorldService).returnHelloWorldWithException();
RetryConfig config = RetryConfig.custom()
.intervalFunction((a) -> -1L)
.build();
Retry retry = Retry.of("id", config);
Callable<String> retryableCallable = Retry
.decorateCallable(retry, helloWorldService::returnHelloWorldWithException);
Try<Void> result = Try.run(retryableCallable::call);
then(helloWorldService).should().returnHelloWorldWithException();
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get()).isInstanceOf(HelloWorldException.class);
}
@Test
public void shouldStopRetryingAndEmitProperEventsIfIntervalFunctionReturnsLessThanZero() throws IOException {
given(helloWorldService.returnHelloWorldWithException())
.willThrow(new HelloWorldException("Exceptional!"));
AtomicInteger numberOfTimesIntervalFunctionCalled = new AtomicInteger(0);
RetryConfig retryConfig = RetryConfig.<String>custom()
.intervalFunction((ignored) -> {
int numTimesCalled = numberOfTimesIntervalFunctionCalled.incrementAndGet();
return numTimesCalled > 1 ? -1L : 0L;
})
.maxAttempts(3)
.build();
AtomicInteger numberOfRetryEvents = new AtomicInteger();
AtomicBoolean onErrorEventOccurred = new AtomicBoolean(false);
Retry retry = Retry.of("retry", retryConfig);
retry.getEventPublisher().onRetry((ignored) -> numberOfRetryEvents.getAndIncrement());
retry.getEventPublisher().onError((ignored) -> onErrorEventOccurred.set(true));
Callable<String> callable = Retry.decorateCallable(
retry,
helloWorldService::returnHelloWorldWithException
);
Try<Void> result = Try.run(callable::call);
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get())
.isInstanceOf(HelloWorldException.class);
assertThat(numberOfRetryEvents).hasValue(1);
assertThat(onErrorEventOccurred).isTrue();
then(helloWorldService).should(times(2)).returnHelloWorldWithException();
}
@Test
public void shouldContinueRetryingAndEmitProperEventsIfIntervalFunctionReturnsZeroOrMore() throws IOException {
given(helloWorldService.returnHelloWorldWithException())
.willThrow(new HelloWorldException("Exceptional!"));
AtomicInteger numberOfTimesIntervalFunctionCalled = new AtomicInteger(0);
RetryConfig retryConfig = RetryConfig.<String>custom()
.intervalFunction((ignored) -> {
// Returns 0, 1, 2
return (long) numberOfTimesIntervalFunctionCalled.getAndIncrement();
})
.maxAttempts(3)
.build();
AtomicInteger numberOfRetryEvents = new AtomicInteger();
AtomicBoolean onErrorEventOccurred = new AtomicBoolean(false);
Retry retry = Retry.of("retry", retryConfig);
retry.getEventPublisher().onRetry((ignored) -> numberOfRetryEvents.getAndIncrement());
retry.getEventPublisher().onError((ignored) -> onErrorEventOccurred.set(true));
Callable<String> callable = Retry.decorateCallable(
retry,
helloWorldService::returnHelloWorldWithException
);
Try<Void> result = Try.run(callable::call);
assertThat(result.isFailure()).isTrue();
assertThat(result.failed().get())
.isInstanceOf(HelloWorldException.class);
assertThat(numberOfRetryEvents).hasValue(2);
assertThat(onErrorEventOccurred).isTrue();
then(helloWorldService).should(times(3)).returnHelloWorldWithException();
}
}
| CallableTest |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/TemplateExtension.java | {
"start": 6369,
"end": 6802
} | interface ____ {
/**
* Constant value for {@link #value()} indicating that the name of the annotated parameter should be used as-is.
*/
String PARAMETER_NAME = "<<parameter name>>";
/**
*
* @return the key used to obtain the attribute
* @see TemplateInstance#getAttribute(String)
*/
String value() default PARAMETER_NAME;
}
}
| TemplateAttribute |
java | elastic__elasticsearch | x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java | {
"start": 23964,
"end": 25644
} | class ____ extends FilterBlobContainer {
TrackingFilesBlobContainer(BlobContainer delegate) {
super(delegate);
}
@Override
public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException {
return new FilterInputStream(super.readBlob(purpose, blobName, position, length)) {
long bytesRead = 0L;
@Override
public int read() throws IOException {
final int result = in.read();
if (result == -1) {
return result;
}
bytesRead += 1L;
return result;
}
@Override
public int read(byte[] b, int offset, int len) throws IOException {
final int result = in.read(b, offset, len);
if (result == -1) {
return result;
}
bytesRead += len;
return result;
}
@Override
public void close() throws IOException {
files.merge(blobName, bytesRead, Math::addExact);
super.close();
}
};
}
@Override
protected BlobContainer wrapChild(BlobContainer child) {
return new TrackingFilesBlobContainer(child);
}
}
}
}
| TrackingFilesBlobContainer |
java | spring-projects__spring-security | oauth2/oauth2-authorization-server/src/main/java/org/springframework/security/oauth2/server/authorization/jackson/JsonNodeUtils.java | {
"start": 902,
"end": 983
} | class ____ {@code JsonNode}.
*
* @author Joe Grandja
* @since 7.0
*/
abstract | for |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionTaskSettingsTests.java | {
"start": 574,
"end": 1820
} | class ____ extends OpenAiTaskSettingsTests<OpenAiChatCompletionTaskSettings> {
private static final TransportVersion INFERENCE_API_OPENAI_HEADERS = TransportVersion.fromName("inference_api_openai_headers");
@Override
protected Writeable.Reader<OpenAiChatCompletionTaskSettings> instanceReader() {
return OpenAiChatCompletionTaskSettings::new;
}
@Override
protected OpenAiChatCompletionTaskSettings createTestInstance() {
return createRandom();
}
@Override
protected OpenAiChatCompletionTaskSettings mutateInstanceForVersion(
OpenAiChatCompletionTaskSettings instance,
TransportVersion version
) {
if (version.supports(INFERENCE_API_OPENAI_HEADERS)) {
return instance;
}
return create(instance.user(), null);
}
@Override
protected OpenAiChatCompletionTaskSettings create(@Nullable String user, @Nullable Map<String, String> headers) {
return new OpenAiChatCompletionTaskSettings(user, headers);
}
@Override
protected OpenAiChatCompletionTaskSettings createFromMap(@Nullable Map<String, Object> map) {
return new OpenAiChatCompletionTaskSettings(map);
}
}
| OpenAiChatCompletionTaskSettingsTests |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/byte2darray/Byte2DArrayAssert_usingCustomComparator_Test.java | {
"start": 986,
"end": 1615
} | class ____ extends Byte2DArrayAssertBaseTest {
private static final AlwaysEqualComparator<byte[][]> ALWAYS_EQUAL = alwaysEqual();
@Override
protected Byte2DArrayAssert invoke_api_method() {
return assertions.usingComparator(ALWAYS_EQUAL);
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isSameAs(ALWAYS_EQUAL);
}
@Test
void should_honor_comparator() {
assertThat(new byte[][] {}).usingComparator(ALWAYS_EQUAL)
.isEqualTo(new byte[][] { { 1, 2 }, { 3, 4 } });
}
}
| Byte2DArrayAssert_usingCustomComparator_Test |
java | spring-projects__spring-boot | buildpack/spring-boot-buildpack-platform/src/main/java/org/springframework/boot/buildpack/platform/docker/transport/HttpTransport.java | {
"start": 3913,
"end": 4234
} | interface ____ extends Closeable {
/**
* Return the content of the response.
* @return the response content
* @throws IOException on IO error
*/
InputStream getContent() throws IOException;
default @Nullable Header getHeader(String name) {
throw new UnsupportedOperationException();
}
}
}
| Response |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/Spr8954Tests.java | {
"start": 1754,
"end": 3184
} | class ____ {
@Test
void repro() {
AnnotationConfigApplicationContext bf = new AnnotationConfigApplicationContext();
bf.registerBeanDefinition("fooConfig", new RootBeanDefinition(FooConfig.class));
bf.getBeanFactory().addBeanPostProcessor(new PredictingBPP());
bf.refresh();
assertThat(bf.getBean("foo")).isInstanceOf(Foo.class);
assertThat(bf.getBean("&foo")).isInstanceOf(FooFactoryBean.class);
assertThat(bf.isTypeMatch("&foo", FactoryBean.class)).isTrue();
@SuppressWarnings("rawtypes")
Map<String, FactoryBean> fbBeans = bf.getBeansOfType(FactoryBean.class);
assertThat(fbBeans).containsOnlyKeys("&foo");
Map<String, AnInterface> aiBeans = bf.getBeansOfType(AnInterface.class);
assertThat(aiBeans).containsOnlyKeys("&foo");
}
@Test
void findsBeansByTypeIfNotInstantiated() {
AnnotationConfigApplicationContext bf = new AnnotationConfigApplicationContext();
bf.registerBeanDefinition("fooConfig", new RootBeanDefinition(FooConfig.class));
bf.getBeanFactory().addBeanPostProcessor(new PredictingBPP());
bf.refresh();
assertThat(bf.isTypeMatch("&foo", FactoryBean.class)).isTrue();
@SuppressWarnings("rawtypes")
Map<String, FactoryBean> fbBeans = bf.getBeansOfType(FactoryBean.class);
assertThat(fbBeans).containsOnlyKeys("&foo");
Map<String, AnInterface> aiBeans = bf.getBeansOfType(AnInterface.class);
assertThat(aiBeans).containsOnlyKeys("&foo");
}
static | Spr8954Tests |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassParser.java | {
"start": 9521,
"end": 10960
} | class ____ definition");
MetadataReader reader = this.metadataReaderFactory.getMetadataReader(className);
ConfigurationClass configClass = new ConfigurationClass(reader, beanName);
processConfigurationClass(configClass, DEFAULT_EXCLUSION_FILTER);
return configClass;
}
/**
* Validate each {@link ConfigurationClass} object.
* @see ConfigurationClass#validate
*/
void validate() {
for (ConfigurationClass configClass : this.configurationClasses.keySet()) {
configClass.validate(this.problemReporter);
}
}
Set<ConfigurationClass> getConfigurationClasses() {
return this.configurationClasses.keySet();
}
List<PropertySourceDescriptor> getPropertySourceDescriptors() {
return (this.propertySourceRegistry != null ? this.propertySourceRegistry.getDescriptors() :
Collections.emptyList());
}
ImportRegistry getImportRegistry() {
return this.importStack;
}
protected void processConfigurationClass(ConfigurationClass configClass, Predicate<String> filter) {
if (this.conditionEvaluator.shouldSkip(configClass.getMetadata(), ConfigurationPhase.PARSE_CONFIGURATION)) {
return;
}
ConfigurationClass existingClass = this.configurationClasses.get(configClass);
if (existingClass != null) {
if (configClass.isImported()) {
if (existingClass.isImported()) {
existingClass.mergeImportedBy(configClass);
}
// Otherwise ignore new imported config class; existing non-imported | bean |
java | apache__camel | core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/CamelInternalProcessor.java | {
"start": 8908,
"end": 18472
} | class ____ implements CamelInternalTask {
private final Object[] states;
private Exchange exchange;
private AsyncCallback originalCallback;
private AsyncAfterTask(Object[] states) {
this.states = states;
}
@Override
public void prepare(Exchange exchange, AsyncCallback originalCallback) {
this.exchange = exchange;
this.originalCallback = originalCallback;
}
@Override
public Object[] getStates() {
return states;
}
@Override
public void reset() {
Arrays.fill(this.states, null);
this.exchange = null;
this.originalCallback = null;
}
@Override
public void done(boolean doneSync) {
try {
AdviceIterator.runAfterTasks(advices, states, exchange);
} finally {
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
// callback must be called
if (originalCallback != null) {
reactiveExecutor.schedule(originalCallback);
}
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
// task is done so reset
if (taskFactory != null) {
taskFactory.release(this);
}
}
}
}
@Override
public boolean process(Exchange exchange, AsyncCallback originalCallback) {
// ----------------------------------------------------------
// CAMEL END USER - READ ME FOR DEBUGGING TIPS
// ----------------------------------------------------------
// If you want to debug the Camel routing engine, then there is a lot of internal functionality
// the routing engine executes during routing messages. You can skip debugging this internal
// functionality and instead debug where the routing engine continues routing to the next node
// in the routes. The CamelInternalProcessor is a vital part of the routing engine, as its
// being used in between the nodes. As an end user you can just debug the code in this class
// in between the:
// CAMEL END USER - DEBUG ME HERE +++ START +++
// CAMEL END USER - DEBUG ME HERE +++ END +++
// you can see in the code below within the processTransacted or processNonTransacted methods.
// ----------------------------------------------------------
if (processor == null || exchange.isRouteStop()) {
// no processor or we should not continue then we are done
originalCallback.done(true);
return true;
}
if (this instanceof Channel ca && ca.getNextProcessor() instanceof DisabledAware da && da.isDisabled()) {
// skip because the processor is disabled at runtime (in dev mode)
originalCallback.done(true);
return true;
}
if (shutdownStrategy.isForceShutdown()) {
return processShutdown(exchange, originalCallback);
}
Object[] states;
// create internal callback which will execute the advices in reverse order when done
CamelInternalTask afterTask = taskFactory != null ? taskFactory.acquire() : null;
if (afterTask == null) {
states = statefulAdvices > 0 ? new Object[statefulAdvices] : EMPTY_STATES;
afterTask = new AsyncAfterTask(states);
} else {
states = afterTask.getStates();
}
afterTask.prepare(exchange, originalCallback);
// optimise to use object array for states, and only for the number of advices that keep state
// optimise for loop using index access to avoid creating iterator object
for (int i = 0, j = 0; i < advices.size(); i++) {
CamelInternalProcessorAdvice<?> task = advices.get(i);
try {
Object state = task.before(exchange);
if (task.hasState()) {
states[j++] = state;
}
} catch (Exception e) {
return handleException(exchange, originalCallback, e, afterTask);
}
}
// debugger can skip processing the exchange
Object skip = exchange.removeProperty(ExchangePropertyKey.SKIP_OVER);
if (Boolean.TRUE == skip) {
if (LOG.isTraceEnabled()) {
LOG.trace("Skipping exchange for exchangeId: {} -> {}", exchange.getExchangeId(), exchange);
}
List<MessageHistory> list = exchange.getProperty(ExchangePropertyKey.MESSAGE_HISTORY, List.class);
if (list != null && !list.isEmpty()) {
MessageHistory last = list.get(list.size() - 1);
last.setDebugSkipOver(true);
}
// skip because the processor is specially disabled (such as from debugger)
originalCallback.done(true);
return true;
}
if (exchange.isTransacted()) {
return processTransacted(exchange, afterTask);
}
return processNonTransacted(exchange, afterTask);
}
private static boolean processShutdown(Exchange exchange, AsyncCallback originalCallback) {
String msg = "Run not allowed as ShutdownStrategy is forcing shutting down, will reject executing exchange: "
+ exchange;
LOG.debug(msg);
if (exchange.getException() == null) {
exchange.setException(new RejectedExecutionException(msg));
}
// force shutdown so we should not continue
originalCallback.done(true);
return true;
}
private boolean processNonTransacted(Exchange exchange, CamelInternalTask afterTask) {
final AsyncCallback async = beforeProcess(exchange, afterTask);
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
if (LOG.isTraceEnabled()) {
LOG.trace("Processing exchange for exchangeId: {} -> {}", exchange.getExchangeId(), exchange);
}
boolean sync = processor.process(exchange, async);
if (!sync) {
EventHelper.notifyExchangeAsyncProcessingStartedEvent(camelContext, exchange);
}
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
// CAMEL-18255: move uow.afterProcess handling to the callback
if (LOG.isTraceEnabled()) {
logExchangeContinuity(exchange, sync);
}
return sync;
}
private static void logExchangeContinuity(Exchange exchange, boolean sync) {
LOG.trace("Exchange processed and is continued routed {} for exchangeId: {} -> {}",
sync ? "synchronously" : "asynchronously",
exchange.getExchangeId(), exchange);
}
private AsyncCallback beforeProcess(Exchange exchange, CamelInternalTask afterTask) {
final UnitOfWork uow = exchange.getUnitOfWork();
// optimize to only do before uow processing if really needed
if (uow != null && uow.isBeforeAfterProcess()) {
return uow.beforeProcess(processor, exchange, afterTask);
}
return afterTask;
}
private boolean processTransacted(Exchange exchange, CamelInternalTask afterTask) {
// must be synchronized for transacted exchanges
if (LOG.isTraceEnabled()) {
LOG.trace("Transacted Exchange must be routed synchronously for exchangeId: {} -> {}", exchange.getExchangeId(),
exchange);
}
try {
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ START +++
// ----------------------------------------------------------
processor.process(exchange);
// ----------------------------------------------------------
// CAMEL END USER - DEBUG ME HERE +++ END +++
// ----------------------------------------------------------
} catch (Exception e) {
exchange.setException(e);
} finally {
// processing is done
afterTask.done(true);
}
// we are done synchronously - must return true
return true;
}
private boolean handleException(
Exchange exchange, AsyncCallback originalCallback, Exception e, CamelInternalTask afterTask) {
// error in before so break out
exchange.setException(e);
try {
originalCallback.done(true);
} finally {
// task is done so reset
if (taskFactory != null) {
taskFactory.release(afterTask);
}
}
return true;
}
@Override
public String toString() {
return processor != null ? processor.toString() : super.toString();
}
/**
* Advice to invoke callbacks for before and after routing.
*/
public static | AsyncAfterTask |
java | apache__camel | components/camel-keycloak/src/main/java/org/apache/camel/component/keycloak/KeycloakOperations.java | {
"start": 856,
"end": 3090
} | enum ____ {
createRealm,
deleteRealm,
getRealm,
updateRealm,
createUser,
deleteUser,
getUser,
updateUser,
listUsers,
searchUsers,
createRole,
deleteRole,
getRole,
updateRole,
listRoles,
assignRoleToUser,
removeRoleFromUser,
getUserRoles,
// Group operations
createGroup,
deleteGroup,
getGroup,
updateGroup,
listGroups,
addUserToGroup,
removeUserFromGroup,
listUserGroups,
// Client operations
createClient,
deleteClient,
getClient,
updateClient,
listClients,
// User password operations
resetUserPassword,
// Client role operations
createClientRole,
deleteClientRole,
getClientRole,
updateClientRole,
listClientRoles,
assignClientRoleToUser,
removeClientRoleFromUser,
// User session operations
listUserSessions,
logoutUser,
// Client scope operations
createClientScope,
deleteClientScope,
getClientScope,
updateClientScope,
listClientScopes,
// Identity Provider operations
createIdentityProvider,
deleteIdentityProvider,
getIdentityProvider,
updateIdentityProvider,
listIdentityProviders,
// Authorization Services operations
createResource,
deleteResource,
getResource,
updateResource,
listResources,
createResourcePolicy,
deleteResourcePolicy,
getResourcePolicy,
updateResourcePolicy,
listResourcePolicies,
createResourcePermission,
deleteResourcePermission,
getResourcePermission,
updateResourcePermission,
listResourcePermissions,
evaluatePermission,
// User Attribute operations
getUserAttributes,
setUserAttribute,
deleteUserAttribute,
// User Credential operations
getUserCredentials,
deleteUserCredential,
// User Action operations
sendVerifyEmail,
sendPasswordResetEmail,
addRequiredAction,
removeRequiredAction,
executeActionsEmail,
// Client Secret Management
getClientSecret,
regenerateClientSecret,
// Bulk operations
bulkCreateUsers,
bulkDeleteUsers,
bulkAssignRolesToUser,
bulkAssignRoleToUsers,
bulkUpdateUsers
}
| KeycloakOperations |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java | {
"start": 15925,
"end": 93180
} | class ____ extends UnaryPlan {
public FieldAttribute field;
public MockFieldAttributeCommand(Source source, LogicalPlan child, FieldAttribute field) {
super(source, child);
this.field = field;
}
@Override
protected AttributeSet computeReferences() {
return AttributeSet.EMPTY;
}
public void writeTo(StreamOutput out) {
throw new UnsupportedOperationException("not serialized");
}
@Override
public String getWriteableName() {
throw new UnsupportedOperationException("not serialized");
}
@Override
public UnaryPlan replaceChild(LogicalPlan newChild) {
return new MockFieldAttributeCommand(source(), newChild, field);
}
@Override
public boolean expressionsResolved() {
return true;
}
@Override
public List<Attribute> output() {
return List.of(field);
}
@Override
protected NodeInfo<? extends LogicalPlan> info() {
return NodeInfo.create(this, MockFieldAttributeCommand::new, child(), field);
}
}
public void testMissingFieldInNewCommand() {
var testStats = statsForMissingField("last_name");
localPlan(
new MockFieldAttributeCommand(
EMPTY,
new Row(EMPTY, List.of()),
new FieldAttribute(EMPTY, "last_name", new EsField("last_name", KEYWORD, Map.of(), true, EsField.TimeSeriesFieldType.NONE))
),
testStats
);
var plan = plan("""
from test
""");
var initialRelation = plan.collectLeaves().get(0);
FieldAttribute lastName = null;
for (Attribute attr : initialRelation.output()) {
if (attr.name().equals("last_name")) {
lastName = (FieldAttribute) attr;
}
}
// Expects
// MockFieldAttributeCommand[last_name{f}#7]
// \_Project[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu
// ages{f}#6, last_name{r}#7, long_noidx{f}#13, salary{f}#8]]
// \_Eval[[null[KEYWORD] AS last_name]]
// \_Limit[1000[INTEGER],false]
// \_EsRelation[test][_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]
LogicalPlan localPlan = localPlan(new MockFieldAttributeCommand(EMPTY, plan, lastName), testStats);
var mockCommand = as(localPlan, MockFieldAttributeCommand.class);
var project = as(mockCommand.child(), Project.class);
var eval = as(project.child(), Eval.class);
var limit = asLimit(eval.child(), 1000);
var relation = as(limit.child(), EsRelation.class);
assertThat(Expressions.names(eval.fields()), contains("last_name"));
var literal = as(eval.fields().get(0), Alias.class);
assertEquals(literal.child(), new Literal(EMPTY, null, KEYWORD));
assertThat(Expressions.names(relation.output()), not(contains("last_name")));
assertEquals(Expressions.names(initialRelation.output()), Expressions.names(project.output()));
}
/**
* Expects
* EsqlProject[[x{r}#3]]
* \_Eval[[null[INTEGER] AS x]]
* \_Limit[10000[INTEGER]]
* \_EsRelation[test][_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, !g..]
*/
public void testMissingFieldInEval() {
var plan = plan("""
from test
| eval x = emp_no + 1
| keep x
""");
var testStats = statsForMissingField("emp_no");
var localPlan = localPlan(plan, testStats);
var project = as(localPlan, Project.class);
assertThat(Expressions.names(project.projections()), contains("x"));
var eval = as(project.child(), Eval.class);
assertThat(Expressions.names(eval.fields()), contains("x"));
var alias = as(eval.fields().get(0), Alias.class);
var literal = as(alias.child(), Literal.class);
assertThat(literal.value(), is(nullValue()));
assertThat(literal.dataType(), is(INTEGER));
var limit = as(eval.child(), Limit.class);
var source = as(limit.child(), EsRelation.class);
}
/**
* Expects
* LocalRelation[[first_name{f}#4],EMPTY]
*/
public void testMissingFieldInFilterNumericWithReference() {
var plan = plan("""
from test
| eval x = emp_no
| where x > 10
| keep first_name
""");
var testStats = statsForMissingField("emp_no");
var localPlan = localPlan(plan, testStats);
var local = as(localPlan, LocalRelation.class);
assertThat(Expressions.names(local.output()), contains("first_name"));
}
/**
* Expects
* LocalRelation[[first_name{f}#4],EMPTY]
*/
public void testMissingFieldInFilterNumericWithReferenceToEval() {
var plan = plan("""
from test
| eval x = emp_no + 1
| where x > 10
| keep first_name
""");
var testStats = statsForMissingField("emp_no");
var localPlan = localPlan(plan, testStats);
var local = as(localPlan, LocalRelation.class);
assertThat(Expressions.names(local.output()), contains("first_name"));
}
/**
* Expects
* LocalRelation[[_meta_field{f}#11, emp_no{f}#5, first_name{f}#6, gender{f}#7, languages{f}#8, last_name{f}#9, salary{f}#10, x
* {r}#3],EMPTY]
*/
public void testMissingFieldInFilterNoProjection() {
var plan = plan("""
from test
| eval x = emp_no
| where x > 10
""");
var testStats = statsForMissingField("emp_no");
var localPlan = localPlan(plan, testStats);
var local = as(localPlan, LocalRelation.class);
assertThat(
Expressions.names(local.output()),
contains(
"_meta_field",
"emp_no",
"first_name",
"gender",
"hire_date",
"job",
"job.raw",
"languages",
"last_name",
"long_noidx",
"salary",
"x"
)
);
}
public void testSparseDocument() throws Exception {
var query = """
from large
| keep field00*
| limit 10
""";
int size = 256;
Map<String, EsField> large = Maps.newLinkedHashMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
var name = String.format(Locale.ROOT, "field%03d", i);
large.put(name, new EsField(name, INTEGER, emptyMap(), true, false, EsField.TimeSeriesFieldType.NONE));
}
SearchStats searchStats = statsForExistingField("field000", "field001", "field002", "field003", "field004");
EsIndex index = EsIndexGenerator.esIndex("large", large, Map.of("large", IndexMode.STANDARD));
var logicalOptimizer = new LogicalPlanOptimizer(unboundLogicalOptimizerContext());
var analyzer = new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(index),
emptyPolicyResolution(),
emptyInferenceResolution()
),
TEST_VERIFIER
);
var analyzed = analyzer.analyze(parser.createStatement(query));
var optimized = logicalOptimizer.optimize(analyzed);
var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small(), searchStats);
var plan = new LocalLogicalPlanOptimizer(localContext).localOptimize(optimized);
var project = as(plan, Project.class);
assertThat(project.projections(), hasSize(10));
assertThat(
Expressions.names(project.projections()),
contains("field000", "field001", "field002", "field003", "field004", "field005", "field006", "field007", "field008", "field009")
);
var eval = as(project.child(), Eval.class);
var field = eval.fields().get(0);
assertThat(Expressions.name(field), is("field005"));
assertThat(Alias.unwrap(field).fold(FoldContext.small()), nullValue());
}
// InferIsNotNull
public void testIsNotNullOnIsNullField() {
EsRelation relation = relation();
var fieldA = getFieldAttribute("a");
Expression inn = isNotNull(fieldA);
Filter f = new Filter(EMPTY, relation, inn);
assertEquals(f, new InferIsNotNull().apply(f));
}
public void testIsNotNullOnOperatorWithOneField() {
EsRelation relation = relation();
var fieldA = getFieldAttribute("a");
Expression inn = isNotNull(new Add(EMPTY, fieldA, ONE));
Filter f = new Filter(EMPTY, relation, inn);
Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn));
assertEquals(expected, new InferIsNotNull().apply(f));
}
public void testIsNotNullOnOperatorWithTwoFields() {
EsRelation relation = relation();
var fieldA = getFieldAttribute("a");
var fieldB = getFieldAttribute("b");
Expression inn = isNotNull(new Add(EMPTY, fieldA, fieldB));
Filter f = new Filter(EMPTY, relation, inn);
Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn));
assertEquals(expected, new InferIsNotNull().apply(f));
}
public void testIsNotNullOnFunctionWithOneField() {
EsRelation relation = relation();
var fieldA = getFieldAttribute("a");
var pattern = L("abc");
Expression inn = isNotNull(new And(EMPTY, new StartsWith(EMPTY, fieldA, pattern), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE)));
Filter f = new Filter(EMPTY, relation, inn);
Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn));
assertEquals(expected, new InferIsNotNull().apply(f));
}
public void testIsNotNullOnFunctionWithTwoFields() {
EsRelation relation = relation();
var fieldA = getFieldAttribute("a");
var fieldB = getFieldAttribute("b");
Expression inn = isNotNull(new StartsWith(EMPTY, fieldA, fieldB));
Filter f = new Filter(EMPTY, relation, inn);
Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn));
assertEquals(expected, new InferIsNotNull().apply(f));
}
public void testIsNotNullOnCoalesce() {
var plan = localPlan("""
from test
| where coalesce(emp_no, salary) is not null
""");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var inn = as(filter.condition(), IsNotNull.class);
var coalesce = as(inn.children().get(0), Coalesce.class);
assertThat(Expressions.names(coalesce.children()), contains("emp_no", "salary"));
var source = as(filter.child(), EsRelation.class);
}
public void testIsNotNullOnExpression() {
var plan = localPlan("""
from test
| eval x = emp_no + 1
| where x is not null
""");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var inn = as(filter.condition(), IsNotNull.class);
assertThat(Expressions.names(inn.children()), contains("x"));
var eval = as(filter.child(), Eval.class);
filter = as(eval.child(), Filter.class);
inn = as(filter.condition(), IsNotNull.class);
assertThat(Expressions.names(inn.children()), contains("emp_no"));
var source = as(filter.child(), EsRelation.class);
}
public void testIsNotNullOnCase() {
var plan = localPlan("""
from test
| where case(emp_no > 10000, "1", salary < 50000, "2", first_name) is not null
""");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var inn = as(filter.condition(), IsNotNull.class);
var caseF = as(inn.children().get(0), Case.class);
assertThat(Expressions.names(caseF.children()), contains("emp_no > 10000", "\"1\"", "salary < 50000", "\"2\"", "first_name"));
var source = as(filter.child(), EsRelation.class);
}
public void testIsNotNullOnCase_With_IS_NULL() {
var plan = localPlan("""
from test
| where case(emp_no IS NULL, "1", salary IS NOT NULL, "2", first_name) is not null
""");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var inn = as(filter.condition(), IsNotNull.class);
var caseF = as(inn.children().get(0), Case.class);
assertThat(Expressions.names(caseF.children()), contains("emp_no IS NULL", "\"1\"", "salary IS NOT NULL", "\"2\"", "first_name"));
var source = as(filter.child(), EsRelation.class);
}
/*
* Limit[1000[INTEGER],false]
* \_Filter[RLIKE(first_name{f}#4, "VALÜ*", true)]
* \_EsRelation[test][_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]
*/
public void testReplaceUpperStringCasinqgWithInsensitiveRLike() {
var plan = localPlan("FROM test | WHERE TO_UPPER(TO_LOWER(TO_UPPER(first_name))) RLIKE \"VALÜ*\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var rlike = as(filter.condition(), RLike.class);
var field = as(rlike.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertThat(rlike.pattern().pattern(), is("VALÜ*"));
assertThat(rlike.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
/*
*Limit[1000[INTEGER],false]
* \_Filter[RLikeList(first_name{f}#4, "("VALÜ*", "TEST*")", true)]
* \_EsRelation[test][_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gen..]
*/
public void testReplaceUpperStringCasinqWithInsensitiveRLikeList() {
var plan = localPlan("FROM test | WHERE TO_UPPER(TO_LOWER(TO_UPPER(first_name))) RLIKE (\"VALÜ*\", \"TEST*\")");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var rLikeList = as(filter.condition(), RLikeList.class);
var field = as(rLikeList.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertEquals(2, rLikeList.pattern().patternList().size());
assertThat(rLikeList.pattern().patternList().get(0).pattern(), is("VALÜ*"));
assertThat(rLikeList.pattern().patternList().get(1).pattern(), is("TEST*"));
assertThat(rLikeList.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as above, but lower case pattern
public void testReplaceLowerStringCasingWithInsensitiveRLike() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) RLIKE \"valü*\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var rlike = as(filter.condition(), RLike.class);
var field = as(rlike.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertThat(rlike.pattern().pattern(), is("valü*"));
assertThat(rlike.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as above, but lower case pattern and list of patterns
public void testReplaceLowerStringCasingWithInsensitiveRLikeList() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) RLIKE (\"valü*\", \"test*\")");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var rLikeList = as(filter.condition(), RLikeList.class);
var field = as(rLikeList.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertEquals(2, rLikeList.pattern().patternList().size());
assertThat(rLikeList.pattern().patternList().get(0).pattern(), is("valü*"));
assertThat(rLikeList.pattern().patternList().get(1).pattern(), is("test*"));
assertThat(rLikeList.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as above, but lower case pattern and list of patterns, one of which is upper case
public void testReplaceLowerStringCasingWithMixedCaseRLikeList() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) RLIKE (\"valü*\", \"TEST*\")");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var rLikeList = as(filter.condition(), RLikeList.class);
var field = as(rLikeList.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertEquals(1, rLikeList.pattern().patternList().size());
assertThat(rLikeList.pattern().patternList().get(0).pattern(), is("valü*"));
assertThat(rLikeList.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
/**
* LocalRelation[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu
* ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],EMPTY]
*/
public void testReplaceStringCasingAndRLikeWithLocalRelation() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) RLIKE \"VALÜ*\"");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(EmptyLocalSupplier.EMPTY));
}
// same plan as in testReplaceUpperStringCasingWithInsensitiveRLike, but with LIKE instead of RLIKE
public void testReplaceUpperStringCasingWithInsensitiveLike() {
var plan = localPlan("FROM test | WHERE TO_UPPER(TO_LOWER(TO_UPPER(first_name))) LIKE \"VALÜ*\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var wlike = as(filter.condition(), WildcardLike.class);
var field = as(wlike.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertThat(wlike.pattern().pattern(), is("VALÜ*"));
assertThat(wlike.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as in testReplaceUpperStringCasingWithInsensitiveRLikeList, but with LIKE instead of RLIKE
public void testReplaceUpperStringCasingWithInsensitiveLikeList() {
var plan = localPlan("FROM test | WHERE TO_UPPER(TO_LOWER(TO_UPPER(first_name))) LIKE (\"VALÜ*\", \"TEST*\")");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var likeList = as(filter.condition(), WildcardLikeList.class);
var field = as(likeList.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertEquals(2, likeList.pattern().patternList().size());
assertThat(likeList.pattern().patternList().get(0).pattern(), is("VALÜ*"));
assertThat(likeList.pattern().patternList().get(1).pattern(), is("TEST*"));
assertThat(likeList.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as above, but mixed case pattern and list of patterns
public void testReplaceLowerStringCasingWithMixedCaseLikeList() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) LIKE (\"TEST*\", \"valü*\", \"vaLü*\")");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var likeList = as(filter.condition(), WildcardLikeList.class);
var field = as(likeList.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
// only the all lowercase pattern is kept, the mixed case and all uppercase patterns are ignored
assertEquals(1, likeList.pattern().patternList().size());
assertThat(likeList.pattern().patternList().get(0).pattern(), is("valü*"));
assertThat(likeList.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
// same plan as above, but lower case pattern
public void testReplaceLowerStringCasingWithInsensitiveLike() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) LIKE \"valü*\"");
var limit = as(plan, Limit.class);
var filter = as(limit.child(), Filter.class);
var wlike = as(filter.condition(), WildcardLike.class);
var field = as(wlike.field(), FieldAttribute.class);
assertThat(field.fieldName().string(), is("first_name"));
assertThat(wlike.pattern().pattern(), is("valü*"));
assertThat(wlike.caseInsensitive(), is(true));
var source = as(filter.child(), EsRelation.class);
}
/**
* LocalRelation[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu
* ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],EMPTY]
*/
public void testReplaceStringCasingAndLikeWithLocalRelation() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) LIKE \"VALÜ*\"");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(EmptyLocalSupplier.EMPTY));
}
/**
* LocalRelation[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu
* ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],EMPTY]
*/
public void testReplaceStringCasingAndLikeListWithLocalRelation() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) LIKE (\"VALÜ*\", \"TEST*\")");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(EmptyLocalSupplier.EMPTY));
}
/**
* LocalRelation[[_meta_field{f}#9, emp_no{f}#3, first_name{f}#4, gender{f}#5, hire_date{f}#10, job{f}#11, job.raw{f}#12, langu
* ages{f}#6, last_name{f}#7, long_noidx{f}#13, salary{f}#8],EMPTY]
*/
public void testReplaceStringCasingAndRLikeListWithLocalRelation() {
var plan = localPlan("FROM test | WHERE TO_LOWER(TO_UPPER(first_name)) RLIKE (\"VALÜ*\", \"TEST*\")");
var local = as(plan, LocalRelation.class);
assertThat(local.supplier(), equalTo(EmptyLocalSupplier.EMPTY));
}
/**
* Limit[1000[INTEGER],false]
* \_Aggregate[[],[SUM($$integer_long_field$converted_to$long{f$}#5,true[BOOLEAN]) AS sum(integer_long_field::long)#3]]
* \_Filter[ISNOTNULL($$integer_long_field$converted_to$long{f$}#5)]
* \_EsRelation[test*][!integer_long_field, $$integer_long_field$converted..]
*/
public void testUnionTypesInferNonNullAggConstraint() {
LogicalPlan coordinatorOptimized = plan("FROM test* | STATS sum(integer_long_field::long)", analyzerWithUnionTypeMapping());
var plan = localPlan(coordinatorOptimized, TEST_SEARCH_STATS);
var limit = asLimit(plan, 1000);
var agg = as(limit.child(), Aggregate.class);
var filter = as(agg.child(), Filter.class);
var relation = as(filter.child(), EsRelation.class);
var isNotNull = as(filter.condition(), IsNotNull.class);
var unionTypeField = as(isNotNull.field(), FieldAttribute.class);
assertEquals("$$integer_long_field$converted_to$long", unionTypeField.name());
assertEquals("integer_long_field", unionTypeField.fieldName().string());
}
/**
* \_Aggregate[[first_name{r}#7, $$first_name$temp_name$17{r}#18],[SUM(salary{f}#11,true[BOOLEAN]) AS SUM(salary)#5, first_nam
* e{r}#7, first_name{r}#7 AS last_name#10]]
* \_Eval[[null[KEYWORD] AS first_name#7, null[KEYWORD] AS $$first_name$temp_name$17#18]]
* \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..]
*/
public void testGroupingByMissingFields() {
var plan = plan("FROM test | STATS SUM(salary) BY first_name, last_name");
var testStats = statsForMissingField("first_name", "last_name");
var localPlan = localPlan(plan, testStats);
Limit limit = as(localPlan, Limit.class);
Aggregate aggregate = as(limit.child(), Aggregate.class);
assertThat(aggregate.groupings(), hasSize(2));
ReferenceAttribute grouping1 = as(aggregate.groupings().get(0), ReferenceAttribute.class);
ReferenceAttribute grouping2 = as(aggregate.groupings().get(1), ReferenceAttribute.class);
Eval eval = as(aggregate.child(), Eval.class);
assertThat(eval.fields(), hasSize(2));
Alias eval1 = eval.fields().get(0);
Literal literal1 = as(eval1.child(), Literal.class);
assertNull(literal1.value());
assertThat(literal1.dataType(), is(KEYWORD));
Alias eval2 = eval.fields().get(1);
Literal literal2 = as(eval2.child(), Literal.class);
assertNull(literal2.value());
assertThat(literal2.dataType(), is(KEYWORD));
assertThat(grouping1.id(), equalTo(eval1.id()));
assertThat(grouping2.id(), equalTo(eval2.id()));
as(eval.child(), EsRelation.class);
}
public void testVerifierOnMissingReferences() throws Exception {
var plan = localPlan("""
from test
| stats a = min(salary) by emp_no
""");
var limit = as(plan, Limit.class);
var aggregate = as(limit.child(), Aggregate.class);
var min = as(Alias.unwrap(aggregate.aggregates().get(0)), Min.class);
var salary = as(min.field(), NamedExpression.class);
assertThat(salary.name(), is("salary"));
// emulate a rule that adds an invalid field
var invalidPlan = new OrderBy(
limit.source(),
limit,
asList(new Order(limit.source(), salary, Order.OrderDirection.ASC, Order.NullsPosition.FIRST))
);
var localContext = new LocalLogicalOptimizerContext(EsqlTestUtils.TEST_CFG, FoldContext.small(), TEST_SEARCH_STATS);
LocalLogicalPlanOptimizer localLogicalPlanOptimizer = new LocalLogicalPlanOptimizer(localContext);
IllegalStateException e = expectThrows(IllegalStateException.class, () -> localLogicalPlanOptimizer.localOptimize(invalidPlan));
assertThat(e.getMessage(), containsString("Plan [OrderBy[[Order[salary"));
assertThat(e.getMessage(), containsString(" optimized incorrectly due to missing references [salary"));
}
private LocalLogicalPlanOptimizer getCustomRulesLocalLogicalPlanOptimizer(List<RuleExecutor.Batch<LogicalPlan>> batches) {
LocalLogicalOptimizerContext context = new LocalLogicalOptimizerContext(
EsqlTestUtils.TEST_CFG,
FoldContext.small(),
TEST_SEARCH_STATS
);
LocalLogicalPlanOptimizer customOptimizer = new LocalLogicalPlanOptimizer(context) {
@Override
protected List<Batch<LogicalPlan>> batches() {
return batches;
}
};
return customOptimizer;
}
public void testVerifierOnAdditionalAttributeAdded() throws Exception {
var plan = localPlan("""
from test
| stats a = min(salary) by emp_no
""");
var limit = as(plan, Limit.class);
var aggregate = as(limit.child(), Aggregate.class);
var min = as(Alias.unwrap(aggregate.aggregates().get(0)), Min.class);
var salary = as(min.field(), NamedExpression.class);
assertThat(salary.name(), is("salary"));
Holder<Integer> appliedCount = new Holder<>(0);
// use a custom rule that adds another output attribute
var customRuleBatch = new RuleExecutor.Batch<>(
"CustomRuleBatch",
RuleExecutor.Limiter.ONCE,
new OptimizerRules.ParameterizedOptimizerRule<Aggregate, LocalLogicalOptimizerContext>(UP) {
@Override
protected LogicalPlan rule(Aggregate plan, LocalLogicalOptimizerContext context) {
// This rule adds a missing attribute to the plan output
// We only want to apply it once, so we use a static counter
if (appliedCount.get() == 0) {
appliedCount.set(appliedCount.get() + 1);
Literal additionalLiteral = new Literal(EMPTY, "additional literal", INTEGER);
return new Eval(plan.source(), plan, List.of(new Alias(EMPTY, "additionalAttribute", additionalLiteral)));
}
return plan;
}
}
);
LocalLogicalPlanOptimizer customRulesLocalLogicalPlanOptimizer = getCustomRulesLocalLogicalPlanOptimizer(List.of(customRuleBatch));
Exception e = expectThrows(VerificationException.class, () -> customRulesLocalLogicalPlanOptimizer.localOptimize(plan));
assertThat(e.getMessage(), containsString("Output has changed from"));
assertThat(e.getMessage(), containsString("additionalAttribute"));
}
public void testVerifierOnAttributeDatatypeChanged() {
var plan = localPlan("""
from test
| stats a = min(salary) by emp_no
""");
var limit = as(plan, Limit.class);
var aggregate = as(limit.child(), Aggregate.class);
var min = as(Alias.unwrap(aggregate.aggregates().get(0)), Min.class);
var salary = as(min.field(), NamedExpression.class);
assertThat(salary.name(), is("salary"));
Holder<Integer> appliedCount = new Holder<>(0);
// use a custom rule that changes the datatype of an output attribute
var customRuleBatch = new RuleExecutor.Batch<>(
"CustomRuleBatch",
RuleExecutor.Limiter.ONCE,
new OptimizerRules.ParameterizedOptimizerRule<LogicalPlan, LocalLogicalOptimizerContext>(DOWN) {
@Override
protected LogicalPlan rule(LogicalPlan plan, LocalLogicalOptimizerContext context) {
// We only want to apply it once, so we use a static counter
if (appliedCount.get() == 0) {
appliedCount.set(appliedCount.get() + 1);
Limit limit = as(plan, Limit.class);
Limit newLimit = new Limit(plan.source(), limit.limit(), limit.child()) {
@Override
public List<Attribute> output() {
List<Attribute> oldOutput = super.output();
List<Attribute> newOutput = new ArrayList<>(oldOutput);
newOutput.set(0, oldOutput.get(0).withDataType(DataType.DATETIME));
return newOutput;
}
};
return newLimit;
}
return plan;
}
}
);
LocalLogicalPlanOptimizer customRulesLocalLogicalPlanOptimizer = getCustomRulesLocalLogicalPlanOptimizer(List.of(customRuleBatch));
Exception e = expectThrows(VerificationException.class, () -> customRulesLocalLogicalPlanOptimizer.localOptimize(plan));
assertThat(e.getMessage(), containsString("Output has changed from"));
}
/**
* Input:
* Project[[key{f}#2, int{f}#3, field1{f}#7, field2{f}#8]]
* \_Join[LEFT,[key{f}#2],[key{f}#6],null]
* |_EsRelation[JLfQlKmn][key{f}#2, int{f}#3, field1{f}#4, field2{f}#5]
* \_EsRelation[HQtEBOWq][LOOKUP][key{f}#6, field1{f}#7, field2{f}#8]
*
* Output:
* Project[[key{r}#2, int{f}#3, field1{r}#7, field1{r}#7 AS field2#8]]
* \_Eval[[null[KEYWORD] AS key#2, null[INTEGER] AS field1#7]]
* \_EsRelation[JLfQlKmn][key{f}#2, int{f}#3, field1{f}#4, field2{f}#5]
*/
public void testPruneLeftJoinOnNullMatchingFieldAndShadowingAttributes() {
var keyLeft = getFieldAttribute("key", KEYWORD);
var intFieldLeft = getFieldAttribute("int");
var fieldLeft1 = getFieldAttribute("field1");
var fieldLeft2 = getFieldAttribute("field2");
var leftRelation = EsqlTestUtils.relation(IndexMode.STANDARD)
.withAttributes(List.of(keyLeft, intFieldLeft, fieldLeft1, fieldLeft2));
var keyRight = getFieldAttribute("key", KEYWORD);
var fieldRight1 = getFieldAttribute("field1");
var fieldRight2 = getFieldAttribute("field2");
var rightRelation = EsqlTestUtils.relation(IndexMode.LOOKUP).withAttributes(List.of(keyRight, fieldRight1, fieldRight2));
JoinConfig joinConfig = new JoinConfig(JoinTypes.LEFT, List.of(keyLeft), List.of(keyRight), null);
var join = new Join(EMPTY, leftRelation, rightRelation, joinConfig);
var project = new Project(EMPTY, join, List.of(keyLeft, intFieldLeft, fieldRight1, fieldRight2));
var testStats = statsForMissingField("key");
var localPlan = localPlan(project, testStats);
var projectOut = as(localPlan, Project.class);
var projectionsOut = projectOut.projections();
assertThat(Expressions.names(projectionsOut), contains("key", "int", "field1", "field2"));
assertThat(projectionsOut.get(0).id(), is(keyLeft.id()));
assertThat(projectionsOut.get(1).id(), is(intFieldLeft.id()));
assertThat(projectionsOut.get(2).id(), is(fieldRight1.id())); // id must remain from the RHS.
var aliasField2 = as(projectionsOut.get(3), Alias.class); // the projection must contain an alias ...
assertThat(aliasField2.id(), is(fieldRight2.id())); // ... with the same id as the original field.
var eval = as(projectOut.child(), Eval.class);
assertThat(Expressions.names(eval.fields()), contains("key", "field1"));
var keyEval = as(Alias.unwrap(eval.fields().get(0)), Literal.class);
assertThat(keyEval.value(), is(nullValue()));
assertThat(keyEval.dataType(), is(KEYWORD));
var field1Eval = as(Alias.unwrap(eval.fields().get(1)), Literal.class);
assertThat(field1Eval.value(), is(nullValue()));
assertThat(field1Eval.dataType(), is(INTEGER));
var source = as(eval.child(), EsRelation.class);
}
/**
* Expected:
* EsqlProject[[!alias_integer, boolean{f}#7, byte{f}#8, constant_keyword-foo{f}#9, date{f}#10, date_nanos{f}#11, dense_vector
* {f}#26, double{f}#12, float{f}#13, half_float{f}#14, integer{f}#16, ip{f}#17, keyword{f}#18, long{f}#19, scaled_float{f}#15,
* semantic_text{f}#25, short{f}#21, text{f}#22, unsigned_long{f}#20, version{f}#23, wildcard{f}#24, s{r}#5]]
* \_Eval[[$$dense_vector$V_DOT_PRODUCT$27{f}#27 AS s#5]]
* \_Limit[1000[INTEGER],false,false]
* \_EsRelation[test_all][$$dense_vector$V_DOT_PRODUCT$27{f}#27, !alias_integer,
*/
public void testVectorFunctionsReplaced() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| eval s = %s
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// EsqlProject[[!alias_integer, boolean{f}#7, byte{f}#8, ... s{r}#5]]
var project = as(plan, EsqlProject.class);
// Does not contain the extracted field
assertFalse(Expressions.names(project.projections()).stream().anyMatch(s -> s.startsWith(testCase.toFieldAttrName())));
// Eval[[$$dense_vector$V_DOT_PRODUCT$27{f}#27 AS s#5]]
var eval = as(project.child(), Eval.class);
assertThat(eval.fields(), hasSize(1));
var alias = as(eval.fields().getFirst(), Alias.class);
assertThat(alias.name(), equalTo("s"));
// Check replaced field attribute
FieldAttribute fieldAttr = (FieldAttribute) alias.child();
assertThat(fieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(fieldAttr.name(), startsWith(testCase.toFieldAttrName()));
var field = as(fieldAttr.field(), FunctionEsField.class);
var blockLoaderFunctionConfig = as(field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(blockLoaderFunctionConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(blockLoaderFunctionConfig.vector(), equalTo(testCase.vector()));
// Limit[1000[INTEGER],false,false]
var limit = as(eval.child(), Limit.class);
assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000));
// EsRelation[types_all]
var esRelation = as(limit.child(), EsRelation.class);
assertTrue(esRelation.output().contains(fieldAttr));
}
/**
* Expected:
* EsqlProject[[s{r}#4]]
* \_TopN[[Order[s{r}#4,DESC,FIRST]],1[INTEGER]]
* \_Eval[[$$dense_vector$replaced$28{t}#28 AS s#4]]
* \_EsRelation[types][$$dense_vector$replaced$28{t}#28, !alias_integer, b..]
*/
public void testVectorFunctionsReplacedWithTopN() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| eval s = %s
| sort s desc
| limit 1
| keep s
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// EsqlProject[[s{r}#4]]
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("s"));
// TopN[[Order[s{r}#4,DESC,FIRST]],1[INTEGER]]
var topN = as(project.child(), TopN.class);
assertThat(topN.limit().fold(FoldContext.small()), equalTo(1));
assertThat(topN.order().size(), is(1));
var order = as(topN.order().getFirst(), Order.class);
assertThat(order.direction(), equalTo(Order.OrderDirection.DESC));
assertThat(order.nullsPosition(), equalTo(Order.NullsPosition.FIRST));
assertThat(Expressions.name(order.child()), equalTo("s"));
// Eval[[$$dense_vector$replaced$28{t}#28 AS s#4]]
var eval = as(topN.child(), Eval.class);
assertThat(eval.fields(), hasSize(1));
var alias = as(eval.fields().getFirst(), Alias.class);
assertThat(alias.name(), equalTo("s"));
// Check replaced field attribute
FieldAttribute fieldAttr = (FieldAttribute) alias.child();
assertThat(fieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(fieldAttr.name(), startsWith(testCase.toFieldAttrName()));
var field = as(fieldAttr.field(), FunctionEsField.class);
var blockLoaderFunctionConfig = as(field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(blockLoaderFunctionConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(blockLoaderFunctionConfig.vector(), equalTo(testCase.vector()));
// EsRelation[types]
var esRelation = as(eval.child(), EsRelation.class);
assertTrue(esRelation.output().contains(fieldAttr));
}
public void testVectorFunctionsNotPushedDownWhenNotIndexed() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| eval s = %s
| sort s desc
| limit 1
| keep s
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), new EsqlTestUtils.TestSearchStats() {
@Override
public boolean isIndexed(FieldAttribute.FieldName field) {
return field.string().equals("dense_vector") == false;
}
});
// EsqlProject[[s{r}#4]]
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("s"));
// TopN[[Order[s{r}#4,DESC,FIRST]],1[INTEGER]]
var topN = as(project.child(), TopN.class);
// Eval[[$$dense_vector$replaced$28{t}#28 AS s#4]]
var eval = as(topN.child(), Eval.class);
var alias = as(eval.fields().getFirst(), Alias.class);
assertThat(alias.name(), equalTo("s"));
// Check similarity function field attribute is NOT replaced
as(alias.child(), VectorSimilarityFunction.class);
// EsRelation does not contain a FunctionEsField
var esRelation = as(eval.child(), EsRelation.class);
assertFalse(
esRelation.output()
.stream()
.anyMatch(att -> (att instanceof FieldAttribute fieldAttr) && fieldAttr.field() instanceof FunctionEsField)
);
}
public void testVectorFunctionsWhenFieldMissing() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| eval s = %s
| sort s desc
| limit 1
| keep s
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), new EsqlTestUtils.TestSearchStats() {
@Override
public boolean exists(FieldAttribute.FieldName field) {
return field.string().equals("dense_vector") == false;
}
});
// Project[[s{r}#5]]
var project = as(plan, Project.class);
assertThat(Expressions.names(project.projections()), contains("s"));
// TopN[[Order[s{r}#5,DESC,FIRST]],1[INTEGER],false]
var topN = as(project.child(), TopN.class);
assertThat(topN.limit().fold(FoldContext.small()), equalTo(1));
// Evaluates expression as null, as the field is missing
var eval = as(topN.child(), Eval.class);
assertThat(Expressions.names(eval.fields()), contains("s"));
var alias = as(eval.fields().getFirst(), Alias.class);
var literal = as(alias.child(), Literal.class);
assertThat(literal.value(), is(nullValue()));
assertThat(literal.dataType(), is(DataType.DOUBLE));
// EsRelation[test_all] - does not contain a FunctionEsField
var esRelation = as(eval.child(), EsRelation.class);
assertFalse(
esRelation.output()
.stream()
.anyMatch(att -> (att instanceof FieldAttribute fieldAttr) && fieldAttr.field() instanceof FunctionEsField)
);
}
public void testVectorFunctionsInWhere() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| where %s > 0.5
| keep dense_vector
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// EsqlProject[[dense_vector{f}#25]]
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("dense_vector"));
var limit = as(project.child(), Limit.class);
var filter = as(limit.child(), Filter.class);
var greaterThan = as(filter.condition(), GreaterThan.class);
// Check left side is the replaced field attribute
var fieldAttr = as(greaterThan.left(), FieldAttribute.class);
assertThat(fieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(fieldAttr.name(), startsWith(testCase.toFieldAttrName()));
var field = as(fieldAttr.field(), FunctionEsField.class);
var blockLoaderFunctionConfig = as(field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(blockLoaderFunctionConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(blockLoaderFunctionConfig.vector(), equalTo(testCase.vector()));
// Check right side is 0.5
var literal = as(greaterThan.right(), Literal.class);
assertThat(literal.value(), equalTo(0.5));
assertThat(literal.dataType(), is(DataType.DOUBLE));
// EsRelation[test_all][$$dense_vector$V_DOT_PRODUCT$26{f}#26, !alias_integer, ..]
var esRelation = as(filter.child(), EsRelation.class);
assertThat(esRelation.indexPattern(), is("test_all"));
assertTrue(esRelation.output().contains(fieldAttr));
}
public void testVectorFunctionsInStats() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| stats count(*) where %s > 0.5
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// Limit[1000[INTEGER],false,false]
var limit = as(plan, Limit.class);
assertThat(limit.limit().fold(FoldContext.small()), equalTo(1000));
// Aggregate[[],[COUNT(*[KEYWORD],true[BOOLEAN]) AS count(*) where v_dot_product(dense_vector, [1.0, 2.0, 3.0]) > 0.5#3]]
var aggregate = as(limit.child(), Aggregate.class);
assertThat(aggregate.groupings(), hasSize(0));
assertThat(aggregate.aggregates(), hasSize(1));
// Check the Count aggregate with filter
var countAlias = as(aggregate.aggregates().getFirst(), Alias.class);
var count = as(countAlias.child(), Count.class);
// Check the filter on the Count aggregate
assertThat(count.filter(), equalTo(Literal.TRUE));
// Filter[$$dense_vector$V_DOT_PRODUCT$26{f}#26 > 0.5[DOUBLE]]
var filter = as(aggregate.child(), Filter.class);
var filterCondition = as(filter.condition(), GreaterThan.class);
// Check left side is the replaced field attribute
var fieldAttr = as(filterCondition.left(), FieldAttribute.class);
assertThat(fieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(fieldAttr.name(), startsWith(testCase.toFieldAttrName()));
var field = as(fieldAttr.field(), FunctionEsField.class);
var blockLoaderFunctionConfig = as(field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(blockLoaderFunctionConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(blockLoaderFunctionConfig.vector(), equalTo(testCase.vector()));
// Verify the filter condition matches the aggregate filter
var filterFieldAttr = as(filterCondition.left(), FieldAttribute.class);
assertThat(filterFieldAttr, is(fieldAttr));
// EsRelation[test_all][$$dense_vector$V_DOT_PRODUCT$26{f}#26, !alias_integer, ..]
var esRelation = as(filter.child(), EsRelation.class);
assertTrue(esRelation.output().contains(filterFieldAttr));
}
public void testVectorFunctionsUpdateIntermediateProjections() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
SimilarityFunctionTestCase testCase = SimilarityFunctionTestCase.random("dense_vector");
String query = String.format(Locale.ROOT, """
from test_all
| keep *
| mv_expand keyword
| eval similarity = %s
| sort similarity desc, keyword asc
| limit 1
""", testCase.toQuery());
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// EsqlProject with all fields including similarity and keyword
var project = as(plan, EsqlProject.class);
assertTrue(Expressions.names(project.projections()).contains("similarity"));
assertTrue(Expressions.names(project.projections()).contains("keyword"));
var topN = as(project.child(), TopN.class);
var eval = as(topN.child(), Eval.class);
assertThat(eval.fields(), hasSize(1));
var alias = as(eval.fields().getFirst(), Alias.class);
assertThat(alias.name(), equalTo("similarity"));
// Check replaced field attribute
var fieldAttr = as(alias.child(), FieldAttribute.class);
assertThat(fieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(fieldAttr.name(), startsWith(testCase.toFieldAttrName()));
var field = as(fieldAttr.field(), FunctionEsField.class);
var blockLoaderFunctionConfig = as(field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(blockLoaderFunctionConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(blockLoaderFunctionConfig.vector(), equalTo(testCase.vector()));
// MvExpand[keyword{f}#23,keyword{r}#32]
var mvExpand = as(eval.child(), MvExpand.class);
assertThat(Expressions.name(mvExpand.target()), equalTo("keyword"));
// Inner EsqlProject with the pushed down function
var innerProject = as(mvExpand.child(), EsqlProject.class);
assertTrue(Expressions.names(innerProject.projections()).contains("keyword"));
assertTrue(
innerProject.projections()
.stream()
.anyMatch(p -> (p instanceof FieldAttribute fa) && fa.name().startsWith(testCase.toFieldAttrName()))
);
// EsRelation[test_all][$$dense_vector$V_COSINE$33{f}#33, !alias_in..]
var esRelation = as(innerProject.child(), EsRelation.class);
assertTrue(esRelation.output().contains(fieldAttr));
}
public void testVectorFunctionsWithDuplicateFunctions() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
// Generate two random test cases - one for duplicate usage, one for the second set
SimilarityFunctionTestCase testCase1 = SimilarityFunctionTestCase.random("dense_vector");
SimilarityFunctionTestCase testCase2 = randomValueOtherThan(testCase1, () -> SimilarityFunctionTestCase.random("dense_vector"));
SimilarityFunctionTestCase testCase3 = randomValueOtherThanMany(
tc -> (tc.equals(testCase1) || tc.equals(testCase2)),
() -> SimilarityFunctionTestCase.random("dense_vector")
);
String query = String.format(
Locale.ROOT,
"""
from test_all
| eval s1 = %s, s2 = %s * 2 / 3
| where %s + 5 + %s > 0
| eval r2 = %s + %s
| keep s1, s2, r2
""",
testCase1.toQuery(),
testCase1.toQuery(),
testCase1.toQuery(),
testCase2.toQuery(),
testCase2.toQuery(),
testCase3.toQuery()
);
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
// EsqlProject[[s1{r}#5, s2{r}#8, r2{r}#14]]
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("s1", "s2", "r2"));
// Eval with s1, s2, r2
var evalS1 = as(project.child(), Eval.class);
assertThat(evalS1.fields(), hasSize(3));
// Check s1 = $$dense_vector$V_DOT_PRODUCT$...
var s1Alias = as(evalS1.fields().getFirst(), Alias.class);
assertThat(s1Alias.name(), equalTo("s1"));
var s1FieldAttr = as(s1Alias.child(), FieldAttribute.class);
assertThat(s1FieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(s1FieldAttr.name(), startsWith(testCase1.toFieldAttrName()));
var s1Field = as(s1FieldAttr.field(), FunctionEsField.class);
var s1Config = as(s1Field.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(s1Config.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(s1Config.vector(), equalTo(testCase1.vector()));
// Check s2 = $$dense_vector$V_DOT_PRODUCT$1606418432 * 2 / 3 (same field as s1)
var s2Alias = as(evalS1.fields().get(1), Alias.class);
assertThat(s2Alias.name(), equalTo("s2"));
var s2Div = as(s2Alias.child(), Div.class);
var s2Mul = as(s2Div.left(), Mul.class);
var s2FieldAttr = as(s2Mul.left(), FieldAttribute.class);
assertThat(s1FieldAttr, is(s2FieldAttr));
// Check r2 = $$dense_vector$V_L1NORM$... + $$dense_vector$V_HAMMING$... (two different fields)
var r2Alias = as(evalS1.fields().get(2), Alias.class);
assertThat(r2Alias.name(), equalTo("r2"));
var r2Add = as(r2Alias.child(), Add.class);
// Left side should be testCase2 (L1NORM)
var r2LeftFieldAttr = as(r2Add.left(), FieldAttribute.class);
assertThat(r2LeftFieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(r2LeftFieldAttr.name(), startsWith(testCase2.toFieldAttrName()));
var r2LeftField = as(r2LeftFieldAttr.field(), FunctionEsField.class);
var r2LeftConfig = as(r2LeftField.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(r2LeftConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(r2LeftConfig.vector(), equalTo(testCase2.vector()));
// Right side should be testCase3 (different HAMMING)
var r2RightFieldAttr = as(r2Add.right(), FieldAttribute.class);
assertThat(r2RightFieldAttr.fieldName().string(), equalTo("dense_vector"));
assertThat(r2RightFieldAttr.name(), startsWith(testCase3.toFieldAttrName()));
var r2RightField = as(r2RightFieldAttr.field(), FunctionEsField.class);
var r2RightConfig = as(r2RightField.functionConfig(), DenseVectorFieldMapper.VectorSimilarityFunctionConfig.class);
assertThat(r2RightConfig.similarityFunction(), instanceOf(DenseVectorFieldMapper.SimilarityFunction.class));
assertThat(r2RightConfig.vector(), equalTo(testCase3.vector()));
// Verify the two fields in r2 are different
assertThat(r2LeftFieldAttr, not(is(r2RightFieldAttr)));
// Limit[1000[INTEGER],false,false]
var limit = as(evalS1.child(), Limit.class);
assertThat(limit.limit(), instanceOf(Literal.class));
assertThat(((Literal) limit.limit()).value(), equalTo(1000));
// Filter[testCase1 + 5 + testCase2 > 0] - Filter still has original function calls
var filter = as(limit.child(), Filter.class);
var greaterThan = as(filter.condition(), GreaterThan.class);
assertThat(greaterThan.right(), instanceOf(Literal.class));
assertThat(((Literal) greaterThan.right()).value(), equalTo(0));
var filterAdd1 = as(greaterThan.left(), Add.class);
var filterAdd2 = as(filterAdd1.left(), Add.class);
// Check the literal 5 in the filter
assertThat(filterAdd2.right(), instanceOf(Literal.class));
assertThat(((Literal) filterAdd2.right()).value(), equalTo(5));
// EsRelation[test_all] - should contain the pushed-down field attributes
var esRelation = as(filter.child(), EsRelation.class);
assertTrue(esRelation.output().contains(s1FieldAttr));
assertTrue(esRelation.output().contains(r2LeftFieldAttr));
assertTrue(esRelation.output().contains(r2RightFieldAttr));
}
private record SimilarityFunctionTestCase(String esqlFunction, String fieldName, float[] vector, String functionName) {
public String toQuery() {
String params = randomBoolean() ? fieldName + ", " + Arrays.toString(vector) : Arrays.toString(vector) + ", " + fieldName;
return esqlFunction + "(" + params + ")";
}
public String toFieldAttrName() {
return "$$" + fieldName + "$" + functionName;
}
public static SimilarityFunctionTestCase random(String fieldName) {
float[] vector = new float[] { randomFloat(), randomFloat(), randomFloat() };
// Only use DotProduct and CosineSimilarity as they have full pushdown support
// L1Norm, L2Norm, and Hamming are still in development
return switch (randomInt(4)) {
case 0 -> new SimilarityFunctionTestCase("v_dot_product", fieldName, vector, "V_DOT_PRODUCT");
case 1 -> new SimilarityFunctionTestCase("v_cosine", fieldName, vector, "V_COSINE");
case 2 -> new SimilarityFunctionTestCase("v_l1_norm", fieldName, vector, "V_L1NORM");
case 3 -> new SimilarityFunctionTestCase("v_l2_norm", fieldName, vector, "V_L2NORM");
case 4 -> new SimilarityFunctionTestCase("v_hamming", fieldName, vector, "V_HAMMING");
default -> throw new IllegalStateException("Unexpected value");
};
}
}
public void testLengthInEval() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| EVAL l = LENGTH(last_name)
| KEEP l
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("l"));
var eval = as(project.child(), Eval.class);
Attribute lAttr = assertLengthPushdown(as(eval.fields().getFirst(), Alias.class).child(), "last_name");
var limit = as(eval.child(), Limit.class);
var relation = as(limit.child(), EsRelation.class);
assertTrue(relation.output().contains(lAttr));
}
public void testLengthInWhere() {
assumeTrue("requires similarity functions", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| WHERE LENGTH(last_name) > 1
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, EsqlProject.class);
var limit = as(project.child(), Limit.class);
var filter = as(limit.child(), Filter.class);
Attribute lAttr = assertLengthPushdown(as(filter.condition(), GreaterThan.class).left(), "last_name");
var relation = as(filter.child(), EsRelation.class);
assertTrue(relation.output().contains(lAttr));
}
public void testLengthInStats() {
assumeTrue("requires 137382", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| STATS l = SUM(LENGTH(last_name))
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var limit = as(plan, Limit.class);
var agg = as(limit.child(), Aggregate.class);
assertThat(agg.aggregates(), hasSize(1));
as(as(agg.aggregates().getFirst(), Alias.class).child(), Sum.class);
var eval = as(agg.child(), Eval.class);
Attribute lAttr = assertLengthPushdown(as(eval.fields().getFirst(), Alias.class).child(), "last_name");
var relation = as(eval.child(), EsRelation.class);
assertTrue(relation.output().contains(lAttr));
}
public void testLengthInEvalAfterManyRenames() {
assumeTrue("requires push", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| EVAL l1 = last_name
| EVAL l2 = l1
| EVAL l3 = l2
| EVAL l = LENGTH(l3)
| KEEP l
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, EsqlProject.class);
assertThat(Expressions.names(project.projections()), contains("l"));
var eval = as(project.child(), Eval.class);
Attribute lAttr = assertLengthPushdown(as(eval.fields().getFirst(), Alias.class).child(), "last_name");
var limit = as(eval.child(), Limit.class);
var relation = as(limit.child(), EsRelation.class);
assertTrue(relation.output().contains(lAttr));
}
public void testLengthInWhereAndEval() {
assumeTrue("requires push", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| WHERE LENGTH(last_name) > 1
| EVAL l = LENGTH(last_name)
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, EsqlProject.class);
var eval = as(project.child(), Eval.class);
Attribute lAttr = assertLengthPushdown(as(eval.fields().getFirst(), Alias.class).child(), "last_name");
var limit = as(eval.child(), Limit.class);
var filter = as(limit.child(), Filter.class);
assertThat(as(filter.condition(), GreaterThan.class).left(), is(lAttr));
var relation = as(filter.child(), EsRelation.class);
assertThat(relation.output(), hasItem(lAttr));
}
/**
* Pushed LENGTH to the same field in a <strong>ton</strong> of unique and curious ways. All
* of these pushdowns should be fused to one.
*
* <pre>{@code
* Project[[l{r}#23]]
* \_Eval[[$$SUM$SUM(LENGTH(last>$0{r$}#37 / $$COUNT$$$AVG$SUM(LENGTH(last>$1$1{r$}#41 AS $$AVG$SUM(LENGTH(last>$1#38, $
* $SUM$SUM(LENGTH(last>$0{r$}#37 + $$AVG$SUM(LENGTH(last>$1{r$}#38 + $$SUM$SUM(LENGTH(last>$2{r$}#39 AS l#23]]
* \_Limit[1000[INTEGER],false,false]
* \_Aggregate[[],[SUM($$LENGTH(last_nam>$SUM$0{r$}#35,true[BOOLEAN],PT0S[TIME_DURATION],compensated[KEYWORD]) AS $$SUM$SUM(LE
* NGTH(last>$0#37,
* COUNT(a3{r}#11,true[BOOLEAN],PT0S[TIME_DURATION]) AS $$COUNT$$$AVG$SUM(LENGTH(last>$1$1#41,
* SUM($$LENGTH(first_na>$SUM$1{r$}#36,true[BOOLEAN],PT0S[TIME_DURATION],compensated[KEYWORD]) AS $$SUM$SUM(LENGTH(last>$2#39]]
* \_Eval[[$$last_name$LENGTH$920787299{f$}#42 AS a3#11, $$last_name$LENGTH$920787299{f$}#42 AS $$LENGTH(last_nam>$SUM$0
* #35, $$first_name$LENGTH$920787299{f$}#43 AS $$LENGTH(first_na>$SUM$1#36]]
* \_Filter[$$last_name$LENGTH$920787299{f$}#42 > 1[INTEGER]]
* \_EsRelation[test][_meta_field{f}#30, emp_no{f}#24, first_name{f}#25, ..]
* }</pre>
*/
public void testLengthPushdownZoo() {
assumeTrue("requires push", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| EVAL a1 = LENGTH(last_name), a2 = LENGTH(last_name), a3 = LENGTH(last_name),
a4 = abs(LENGTH(last_name)) + a1 + LENGTH(first_name) * 3
| WHERE a1 > 1 and LENGTH(last_name) > 1
| STATS l = SUM(LENGTH(last_name)) + AVG(a3) + SUM(LENGTH(first_name))
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, Project.class);
assertThat(Expressions.names(project.projections()), contains("l"));
// Eval - computes final aggregation result (SUM + AVG + SUM)
var eval1 = as(project.child(), Eval.class);
assertThat(eval1.fields(), hasSize(2));
// The avg is computed as the SUM(LENGTH(last_name)) / COUNT(LENGTH(last_name))
var avg = eval1.fields().get(0);
var avgDiv = as(avg.child(), Div.class);
// SUM(LENGTH(last_name))
var evalSumLastName = as(avgDiv.left(), ReferenceAttribute.class);
var evalCountLastName = as(avgDiv.right(), ReferenceAttribute.class);
var finalAgg = as(eval1.fields().get(1).child(), Add.class);
var leftFinalAgg = as(finalAgg.left(), Add.class);
assertThat(leftFinalAgg.left(), equalTo(evalSumLastName));
assertThat(as(leftFinalAgg.right(), ReferenceAttribute.class).id(), equalTo(avg.id()));
// SUM(LENGTH(first_name))
var evalSumFirstName = as(finalAgg.right(), ReferenceAttribute.class);
// Limit[1000[INTEGER],false,false]
var limit = as(eval1.child(), Limit.class);
// Aggregate with 3 aggregates: SUM for last_name, COUNT for last_name
// (the AVG uses the sum and the count), SUM for first_name
var agg = as(limit.child(), Aggregate.class);
assertThat(agg.aggregates(), hasSize(3));
// Eval - pushdown fields: a3, LENGTH(last_name) for SUM, and LENGTH(first_name) for SUM
var evalPushdown = as(agg.child(), Eval.class);
assertThat(evalPushdown.fields(), hasSize(3));
Alias a3Alias = as(evalPushdown.fields().getFirst(), Alias.class);
assertThat(a3Alias.name(), equalTo("a3"));
Attribute lastNamePushDownAttr = assertLengthPushdown(a3Alias.child(), "last_name");
Alias lastNamePushdownAlias = as(evalPushdown.fields().get(1), Alias.class);
assertLengthPushdown(lastNamePushdownAlias.child(), "last_name");
Alias firstNamePushdownAlias = as(evalPushdown.fields().get(2), Alias.class);
Attribute firstNamePushDownAttr = assertLengthPushdown(firstNamePushdownAlias.child(), "first_name");
// Verify aggregates reference the pushed down fields
var sumForLastNameAlias = as(agg.aggregates().get(0), Alias.class);
var sumForLastName = as(sumForLastNameAlias.child(), Sum.class);
assertThat(as(sumForLastName.field(), ReferenceAttribute.class).id(), equalTo(lastNamePushdownAlias.id()));
// Checks that the SUM(LENGTH(last_name)) in the final EVAL is the aggregate result here
assertThat(evalSumLastName.id(), equalTo(sumForLastNameAlias.id()));
var countForAvgAlias = as(agg.aggregates().get(1), Alias.class);
var countForAvg = as(countForAvgAlias.child(), Count.class);
assertThat(as(countForAvg.field(), ReferenceAttribute.class).id(), equalTo(a3Alias.id()));
// Checks that the COUNT(LENGTH(last_name)) in the final EVAL is the aggregate result here
assertThat(evalCountLastName.id(), equalTo(countForAvgAlias.id()));
var sumForFirstNameAlias = as(agg.aggregates().get(2), Alias.class);
var sumForFirstName = as(sumForFirstNameAlias.child(), Sum.class);
assertThat(as(sumForFirstName.field(), ReferenceAttribute.class).id(), equalTo(firstNamePushdownAlias.id()));
// Checks that the SUM(LENGTH(first_name)) in the final EVAL is the aggregate result here
assertThat(evalSumFirstName.id(), equalTo(sumForFirstNameAlias.id()));
// Filter[LENGTH(last_name) > 1]
var filter = as(evalPushdown.child(), Filter.class);
assertLengthPushdown(as(filter.condition(), GreaterThan.class).left(), "last_name");
// EsRelation[test] - should contain the pushed-down field attribute
var relation = as(filter.child(), EsRelation.class);
assertThat(relation.output(), hasItem(lastNamePushDownAttr));
assertThat(relation.output(), hasItem(firstNamePushDownAttr));
assertThat(relation.output().stream().filter(a -> {
if (a instanceof FieldAttribute fa) {
if (fa.field() instanceof FunctionEsField fef) {
return fef.functionConfig().function() == BlockLoaderFunctionConfig.Function.LENGTH;
}
}
return false;
}).toList(), hasSize(2));
}
public void testLengthInStatsTwice() {
assumeTrue("requires push", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| STATS l = SUM(LENGTH(last_name)) + AVG(LENGTH(last_name))
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var project = as(plan, Project.class);
var eval1 = as(project.child(), Eval.class);
var limit = as(eval1.child(), Limit.class);
var agg = as(limit.child(), Aggregate.class);
assertThat(agg.aggregates(), hasSize(2));
var sum = as(as(agg.aggregates().getFirst(), Alias.class).child(), Sum.class);
var count = as(as(agg.aggregates().get(1), Alias.class).child(), Count.class);
var eval2 = as(agg.child(), Eval.class);
assertThat(eval2.fields(), hasSize(1));
Alias lAlias = as(eval2.fields().getFirst(), Alias.class);
Attribute lAttr = assertLengthPushdown(lAlias.child(), "last_name");
var relation = as(eval2.child(), EsRelation.class);
assertTrue(relation.output().contains(lAttr));
assertThat(as(sum.field(), ReferenceAttribute.class).id(), equalTo(lAlias.id()));
assertThat(as(count.field(), ReferenceAttribute.class).id(), equalTo(lAlias.id()));
}
public void testLengthTwoFields() {
assumeTrue("requires push", EsqlCapabilities.Cap.VECTOR_SIMILARITY_FUNCTIONS_PUSHDOWN.isEnabled());
String query = """
FROM test
| STATS last_name = SUM(LENGTH(last_name)), first_name = SUM(LENGTH(first_name))
""";
LogicalPlan plan = localPlan(plan(query, analyzer), TEST_SEARCH_STATS);
var limit = as(plan, Limit.class);
var agg = as(limit.child(), Aggregate.class);
assertThat(agg.aggregates(), hasSize(2));
var sum1 = as(as(agg.aggregates().getFirst(), Alias.class).child(), Sum.class);
var sum2 = as(as(agg.aggregates().get(1), Alias.class).child(), Sum.class);
var eval = as(agg.child(), Eval.class);
assertThat(eval.fields(), hasSize(2));
Alias lastNameAlias = as(eval.fields().getFirst(), Alias.class);
Attribute lastNameAttr = assertLengthPushdown(lastNameAlias.child(), "last_name");
Alias firstNameAlias = as(eval.fields().get(1), Alias.class);
Attribute firstNameAttr = assertLengthPushdown(firstNameAlias.child(), "first_name");
var relation = as(eval.child(), EsRelation.class);
assertThat(relation.output(), hasItems(lastNameAttr, firstNameAttr));
assertThat(as(sum1.field(), ReferenceAttribute.class).id(), equalTo(lastNameAlias.id()));
assertThat(as(sum2.field(), ReferenceAttribute.class).id(), equalTo(firstNameAlias.id()));
}
private Attribute assertLengthPushdown(Expression e, String fieldName) {
FieldAttribute attr = as(e, FieldAttribute.class);
assertThat(attr.name(), startsWith("$$" + fieldName + "$LENGTH$"));
var field = as(attr.field(), FunctionEsField.class);
assertThat(field.functionConfig().function(), is(BlockLoaderFunctionConfig.Function.LENGTH));
assertThat(field.getName(), equalTo(fieldName));
assertThat(field.getExactInfo().hasExact(), equalTo(false));
return attr;
}
public void testFullTextFunctionOnMissingField() {
String functionName = randomFrom("match", "match_phrase");
var plan = plan(String.format(Locale.ROOT, """
from test
| where %s(first_name, "John") or %s(last_name, "Doe")
""", functionName, functionName));
var testStats = statsForMissingField("first_name");
var localPlan = localPlan(plan, testStats);
var project = as(localPlan, Project.class);
// Introduces an Eval with first_name as null literal
var eval = as(project.child(), Eval.class);
assertThat(Expressions.names(eval.fields()), contains("first_name"));
var firstNameEval = as(Alias.unwrap(eval.fields().get(0)), Literal.class);
assertThat(firstNameEval.value(), is(nullValue()));
assertThat(firstNameEval.dataType(), is(KEYWORD));
var limit = as(eval.child(), Limit.class);
// Filter has a single match on last_name only
var filter = as(limit.child(), Filter.class);
var fullTextFunction = as(filter.condition(), SingleFieldFullTextFunction.class);
assertThat(Expressions.name(fullTextFunction.field()), equalTo("last_name"));
}
public void testKnnOnMissingField() {
String query = """
from test_all
| where knn(dense_vector, [0, 1, 2]) or match(text, "Doe")
""";
LogicalPlan plan = localPlan(plan(query, allTypesAnalyzer), TEST_SEARCH_STATS);
var testStats = statsForMissingField("dense_vector");
var localPlan = localPlan(plan, testStats);
var project = as(localPlan, Project.class);
// Introduces an Eval with first_name as null literal
var eval = as(project.child(), Eval.class);
assertThat(Expressions.names(eval.fields()), contains("dense_vector"));
var firstNameEval = as(Alias.unwrap(eval.fields().get(0)), Literal.class);
assertThat(firstNameEval.value(), is(nullValue()));
assertThat(firstNameEval.dataType(), is(DENSE_VECTOR));
var limit = as(eval.child(), Limit.class);
// Filter has a single match on last_name only
var filter = as(limit.child(), Filter.class);
var fullTextFunction = as(filter.condition(), SingleFieldFullTextFunction.class);
assertThat(Expressions.name(fullTextFunction.field()), equalTo("text"));
}
private IsNotNull isNotNull(Expression field) {
return new IsNotNull(EMPTY, field);
}
private LocalRelation asEmptyRelation(Object o) {
var empty = as(o, LocalRelation.class);
assertThat(empty.supplier(), is(EmptyLocalSupplier.EMPTY));
return empty;
}
private LogicalPlan plan(String query, Analyzer analyzer) {
var analyzed = analyzer.analyze(parser.createStatement(query));
return logicalOptimizer.optimize(analyzed);
}
protected LogicalPlan plan(String query) {
return plan(query, analyzer);
}
protected LogicalPlan localPlan(LogicalPlan plan, Configuration configuration, SearchStats searchStats) {
var localContext = new LocalLogicalOptimizerContext(configuration, FoldContext.small(), searchStats);
return new LocalLogicalPlanOptimizer(localContext).localOptimize(plan);
}
protected LogicalPlan localPlan(LogicalPlan plan, SearchStats searchStats) {
return localPlan(plan, EsqlTestUtils.TEST_CFG, searchStats);
}
private LogicalPlan localPlan(String query) {
return localPlan(plan(query), TEST_SEARCH_STATS);
}
private static Analyzer analyzerWithUnionTypeMapping() {
InvalidMappedField unionTypeField = new InvalidMappedField(
"integer_long_field",
Map.of("integer", Set.of("test1"), "long", Set.of("test2"))
);
EsIndex test = EsIndexGenerator.esIndex(
"test*",
Map.of("integer_long_field", unionTypeField),
Map.of("test1", IndexMode.STANDARD, "test2", IndexMode.STANDARD)
);
return new Analyzer(
testAnalyzerContext(
EsqlTestUtils.TEST_CFG,
new EsqlFunctionRegistry(),
indexResolutions(test),
emptyPolicyResolution(),
emptyInferenceResolution()
),
TEST_VERIFIER
);
}
@Override
protected List<String> filteredWarnings() {
return withDefaultLimitWarning(super.filteredWarnings());
}
public static EsRelation relation() {
return EsqlTestUtils.relation(randomFrom(IndexMode.values()));
}
}
| MockFieldAttributeCommand |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/inject/MissingRuntimeRetentionTest.java | {
"start": 3369,
"end": 3623
} | interface ____ {}
/** A qualifier annotation with default retention. */
@BindingAnnotation
@Target({TYPE, METHOD})
// BUG: Diagnostic contains: @Retention(RUNTIME)
public @ | TestAnnotation4 |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/util/clhm/ConcurrentLinkedHashMap.java | {
"start": 53706,
"end": 54965
} | class ____<K, V> implements Serializable {
static final long serialVersionUID = 1;
final EntryWeigher<? super K, ? super V> weigher;
final EvictionListener<K, V> listener;
final int concurrencyLevel;
final Map<K, V> data;
final long capacity;
/**
* Default constructor.
* @param map The map
*/
SerializationProxy(ConcurrentLinkedHashMap<K, V> map) {
concurrencyLevel = map.concurrencyLevel;
data = new HashMap<>(map);
capacity = map.capacity.get();
listener = map.listener;
weigher = map.weigher;
}
/**
* Used for deserialization.
* @return The resolved object
*/
Object readResolve() {
ConcurrentLinkedHashMap<K, V> map = new Builder<K, V>()
.concurrencyLevel(concurrencyLevel)
.maximumWeightedCapacity(capacity)
.listener(listener)
.weigher(weigher)
.build();
map.putAll(data);
return map;
}
}
/**
* Just hold an object.
* @param <T> the type of object
*/
private | SerializationProxy |
java | elastic__elasticsearch | x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openshiftai/completion/OpenShiftAiChatCompletionResponseHandlerTests.java | {
"start": 1368,
"end": 6640
} | class ____ extends ESTestCase {
private static final String URL_VALUE = "http://www.abc.com";
private static final String INFERENCE_ID = "id";
private final OpenShiftAiChatCompletionResponseHandler responseHandler = new OpenShiftAiChatCompletionResponseHandler(
"chat completions",
(a, b) -> mock()
);
public void testFailNotFound() throws IOException {
var responseJson = XContentHelper.stripWhitespace("""
{
"detail": "Not Found"
}
""");
var errorJson = invalidResponseJson(responseJson, 404);
assertThat(errorJson, is(XContentHelper.stripWhitespace(Strings.format("""
{
"error" : {
"code" : "not_found",
"message" : "Resource not found at [%s] for request from inference entity id [%s] \
status [404]. Error message: [{\\"detail\\":\\"Not Found\\"}]",
"type" : "openshift_ai_error"
}
}""", URL_VALUE, INFERENCE_ID))));
}
public void testFailBadRequest() throws IOException {
var responseJson = XContentHelper.stripWhitespace("""
{
"object": "error",
"message": "[{'type': 'missing', 'loc': ('body', 'messages'), 'msg': 'Field required', \
'input': {'model': 'llama-31-8b-instruct', 'messages': [{'role': 'user', 'content': 'What is deep learning?'}], \
'max_tokens': 2, 'stream': True}}]",
"type": "Bad Request",
"param": null,
"code": 400
}
""");
var errorJson = invalidResponseJson(responseJson, 400);
assertThat(errorJson, is(XContentHelper.stripWhitespace(Strings.format("""
{
"error": {
"code": "bad_request",
"message": "Received a bad request status code for request from inference entity id [%s] status [400]. Error message: \
[{\\"object\\":\\"error\\",\\"message\\":\\"[{'type': 'missing', 'loc': ('body', 'messages'), 'msg': 'Field required', \
'input': {'model': 'llama-31-8b-instruct', 'messages': [{'role': 'user', 'content': 'What is deep learning?'}], \
'max_tokens': 2, 'stream': True}}]\\",\\"type\\":\\"Bad Request\\",\\"param\\":null,\\"code\\":400}]",
"type": "openshift_ai_error"
}
}
""", INFERENCE_ID))));
}
public void testFailValidationWithInvalidJson() throws IOException {
var responseJson = """
what? this isn't a json
""";
var errorJson = invalidResponseJson(responseJson, 500);
assertThat(errorJson, is(XContentHelper.stripWhitespace(Strings.format("""
{
"error": {
"code": "bad_request",
"message": "Received a server error status code for request from inference entity id [%s] status [500]. \
Error message: [what? this isn't a json\\n]",
"type": "openshift_ai_error"
}
}
""", INFERENCE_ID))));
}
private String invalidResponseJson(String responseJson, int statusCode) throws IOException {
var exception = invalidResponse(responseJson, statusCode);
assertThat(exception, isA(RetryException.class));
assertThat(unwrapCause(exception), isA(UnifiedChatCompletionException.class));
return toJson((UnifiedChatCompletionException) unwrapCause(exception));
}
private Exception invalidResponse(String responseJson, int statusCode) {
return expectThrows(
RetryException.class,
() -> responseHandler.validateResponse(
mock(),
mock(),
mockRequest(),
new HttpResult(mockErrorResponse(statusCode), responseJson.getBytes(StandardCharsets.UTF_8))
)
);
}
private static Request mockRequest() throws URISyntaxException {
var request = mock(Request.class);
when(request.getInferenceEntityId()).thenReturn(INFERENCE_ID);
when(request.isStreaming()).thenReturn(true);
when(request.getURI()).thenReturn(new URI(URL_VALUE));
return request;
}
private static HttpResponse mockErrorResponse(int statusCode) {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(statusCode);
var response = mock(HttpResponse.class);
when(response.getStatusLine()).thenReturn(statusLine);
return response;
}
private String toJson(UnifiedChatCompletionException e) throws IOException {
try (var builder = XContentFactory.jsonBuilder()) {
e.toXContentChunked(EMPTY_PARAMS).forEachRemaining(xContent -> {
try {
xContent.toXContent(builder, EMPTY_PARAMS);
} catch (IOException ex) {
throw new RuntimeException(ex);
}
});
return XContentHelper.convertToJson(BytesReference.bytes(builder), false, builder.contentType());
}
}
}
| OpenShiftAiChatCompletionResponseHandlerTests |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.