language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-boot
|
build-plugin/spring-boot-maven-plugin/src/main/java/org/springframework/boot/maven/CacheInfo.java
|
{
"start": 934,
"end": 2173
}
|
class ____ {
private @Nullable Cache cache;
public CacheInfo() {
}
private CacheInfo(Cache cache) {
this.cache = cache;
}
public void setVolume(VolumeCacheInfo info) {
Assert.state(this.cache == null, "Each image building cache can be configured only once");
String name = info.getName();
Assert.state(name != null, "'name' must not be null");
this.cache = Cache.volume(name);
}
public void setBind(BindCacheInfo info) {
Assert.state(this.cache == null, "Each image building cache can be configured only once");
String source = info.getSource();
Assert.state(source != null, "'source' must not be null");
this.cache = Cache.bind(source);
}
@Nullable Cache asCache() {
return this.cache;
}
static CacheInfo fromVolume(VolumeCacheInfo cacheInfo) {
String name = cacheInfo.getName();
Assert.state(name != null, "'name' must not be null");
return new CacheInfo(Cache.volume(name));
}
static CacheInfo fromBind(BindCacheInfo cacheInfo) {
String source = cacheInfo.getSource();
Assert.state(source != null, "'source' must not be null");
return new CacheInfo(Cache.bind(source));
}
/**
* Encapsulates configuration of an image building cache stored in a volume.
*/
public static
|
CacheInfo
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/jdk8/MaybeFlattenStreamAsFlowable.java
|
{
"start": 1441,
"end": 1968
}
|
class ____<T, R> extends Flowable<R> {
final Maybe<T> source;
final Function<? super T, ? extends Stream<? extends R>> mapper;
public MaybeFlattenStreamAsFlowable(Maybe<T> source, Function<? super T, ? extends Stream<? extends R>> mapper) {
this.source = source;
this.mapper = mapper;
}
@Override
protected void subscribeActual(@NonNull Subscriber<? super R> s) {
source.subscribe(new FlattenStreamMultiObserver<>(s, mapper));
}
static final
|
MaybeFlattenStreamAsFlowable
|
java
|
apache__camel
|
components/camel-thrift/src/test/java/org/apache/camel/component/thrift/generated/InvalidOperation.java
|
{
"start": 13791,
"end": 14058
}
|
class ____ implements org.apache.thrift.scheme.SchemeFactory {
@Override
public InvalidOperationStandardScheme getScheme() {
return new InvalidOperationStandardScheme();
}
}
private static
|
InvalidOperationStandardSchemeFactory
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileInputSplit.java
|
{
"start": 1121,
"end": 3881
}
|
class ____ extends LocatableInputSplit {
private static final long serialVersionUID = 1L;
/** The path of the file this file split refers to. */
private final Path file;
/** The position of the first byte in the file to process. */
private final long start;
/** The number of bytes in the file to process. */
private final long length;
// --------------------------------------------------------------------------------------------
/**
* Constructs a split with host information.
*
* @param num the number of this input split
* @param file the file name
* @param start the position of the first byte in the file to process
* @param length the number of bytes in the file to process (-1 is flag for "read whole file")
* @param hosts the list of hosts containing the block, possibly <code>null</code>
*/
public FileInputSplit(int num, Path file, long start, long length, String[] hosts) {
super(num, hosts);
this.file = file;
this.start = start;
this.length = length;
}
// --------------------------------------------------------------------------------------------
/**
* Returns the path of the file containing this split's data.
*
* @return the path of the file containing this split's data.
*/
public Path getPath() {
return file;
}
/**
* Returns the position of the first byte in the file to process.
*
* @return the position of the first byte in the file to process
*/
public long getStart() {
return start;
}
/**
* Returns the number of bytes in the file to process.
*
* @return the number of bytes in the file to process
*/
public long getLength() {
return length;
}
// --------------------------------------------------------------------------------------------
@Override
public int hashCode() {
return getSplitNumber() ^ (file == null ? 0 : file.hashCode());
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj instanceof FileInputSplit && super.equals(obj)) {
FileInputSplit other = (FileInputSplit) obj;
return this.start == other.start
&& this.length == other.length
&& (this.file == null
? other.file == null
: (other.file != null && this.file.equals(other.file)));
} else {
return false;
}
}
@Override
public String toString() {
return "[" + getSplitNumber() + "] " + file + ":" + start + "+" + length;
}
}
|
FileInputSplit
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/internal/BindingTypeHelper.java
|
{
"start": 962,
"end": 7714
}
|
class ____ {
private BindingTypeHelper() {
}
public static <T> BindableType<T> resolveTemporalPrecision(
TemporalType precision,
BindableType<T> declaredParameterType,
BindingContext bindingContext) {
if ( precision != null ) {
final TemporalJavaType<T> temporalJtd = getTemporalJavaType( declaredParameterType, bindingContext );
if ( temporalJtd == null || temporalJtd.getPrecision() != precision ) {
final TypeConfiguration typeConfiguration = bindingContext.getTypeConfiguration();
final TemporalJavaType<T> temporalTypeForPrecision =
getTemporalTypeForPrecision( precision, temporalJtd, typeConfiguration );
return typeConfiguration.getBasicTypeRegistry()
.resolve( temporalTypeForPrecision, resolveJdbcTypeCode( precision ) );
}
}
return declaredParameterType;
}
private static <T> TemporalJavaType<T> getTemporalTypeForPrecision(
TemporalType precision, TemporalJavaType<T> temporalJtd, TypeConfiguration typeConfiguration) {
// Special case java.util.Date, because TemporalJavaType#resolveTypeForPrecision doesn't support widening,
// since the main purpose of that method is to determine the final java type based on the reflective type
// + the explicit @Temporal(TemporalType...) configuration
if ( temporalJtd == null || java.util.Date.class.isAssignableFrom( temporalJtd.getJavaTypeClass() ) ) {
final var descriptor =
typeConfiguration.getJavaTypeRegistry()
.resolveDescriptor( resolveJavaTypeClass( precision ) );
//noinspection unchecked
return (TemporalJavaType<T>) descriptor;
}
else {
return temporalJtd.resolveTypeForPrecision( precision, typeConfiguration );
}
}
private static <T> TemporalJavaType<T> getTemporalJavaType(
BindableType<T> declaredParameterType, BindingContext bindingContext) {
if ( declaredParameterType != null ) {
final SqmExpressible<T> sqmExpressible = bindingContext.resolveExpressible( declaredParameterType );
if ( !( JavaTypeHelper.isTemporal( sqmExpressible.getExpressibleJavaType() ) ) ) {
throw new UnsupportedOperationException(
"Cannot treat non-temporal parameter type with temporal precision"
);
}
return (TemporalJavaType<T>) sqmExpressible.getExpressibleJavaType();
}
else {
return null;
}
}
public static JdbcMapping resolveBindType(
Object value,
JdbcMapping baseType,
TypeConfiguration typeConfiguration) {
if ( value == null || !JavaTypeHelper.isTemporal( baseType.getJdbcJavaType() ) ) {
return baseType;
}
else {
final Class<?> javaType = value.getClass();
final TemporalJavaType<?> temporalJavaType = (TemporalJavaType<?>) baseType.getJdbcJavaType();
final TemporalType temporalType = temporalJavaType.getPrecision();
final BindableType<?> bindableType = (BindableType<?>) baseType;
return switch ( temporalType ) {
case TIMESTAMP -> (JdbcMapping) resolveTimestampTemporalTypeVariant( javaType, bindableType, typeConfiguration );
case DATE -> (JdbcMapping) resolveDateTemporalTypeVariant( javaType, bindableType, typeConfiguration );
case TIME -> (JdbcMapping) resolveTimeTemporalTypeVariant( javaType, bindableType, typeConfiguration );
};
}
}
private static BindableType<?> resolveTimestampTemporalTypeVariant(
Class<?> javaType,
BindableType<?> baseType,
TypeConfiguration typeConfiguration) {
if ( baseType.getJavaType().isAssignableFrom( javaType ) ) {
return baseType;
}
else if ( Calendar.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.CALENDAR );
}
else if ( java.util.Date.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.TIMESTAMP );
}
else if ( Instant.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.INSTANT );
}
else if ( OffsetDateTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.OFFSET_DATE_TIME );
}
else if ( ZonedDateTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.ZONED_DATE_TIME );
}
else if ( OffsetTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.OFFSET_TIME );
}
else {
throw new IllegalArgumentException( "Unsure how to handle given Java type ["
+ javaType.getName() + "] as TemporalType#TIMESTAMP" );
}
}
private static BindableType<?> resolveDateTemporalTypeVariant(
Class<?> javaType,
BindableType<?> baseType,
TypeConfiguration typeConfiguration) {
if ( baseType.getJavaType().isAssignableFrom( javaType ) ) {
return baseType;
}
else if ( Calendar.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.CALENDAR_DATE );
}
else if ( java.util.Date.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.DATE );
}
else if ( Instant.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.INSTANT );
}
else if ( OffsetDateTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.OFFSET_DATE_TIME );
}
else if ( ZonedDateTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.ZONED_DATE_TIME );
}
else {
throw new IllegalArgumentException( "Unsure how to handle given Java type ["
+ javaType.getName() + "] as TemporalType#DATE" );
}
}
private static BindableType<?> resolveTimeTemporalTypeVariant(
Class<?> javaType,
BindableType<?> baseType,
TypeConfiguration typeConfiguration) {
if ( Calendar.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.CALENDAR_TIME );
}
else if ( java.util.Date.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.TIME );
}
else if ( LocalTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.LOCAL_TIME );
}
else if ( OffsetTime.class.isAssignableFrom( javaType ) ) {
return typeConfiguration.getBasicTypeRegistry().resolve( StandardBasicTypes.OFFSET_TIME );
}
else {
throw new IllegalArgumentException( "Unsure how to handle given Java type ["
+ javaType.getName() + "] as TemporalType#TIME" );
}
}
}
|
BindingTypeHelper
|
java
|
spring-projects__spring-security
|
ldap/src/main/java/org/springframework/security/ldap/authentication/NullLdapAuthoritiesPopulator.java
|
{
"start": 1038,
"end": 1281
}
|
class ____ implements LdapAuthoritiesPopulator {
@Override
public Collection<GrantedAuthority> getGrantedAuthorities(DirContextOperations userDetails, String username) {
return AuthorityUtils.NO_AUTHORITIES;
}
}
|
NullLdapAuthoritiesPopulator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/HierarchyOrderTest.java
|
{
"start": 2851,
"end": 3082
}
|
class ____ extends BaseD {
public DerDB(int b) {
this.b = b;
}
public int b;
public DerDB() {
}
}
/*
* Created on 03/12/2024 by Paul Harrison (paul.harrison@manchester.ac.uk).
*/
@Embeddable
public static
|
DerDB
|
java
|
hibernate__hibernate-orm
|
hibernate-jfr/src/main/java/org/hibernate/event/jfr/internal/CollectionRecreateEvent.java
|
{
"start": 559,
"end": 981
}
|
class ____ extends Event implements DiagnosticEvent {
public static final String NAME = "org.hibernate.orm.CollectionRecreateEvent";
@Label("Session Identifier")
public String sessionIdentifier;
@Label("Entity Identifier")
public String id;
@Label("Collection Role")
public String role;
@Label("Success")
public boolean success;
@Override
public String toString() {
return NAME;
}
}
|
CollectionRecreateEvent
|
java
|
quarkusio__quarkus
|
test-framework/common/src/main/java/io/quarkus/test/common/TestResourceManager.java
|
{
"start": 38301,
"end": 39411
}
|
class ____ extends
AbstractTestResourceClassEntryHandler
implements TestResourceClassEntryHandler {
@Override
public boolean appliesTo(AnnotationInstance annotation) {
return QUARKUS_TEST_RESOURCE.equals(annotation.name());
}
@Override
public TestResourceClassEntry produce(AnnotationInstance annotation) {
return new TestResourceClassEntry(lifecycleManager(annotation), args(annotation), null, isParallel(annotation),
scope(annotation));
}
@Override
public TestResourceScope scope(AnnotationInstance annotation) {
TestResourceScope scope = GLOBAL;
AnnotationValue restrict = annotation.value("restrictToAnnotatedClass");
if (restrict != null) {
if (restrict.asBoolean()) {
scope = RESTRICTED_TO_CLASS;
}
}
return scope;
}
}
/**
* Handles {@link WithTestResource}
*/
private static final
|
QuarkusTestResourceTestResourceClassEntryHandler
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/matchers/MatchersTest.java
|
{
"start": 17250,
"end": 17856
}
|
class ____ extends BugChecker
implements MethodInvocationTreeMatcher {
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (Matchers.hasAnnotation("java.lang.Deprecated").matches(tree, state)) {
return describeMatch(tree);
}
return Description.NO_MATCH;
}
}
/** Simple checker to make sure sameArgument doesn't throw IndexOutOfBoundsException. */
@BugPattern(
summary = "Checker that matches invocation if the first argument is repeated",
severity = ERROR)
public static
|
NoAnnotatedCallsChecker
|
java
|
quarkusio__quarkus
|
integration-tests/test-extension/tests/src/main/java/io/quarkus/it/extension/FinalFieldReflectionTestEndpoint.java
|
{
"start": 598,
"end": 2278
}
|
class ____ extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws IOException {
reflectiveSetterInvoke(resp);
resp.getWriter().write("OK");
}
private void reflectiveSetterInvoke(HttpServletResponse resp) throws IOException {
try {
FinalFieldReflectionObject nominalInstance = new FinalFieldReflectionObject();
Field field = nominalInstance.getClass().getDeclaredField("value");
field.setAccessible(true);
field.set(nominalInstance, "OK");
Method getValue = nominalInstance.getClass().getMethod("getValue");
Object value = getValue.invoke(nominalInstance);
if (!"OK".equals(value)) {
final PrintWriter writer = resp.getWriter();
writer.write(format("field incorrectly set, expecting 'OK', got '%s'", value));
writer.append("\n\t");
}
} catch (Exception e) {
reportException(e, resp);
}
}
private void reportException(final Exception e, final HttpServletResponse resp) throws IOException {
reportException(null, e, resp);
}
private void reportException(String errorMessage, final Exception e, final HttpServletResponse resp) throws IOException {
final PrintWriter writer = resp.getWriter();
if (errorMessage != null) {
writer.write(errorMessage);
writer.write(" ");
}
writer.write(e.toString());
writer.append("\n\t");
e.printStackTrace(writer);
writer.append("\n\t");
}
}
|
FinalFieldReflectionTestEndpoint
|
java
|
elastic__elasticsearch
|
modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/CustomMustacheFactory.java
|
{
"start": 1632,
"end": 4348
}
|
class ____ extends DefaultMustacheFactory {
static final String V7_JSON_MEDIA_TYPE_WITH_CHARSET = "application/json; charset=UTF-8";
static final String JSON_MEDIA_TYPE_WITH_CHARSET = "application/json;charset=utf-8";
static final String JSON_MEDIA_TYPE = "application/json";
static final String PLAIN_TEXT_MEDIA_TYPE = "text/plain";
static final String X_WWW_FORM_URLENCODED_MEDIA_TYPE = "application/x-www-form-urlencoded";
private static final String DEFAULT_MEDIA_TYPE = JSON_MEDIA_TYPE;
private static final boolean DEFAULT_DETECT_MISSING_PARAMS = false;
private static final Map<String, Supplier<Encoder>> ENCODERS = Map.of(
V7_JSON_MEDIA_TYPE_WITH_CHARSET,
JsonEscapeEncoder::new,
JSON_MEDIA_TYPE_WITH_CHARSET,
JsonEscapeEncoder::new,
JSON_MEDIA_TYPE,
JsonEscapeEncoder::new,
PLAIN_TEXT_MEDIA_TYPE,
DefaultEncoder::new,
X_WWW_FORM_URLENCODED_MEDIA_TYPE,
UrlEncoder::new
);
private final Encoder encoder;
/**
* Initializes a CustomMustacheFactory object with a specified mediaType.
*
* @deprecated Use {@link #builder()} instead to retrieve a {@link Builder} object that can be used to create a factory.
*/
@Deprecated
public CustomMustacheFactory(String mediaType) {
this(mediaType, DEFAULT_DETECT_MISSING_PARAMS);
}
/**
* Default constructor for the factory.
*
* @deprecated Use {@link #builder()} instead to retrieve a {@link Builder} object that can be used to create a factory.
*/
@Deprecated
public CustomMustacheFactory() {
this(DEFAULT_MEDIA_TYPE, DEFAULT_DETECT_MISSING_PARAMS);
}
private CustomMustacheFactory(String mediaType, boolean detectMissingParams) {
super();
setObjectHandler(new CustomReflectionObjectHandler(detectMissingParams));
this.encoder = createEncoder(mediaType);
}
@Override
public void encode(String value, Writer writer) {
try {
encoder.encode(value, writer);
} catch (IOException e) {
throw new MustacheException("Unable to encode value", e);
}
}
static Encoder createEncoder(String mediaType) {
final Supplier<Encoder> supplier = ENCODERS.get(mediaType);
if (supplier == null) {
throw new IllegalArgumentException("No encoder found for media type [" + mediaType + "]");
}
return supplier.get();
}
@Override
public MustacheVisitor createMustacheVisitor() {
return new CustomMustacheVisitor(this);
}
public static Builder builder() {
return new Builder();
}
|
CustomMustacheFactory
|
java
|
apache__flink
|
flink-python/src/test/java/org/apache/flink/table/runtime/typeutils/serializers/python/TimestampSerializerTest.java
|
{
"start": 1086,
"end": 1689
}
|
class ____ extends SerializerTestBase<Timestamp> {
@Override
protected TypeSerializer<Timestamp> createSerializer() {
return new TimestampSerializer(getPrecision());
}
@Override
protected int getLength() {
return (getPrecision() <= 3) ? 8 : 12;
}
@Override
protected Class<Timestamp> getTypeClass() {
return Timestamp.class;
}
abstract int getPrecision();
@Override
protected Timestamp[] getTestData() {
return new Timestamp[] {Timestamp.valueOf("2018-03-11 03:00:00.123")};
}
static final
|
TimestampSerializerTest
|
java
|
apache__spark
|
sql/api/src/main/java/org/apache/spark/sql/api/java/UDF5.java
|
{
"start": 980,
"end": 1105
}
|
interface ____<T1, T2, T3, T4, T5, R> extends Serializable {
R call(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) throws Exception;
}
|
UDF5
|
java
|
apache__thrift
|
lib/java/src/main/java/org/apache/thrift/meta_data/FieldValueMetaData.java
|
{
"start": 999,
"end": 1981
}
|
class ____ implements java.io.Serializable {
public final byte type;
private final boolean isTypedefType;
private final String typedefName;
private final boolean isBinary;
public FieldValueMetaData(byte type, boolean binary) {
this.type = type;
this.isTypedefType = false;
this.typedefName = null;
this.isBinary = binary;
}
public FieldValueMetaData(byte type) {
this(type, false);
}
public FieldValueMetaData(byte type, String typedefName) {
this.type = type;
this.isTypedefType = true;
this.typedefName = typedefName;
this.isBinary = false;
}
public boolean isTypedef() {
return isTypedefType;
}
public String getTypedefName() {
return typedefName;
}
public boolean isStruct() {
return type == TType.STRUCT;
}
public boolean isContainer() {
return type == TType.LIST || type == TType.MAP || type == TType.SET;
}
public boolean isBinary() {
return isBinary;
}
}
|
FieldValueMetaData
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SjmsEndpointBuilderFactory.java
|
{
"start": 67383,
"end": 98735
}
|
interface ____ extends EndpointProducerBuilder {
default SjmsEndpointProducerBuilder basic() {
return (SjmsEndpointProducerBuilder) this;
}
/**
* Whether to allow sending messages with no body. If this option is
* false and the message body is null, then an JMSException is thrown.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: producer (advanced)
*
* @param allowNullBody the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder allowNullBody(boolean allowNullBody) {
doSetProperty("allowNullBody", allowNullBody);
return this;
}
/**
* Whether to allow sending messages with no body. If this option is
* false and the message body is null, then an JMSException is thrown.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: producer (advanced)
*
* @param allowNullBody the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder allowNullBody(String allowNullBody) {
doSetProperty("allowNullBody", allowNullBody);
return this;
}
/**
* Use this option to force disabling time to live. For example when you
* do request/reply over JMS, then Camel will by default use the
* requestTimeout value as time to live on the message being sent. The
* problem is that the sender and receiver systems have to have their
* clocks synchronized, so they are in sync. This is not always so easy
* to archive. So you can use disableTimeToLive=true to not set a time
* to live value on the sent message. Then the message will not expire
* on the receiver system. See below in section About time to live for
* more details.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param disableTimeToLive the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder disableTimeToLive(boolean disableTimeToLive) {
doSetProperty("disableTimeToLive", disableTimeToLive);
return this;
}
/**
* Use this option to force disabling time to live. For example when you
* do request/reply over JMS, then Camel will by default use the
* requestTimeout value as time to live on the message being sent. The
* problem is that the sender and receiver systems have to have their
* clocks synchronized, so they are in sync. This is not always so easy
* to archive. So you can use disableTimeToLive=true to not set a time
* to live value on the sent message. Then the message will not expire
* on the receiver system. See below in section About time to live for
* more details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param disableTimeToLive the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder disableTimeToLive(String disableTimeToLive) {
doSetProperty("disableTimeToLive", disableTimeToLive);
return this;
}
/**
* Set if the deliveryMode, priority or timeToLive qualities of service
* should be used when sending messages. This option is based on
* Spring's JmsTemplate. The deliveryMode, priority and timeToLive
* options are applied to the current endpoint. This contrasts with the
* preserveMessageQos option, which operates at message granularity,
* reading QoS properties exclusively from the Camel In message headers.
*
* The option is a: <code>java.lang.Boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param explicitQosEnabled the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder explicitQosEnabled(Boolean explicitQosEnabled) {
doSetProperty("explicitQosEnabled", explicitQosEnabled);
return this;
}
/**
* Set if the deliveryMode, priority or timeToLive qualities of service
* should be used when sending messages. This option is based on
* Spring's JmsTemplate. The deliveryMode, priority and timeToLive
* options are applied to the current endpoint. This contrasts with the
* preserveMessageQos option, which operates at message granularity,
* reading QoS properties exclusively from the Camel In message headers.
*
* The option will be converted to a <code>java.lang.Boolean</code>
* type.
*
* Default: false
* Group: producer (advanced)
*
* @param explicitQosEnabled the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder explicitQosEnabled(String explicitQosEnabled) {
doSetProperty("explicitQosEnabled", explicitQosEnabled);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Set to true, if you want to send message using the QoS settings
* specified on the message, instead of the QoS settings on the JMS
* endpoint. The following three headers are considered JMSPriority,
* JMSDeliveryMode, and JMSExpiration. You can provide all or only some
* of them. If not provided, Camel will fall back to use the values from
* the endpoint instead. So, when using this option, the headers
* override the values from the endpoint. The explicitQosEnabled option,
* by contrast, will only use options set on the endpoint, and not
* values from the message header.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param preserveMessageQos the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder preserveMessageQos(boolean preserveMessageQos) {
doSetProperty("preserveMessageQos", preserveMessageQos);
return this;
}
/**
* Set to true, if you want to send message using the QoS settings
* specified on the message, instead of the QoS settings on the JMS
* endpoint. The following three headers are considered JMSPriority,
* JMSDeliveryMode, and JMSExpiration. You can provide all or only some
* of them. If not provided, Camel will fall back to use the values from
* the endpoint instead. So, when using this option, the headers
* override the values from the endpoint. The explicitQosEnabled option,
* by contrast, will only use options set on the endpoint, and not
* values from the message header.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param preserveMessageQos the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder preserveMessageQos(String preserveMessageQos) {
doSetProperty("preserveMessageQos", preserveMessageQos);
return this;
}
/**
* Whether to startup the consumer message listener asynchronously, when
* starting a route. For example if a JmsConsumer cannot get a
* connection to a remote JMS broker, then it may block while retrying
* and/or fail over. This will cause Camel to block while starting
* routes. By setting this option to true, you will let routes startup,
* while the JmsConsumer connects to the JMS broker using a dedicated
* thread in asynchronous mode. If this option is used, then beware that
* if the connection could not be established, then an exception is
* logged at WARN level, and the consumer will not be able to receive
* messages; You can then restart the route to retry.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param asyncStartListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder asyncStartListener(boolean asyncStartListener) {
doSetProperty("asyncStartListener", asyncStartListener);
return this;
}
/**
* Whether to startup the consumer message listener asynchronously, when
* starting a route. For example if a JmsConsumer cannot get a
* connection to a remote JMS broker, then it may block while retrying
* and/or fail over. This will cause Camel to block while starting
* routes. By setting this option to true, you will let routes startup,
* while the JmsConsumer connects to the JMS broker using a dedicated
* thread in asynchronous mode. If this option is used, then beware that
* if the connection could not be established, then an exception is
* logged at WARN level, and the consumer will not be able to receive
* messages; You can then restart the route to retry.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param asyncStartListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder asyncStartListener(String asyncStartListener) {
doSetProperty("asyncStartListener", asyncStartListener);
return this;
}
/**
* Whether to stop the consumer message listener asynchronously, when
* stopping a route.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param asyncStopListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder asyncStopListener(boolean asyncStopListener) {
doSetProperty("asyncStopListener", asyncStopListener);
return this;
}
/**
* Whether to stop the consumer message listener asynchronously, when
* stopping a route.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param asyncStopListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder asyncStopListener(String asyncStopListener) {
doSetProperty("asyncStopListener", asyncStopListener);
return this;
}
/**
* To use a custom DestinationCreationStrategy.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.DestinationCreationStrategy</code> type.
*
* Group: advanced
*
* @param destinationCreationStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder destinationCreationStrategy(org.apache.camel.component.sjms.jms.DestinationCreationStrategy destinationCreationStrategy) {
doSetProperty("destinationCreationStrategy", destinationCreationStrategy);
return this;
}
/**
* To use a custom DestinationCreationStrategy.
*
* The option will be converted to a
* <code>org.apache.camel.component.sjms.jms.DestinationCreationStrategy</code> type.
*
* Group: advanced
*
* @param destinationCreationStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder destinationCreationStrategy(String destinationCreationStrategy) {
doSetProperty("destinationCreationStrategy", destinationCreationStrategy);
return this;
}
/**
* Specifies the JMS Exception Listener that is to be notified of any
* underlying JMS exceptions.
*
* The option is a: <code>jakarta.jms.ExceptionListener</code> type.
*
* Group: advanced
*
* @param exceptionListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder exceptionListener(jakarta.jms.ExceptionListener exceptionListener) {
doSetProperty("exceptionListener", exceptionListener);
return this;
}
/**
* Specifies the JMS Exception Listener that is to be notified of any
* underlying JMS exceptions.
*
* The option will be converted to a
* <code>jakarta.jms.ExceptionListener</code> type.
*
* Group: advanced
*
* @param exceptionListener the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder exceptionListener(String exceptionListener) {
doSetProperty("exceptionListener", exceptionListener);
return this;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from
* Camel message.
*
* The option is a:
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder headerFilterStrategy(org.apache.camel.spi.HeaderFilterStrategy headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* To use a custom HeaderFilterStrategy to filter header to and from
* Camel message.
*
* The option will be converted to a
* <code>org.apache.camel.spi.HeaderFilterStrategy</code> type.
*
* Group: advanced
*
* @param headerFilterStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder headerFilterStrategy(String headerFilterStrategy) {
doSetProperty("headerFilterStrategy", headerFilterStrategy);
return this;
}
/**
* Whether to include all JMSXxxx properties when mapping from JMS to
* Camel Message. Setting this to true will include properties such as
* JMSXAppID, and JMSXUserID etc. Note: If you are using a custom
* headerFilterStrategy then this option does not apply.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param includeAllJMSXProperties the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder includeAllJMSXProperties(boolean includeAllJMSXProperties) {
doSetProperty("includeAllJMSXProperties", includeAllJMSXProperties);
return this;
}
/**
* Whether to include all JMSXxxx properties when mapping from JMS to
* Camel Message. Setting this to true will include properties such as
* JMSXAppID, and JMSXUserID etc. Note: If you are using a custom
* headerFilterStrategy then this option does not apply.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param includeAllJMSXProperties the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder includeAllJMSXProperties(String includeAllJMSXProperties) {
doSetProperty("includeAllJMSXProperties", includeAllJMSXProperties);
return this;
}
/**
* Pluggable strategy for encoding and decoding JMS keys so they can be
* compliant with the JMS specification. Camel provides two
* implementations out of the box: default and passthrough. The default
* strategy will safely marshal dots and hyphens (. and -). The
* passthrough strategy leaves the key as is. Can be used for JMS
* brokers which do not care whether JMS header keys contain illegal
* characters. You can provide your own implementation of the
* org.apache.camel.component.jms.JmsKeyFormatStrategy and refer to it
* using the # notation.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy</code>
* type.
*
* Group: advanced
*
* @param jmsKeyFormatStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder jmsKeyFormatStrategy(org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy jmsKeyFormatStrategy) {
doSetProperty("jmsKeyFormatStrategy", jmsKeyFormatStrategy);
return this;
}
/**
* Pluggable strategy for encoding and decoding JMS keys so they can be
* compliant with the JMS specification. Camel provides two
* implementations out of the box: default and passthrough. The default
* strategy will safely marshal dots and hyphens (. and -). The
* passthrough strategy leaves the key as is. Can be used for JMS
* brokers which do not care whether JMS header keys contain illegal
* characters. You can provide your own implementation of the
* org.apache.camel.component.jms.JmsKeyFormatStrategy and refer to it
* using the # notation.
*
* The option will be converted to a
* <code>org.apache.camel.component.sjms.jms.JmsKeyFormatStrategy</code>
* type.
*
* Group: advanced
*
* @param jmsKeyFormatStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder jmsKeyFormatStrategy(String jmsKeyFormatStrategy) {
doSetProperty("jmsKeyFormatStrategy", jmsKeyFormatStrategy);
return this;
}
/**
* Allows you to force the use of a specific jakarta.jms.Message
* implementation for sending JMS messages. Possible values are: Bytes,
* Map, Object, Stream, Text. By default, Camel would determine which
* JMS message type to use from the In body type. This option allows you
* to specify it.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.JmsMessageType</code> type.
*
* Group: advanced
*
* @param jmsMessageType the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder jmsMessageType(org.apache.camel.component.sjms.jms.JmsMessageType jmsMessageType) {
doSetProperty("jmsMessageType", jmsMessageType);
return this;
}
/**
* Allows you to force the use of a specific jakarta.jms.Message
* implementation for sending JMS messages. Possible values are: Bytes,
* Map, Object, Stream, Text. By default, Camel would determine which
* JMS message type to use from the In body type. This option allows you
* to specify it.
*
* The option will be converted to a
* <code>org.apache.camel.component.sjms.jms.JmsMessageType</code> type.
*
* Group: advanced
*
* @param jmsMessageType the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder jmsMessageType(String jmsMessageType) {
doSetProperty("jmsMessageType", jmsMessageType);
return this;
}
/**
* Specifies whether Camel should auto map the received JMS message to a
* suited payload type, such as jakarta.jms.TextMessage to a String etc.
* See section about how mapping works below for more details.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param mapJmsMessage the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder mapJmsMessage(boolean mapJmsMessage) {
doSetProperty("mapJmsMessage", mapJmsMessage);
return this;
}
/**
* Specifies whether Camel should auto map the received JMS message to a
* suited payload type, such as jakarta.jms.TextMessage to a String etc.
* See section about how mapping works below for more details.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param mapJmsMessage the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder mapJmsMessage(String mapJmsMessage) {
doSetProperty("mapJmsMessage", mapJmsMessage);
return this;
}
/**
* To use the given MessageCreatedStrategy which are invoked when Camel
* creates new instances of jakarta.jms.Message objects when Camel is
* sending a JMS message.
*
* The option is a:
* <code>org.apache.camel.component.sjms.jms.MessageCreatedStrategy</code> type.
*
* Group: advanced
*
* @param messageCreatedStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder messageCreatedStrategy(org.apache.camel.component.sjms.jms.MessageCreatedStrategy messageCreatedStrategy) {
doSetProperty("messageCreatedStrategy", messageCreatedStrategy);
return this;
}
/**
* To use the given MessageCreatedStrategy which are invoked when Camel
* creates new instances of jakarta.jms.Message objects when Camel is
* sending a JMS message.
*
* The option will be converted to a
* <code>org.apache.camel.component.sjms.jms.MessageCreatedStrategy</code> type.
*
* Group: advanced
*
* @param messageCreatedStrategy the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder messageCreatedStrategy(String messageCreatedStrategy) {
doSetProperty("messageCreatedStrategy", messageCreatedStrategy);
return this;
}
/**
* Specifies the interval between recovery attempts, i.e. when a
* connection is being refreshed, in milliseconds. The default is 5000
* ms, that is, 5 seconds.
*
* The option is a: <code>long</code> type.
*
* Default: 5000
* Group: advanced
*
* @param recoveryInterval the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder recoveryInterval(long recoveryInterval) {
doSetProperty("recoveryInterval", recoveryInterval);
return this;
}
/**
* Specifies the interval between recovery attempts, i.e. when a
* connection is being refreshed, in milliseconds. The default is 5000
* ms, that is, 5 seconds.
*
* The option will be converted to a <code>long</code> type.
*
* Default: 5000
* Group: advanced
*
* @param recoveryInterval the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder recoveryInterval(String recoveryInterval) {
doSetProperty("recoveryInterval", recoveryInterval);
return this;
}
/**
* Sets whether synchronous processing should be strictly used.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param synchronous the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param synchronous the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* If enabled and you are using Request Reply messaging (InOut) and an
* Exchange failed on the consumer side, then the caused Exception will
* be send back in response as a jakarta.jms.ObjectMessage. If the
* client is Camel, the returned Exception is rethrown. This allows you
* to use Camel JMS as a bridge in your routing - for example, using
* persistent queues to enable robust routing. Notice that if you also
* have transferExchange enabled, this option takes precedence. The
* caught exception is required to be serializable. The original
* Exception on the consumer side can be wrapped in an outer exception
* such as org.apache.camel.RuntimeCamelException when returned to the
* producer. Use this with caution as the data is using Java Object
* serialization and requires the received to be able to deserialize the
* data at Class level, which forces a strong coupling between the
* producers and consumer!.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param transferException the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder transferException(boolean transferException) {
doSetProperty("transferException", transferException);
return this;
}
/**
* If enabled and you are using Request Reply messaging (InOut) and an
* Exchange failed on the consumer side, then the caused Exception will
* be send back in response as a jakarta.jms.ObjectMessage. If the
* client is Camel, the returned Exception is rethrown. This allows you
* to use Camel JMS as a bridge in your routing - for example, using
* persistent queues to enable robust routing. Notice that if you also
* have transferExchange enabled, this option takes precedence. The
* caught exception is required to be serializable. The original
* Exception on the consumer side can be wrapped in an outer exception
* such as org.apache.camel.RuntimeCamelException when returned to the
* producer. Use this with caution as the data is using Java Object
* serialization and requires the received to be able to deserialize the
* data at Class level, which forces a strong coupling between the
* producers and consumer!.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param transferException the value to set
* @return the dsl builder
*/
default AdvancedSjmsEndpointProducerBuilder transferException(String transferException) {
doSetProperty("transferException", transferException);
return this;
}
}
/**
* Builder for endpoint for the Simple JMS component.
*/
public
|
AdvancedSjmsEndpointProducerBuilder
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeAdminManager.java
|
{
"start": 3806,
"end": 16711
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(DatanodeAdminManager.class);
private final Namesystem namesystem;
private final BlockManager blockManager;
private final HeartbeatManager hbManager;
private final ScheduledExecutorService executor;
private DatanodeAdminMonitorInterface monitor = null;
DatanodeAdminManager(final Namesystem namesystem,
final BlockManager blockManager, final HeartbeatManager hbManager) {
this.namesystem = namesystem;
this.blockManager = blockManager;
this.hbManager = hbManager;
executor = Executors.newScheduledThreadPool(1,
new ThreadFactoryBuilder().setNameFormat("DatanodeAdminMonitor-%d")
.setDaemon(true).build());
}
/**
* Start the DataNode admin monitor thread.
* @param conf
*/
void activate(Configuration conf) {
final int intervalSecs = (int) conf.getTimeDuration(
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY,
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_DEFAULT,
TimeUnit.SECONDS);
Preconditions.checkArgument(intervalSecs >= 0, "Cannot set a negative " +
"value for " + DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY);
Class cls = null;
try {
cls = conf.getClass(
DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_MONITOR_CLASS,
Class.forName(DFSConfigKeys
.DFS_NAMENODE_DECOMMISSION_MONITOR_CLASS_DEFAULT));
monitor =
(DatanodeAdminMonitorInterface)ReflectionUtils.newInstance(cls, conf);
monitor.setBlockManager(blockManager);
monitor.setNameSystem(namesystem);
monitor.setDatanodeAdminManager(this);
} catch (Exception e) {
throw new RuntimeException("Unable to create the Decommission monitor " +
"from "+cls, e);
}
executor.scheduleWithFixedDelay(monitor, intervalSecs, intervalSecs,
TimeUnit.SECONDS);
LOG.debug("Activating DatanodeAdminManager with interval {} seconds.", intervalSecs);
}
/**
* Stop the admin monitor thread, waiting briefly for it to terminate.
*/
void close() {
executor.shutdownNow();
try {
executor.awaitTermination(3000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {}
}
/**
* Start decommissioning the specified datanode.
* @param node
*/
@VisibleForTesting
public void startDecommission(DatanodeDescriptor node) {
if (!node.isDecommissionInProgress() && !node.isDecommissioned()) {
// Update DN stats maintained by HeartbeatManager
hbManager.startDecommission(node);
// Update cluster's emptyRack
blockManager.getDatanodeManager().getNetworkTopology().decommissionNode(node);
// hbManager.startDecommission will set dead node to decommissioned.
if (node.isDecommissionInProgress()) {
for (DatanodeStorageInfo storage : node.getStorageInfos()) {
LOG.info("Starting decommission of {} {} with {} blocks",
node, storage, storage.numBlocks());
}
node.getLeavingServiceStatus().setStartTime(monotonicNow());
monitor.startTrackingNode(node);
}
} else {
LOG.trace("startDecommission: Node {} in {}, nothing to do.",
node, node.getAdminState());
}
}
/**
* Stop decommissioning the specified datanode.
* @param node
*/
@VisibleForTesting
public void stopDecommission(DatanodeDescriptor node) {
if (node.isDecommissionInProgress() || node.isDecommissioned()) {
// Update DN stats maintained by HeartbeatManager
hbManager.stopDecommission(node);
// Update cluster's emptyRack
blockManager.getDatanodeManager().getNetworkTopology().recommissionNode(node);
// extra redundancy blocks will be detected and processed when
// the dead node comes back and send in its full block report.
if (node.isAlive()) {
blockManager.processExtraRedundancyBlocksOnInService(node);
}
// Remove from tracking in DatanodeAdminManager
monitor.stopTrackingNode(node);
} else {
LOG.trace("stopDecommission: Node {} in {}, nothing to do.",
node, node.getAdminState());
}
}
/**
* Start maintenance of the specified datanode.
* @param node
*/
@VisibleForTesting
public void startMaintenance(DatanodeDescriptor node,
long maintenanceExpireTimeInMS) {
// Even if the node is already in maintenance, we still need to adjust
// the expiration time.
node.setMaintenanceExpireTimeInMS(maintenanceExpireTimeInMS);
if (!node.isMaintenance()) {
// Update DN stats maintained by HeartbeatManager
hbManager.startMaintenance(node);
// hbManager.startMaintenance will set dead node to IN_MAINTENANCE.
if (node.isEnteringMaintenance()) {
for (DatanodeStorageInfo storage : node.getStorageInfos()) {
LOG.info("Starting maintenance of {} {} with {} blocks",
node, storage, storage.numBlocks());
}
node.getLeavingServiceStatus().setStartTime(monotonicNow());
}
// Track the node regardless whether it is ENTERING_MAINTENANCE or
// IN_MAINTENANCE to support maintenance expiration.
monitor.startTrackingNode(node);
} else {
LOG.trace("startMaintenance: Node {} in {}, nothing to do.",
node, node.getAdminState());
}
}
/**
* Stop maintenance of the specified datanode.
* @param node
*/
@VisibleForTesting
public void stopMaintenance(DatanodeDescriptor node) {
if (node.isMaintenance()) {
// Update DN stats maintained by HeartbeatManager
hbManager.stopMaintenance(node);
// extra redundancy blocks will be detected and processed when
// the dead node comes back and send in its full block report.
if (!node.isAlive()) {
// The node became dead when it was in maintenance, at which point
// the replicas weren't removed from block maps.
// When the node leaves maintenance, the replicas should be removed
// from the block maps to trigger the necessary replication to
// maintain the safety property of "# of live replicas + maintenance
// replicas" >= the expected redundancy.
blockManager.removeBlocksAssociatedTo(node);
} else {
// Even though putting nodes in maintenance node doesn't cause live
// replicas to match expected replication factor, it is still possible
// to have over replicated when the node leaves maintenance node.
// First scenario:
// a. Node became dead when it is at AdminStates.NORMAL, thus
// block is replicated so that 3 replicas exist on other nodes.
// b. Admins put the dead node into maintenance mode and then
// have the node rejoin the cluster.
// c. Take the node out of maintenance mode.
// Second scenario:
// a. With replication factor 3, set one replica to maintenance node,
// thus block has 1 maintenance replica and 2 live replicas.
// b. Change the replication factor to 2. The block will still have
// 1 maintenance replica and 2 live replicas.
// c. Take the node out of maintenance mode.
blockManager.processExtraRedundancyBlocksOnInService(node);
}
// Remove from tracking in DatanodeAdminManager
monitor.stopTrackingNode(node);
} else {
LOG.trace("stopMaintenance: Node {} in {}, nothing to do.",
node, node.getAdminState());
}
}
protected void setDecommissioned(DatanodeDescriptor dn) {
dn.setDecommissioned();
LOG.info("Decommissioning complete for node {}", dn);
}
protected void setInMaintenance(DatanodeDescriptor dn) {
dn.setInMaintenance();
LOG.info("Node {} has entered maintenance mode.", dn);
}
/**
* Checks whether a block is sufficiently replicated/stored for
* DECOMMISSION_INPROGRESS or ENTERING_MAINTENANCE datanodes. For replicated
* blocks or striped blocks, full-strength replication or storage is not
* always necessary, hence "sufficient".
* @return true if sufficient, else false.
*/
protected boolean isSufficient(BlockInfo block, BlockCollection bc,
NumberReplicas numberReplicas,
boolean isDecommission,
boolean isMaintenance) {
if (blockManager.hasEnoughEffectiveReplicas(block, numberReplicas, 0)) {
// Block has enough replica, skip
LOG.trace("Block {} does not need replication.", block);
return true;
}
final int numExpected = blockManager.getExpectedLiveRedundancyNum(block,
numberReplicas);
final int numLive = numberReplicas.liveReplicas();
// Block is under-replicated
LOG.trace("Block {} numExpected={}, numLive={}", block, numExpected,
numLive);
if (isDecommission && numExpected > numLive) {
if (bc.isUnderConstruction() && block.equals(bc.getLastBlock())) {
// Can decom a UC block as long as there will still be minReplicas
if (blockManager.hasMinStorage(block, numLive)) {
LOG.trace("UC block {} sufficiently-replicated since numLive ({}) "
+ ">= minR ({})", block, numLive,
blockManager.getMinStorageNum(block));
return true;
} else {
LOG.trace("UC block {} insufficiently-replicated since numLive "
+ "({}) < minR ({})", block, numLive,
blockManager.getMinStorageNum(block));
}
} else {
// Can decom a non-UC as long as the default replication is met
if (numLive >= blockManager.getDefaultStorageNum(block)) {
return true;
}
}
}
if (isMaintenance && numLive >= blockManager.getMinMaintenanceStorageNum(block)) {
return true;
}
return false;
}
protected void logBlockReplicationInfo(BlockInfo block,
BlockCollection bc,
DatanodeDescriptor srcNode, NumberReplicas num,
Iterable<DatanodeStorageInfo> storages) {
if (!NameNode.blockStateChangeLog.isInfoEnabled()) {
return;
}
int curReplicas = num.liveReplicas();
int curExpectedRedundancy = blockManager.getExpectedRedundancyNum(block);
StringBuilder nodeList = new StringBuilder();
for (DatanodeStorageInfo storage : storages) {
final DatanodeDescriptor node = storage.getDatanodeDescriptor();
nodeList.append(node).append(' ');
}
NameNode.blockStateChangeLog.info(
"Block: " + block + ", Expected Replicas: "
+ curExpectedRedundancy + ", live replicas: " + curReplicas
+ ", corrupt replicas: " + num.corruptReplicas()
+ ", decommissioned replicas: " + num.decommissioned()
+ ", decommissioning replicas: " + num.decommissioning()
+ ", maintenance replicas: " + num.maintenanceReplicas()
+ ", live entering maintenance replicas: "
+ num.liveEnteringMaintenanceReplicas()
+ ", replicas on stale nodes: " + num.replicasOnStaleNodes()
+ ", readonly replicas: " + num.readOnlyReplicas()
+ ", excess replicas: " + num.excessReplicas()
+ ", Is Open File: " + bc.isUnderConstruction()
+ ", Datanodes having this block: " + nodeList + ", Current Datanode: "
+ srcNode + ", Is current datanode decommissioning: "
+ srcNode.isDecommissionInProgress() +
", Is current datanode entering maintenance: "
+ srcNode.isEnteringMaintenance());
}
@VisibleForTesting
public int getNumPendingNodes() {
return monitor.getPendingNodeCount();
}
@VisibleForTesting
public int getNumTrackedNodes() {
return monitor.getTrackedNodeCount();
}
@VisibleForTesting
public int getNumNodesChecked() {
return monitor.getNumNodesChecked();
}
@VisibleForTesting
public Queue<DatanodeDescriptor> getPendingNodes() {
return monitor.getPendingNodes();
}
@VisibleForTesting
void runMonitorForTest() throws ExecutionException, InterruptedException {
executor.submit(monitor).get();
}
public void refreshPendingRepLimit(int pendingRepLimit, String key) {
ensurePositiveInt(pendingRepLimit, key);
this.monitor.setPendingRepLimit(pendingRepLimit);
}
@VisibleForTesting
public int getPendingRepLimit() {
return this.monitor.getPendingRepLimit();
}
public void refreshBlocksPerLock(int blocksPerLock, String key) {
ensurePositiveInt(blocksPerLock, key);
this.monitor.setBlocksPerLock(blocksPerLock);
}
@VisibleForTesting
public int getBlocksPerLock() {
return this.monitor.getBlocksPerLock();
}
private void ensurePositiveInt(int val, String key) {
Preconditions.checkArgument(
(val > 0),
key + " = '" + val + "' is invalid. " +
"It should be a positive, non-zero integer value.");
}
}
|
DatanodeAdminManager
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/queue/event/FullEventListener.java
|
{
"start": 705,
"end": 797
}
|
interface ____ triggered when the queue is full.
*
* @author Nikita Koksharov
*
*/
public
|
is
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/typeutils/BinaryRowDataSerializer.java
|
{
"start": 1788,
"end": 12808
}
|
class ____ extends AbstractRowDataSerializer<BinaryRowData> {
private static final long serialVersionUID = 1L;
public static final int LENGTH_SIZE_IN_BYTES = 4;
private final int numFields;
private final int fixedLengthPartSize;
public BinaryRowDataSerializer(int numFields) {
this.numFields = numFields;
this.fixedLengthPartSize = BinaryRowData.calculateFixPartSizeInBytes(numFields);
}
@Override
public boolean isImmutableType() {
return false;
}
@Override
public TypeSerializer<BinaryRowData> duplicate() {
return new BinaryRowDataSerializer(numFields);
}
@Override
public BinaryRowData createInstance() {
return new BinaryRowData(numFields);
}
@Override
public BinaryRowData copy(BinaryRowData from) {
return copy(from, new BinaryRowData(numFields));
}
@Override
public BinaryRowData copy(BinaryRowData from, BinaryRowData reuse) {
return from.copy(reuse);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(BinaryRowData record, DataOutputView target) throws IOException {
target.writeInt(record.getSizeInBytes());
if (target instanceof MemorySegmentWritable) {
serializeWithoutLength(record, (MemorySegmentWritable) target);
} else {
BinarySegmentUtils.copyToView(
record.getSegments(), record.getOffset(), record.getSizeInBytes(), target);
}
}
@Override
public BinaryRowData deserialize(DataInputView source) throws IOException {
BinaryRowData row = new BinaryRowData(numFields);
int length = source.readInt();
byte[] bytes = new byte[length];
source.readFully(bytes);
row.pointTo(MemorySegmentFactory.wrap(bytes), 0, length);
return row;
}
@Override
public BinaryRowData deserialize(BinaryRowData reuse, DataInputView source) throws IOException {
MemorySegment[] segments = reuse.getSegments();
checkArgument(
segments == null || (segments.length == 1 && reuse.getOffset() == 0),
"Reuse BinaryRowData should have no segments or only one segment and offset start at 0.");
int length = source.readInt();
if (segments == null || segments[0].size() < length) {
segments = new MemorySegment[] {MemorySegmentFactory.wrap(new byte[length])};
}
source.readFully(segments[0].getArray(), 0, length);
reuse.pointTo(segments, 0, length);
return reuse;
}
@Override
public int getArity() {
return numFields;
}
@Override
public BinaryRowData toBinaryRow(BinaryRowData rowData) throws IOException {
return rowData;
}
// ============================ Page related operations ===================================
@Override
public int serializeToPages(BinaryRowData record, AbstractPagedOutputView headerLessView)
throws IOException {
checkArgument(headerLessView.getHeaderLength() == 0);
int skip = checkSkipWriteForFixLengthPart(headerLessView);
headerLessView.writeInt(record.getSizeInBytes());
serializeWithoutLength(record, headerLessView);
return skip;
}
private static void serializeWithoutLength(BinaryRowData record, MemorySegmentWritable writable)
throws IOException {
if (record.getSegments().length == 1) {
writable.write(record.getSegments()[0], record.getOffset(), record.getSizeInBytes());
} else {
serializeWithoutLengthSlow(record, writable);
}
}
public static void serializeWithoutLengthSlow(BinaryRowData record, MemorySegmentWritable out)
throws IOException {
int remainSize = record.getSizeInBytes();
int posInSegOfRecord = record.getOffset();
int segmentSize = record.getSegments()[0].size();
for (MemorySegment segOfRecord : record.getSegments()) {
int nWrite = Math.min(segmentSize - posInSegOfRecord, remainSize);
assert nWrite > 0;
out.write(segOfRecord, posInSegOfRecord, nWrite);
// next new segment.
posInSegOfRecord = 0;
remainSize -= nWrite;
if (remainSize == 0) {
break;
}
}
checkArgument(remainSize == 0);
}
@Override
public BinaryRowData deserializeFromPages(AbstractPagedInputView headerLessView)
throws IOException {
return deserializeFromPages(createInstance(), headerLessView);
}
@Override
public BinaryRowData deserializeFromPages(
BinaryRowData reuse, AbstractPagedInputView headerLessView) throws IOException {
checkArgument(headerLessView.getHeaderLength() == 0);
checkSkipReadForFixLengthPart(headerLessView);
return deserialize(reuse, headerLessView);
}
@Override
public BinaryRowData mapFromPages(BinaryRowData reuse, AbstractPagedInputView headerLessView)
throws IOException {
checkArgument(headerLessView.getHeaderLength() == 0);
checkSkipReadForFixLengthPart(headerLessView);
pointTo(headerLessView.readInt(), reuse, headerLessView);
return reuse;
}
@Override
public void skipRecordFromPages(AbstractPagedInputView headerLessView) throws IOException {
checkArgument(headerLessView.getHeaderLength() == 0);
checkSkipReadForFixLengthPart(headerLessView);
headerLessView.skipBytes(headerLessView.readInt());
}
/**
* Copy a binaryRow which stored in paged input view to output view.
*
* @param source source paged input view where the binary row stored
* @param target the target output view.
*/
public void copyFromPagesToView(AbstractPagedInputView source, DataOutputView target)
throws IOException {
checkSkipReadForFixLengthPart(source);
int length = source.readInt();
target.writeInt(length);
target.write(source, length);
}
/**
* Point row to memory segments with offset(in the AbstractPagedInputView) and length.
*
* @param length row length.
* @param reuse reuse BinaryRowData object.
* @param headerLessView source memory segments container.
*/
public void pointTo(int length, BinaryRowData reuse, AbstractPagedInputView headerLessView)
throws IOException {
checkArgument(headerLessView.getHeaderLength() == 0);
if (length < 0) {
throw new IOException(
String.format(
"Read unexpected bytes in source of positionInSegment[%d] and limitInSegment[%d]",
headerLessView.getCurrentPositionInSegment(),
headerLessView.getCurrentSegmentLimit()));
}
int remainInSegment =
headerLessView.getCurrentSegmentLimit()
- headerLessView.getCurrentPositionInSegment();
MemorySegment currSeg = headerLessView.getCurrentSegment();
int currPosInSeg = headerLessView.getCurrentPositionInSegment();
if (remainInSegment >= length) {
// all in one segment, that's good.
reuse.pointTo(currSeg, currPosInSeg, length);
headerLessView.skipBytesToRead(length);
} else {
pointToMultiSegments(
reuse, headerLessView, length, length - remainInSegment, currSeg, currPosInSeg);
}
}
private void pointToMultiSegments(
BinaryRowData reuse,
AbstractPagedInputView source,
int sizeInBytes,
int remainLength,
MemorySegment currSeg,
int currPosInSeg)
throws IOException {
int segmentSize = currSeg.size();
int div = remainLength / segmentSize;
int remainder = remainLength - segmentSize * div; // equal to p % q
int varSegSize = remainder == 0 ? div : div + 1;
MemorySegment[] segments = new MemorySegment[varSegSize + 1];
segments[0] = currSeg;
for (int i = 1; i <= varSegSize; i++) {
source.advance();
segments[i] = source.getCurrentSegment();
}
// The remaining is 0. There is no next Segment at this time. The current Segment is
// all the data of this row, so we need to skip segmentSize bytes to read. We can't
// jump directly to the next Segment. Because maybe there are no segment in later.
int remainLenInLastSeg = remainder == 0 ? segmentSize : remainder;
source.skipBytesToRead(remainLenInLastSeg);
reuse.pointTo(segments, currPosInSeg, sizeInBytes);
}
/**
* We need skip bytes to write when the remain bytes of current segment is not enough to write
* binary row fixed part. See {@link BinaryRowData}.
*/
private int checkSkipWriteForFixLengthPart(AbstractPagedOutputView out) throws IOException {
// skip if there is no enough size.
int available = out.getSegmentSize() - out.getCurrentPositionInSegment();
if (available < getSerializedRowFixedPartLength()) {
out.advance();
return available;
}
return 0;
}
/**
* We need skip bytes to read when the remain bytes of current segment is not enough to write
* binary row fixed part. See {@link BinaryRowData}.
*/
public void checkSkipReadForFixLengthPart(AbstractPagedInputView source) throws IOException {
// skip if there is no enough size.
// Note: Use currentSegmentLimit instead of segmentSize.
int available = source.getCurrentSegmentLimit() - source.getCurrentPositionInSegment();
if (available < getSerializedRowFixedPartLength()) {
source.advance();
}
}
/** Return fixed part length to serialize one row. */
public int getSerializedRowFixedPartLength() {
return getFixedLengthPartSize() + LENGTH_SIZE_IN_BYTES;
}
public int getFixedLengthPartSize() {
return fixedLengthPartSize;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
int length = source.readInt();
target.writeInt(length);
target.write(source, length);
}
@Override
public boolean equals(Object obj) {
return obj instanceof BinaryRowDataSerializer
&& numFields == ((BinaryRowDataSerializer) obj).numFields;
}
@Override
public int hashCode() {
return Integer.hashCode(numFields);
}
@Override
public TypeSerializerSnapshot<BinaryRowData> snapshotConfiguration() {
return new BinaryRowDataSerializerSnapshot(numFields);
}
/** {@link TypeSerializerSnapshot} for {@link BinaryRowDataSerializer}. */
public static final
|
BinaryRowDataSerializer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/Discount.java
|
{
"start": 579,
"end": 1159
}
|
class ____ implements Serializable {
private Long id;
private double discount;
private Customer owner;
@Column(precision = 5)
public double getDiscount() {
return discount;
}
@Id
@GeneratedValue
public Long getId() {
return id;
}
public void setDiscount(double i) {
discount = i;
}
public void setId(Long long1) {
id = long1;
}
@ManyToOne(cascade = {CascadeType.ALL}, fetch = FetchType.LAZY)
@JoinColumn(name = "CUSTOMER_ID")
public Customer getOwner() {
return owner;
}
public void setOwner(Customer customer) {
owner = customer;
}
}
|
Discount
|
java
|
micronaut-projects__micronaut-core
|
management/src/main/java/io/micronaut/management/endpoint/stop/ServerStopEndpoint.java
|
{
"start": 1033,
"end": 2069
}
|
class ____ {
private static final long WAIT_BEFORE_STOP = 500L;
private final ApplicationContext context;
private final Map<String, String> message;
/**
* @param context The application context
*/
ServerStopEndpoint(ApplicationContext context) {
this.context = context;
this.message = new LinkedHashMap<>(1);
this.message.put("message", "Server shutdown started");
}
/**
* Stops the server.
*
* @return The message
*/
@Write(consumes = {})
public Object stop() {
try {
return message;
} finally {
Thread thread = new Thread(this::stopServer);
thread.setContextClassLoader(getClass().getClassLoader());
thread.start();
}
}
private void stopServer() {
try {
Thread.sleep(WAIT_BEFORE_STOP);
} catch (InterruptedException ex) {
Thread.currentThread().interrupt();
}
this.context.stop();
}
}
|
ServerStopEndpoint
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/streaming/StreamingDataWriterFactory.java
|
{
"start": 1649,
"end": 2975
}
|
interface ____ extends Serializable {
/**
* Returns a data writer to do the actual writing work. Note that, Spark will reuse the same data
* object instance when sending data to the data writer, for better performance. Data writers
* are responsible for defensive copies if necessary, e.g. copy the data before buffer it in a
* list.
* <p>
* If this method fails (by throwing an exception), the corresponding Spark write task would fail
* and get retried until hitting the maximum retry times.
*
* @param partitionId A unique id of the RDD partition that the returned writer will process.
* Usually Spark processes many RDD partitions at the same time,
* implementations should use the partition id to distinguish writers for
* different partitions.
* @param taskId The task id returned by {@link TaskContext#taskAttemptId()}. Spark may run
* multiple tasks for the same partition (due to speculation or task failures,
* for example).
* @param epochId A monotonically increasing id for streaming queries that are split in to
* discrete periods of execution.
*/
DataWriter<InternalRow> createWriter(int partitionId, long taskId, long epochId);
}
|
StreamingDataWriterFactory
|
java
|
elastic__elasticsearch
|
x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/TransportSLMGetExpiredSnapshotsAction.java
|
{
"start": 12706,
"end": 13421
}
|
class ____ extends ActionResponse {
private final Map<String, List<Tuple<SnapshotId, String>>> snapshotsToDelete;
public Response(Map<String, List<Tuple<SnapshotId, String>>> snapshotsToDelete) {
this.snapshotsToDelete = snapshotsToDelete;
}
public Map<String, List<Tuple<SnapshotId, String>>> snapshotsToDelete() {
return snapshotsToDelete;
}
@Override
public void writeTo(StreamOutput out) {
TransportAction.localOnly();
}
}
private static final Set<SnapshotState> RETAINABLE_STATES = EnumSet.of(
SnapshotState.SUCCESS,
SnapshotState.FAILED,
SnapshotState.PARTIAL
);
}
|
Response
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/FileEncodingApplicationListenerTests.java
|
{
"start": 1410,
"end": 2622
}
|
class ____ {
private final FileEncodingApplicationListener initializer = new FileEncodingApplicationListener();
private final ConfigurableEnvironment environment = new StandardEnvironment();
private final ApplicationEnvironmentPreparedEvent event = new ApplicationEnvironmentPreparedEvent(
new DefaultBootstrapContext(), new SpringApplication(), new String[0], this.environment);
@Test
void testIllegalState() {
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.environment,
"spring.mandatory_file_encoding=FOO");
ConfigurationPropertySources.attach(this.environment);
assertThatIllegalStateException().isThrownBy(() -> this.initializer.onApplicationEvent(this.event));
}
@Test
void testSunnyDayNothingMandated() {
this.initializer.onApplicationEvent(this.event);
}
@Test
void testSunnyDayMandated() {
assertThat(System.getProperty("file.encoding")).isNotNull();
TestPropertySourceUtils.addInlinedPropertiesToEnvironment(this.environment,
"spring.mandatory_file_encoding:" + System.getProperty("file.encoding"));
ConfigurationPropertySources.attach(this.environment);
this.initializer.onApplicationEvent(this.event);
}
}
|
FileEncodingApplicationListenerTests
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/test/java/org/springframework/web/reactive/function/server/DispatcherHandlerIntegrationTests.java
|
{
"start": 8313,
"end": 8491
}
|
class ____ {
@RequestMapping("/controller")
@ResponseBody
public Mono<Person> controller() {
return Mono.just(new Person("John"));
}
}
private static
|
PersonController
|
java
|
apache__camel
|
components/camel-jcr/src/test/java/org/apache/camel/component/jcr/JcrProducerSubNodeTest.java
|
{
"start": 1190,
"end": 3017
}
|
class ____ extends JcrRouteTestSupport {
@Test
public void testCreateNodeAndSubNode() throws Exception {
Session session = openSession();
try {
// create node
Exchange exchange1 = ExchangeBuilder.anExchange(context)
.withHeader(JcrConstants.JCR_NODE_NAME, "node")
.build();
Exchange out1 = template.send("direct:a", exchange1);
assertNotNull(out1);
String uuidNode = out1.getMessage().getBody(String.class);
Node node = session.getNodeByIdentifier(uuidNode);
assertNotNull(node);
assertEquals("/home/test/node", node.getPath());
// create sub node
Exchange exchange2 = ExchangeBuilder.anExchange(context)
.withHeader(JcrConstants.JCR_NODE_NAME, "node/subnode")
.build();
Exchange out2 = template.send("direct:a", exchange2);
assertNotNull(out2);
String uuidSubNode = out2.getMessage().getBody(String.class);
Node subNode = session.getNodeByIdentifier(uuidSubNode);
assertNotNull(subNode);
assertEquals("/home/test/node/subnode", subNode.getPath());
assertNotNull(subNode.getParent());
assertEquals("/home/test/node", subNode.getParent().getPath());
} finally {
if (session != null && session.isLive()) {
session.logout();
}
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:a")
.to("jcr://user:pass@repository/home/test");
}
};
}
}
|
JcrProducerSubNodeTest
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/menu/TrimmedTestRunResult.java
|
{
"start": 215,
"end": 3213
}
|
class ____ {
/**
* The run id
*/
private final long id;
/**
* If this ran all tests, or just the modified ones
*/
private final boolean full;
private final long started;
private final long completed;
private final Map<String, TestClassResult> currentFailing = new HashMap<>();
private final Map<String, TestClassResult> historicFailing = new HashMap<>();
private final Map<String, TestClassResult> currentPassing = new HashMap<>();
private final Map<String, TestClassResult> historicPassing = new HashMap<>();
private final long passedCount;
private final long failedCount;
private final long skippedCount;
private final long currentPassedCount;
private final long currentFailedCount;
private final long currentSkippedCount;
public TrimmedTestRunResult(TestRunResults testRunResults) {
this.id = testRunResults.getId();
this.full = testRunResults.isFull();
this.started = testRunResults.getStartedTime();
this.completed = testRunResults.getCompletedTime();
this.currentFailing.putAll(testRunResults.getCurrentFailing());
this.historicFailing.putAll(testRunResults.getHistoricFailing());
this.currentPassing.putAll(testRunResults.getCurrentPassing());
this.historicPassing.putAll(testRunResults.getHistoricPassing());
this.passedCount = testRunResults.getPassedCount();
this.failedCount = testRunResults.getFailedCount();
this.skippedCount = testRunResults.getSkippedCount();
this.currentPassedCount = testRunResults.getCurrentPassedCount();
this.currentFailedCount = testRunResults.getCurrentFailedCount();
this.currentSkippedCount = testRunResults.getCurrentSkippedCount();
}
public long getId() {
return id;
}
public boolean isFull() {
return full;
}
public Map<String, TestClassResult> getCurrentFailing() {
return currentFailing;
}
public Map<String, TestClassResult> getHistoricFailing() {
return historicFailing;
}
public Map<String, TestClassResult> getCurrentPassing() {
return currentPassing;
}
public Map<String, TestClassResult> getHistoricPassing() {
return historicPassing;
}
public long getStartedTime() {
return started;
}
public long getCompletedTime() {
return completed;
}
public long getTotalTime() {
return completed - started;
}
public long getPassedCount() {
return passedCount;
}
public long getFailedCount() {
return failedCount;
}
public long getSkippedCount() {
return skippedCount;
}
public long getCurrentPassedCount() {
return currentPassedCount;
}
public long getCurrentFailedCount() {
return currentFailedCount;
}
public long getCurrentSkippedCount() {
return currentSkippedCount;
}
}
|
TrimmedTestRunResult
|
java
|
micronaut-projects__micronaut-core
|
inject/src/main/java/io/micronaut/inject/annotation/AnnotationMetadataReference.java
|
{
"start": 1542,
"end": 1799
}
|
class ____ of the annotation metadata
*/
public String getClassName() {
return className;
}
@Override
public AnnotationMetadata getTargetAnnotationMetadata() {
// Don't unwrap the reference
return this;
}
}
|
name
|
java
|
junit-team__junit5
|
junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/descriptor/TestFactoryTestDescriptor.java
|
{
"start": 2325,
"end": 8871
}
|
class ____ extends TestMethodTestDescriptor implements Filterable {
public static final String SEGMENT_TYPE = "test-factory";
public static final String DYNAMIC_CONTAINER_SEGMENT_TYPE = "dynamic-container";
public static final String DYNAMIC_TEST_SEGMENT_TYPE = "dynamic-test";
private static final ReflectiveInterceptorCall<Method, @Nullable Object> interceptorCall = InvocationInterceptor::interceptTestFactoryMethod;
private static final InterceptingExecutableInvoker executableInvoker = new InterceptingExecutableInvoker();
private final DynamicDescendantFilter dynamicDescendantFilter;
public TestFactoryTestDescriptor(UniqueId uniqueId, Class<?> testClass, Method testMethod,
Supplier<List<Class<?>>> enclosingInstanceTypes, JupiterConfiguration configuration) {
super(uniqueId, testClass, testMethod, enclosingInstanceTypes, configuration);
this.dynamicDescendantFilter = new DynamicDescendantFilter();
}
private TestFactoryTestDescriptor(UniqueId uniqueId, String displayName, Class<?> testClass, Method testMethod,
JupiterConfiguration configuration, DynamicDescendantFilter dynamicDescendantFilter) {
super(uniqueId, displayName, testClass, testMethod, configuration);
this.dynamicDescendantFilter = dynamicDescendantFilter;
}
// --- JupiterTestDescriptor -----------------------------------------------
@Override
protected TestFactoryTestDescriptor withUniqueId(UnaryOperator<UniqueId> uniqueIdTransformer) {
return new TestFactoryTestDescriptor(uniqueIdTransformer.apply(getUniqueId()), getDisplayName(), getTestClass(),
getTestMethod(), this.configuration, this.dynamicDescendantFilter.copy(uniqueIdTransformer));
}
// --- Filterable ----------------------------------------------------------
@Override
public DynamicDescendantFilter getDynamicDescendantFilter() {
return dynamicDescendantFilter;
}
// --- TestDescriptor ------------------------------------------------------
@Override
public Type getType() {
return Type.CONTAINER;
}
@Override
public boolean mayRegisterTests() {
return true;
}
// --- Node ----------------------------------------------------------------
@Override
protected void invokeTestMethod(JupiterEngineExecutionContext context, DynamicTestExecutor dynamicTestExecutor) {
ExtensionContext extensionContext = context.getExtensionContext();
context.getThrowableCollector().execute(() -> {
Object instance = extensionContext.getRequiredTestInstance();
Object testFactoryMethodResult = executableInvoker.<@Nullable Object> invoke(getTestMethod(), instance,
extensionContext, context.getExtensionRegistry(), interceptorCall);
TestSource defaultTestSource = getSource().orElseThrow(
() -> new JUnitException("Illegal state: TestSource must be present"));
try (Stream<DynamicNode> dynamicNodeStream = toDynamicNodeStream(testFactoryMethodResult)) {
int index = 1;
Iterator<DynamicNode> iterator = dynamicNodeStream.iterator();
while (iterator.hasNext()) {
DynamicNode dynamicNode = iterator.next();
Optional<JupiterTestDescriptor> descriptor = createDynamicDescriptor(this, dynamicNode, index,
defaultTestSource, getDynamicDescendantFilter(), configuration);
descriptor.ifPresent(dynamicTestExecutor::execute);
index++;
}
}
catch (ClassCastException ex) {
throw invalidReturnTypeException(ex);
}
dynamicTestExecutor.awaitFinished();
});
}
@SuppressWarnings("unchecked")
private Stream<DynamicNode> toDynamicNodeStream(@Nullable Object testFactoryMethodResult) {
if (testFactoryMethodResult == null) {
throw new JUnitException("@TestFactory method must not return null");
}
if (testFactoryMethodResult instanceof DynamicNode node) {
return Stream.of(node);
}
return (Stream<DynamicNode>) CollectionUtils.toStream(testFactoryMethodResult);
}
private JUnitException invalidReturnTypeException(Throwable cause) {
String message = "Objects produced by @TestFactory method '%s' must be of type %s.".formatted(
getTestMethod().toGenericString(), DynamicNode.class.getName());
return new JUnitException(message, cause);
}
static Optional<JupiterTestDescriptor> createDynamicDescriptor(JupiterTestDescriptor parent, DynamicNode node,
int index, TestSource defaultTestSource, DynamicDescendantFilter dynamicDescendantFilter,
JupiterConfiguration configuration) {
UniqueId uniqueId;
Supplier<JupiterTestDescriptor> descriptorCreator;
Optional<TestSource> customTestSource = node.getTestSourceUri().map(TestFactoryTestDescriptor::fromUri);
TestSource source = customTestSource.orElse(defaultTestSource);
if (node instanceof DynamicTest test) {
uniqueId = parent.getUniqueId().append(DYNAMIC_TEST_SEGMENT_TYPE, "#" + index);
descriptorCreator = () -> new DynamicTestTestDescriptor(uniqueId, index, test, source, configuration);
}
else {
DynamicContainer container = (DynamicContainer) node;
uniqueId = parent.getUniqueId().append(DYNAMIC_CONTAINER_SEGMENT_TYPE, "#" + index);
descriptorCreator = () -> new DynamicContainerTestDescriptor(uniqueId, index, container, source,
dynamicDescendantFilter.withoutIndexFiltering(), configuration);
}
if (dynamicDescendantFilter.test(uniqueId, index - 1)) {
JupiterTestDescriptor descriptor = descriptorCreator.get();
descriptor.setParent(parent);
return Optional.of(descriptor);
}
return Optional.empty();
}
/**
* @since 5.3
*/
static TestSource fromUri(URI uri) {
Preconditions.notNull(uri, "URI must not be null");
if (CLASSPATH_SCHEME.equals(uri.getScheme())) {
return ClasspathResourceSource.from(uri);
}
if (CLASS_SCHEME.equals(uri.getScheme())) {
return ClassSource.from(uri);
}
if (METHOD_SCHEME.equals(uri.getScheme())) {
return MethodSourceSupport.from(uri);
}
return UriSource.from(uri);
}
/**
* Override {@link TestMethodTestDescriptor#nodeSkipped} as a no-op, since
* the {@code TestWatcher} API is not supported for {@code @TestFactory}
* containers.
*
* @since 5.4
*/
@Override
public void nodeSkipped(JupiterEngineExecutionContext context, TestDescriptor descriptor, SkipResult result) {
/* no-op */
}
/**
* Override {@link TestMethodTestDescriptor#nodeFinished} as a no-op, since
* the {@code TestWatcher} API is not supported for {@code @TestFactory}
* containers.
*
* @since 5.4
*/
@Override
public void nodeFinished(JupiterEngineExecutionContext context, TestDescriptor descriptor,
TestExecutionResult result) {
/* no-op */
}
}
|
TestFactoryTestDescriptor
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/exc/InvalidFormatException.java
|
{
"start": 263,
"end": 1572
}
|
class ____
extends MismatchedInputException
{
private static final long serialVersionUID = 1L;
/**
* Underlying value that could not be deserialized into
* target type, if available.
*/
protected final Object _value;
/*
/**********************************************************
/* Life-cycle
/**********************************************************
*/
public InvalidFormatException(JsonParser p,
String msg, Object value, Class<?> targetType)
{
super(p, msg, targetType);
_value = value;
}
public static InvalidFormatException from(JsonParser p, String msg,
Object value, Class<?> targetType)
{
return new InvalidFormatException(p, msg, value, targetType);
}
/*
/**********************************************************
/* Additional accessors
/**********************************************************
*/
/**
* Accessor for checking source value (String, Number usually) that could not
* be deserialized into target type ({@link #getTargetType}).
* Note that value may not be available, depending on who throws the exception
* and when.
*/
public Object getValue() {
return _value;
}
}
|
InvalidFormatException
|
java
|
quarkusio__quarkus
|
extensions/web-dependency-locator/deployment/src/main/java/io/quarkus/webdependency/locator/deployment/devui/WebDependencyLocatorDevUIProcessor.java
|
{
"start": 522,
"end": 3149
}
|
class ____ {
@BuildStep(onlyIf = IsDevelopment.class)
public void createPages(BuildProducer<CardPageBuildItem> cardPageProducer,
List<WebDependencyLibrariesBuildItem> webDependencyLibrariesBuildItems,
Optional<ImportMapBuildItem> importMapBuildItem) {
List<WebDependencyLibrary> webDependencyLibraries = new ArrayList<>();
for (WebDependencyLibrariesBuildItem webDependencyLibrariesBuildItem : webDependencyLibrariesBuildItems) {
webDependencyLibraries.addAll(webDependencyLibrariesBuildItem.getWebDependencyLibraries());
}
CardPageBuildItem cardPageBuildItem = new CardPageBuildItem();
cardPageBuildItem.setLogo("javascript_logo.svg", "javascript_logo.svg");
if (!webDependencyLibraries.isEmpty()) {
// Web Dependency Libraries
cardPageBuildItem.addBuildTimeData("webDependencyLibraries", webDependencyLibraries);
// Web Dependency Asset List
cardPageBuildItem.addPage(Page.webComponentPageBuilder()
.componentLink("qwc-web-dependency-locator-libraries.js")
.title("Web libraries")
.icon("font-awesome-solid:folder-tree")
.staticLabel(String.valueOf(webDependencyLibraries.size())));
if (importMapBuildItem.isPresent()) {
cardPageBuildItem.addBuildTimeData("importMap", importMapBuildItem.get().getImportMap(),
"An importmap that maps all available JavaScript libraries that can be used in the application");
// ImportMap
cardPageBuildItem.addPage(Page.webComponentPageBuilder()
.componentLink("qwc-web-dependency-locator-importmap.js")
.title("Import Map")
.icon("font-awesome-solid:diagram-project"));
}
}
cardPageProducer.produce(cardPageBuildItem);
}
@BuildStep(onlyIf = IsDevelopment.class)
public DynamicWelcomeBuildItem createDynamicWelcomeData() {
return new DynamicWelcomeBuildItem(DYNAMIC_WELCOME);
}
private static final String DYNAMIC_WELCOME = """
<span>Learn how you can <a href="https://quarkus.io/guides/web-dependency-locator" target="_blank">add your own web content</a></span>
<span>Static assets: <code>${devuiState.welcomeData.resourcesDir}/META-INF/resources/</code> OR </span>
<span>Dynamic assets: <code>${devuiState.welcomeData.resourcesDir}/web</code></span>
""";
}
|
WebDependencyLocatorDevUIProcessor
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/EmptyStatementInspector.java
|
{
"start": 242,
"end": 534
}
|
class ____ implements StatementInspector {
/**
* The singleton reference.
*/
public static final StatementInspector INSTANCE = new EmptyStatementInspector();
@Override
public String inspect(String sql) {
return sql;
}
protected EmptyStatementInspector() {}
}
|
EmptyStatementInspector
|
java
|
apache__camel
|
components/camel-leveldb/src/test/java/org/apache/camel/component/leveldb/LevelDBTestSupport.java
|
{
"start": 1754,
"end": 2789
}
|
enum ____ {
JACKSON,
JAVA_SERIALIZATION,
}
@Parameters
public static Collection<Object[]> serializers() {
Object[][] serializers = {
{ SerializerType.JAVA_SERIALIZATION },
{ SerializerType.JACKSON } };
return Arrays.asList(serializers);
}
public SerializerType getSerializerType() {
return serializerType;
}
public LevelDBSerializer getSerializer() {
switch (serializerType) {
case JACKSON:
return new JacksonLevelDBSerializer();
default:
return new DefaultLevelDBSerializer();
}
}
LevelDBAggregationRepository getRepo() {
if (repo == null) {
repo = createRepo();
}
return repo;
}
LevelDBAggregationRepository createRepo() {
repo = new LevelDBAggregationRepository("repo1", "target/data/leveldb.dat");
repo.setSerializer(getSerializer());
return repo;
}
static
|
SerializerType
|
java
|
spring-projects__spring-framework
|
spring-websocket/src/main/java/org/springframework/web/socket/sockjs/transport/handler/AbstractTransportHandler.java
|
{
"start": 1029,
"end": 1147
}
|
class ____ {@link TransportHandler} implementations.
*
* @author Rossen Stoyanchev
* @since 4.0
*/
public abstract
|
for
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/core/Scheduler.java
|
{
"start": 26847,
"end": 29117
}
|
class ____ implements Runnable, SchedulerRunnableIntrospection {
@NonNull
final Runnable decoratedRun;
@NonNull
final SequentialDisposable sd;
final long periodInNanoseconds;
long count;
long lastNowNanoseconds;
long startInNanoseconds;
PeriodicTask(long firstStartInNanoseconds, @NonNull Runnable decoratedRun,
long firstNowNanoseconds, @NonNull SequentialDisposable sd, long periodInNanoseconds) {
this.decoratedRun = decoratedRun;
this.sd = sd;
this.periodInNanoseconds = periodInNanoseconds;
lastNowNanoseconds = firstNowNanoseconds;
startInNanoseconds = firstStartInNanoseconds;
}
@Override
public void run() {
decoratedRun.run();
if (!sd.isDisposed()) {
long nextTick;
long nowNanoseconds = now(TimeUnit.NANOSECONDS);
// If the clock moved in a direction quite a bit, rebase the repetition period
if (nowNanoseconds + CLOCK_DRIFT_TOLERANCE_NANOSECONDS < lastNowNanoseconds
|| nowNanoseconds >= lastNowNanoseconds + periodInNanoseconds + CLOCK_DRIFT_TOLERANCE_NANOSECONDS) {
nextTick = nowNanoseconds + periodInNanoseconds;
/*
* Shift the start point back by the drift as if the whole thing
* started count periods ago.
*/
startInNanoseconds = nextTick - (periodInNanoseconds * (++count));
} else {
nextTick = startInNanoseconds + (++count * periodInNanoseconds);
}
lastNowNanoseconds = nowNanoseconds;
long delay = nextTick - nowNanoseconds;
sd.replace(schedule(this, delay, TimeUnit.NANOSECONDS));
}
}
@Override
public Runnable getWrappedRunnable() {
return this.decoratedRun;
}
}
}
static final
|
PeriodicTask
|
java
|
junit-team__junit5
|
junit-jupiter-params/src/main/java/org/junit/jupiter/params/ParameterizedClass.java
|
{
"start": 12486,
"end": 13025
}
|
class ____ the
* {@link ArgumentsSource}. By default, the additional arguments are
* ignored. {@code argumentCountValidation} allows you to control how
* additional arguments are handled. The default can be configured via the
* {@value ArgumentCountValidator#ARGUMENT_COUNT_VALIDATION_KEY}
* configuration parameter (see the User Guide for details on configuration
* parameters).
*
* @see ArgumentCountValidationMode
*/
ArgumentCountValidationMode argumentCountValidation() default ArgumentCountValidationMode.DEFAULT;
}
|
or
|
java
|
apache__dubbo
|
dubbo-config/dubbo-config-api/src/test/java/org/apache/dubbo/config/integration/multiple/AbstractStorage.java
|
{
"start": 1064,
"end": 2232
}
|
class ____<T> implements Storage<T> {
private Map<String, T> storage = new ConcurrentHashMap<>();
/**
* Generate the key for storage
*
* @param host the host in the register center.
* @param port the port in the register center.
* @return the generated key with the given host and port.
*/
private String generateKey(String host, int port) {
return String.format("%s:%d", host, port);
}
/**
* {@inheritDoc}
*/
@Override
public T get(String host, int port) {
return storage.get(generateKey(host, port));
}
/**
* {@inheritDoc}
*/
@Override
public void put(String host, int port, T value) {
storage.put(generateKey(host, port), value);
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(String host, int port) {
return storage.containsKey(generateKey(host, port));
}
/**
* {@inheritDoc}
*/
@Override
public int size() {
return storage.size();
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
storage.clear();
}
}
|
AbstractStorage
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/internal/net/RFC3986.java
|
{
"start": 641,
"end": 10545
}
|
class ____ {
private RFC3986() {
}
/**
* Decodes a segment of a URI encoded by a browser.
*
* <p>The string is expected to be encoded as per RFC 3986, Section 2. This is the encoding used by JavaScript functions
* encodeURI and encodeURIComponent, but not escape. For example in this encoding, é (in Unicode U+00E9 or in
* UTF-8 0xC3 0xA9) is encoded as %C3%A9 or %c3%a9.
*
* <p>Plus signs '+' will be handled as spaces and encoded using the default JDK URLEncoder class.
*
* @param s string to decode
* @return decoded string
*/
public static String decodeURIComponent(String s) {
return decodeURIComponent(s, true);
}
/**
* Decodes a segment of an URI encoded by a browser.
*
* <p>The string is expected to be encoded as per RFC 3986, Section 2. This is the encoding used by JavaScript functions
* encodeURI and encodeURIComponent, but not escape. For example in this encoding, é (in Unicode U+00E9 or in
* UTF-8 0xC3 0xA9) is encoded as %C3%A9 or %c3%a9.
*
* @param s string to decode
* @param plus whether to convert plus char into spaces
*
* @return decoded string
*/
public static String decodeURIComponent(String s, boolean plus) {
Objects.requireNonNull(s);
int i = !plus ? s.indexOf('%') : indexOfPercentOrPlus(s);
if (i == -1) {
return s;
}
// pack the slowest path away
return decodeAndTransformURIComponent(s, i, plus);
}
private static int indexOfPercentOrPlus(String s) {
for (int i = 0, size = s.length(); i < size; i++) {
final char c = s.charAt(i);
if (c == '%' || c == '+') {
return i;
}
}
return -1;
}
private static String decodeAndTransformURIComponent(String s, int i, boolean plus) {
final byte[] buf = s.getBytes(StandardCharsets.UTF_8);
int pos = i; // position in `buf'.
for (int size = s.length(); i < size; i++) {
char c = s.charAt(i);
if (c == '%') {
if (i == size - 1) {
throw new IllegalArgumentException("unterminated escape"
+ " sequence at end of string: " + s);
}
c = s.charAt(++i);
if (c == '%') {
buf[pos++] = '%'; // "%%" -> "%"
break;
}
if (i >= size - 1) {
throw new IllegalArgumentException("partial escape"
+ " sequence at end of string: " + s);
}
c = decodeHexNibble(c);
final char c2 = decodeHexNibble(s.charAt(++i));
if (c == Character.MAX_VALUE || c2 == Character.MAX_VALUE) {
throw new IllegalArgumentException(
"invalid escape sequence `%" + s.charAt(i - 1)
+ s.charAt(i) + "' at index " + (i - 2)
+ " of: " + s);
}
c = (char) (c * 16 + c2);
// shouldn't check for plus since it would be a double decoding
buf[pos++] = (byte) c;
} else {
buf[pos++] = (byte) (plus && c == '+' ? ' ' : c);
}
}
return new String(buf, 0, pos, StandardCharsets.UTF_8);
}
/**
* Helper to decode half of a hexadecimal number from a string.
* @param c The ASCII character of the hexadecimal number to decode.
* Must be in the range {@code [0-9a-fA-F]}.
* @return The hexadecimal value represented in the ASCII character
* given, or {@link Character#MAX_VALUE} if the character is invalid.
*/
private static char decodeHexNibble(final char c) {
if ('0' <= c && c <= '9') {
return (char) (c - '0');
} else if ('a' <= c && c <= 'f') {
return (char) (c - 'a' + 10);
} else if ('A' <= c && c <= 'F') {
return (char) (c - 'A' + 10);
} else {
return Character.MAX_VALUE;
}
}
/**
* Removed dots as per <a href="http://tools.ietf.org/html/rfc3986#section-5.2.4">rfc3986</a>.
*
* <p>There is 1 extra transformation that are not part of the spec but kept for backwards compatibility:
* double slash // will be converted to single slash.
*
* @param path raw path
* @return normalized path
*/
public static String removeDotSegments(CharSequence path) {
Objects.requireNonNull(path);
final StringBuilder obuf = new StringBuilder(path.length());
int i = 0;
while (i < path.length()) {
// remove dots as described in
// http://tools.ietf.org/html/rfc3986#section-5.2.4
if (matches(path, i, "./")) {
i += 2;
} else if (matches(path, i, "../")) {
i += 3;
} else if (matches(path, i, "/./")) {
// preserve last slash
i += 2;
} else if (matches(path, i,"/.", true)) {
path = "/";
i = 0;
} else if (matches(path, i, "/../")) {
// preserve last slash
i += 3;
int pos = obuf.lastIndexOf("/");
if (pos != -1) {
obuf.delete(pos, obuf.length());
}
} else if (matches(path, i, "/..", true)) {
path = "/";
i = 0;
int pos = obuf.lastIndexOf("/");
if (pos != -1) {
obuf.delete(pos, obuf.length());
}
} else if (matches(path, i, ".", true) || matches(path, i, "..", true)) {
break;
} else {
if (path.charAt(i) == '/') {
i++;
// Not standard!!!
// but common // -> /
if (obuf.length() == 0 || obuf.charAt(obuf.length() - 1) != '/') {
obuf.append('/');
}
}
int pos = indexOfSlash(path, i);
if (pos != -1) {
obuf.append(path, i, pos);
i = pos;
} else {
obuf.append(path, i, path.length());
break;
}
}
}
return obuf.toString();
}
private static boolean matches(CharSequence path, int start, String what) {
return matches(path, start, what, false);
}
private static boolean matches(CharSequence path, int start, String what, boolean exact) {
if (exact) {
if (path.length() - start != what.length()) {
return false;
}
}
if (path.length() - start >= what.length()) {
for (int i = 0; i < what.length(); i++) {
if (path.charAt(start + i) != what.charAt(i)) {
return false;
}
}
return true;
}
return false;
}
private static int indexOfSlash(CharSequence str, int start) {
for (int i = start; i < str.length(); i++) {
if (str.charAt(i) == '/') {
return i;
}
}
return -1;
}
/**
* Normalizes a path as per <a href="http://tools.ietf.org/html/rfc3986#section-5.2.4>rfc3986</a>.
*
* There are 2 extra transformations that are not part of the spec but kept for backwards compatibility:
*
* double slash // will be converted to single slash and the path will always start with slash.
*
* Null paths are not normalized as nothing can be said about them.
*
* @param pathname raw path
* @return normalized path
*/
public static String normalizePath(String pathname) {
// add trailing slash if not set
if (pathname.isEmpty()) {
return "/";
}
int indexOfFirstPercent = pathname.indexOf('%');
if (indexOfFirstPercent == -1) {
// no need to removeDots nor replace double slashes
if (pathname.indexOf('.') == -1 && pathname.indexOf("//") == -1) {
if (pathname.charAt(0) == '/') {
return pathname;
}
// See https://bugs.openjdk.org/browse/JDK-8085796
return "/" + pathname;
}
}
return normalizePathSlow(pathname, indexOfFirstPercent);
}
private static String normalizePathSlow(String pathname, int indexOfFirstPercent) {
final StringBuilder ibuf;
// Not standard!!!
if (pathname.charAt(0) != '/') {
ibuf = new StringBuilder(pathname.length() + 1);
ibuf.append('/');
if (indexOfFirstPercent != -1) {
indexOfFirstPercent++;
}
} else {
ibuf = new StringBuilder(pathname.length());
}
ibuf.append(pathname);
if (indexOfFirstPercent != -1) {
decodeUnreservedChars(ibuf, indexOfFirstPercent);
}
// remove dots as described in
// http://tools.ietf.org/html/rfc3986#section-5.2.4
return RFC3986.removeDotSegments(ibuf);
}
private static void decodeUnreservedChars(StringBuilder path, int start) {
while (start < path.length()) {
// decode unreserved chars described in
// http://tools.ietf.org/html/rfc3986#section-2.4
if (path.charAt(start) == '%') {
decodeUnreserved(path, start);
}
start++;
}
}
private static void decodeUnreserved(StringBuilder path, int start) {
if (start + 3 <= path.length()) {
// these are latin chars so there is no danger of falling into some special unicode char that requires more
// than 1 byte
final String escapeSequence = path.substring(start + 1, start + 3);
int unescaped;
try {
unescaped = Integer.parseInt(escapeSequence, 16);
if (unescaped < 0) {
throw new IllegalArgumentException("Invalid escape sequence: %" + escapeSequence);
}
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid escape sequence: %" + escapeSequence);
}
// validate if the octet is within the allowed ranges
if (
// ALPHA
(unescaped >= 0x41 && unescaped <= 0x5A) ||
(unescaped >= 0x61 && unescaped <= 0x7A) ||
// DIGIT
(unescaped >= 0x30 && unescaped <= 0x39) ||
// HYPHEN
(unescaped == 0x2D) ||
// PERIOD
(unescaped == 0x2E) ||
// UNDERSCORE
(unescaped == 0x5F) ||
// TILDE
(unescaped == 0x7E)) {
path.setCharAt(start, (char) unescaped);
path.delete(start + 1, start + 3);
}
} else {
throw new IllegalArgumentException("Invalid position for escape character: " + start);
}
}
}
|
RFC3986
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/storage/OffsetStorageWriter.java
|
{
"start": 3304,
"end": 11558
}
|
class ____ {
private static final Logger log = LoggerFactory.getLogger(OffsetStorageWriter.class);
private final OffsetBackingStore backingStore;
private final Converter keyConverter;
private final Converter valueConverter;
private final String namespace;
// Offset data in Connect format
private Map<Map<String, Object>, Map<String, Object>> data = new HashMap<>();
private Map<Map<String, Object>, Map<String, Object>> toFlush = null;
private final Semaphore flushInProgress = new Semaphore(1);
// Unique ID for each flush request to handle callbacks after timeouts
private long currentFlushId = 0;
public OffsetStorageWriter(OffsetBackingStore backingStore,
String namespace, Converter keyConverter, Converter valueConverter) {
this.backingStore = backingStore;
this.namespace = namespace;
this.keyConverter = keyConverter;
this.valueConverter = valueConverter;
}
/**
* Set an offset for a partition using Connect data values
* @param partition the partition to store an offset for
* @param offset the offset
*/
@SuppressWarnings("unchecked")
public synchronized void offset(Map<String, ?> partition, Map<String, ?> offset) {
data.put((Map<String, Object>) partition, (Map<String, Object>) offset);
}
private boolean flushing() {
return toFlush != null;
}
/**
* Performs the first step of a flush operation, snapshotting the current state. This does not
* actually initiate the flush with the underlying storage. Ensures that any previous flush operations
* have finished before beginning a new flush.
*
* @return true if a flush was initiated, false if no data was available
* @throws ConnectException if the previous flush is not complete before this method is called
*/
public boolean beginFlush() {
try {
return beginFlush(0, TimeUnit.NANOSECONDS);
} catch (InterruptedException | TimeoutException e) {
log.error("Invalid call to OffsetStorageWriter beginFlush() while already flushing, the "
+ "framework should not allow this");
throw new ConnectException("OffsetStorageWriter is already flushing");
}
}
/**
* Performs the first step of a flush operation, snapshotting the current state. This does not
* actually initiate the flush with the underlying storage. Ensures that any previous flush operations
* have finished before beginning a new flush.
* <p>If and only if this method returns true, the caller must call {@link #doFlush(Callback)}
* or {@link #cancelFlush()} to finish the flush operation and allow later calls to complete.
*
* @param timeout A maximum duration to wait for previous flushes to finish before giving up on waiting
* @param timeUnit Units of the timeout argument
* @return true if a flush was initiated, false if no data was available
* @throws InterruptedException if this thread was interrupted while waiting for the previous flush to complete
* @throws TimeoutException if the {@code timeout} elapses before previous flushes are complete.
*/
public boolean beginFlush(long timeout, TimeUnit timeUnit) throws InterruptedException, TimeoutException {
if (flushInProgress.tryAcquire(Math.max(0, timeout), timeUnit)) {
synchronized (this) {
if (data.isEmpty()) {
flushInProgress.release();
return false;
} else {
toFlush = data;
data = new HashMap<>();
return true;
}
}
} else {
throw new TimeoutException("Timed out waiting for previous flush to finish");
}
}
/**
* Flush the current offsets and clear them from this writer. This is non-blocking: it
* moves the current set of offsets out of the way, serializes the data, and asynchronously
* writes the data to the backing store. If no offsets need to be written, the callback is
* still invoked, but no Future is returned.
*
* @return a Future, or null if there are no offsets to commit
*/
public Future<Void> doFlush(final Callback<Void> callback) {
final long flushId;
// Serialize
final Map<ByteBuffer, ByteBuffer> offsetsSerialized;
synchronized (this) {
flushId = currentFlushId;
try {
offsetsSerialized = new HashMap<>(toFlush.size());
for (Map.Entry<Map<String, Object>, Map<String, Object>> entry : toFlush.entrySet()) {
// Offsets are specified as schemaless to the converter, using whatever internal schema is appropriate
// for that data. The only enforcement of the format is here.
OffsetUtils.validateFormat(entry.getKey());
OffsetUtils.validateFormat(entry.getValue());
// When serializing the key, we add in the namespace information so the key is [namespace, real key]
byte[] key = keyConverter.fromConnectData(namespace, null, Arrays.asList(namespace, entry.getKey()));
ByteBuffer keyBuffer = (key != null) ? ByteBuffer.wrap(key) : null;
byte[] value = valueConverter.fromConnectData(namespace, null, entry.getValue());
ByteBuffer valueBuffer = (value != null) ? ByteBuffer.wrap(value) : null;
offsetsSerialized.put(keyBuffer, valueBuffer);
}
} catch (Throwable t) {
// Must handle errors properly here or the writer will be left mid-flush forever and be
// unable to make progress.
log.error("CRITICAL: Failed to serialize offset data, making it impossible to commit "
+ "offsets under namespace {}. This likely won't recover unless the "
+ "unserializable partition or offset information is overwritten.", namespace);
log.error("Cause of serialization failure:", t);
callback.onCompletion(t, null);
return null;
}
// And submit the data
log.debug("Submitting {} entries to backing store. The offsets are: {}", offsetsSerialized.size(), toFlush);
}
return backingStore.set(offsetsSerialized, (error, result) -> {
boolean isCurrent = handleFinishWrite(flushId, error, result);
if (isCurrent && callback != null) {
callback.onCompletion(error, result);
}
});
}
/**
* Cancel a flush that has been initiated by {@link #beginFlush}. This should not be called if
* {@link #doFlush} has already been invoked. It should be used if an operation performed
* between beginFlush and doFlush failed.
*/
public synchronized void cancelFlush() {
// Verify we're still flushing data to handle a race between cancelFlush() calls from up the
// call stack and callbacks from the write request to underlying storage
if (flushing()) {
// Just recombine the data and place it back in the primary storage
toFlush.putAll(data);
data = toFlush;
currentFlushId++;
flushInProgress.release();
toFlush = null;
}
}
/**
* Handle completion of a write. Returns true if this callback is for the current flush
* operation, false if it's for an old operation that should now be ignored.
*/
private synchronized boolean handleFinishWrite(long flushId, Throwable error, Void result) {
// Callbacks need to be handled carefully since the flush operation may have already timed
// out and been cancelled.
if (flushId != currentFlushId)
return false;
if (error != null) {
cancelFlush();
} else {
currentFlushId++;
flushInProgress.release();
toFlush = null;
}
return true;
}
}
|
OffsetStorageWriter
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/main/java/org/apache/logging/log4j/core/test/smtp/SimpleSmtpServer.java
|
{
"start": 1367,
"end": 9147
}
|
class ____ implements Runnable {
/**
* Stores all of the email received since this instance started up.
*/
private final List<SmtpMessage> receivedMail;
/**
* Default SMTP port is 25.
*/
public static final int DEFAULT_SMTP_PORT = 25;
/**
* Indicates whether this server is stopped or not.
*/
private volatile boolean stopped = true;
/**
* Handle to the server socket this server listens to.
*/
private ServerSocket serverSocket;
/**
* Port the server listens on - set to the default SMTP port initially.
*/
private int port = DEFAULT_SMTP_PORT;
/**
* Timeout listening on server socket.
*/
private static final int TIMEOUT = 500;
/**
* Constructor.
*
* @param port port number
*/
public SimpleSmtpServer(final int port) {
receivedMail = new ArrayList<>();
this.port = port;
}
/**
* Main loop of the SMTP server.
*/
@Override
@SuppressFBWarnings("UNENCRYPTED_SERVER_SOCKET")
public void run() {
stopped = false;
try {
try {
serverSocket = new ServerSocket(port);
serverSocket.setSoTimeout(TIMEOUT); // Block for maximum of 1.5 seconds
} finally {
synchronized (this) {
// Notify when server socket has been created
notifyAll();
}
}
// Server: loop until stopped
while (!isStopped()) {
// Start server socket and listen for client connections
Socket socket = null;
try {
socket = serverSocket.accept();
} catch (final Exception e) {
if (socket != null) {
socket.close();
}
continue; // Non-blocking socket timeout occurred: try accept() again
}
// Get the input and output streams
final BufferedReader input = new BufferedReader(new InputStreamReader(socket.getInputStream()));
final PrintWriter out = new PrintWriter(socket.getOutputStream());
synchronized (this) {
/*
* We synchronize over the handle method and the list update because the client call completes
* inside the handle method and we have to prevent the client from reading the list until we've
* updated it. For higher concurrency, we could just change handle to return void and update the
* list inside the method to limit the duration that we hold the lock.
*/
final List<SmtpMessage> msgs = handleTransaction(out, input);
receivedMail.addAll(msgs);
}
socket.close();
}
} catch (final Exception e) {
/** @todo Should throw an appropriate exception here. */
e.printStackTrace();
} finally {
if (serverSocket != null) {
try {
serverSocket.close();
} catch (final IOException e) {
e.printStackTrace();
}
}
}
}
/**
* Check if the server has been placed in a stopped state. Allows another thread to
* stop the server safely.
*
* @return true if the server has been sent a stop signal, false otherwise
*/
public synchronized boolean isStopped() {
return stopped;
}
/**
* Stops the server. Server is shutdown after processing of the current request is complete.
*/
public synchronized void stop() {
// Mark us closed
stopped = true;
try {
// Kick the server accept loop
serverSocket.close();
} catch (final IOException e) {
// Ignore
}
}
/**
* Handle an SMTP transaction, i.e. all activity between initial connect and QUIT command.
*
* @param out output stream
* @param input input stream
* @return List of SmtpMessage
* @throws IOException
*/
private List<SmtpMessage> handleTransaction(final PrintWriter out, final BufferedReader input) throws IOException {
// Initialize the state machine
SmtpState smtpState = SmtpState.CONNECT;
final SmtpRequest smtpRequest = new SmtpRequest(SmtpActionType.CONNECT, Strings.EMPTY, smtpState);
// Execute the connection request
final SmtpResponse smtpResponse = smtpRequest.execute();
// Send initial response
sendResponse(out, smtpResponse);
smtpState = smtpResponse.getNextState();
final List<SmtpMessage> msgList = new ArrayList<>();
SmtpMessage msg = new SmtpMessage();
while (smtpState != SmtpState.CONNECT) {
final String line = input.readLine();
if (line == null) {
break;
}
// Create request from client input and current state
final SmtpRequest request = SmtpRequest.createRequest(line, smtpState);
// Execute request and create response object
final SmtpResponse response = request.execute();
// Move to next internal state
smtpState = response.getNextState();
// Send response to client
sendResponse(out, response);
// Store input in message
final String params = request.getParams();
msg.store(response, params);
// If message reception is complete save it
if (smtpState == SmtpState.QUIT) {
msgList.add(msg);
msg = new SmtpMessage();
}
}
return msgList;
}
/**
* Send response to client.
*
* @param out socket output stream
* @param smtpResponse response object
*/
private static void sendResponse(final PrintWriter out, final SmtpResponse smtpResponse) {
if (smtpResponse.getCode() > 0) {
final int code = smtpResponse.getCode();
final String message = smtpResponse.getMessage();
out.print(code + " " + message + "\r\n");
out.flush();
}
}
/**
* Get email received by this instance since start up.
*
* @return List of String
*/
public synchronized Iterator<SmtpMessage> getReceivedEmail() {
return receivedMail.iterator();
}
/**
* Get the number of messages received.
*
* @return size of received email list
*/
public synchronized int getReceivedEmailSize() {
return receivedMail.size();
}
/**
* Creates an instance of SimpleSmtpServer and starts it. Will listen on the default port.
*
* @return a reference to the SMTP server
*/
public static SimpleSmtpServer start() {
return start(DEFAULT_SMTP_PORT);
}
/**
* Creates an instance of SimpleSmtpServer and starts it.
*
* @param port port number the server should listen to
* @return a reference to the SMTP server
*/
public static SimpleSmtpServer start(final int port) {
final SimpleSmtpServer server = new SimpleSmtpServer(port);
final Thread t = new Thread(server);
// Block until the server socket is created
synchronized (server) {
t.start();
try {
server.wait();
} catch (final InterruptedException e) {
// Ignore don't care.
}
}
return server;
}
}
|
SimpleSmtpServer
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
|
{
"start": 1568,
"end": 4713
}
|
class ____ extends FinishApplicationMasterRequest {
FinishApplicationMasterRequestProto proto = FinishApplicationMasterRequestProto.getDefaultInstance();
FinishApplicationMasterRequestProto.Builder builder = null;
boolean viaProto = false;
public FinishApplicationMasterRequestPBImpl() {
builder = FinishApplicationMasterRequestProto.newBuilder();
}
public FinishApplicationMasterRequestPBImpl(FinishApplicationMasterRequestProto proto) {
this.proto = proto;
viaProto = true;
}
public FinishApplicationMasterRequestProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToBuilder() {
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = FinishApplicationMasterRequestProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public String getDiagnostics() {
FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
return p.getDiagnostics();
}
@Override
public void setDiagnostics(String diagnostics) {
maybeInitBuilder();
if (diagnostics == null) {
builder.clearDiagnostics();
return;
}
builder.setDiagnostics(diagnostics);
}
@Override
public String getTrackingUrl() {
FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
return p.getTrackingUrl();
}
@Override
public void setTrackingUrl(String url) {
maybeInitBuilder();
if (url == null) {
builder.clearTrackingUrl();
return;
}
builder.setTrackingUrl(url);
}
@Override
public FinalApplicationStatus getFinalApplicationStatus() {
FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasFinalApplicationStatus()) {
return null;
}
return convertFromProtoFormat(p.getFinalApplicationStatus());
}
@Override
public void setFinalApplicationStatus(FinalApplicationStatus finalState) {
maybeInitBuilder();
if (finalState == null) {
builder.clearFinalApplicationStatus();
return;
}
builder.setFinalApplicationStatus(convertToProtoFormat(finalState));
}
private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
return ProtoUtils.convertToProtoFormat(s);
}
}
|
FinishApplicationMasterRequestPBImpl
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TestExecutionExceptionHandlerTests.java
|
{
"start": 7394,
"end": 7806
}
|
class ____ implements TestExecutionExceptionHandler {
static boolean handleExceptionCalled = false;
@Override
public void handleTestExecutionException(ExtensionContext context, Throwable throwable) throws Throwable {
assertInstanceOf(RuntimeException.class, throwable);
handleExceptionCalled = true;
handlerCalls.add("convert");
throw new IOException("checked");
}
}
static
|
ConvertException
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/conversion/BigDecimalToStringConversion.java
|
{
"start": 793,
"end": 3335
}
|
class ____ extends AbstractNumberToStringConversion {
public BigDecimalToStringConversion() {
super( true );
}
@Override
public String getToExpression(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
StringBuilder sb = new StringBuilder();
appendDecimalFormatter( sb, conversionContext );
sb.append( ".format( <SOURCE> )" );
return sb.toString();
}
else {
return "<SOURCE>.toString()";
}
}
@Override
public String getFromExpression(ConversionContext conversionContext) {
if ( requiresDecimalFormat( conversionContext ) ) {
StringBuilder sb = new StringBuilder();
sb.append( "(" ).append( bigDecimal( conversionContext ) ).append( ") " );
appendDecimalFormatter( sb, conversionContext );
sb.append( ".parse( <SOURCE> )" );
return sb.toString();
}
else {
return "new " + bigDecimal( conversionContext ) + "( <SOURCE> )";
}
}
@Override
protected Set<Type> getFromConversionImportTypes(ConversionContext conversionContext) {
return asSet( conversionContext.getTypeFactory().getType( BigDecimal.class ) );
}
@Override
public List<HelperMethod> getRequiredHelperMethods(ConversionContext conversionContext) {
List<HelperMethod> helpers = new ArrayList<>();
if ( conversionContext.getNumberFormat() != null ) {
helpers.add( new CreateDecimalFormat(
conversionContext.getTypeFactory(),
conversionContext.getLocale() != null
) );
}
return helpers;
}
private void appendDecimalFormatter(StringBuilder sb, ConversionContext conversionContext) {
boolean withLocale = conversionContext.getLocale() != null;
sb.append( "createDecimalFormat" );
if ( withLocale ) {
sb.append( "WithLocale" );
}
sb.append( "( " );
if ( conversionContext.getNumberFormat() != null ) {
sb.append( "\"" );
sb.append( conversionContext.getNumberFormat() );
sb.append( "\"" );
}
if ( withLocale ) {
sb.append( ", " ).append( locale( conversionContext ) ).append( ".forLanguageTag( \"" );
sb.append( conversionContext.getLocale() );
sb.append( "\" )" );
}
sb.append( " )" );
}
}
|
BigDecimalToStringConversion
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/ai/model/mcp/McpServiceRef.java
|
{
"start": 818,
"end": 1722
}
|
class ____ {
private String namespaceId;
private String groupName;
private String serviceName;
private String transportProtocol;
public String getNamespaceId() {
return namespaceId;
}
public void setNamespaceId(String namespaceId) {
this.namespaceId = namespaceId;
}
public String getGroupName() {
return groupName;
}
public void setGroupName(String groupName) {
this.groupName = groupName;
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public void setTransportProtocol(String transportProtocol) {
this.transportProtocol = transportProtocol;
}
public String getTransportProtocol() {
return transportProtocol;
}
}
|
McpServiceRef
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CollectionToArraySafeParameterTest.java
|
{
"start": 2959,
"end": 3412
}
|
class ____<T> extends HashSet<T> {}
}\
""")
.doTest();
}
@Test
public void negativeCases() {
compilationHelper
.addSourceLines(
"CollectionToArraySafeParameterNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
/**
* @author mariasam@google.com (Maria Sam) on 6/27/17.
*/
public
|
Foo
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/internal/ExecutableOperationContextImpl.java
|
{
"start": 1313,
"end": 2839
}
|
class ____ implements ExecutableOperation.Context {
private final CatalogManager catalogManager;
private final FunctionCatalog functionCatalog;
private final ModuleManager moduleManager;
private final ResourceManager resourceManager;
private final TableConfig tableConfig;
private final boolean isStreamingMode;
public ExecutableOperationContextImpl(
CatalogManager catalogManager,
FunctionCatalog functionCatalog,
ModuleManager moduleManager,
ResourceManager resourceManager,
TableConfig tableConfig,
boolean isStreamingMode) {
this.catalogManager = catalogManager;
this.functionCatalog = functionCatalog;
this.moduleManager = moduleManager;
this.resourceManager = resourceManager;
this.tableConfig = tableConfig;
this.isStreamingMode = isStreamingMode;
}
@Override
public CatalogManager getCatalogManager() {
return catalogManager;
}
@Override
public FunctionCatalog getFunctionCatalog() {
return functionCatalog;
}
@Override
public ModuleManager getModuleManager() {
return moduleManager;
}
@Override
public ResourceManager getResourceManager() {
return resourceManager;
}
@Override
public TableConfig getTableConfig() {
return tableConfig;
}
@Override
public boolean isStreamingMode() {
return isStreamingMode;
}
}
|
ExecutableOperationContextImpl
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/Assertions_anyOf_with_ThrowingConsumer_Test.java
|
{
"start": 1193,
"end": 2005
}
|
class ____ {
@ParameterizedTest
@NullAndEmptySource
@ValueSource(strings = "value")
void should_pass_if_any_consumer_passes(String value) {
// GIVEN
ThrowingConsumer<String> underTest = anyOf(e -> assertThat(e).isBlank(),
e -> assertThat(e).isNotBlank());
// WHEN/THEN
underTest.accept(value);
}
@Test
void should_fail_if_all_consumers_fail() {
// GIVEN
ThrowingConsumer<String> underTest = anyOf(e -> assertThat(e).isNotNull(),
e -> assertThat(e).isNotBlank());
// WHEN
var assertionError = expectAssertionError(() -> underTest.accept(null));
// THEN
then(assertionError).isInstanceOf(MultipleFailuresError.class);
}
}
|
Assertions_anyOf_with_ThrowingConsumer_Test
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/AnnotationUtilsTests.java
|
{
"start": 47824,
"end": 47903
}
|
interface ____ {
}
@MetaCycle1
@Retention(RetentionPolicy.RUNTIME)
@
|
MetaCycle1
|
java
|
spring-projects__spring-framework
|
spring-orm/src/main/java/org/springframework/orm/jpa/persistenceunit/PersistenceManagedTypesBeanRegistrationAotProcessor.java
|
{
"start": 3449,
"end": 12623
}
|
class ____ extends BeanRegistrationCodeFragmentsDecorator {
private static final List<Class<? extends Annotation>> CALLBACK_TYPES = List.of(PreUpdate.class,
PostUpdate.class, PrePersist.class, PostPersist.class, PreRemove.class, PostRemove.class, PostLoad.class);
private static final ParameterizedTypeName LIST_OF_STRINGS_TYPE = ParameterizedTypeName.get(List.class, String.class);
private final RegisteredBean registeredBean;
private final BindingReflectionHintsRegistrar bindingRegistrar = new BindingReflectionHintsRegistrar();
private JpaManagedTypesBeanRegistrationCodeFragments(BeanRegistrationCodeFragments codeFragments,
RegisteredBean registeredBean) {
super(codeFragments);
this.registeredBean = registeredBean;
}
@Override
public CodeBlock generateInstanceSupplierCode(GenerationContext generationContext,
BeanRegistrationCode beanRegistrationCode, boolean allowDirectSupplierShortcut) {
PersistenceManagedTypes persistenceManagedTypes = this.registeredBean.getBeanFactory()
.getBean(this.registeredBean.getBeanName(), PersistenceManagedTypes.class);
contributeHints(generationContext.getRuntimeHints(),
this.registeredBean.getBeanFactory().getBeanClassLoader(), persistenceManagedTypes.getManagedClassNames());
GeneratedMethod generatedMethod = beanRegistrationCode.getMethods()
.add("getInstance", method -> {
Class<?> beanType = PersistenceManagedTypes.class;
method.addJavadoc("Get the bean instance for '$L'.",
this.registeredBean.getBeanName());
method.addModifiers(Modifier.PRIVATE, Modifier.STATIC);
method.returns(beanType);
method.addStatement("$T managedClassNames = $T.of($L)", LIST_OF_STRINGS_TYPE,
List.class, toCodeBlock(persistenceManagedTypes.getManagedClassNames()));
method.addStatement("$T managedPackages = $T.of($L)", LIST_OF_STRINGS_TYPE,
List.class, toCodeBlock(persistenceManagedTypes.getManagedPackages()));
method.addStatement("return $T.of($L, $L)", beanType, "managedClassNames", "managedPackages");
});
return generatedMethod.toMethodReference().toCodeBlock();
}
private CodeBlock toCodeBlock(List<String> values) {
return CodeBlock.join(values.stream().map(value -> CodeBlock.of("$S", value)).toList(), ", ");
}
private void contributeHints(RuntimeHints hints, @Nullable ClassLoader classLoader, List<String> managedClassNames) {
for (String managedClassName : managedClassNames) {
try {
Class<?> managedClass = ClassUtils.forName(managedClassName, null);
this.bindingRegistrar.registerReflectionHints(hints.reflection(), managedClass);
contributeEntityListenersHints(hints, managedClass);
contributeIdClassHints(hints, managedClass);
contributeConverterHints(hints, managedClass);
contributeCallbackHints(hints, managedClass);
contributeHibernateHints(hints, classLoader, managedClass);
contributePackagePrivateHints(hints, managedClass);
}
catch (ClassNotFoundException ex) {
throw new IllegalArgumentException("Failed to instantiate JPA managed class: " + managedClassName, ex);
}
}
}
private void contributeEntityListenersHints(RuntimeHints hints, Class<?> managedClass) {
EntityListeners entityListeners = AnnotationUtils.findAnnotation(managedClass, EntityListeners.class);
if (entityListeners != null) {
for (Class<?> entityListener : entityListeners.value()) {
hints.reflection().registerType(entityListener,
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_PUBLIC_METHODS);
}
}
}
private void contributeIdClassHints(RuntimeHints hints, Class<?> managedClass) {
IdClass idClass = AnnotationUtils.findAnnotation(managedClass, IdClass.class);
if (idClass != null) {
this.bindingRegistrar.registerReflectionHints(hints.reflection(), idClass.value());
}
}
private void contributeConverterHints(RuntimeHints hints, Class<?> managedClass) {
Converter converter = AnnotationUtils.findAnnotation(managedClass, Converter.class);
ReflectionHints reflectionHints = hints.reflection();
if (converter != null) {
reflectionHints.registerType(managedClass, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
Convert convertClassAnnotation = AnnotationUtils.findAnnotation(managedClass, Convert.class);
if (convertClassAnnotation != null) {
reflectionHints.registerType(convertClassAnnotation.converter(),
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
ReflectionUtils.doWithFields(managedClass, field -> {
Convert convertFieldAnnotation = AnnotationUtils.findAnnotation(field, Convert.class);
if (convertFieldAnnotation != null && convertFieldAnnotation.converter() != AttributeConverter.class) {
reflectionHints.registerType(convertFieldAnnotation.converter(),
MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
});
}
private void contributeCallbackHints(RuntimeHints hints, Class<?> managedClass) {
ReflectionHints reflection = hints.reflection();
ReflectionUtils.doWithMethods(managedClass, method ->
reflection.registerMethod(method, ExecutableMode.INVOKE),
method -> CALLBACK_TYPES.stream().anyMatch(method::isAnnotationPresent));
}
private void contributeHibernateHints(RuntimeHints hints, @Nullable ClassLoader classLoader, Class<?> managedClass) {
ReflectionHints reflection = hints.reflection();
Class<? extends Annotation> embeddableInstantiatorClass =
loadClass("org.hibernate.annotations.EmbeddableInstantiator", classLoader);
if (embeddableInstantiatorClass != null) {
registerForReflection(reflection,
AnnotationUtils.findAnnotation(managedClass, embeddableInstantiatorClass), "value");
ReflectionUtils.doWithFields(managedClass, field -> {
registerForReflection(reflection,
AnnotationUtils.findAnnotation(field, embeddableInstantiatorClass), "value");
registerForReflection(reflection,
AnnotationUtils.findAnnotation(field.getType(), embeddableInstantiatorClass), "value");
});
ReflectionUtils.doWithMethods(managedClass, method -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(method, embeddableInstantiatorClass), "value"));
}
Class<? extends Annotation> valueGenerationTypeClass =
loadClass("org.hibernate.annotations.ValueGenerationType", classLoader);
if (valueGenerationTypeClass != null) {
ReflectionUtils.doWithFields(managedClass, field -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(field, valueGenerationTypeClass), "generatedBy"));
ReflectionUtils.doWithMethods(managedClass, method -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(method, valueGenerationTypeClass), "generatedBy"));
}
Class<? extends Annotation> idGeneratorTypeClass =
loadClass("org.hibernate.annotations.IdGeneratorType", classLoader);
if (idGeneratorTypeClass != null) {
ReflectionUtils.doWithFields(managedClass, field -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(field, idGeneratorTypeClass), "value"));
ReflectionUtils.doWithMethods(managedClass, method -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(method, idGeneratorTypeClass), "value"));
}
Class<? extends Annotation> attributeBinderTypeClass =
loadClass("org.hibernate.annotations.AttributeBinderType", classLoader);
if (attributeBinderTypeClass != null) {
ReflectionUtils.doWithFields(managedClass, field -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(field, attributeBinderTypeClass), "binder"));
ReflectionUtils.doWithMethods(managedClass, method -> registerForReflection(reflection,
AnnotationUtils.findAnnotation(method, attributeBinderTypeClass), "binder"));
}
}
private void contributePackagePrivateHints(RuntimeHints hints, Class<?> managedClass) {
ReflectionHints reflection = hints.reflection();
ReflectionUtils.doWithMethods(managedClass, method ->
reflection.registerMethod(method, ExecutableMode.INVOKE),
method -> {
int modifiers = method.getModifiers();
return !(java.lang.reflect.Modifier.isProtected(modifiers) ||
java.lang.reflect.Modifier.isPrivate(modifiers) ||
java.lang.reflect.Modifier.isPublic(modifiers));
});
}
@SuppressWarnings("unchecked")
private static @Nullable Class<? extends Annotation> loadClass(String className, @Nullable ClassLoader classLoader) {
try {
return (Class<? extends Annotation>) ClassUtils.forName(className, classLoader);
}
catch (ClassNotFoundException ex) {
return null;
}
}
@SuppressWarnings("NullAway") // Not-null assertion performed in ReflectionHints.registerType
private void registerForReflection(ReflectionHints reflection, @Nullable Annotation annotation, String attribute) {
if (annotation == null) {
return;
}
Class<?> type = (Class<?>) AnnotationUtils.getAnnotationAttributes(annotation).get(attribute);
reflection.registerType(type, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
}
}
|
JpaManagedTypesBeanRegistrationCodeFragments
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerImpl.java
|
{
"start": 38701,
"end": 39547
}
|
class ____ implements EnhancementInfo {
private final String version;
private final boolean includesDirtyChecking;
private final boolean includesAssociationManagement;
public EnhancementInfoImpl(boolean includesDirtyChecking, boolean includesAssociationManagement) {
this.version = Version.getVersionString();
this.includesDirtyChecking = includesDirtyChecking;
this.includesAssociationManagement = includesAssociationManagement;
}
@Override
public String version() {
return version;
}
@Override
public boolean includesDirtyChecking() {
return includesDirtyChecking;
}
@Override
public boolean includesAssociationManagement() {
return includesAssociationManagement;
}
@Override
public Class<? extends Annotation> annotationType() {
return EnhancementInfo.class;
}
}
}
|
EnhancementInfoImpl
|
java
|
square__retrofit
|
retrofit/src/main/java/retrofit2/BuiltInConverters.java
|
{
"start": 871,
"end": 1838
}
|
class ____ extends Converter.Factory {
@Override
public @Nullable Converter<ResponseBody, ?> responseBodyConverter(
Type type, Annotation[] annotations, Retrofit retrofit) {
if (type == ResponseBody.class) {
return Utils.isAnnotationPresent(annotations, Streaming.class)
? StreamingResponseBodyConverter.INSTANCE
: BufferingResponseBodyConverter.INSTANCE;
}
if (type == Void.class) {
return VoidResponseBodyConverter.INSTANCE;
}
if (Utils.isUnit(type)) {
return UnitResponseBodyConverter.INSTANCE;
}
return null;
}
@Override
public @Nullable Converter<?, RequestBody> requestBodyConverter(
Type type,
Annotation[] parameterAnnotations,
Annotation[] methodAnnotations,
Retrofit retrofit) {
if (RequestBody.class.isAssignableFrom(Utils.getRawType(type))) {
return RequestBodyConverter.INSTANCE;
}
return null;
}
static final
|
BuiltInConverters
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/ImplicitJoinInSubqueryTest.java
|
{
"start": 2103,
"end": 2232
}
|
class ____ {
@Id
@GeneratedValue
Long id;
@ManyToOne(fetch = FetchType.LAZY)
B b;
}
@Entity(name = "B")
public static
|
A
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/results/internal/ResultSetMappingImpl.java
|
{
"start": 1583,
"end": 12441
}
|
class ____ implements ResultSetMapping {
private final String mappingIdentifier;
private final boolean isDynamic;
private List<ResultBuilder> resultBuilders;
private Map<String, Map<Fetchable, LegacyFetchBuilder>> legacyFetchBuilders;
public ResultSetMappingImpl(String mappingIdentifier) {
this( mappingIdentifier, false );
}
public ResultSetMappingImpl(String mappingIdentifier, boolean isDynamic) {
this.mappingIdentifier = mappingIdentifier;
this.isDynamic = isDynamic;
}
private ResultSetMappingImpl(ResultSetMappingImpl original) {
this.mappingIdentifier = original.mappingIdentifier;
this.isDynamic = original.isDynamic;
if ( !original.isDynamic || original.resultBuilders == null ) {
this.resultBuilders = null;
}
else {
final List<ResultBuilder> resultBuilders = new ArrayList<>( original.resultBuilders.size() );
for ( ResultBuilder resultBuilder : original.resultBuilders ) {
resultBuilders.add( resultBuilder.cacheKeyInstance() );
}
this.resultBuilders = resultBuilders;
}
if ( !original.isDynamic || original.legacyFetchBuilders == null ) {
this.legacyFetchBuilders = null;
}
else {
final Map<String, Map<Fetchable, LegacyFetchBuilder>> builders = new HashMap<>( original.legacyFetchBuilders.size() );
for ( Map.Entry<String, Map<Fetchable, LegacyFetchBuilder>> entry : original.legacyFetchBuilders.entrySet() ) {
final Map<Fetchable, LegacyFetchBuilder> newValue = new HashMap<>( entry.getValue().size() );
for ( Map.Entry<Fetchable, LegacyFetchBuilder> builderEntry : entry.getValue().entrySet() ) {
newValue.put( builderEntry.getKey(), builderEntry.getValue().cacheKeyInstance() );
}
builders.put( entry.getKey(), newValue );
}
this.legacyFetchBuilders = builders;
}
}
@Override
public String getMappingIdentifier(){
return mappingIdentifier;
}
@Override
public boolean isDynamic() {
return isDynamic;
}
@Override
public int getNumberOfResultBuilders() {
return resultBuilders == null ? 0 : resultBuilders.size();
}
public List<ResultBuilder> getResultBuilders() {
if ( resultBuilders == null ) {
return Collections.emptyList();
}
return Collections.unmodifiableList( resultBuilders );
}
@Override
public void visitResultBuilders(BiConsumer<Integer, ResultBuilder> resultBuilderConsumer) {
if ( resultBuilders == null ) {
return;
}
for ( int i = 0; i < resultBuilders.size(); i++ ) {
resultBuilderConsumer.accept( i, resultBuilders.get( i ) );
}
}
@Override
public void visitLegacyFetchBuilders(Consumer<LegacyFetchBuilder> resultBuilderConsumer) {
if ( legacyFetchBuilders == null ) {
return;
}
for ( Map.Entry<String, Map<Fetchable, LegacyFetchBuilder>> entry : legacyFetchBuilders.entrySet() ) {
for ( LegacyFetchBuilder fetchBuilder : entry.getValue().values() ) {
resultBuilderConsumer.accept( fetchBuilder );
}
}
}
@Override
public void addResultBuilder(ResultBuilder resultBuilder) {
if ( resultBuilders == null ) {
resultBuilders = new ArrayList<>();
}
resultBuilders.add( resultBuilder );
}
@Override
public void addLegacyFetchBuilder(LegacyFetchBuilder fetchBuilder) {
final Map<Fetchable, LegacyFetchBuilder> existingFetchBuildersByOwner;
if ( legacyFetchBuilders == null ) {
legacyFetchBuilders = new HashMap<>();
existingFetchBuildersByOwner = null;
}
else {
existingFetchBuildersByOwner = legacyFetchBuilders.get( fetchBuilder.getOwnerAlias() );
}
final Map<Fetchable, LegacyFetchBuilder> fetchBuildersByOwner;
if ( existingFetchBuildersByOwner == null ) {
fetchBuildersByOwner = new HashMap<>();
legacyFetchBuilders.put( fetchBuilder.getOwnerAlias(), fetchBuildersByOwner );
}
else {
fetchBuildersByOwner = existingFetchBuildersByOwner;
}
fetchBuildersByOwner.put( fetchBuilder.getFetchable(), fetchBuilder );
}
@Override
public void addAffectedTableNames(Set<String> affectedTableNames, SessionFactoryImplementor sessionFactory) {
if ( StringHelper.isEmpty( mappingIdentifier ) ) {
return;
}
final EntityPersister entityDescriptor =
sessionFactory.getMappingMetamodel()
.findEntityDescriptor( mappingIdentifier );
if ( entityDescriptor == null ) {
return;
}
Collections.addAll( affectedTableNames, (String[]) entityDescriptor.getQuerySpaces() );
}
@Override
public JdbcValuesMapping resolve(
JdbcValuesMetadata jdbcResultsMetadata,
LoadQueryInfluencers loadQueryInfluencers,
SessionFactoryImplementor sessionFactory) {
final int numberOfResults;
final int rowSize = jdbcResultsMetadata.getColumnCount();
numberOfResults = resultBuilders == null ? rowSize : resultBuilders.size();
final List<SqlSelection> sqlSelections = new ArrayList<>( rowSize );
final List<DomainResult<?>> domainResults = new ArrayList<>( numberOfResults );
final DomainResultCreationStateImpl creationState = new DomainResultCreationStateImpl(
mappingIdentifier,
jdbcResultsMetadata,
legacyFetchBuilders,
sqlSelections::add,
loadQueryInfluencers,
true,
sessionFactory
);
for ( int i = 0; i < numberOfResults; i++ ) {
final ResultBuilder resultBuilder = resultBuilders != null
? resultBuilders.get( i )
: null;
final DomainResult<?> domainResult;
if ( resultBuilder == null ) {
domainResult = makeImplicitDomainResult(
i,
sqlSelections::add,
jdbcResultsMetadata,
sessionFactory
);
}
else {
domainResult = resultBuilder.buildResult(
jdbcResultsMetadata,
domainResults.size(),
creationState
);
}
if ( domainResult.containsAnyNonScalarResults() ) {
creationState.disallowPositionalSelections();
}
domainResults.add( domainResult );
}
// We only need this check when we actually have result builders
// As people should be able to just run native queries and work with tuples
if ( resultBuilders != null ) {
final Set<String> knownDuplicateAliases = new TreeSet<>( String.CASE_INSENSITIVE_ORDER );
if ( resultBuilders.size() == 1 && domainResults.size() == 1 && domainResults.get( 0 ) instanceof EntityResult entityResult ) {
// Special case for result set mappings that just fetch a single polymorphic entity
final EntityPersister persister = entityResult.getReferencedMappingContainer().getEntityPersister();
final boolean polymorphic = persister.isPolymorphic();
// We only need to check for duplicate aliases if we have join fetches,
// otherwise we assume that even if there are duplicate aliases, the values are equivalent.
// If we don't do that, there is no way to fetch joined inheritance entities
if ( polymorphic && ( legacyFetchBuilders == null || legacyFetchBuilders.isEmpty() )
&& !entityResult.hasJoinFetches() ) {
final Set<String> aliases = new TreeSet<>( String.CASE_INSENSITIVE_ORDER );
for ( String[] columns : persister.getConstraintOrderedTableKeyColumnClosure() ) {
addColumns( aliases, knownDuplicateAliases, columns );
}
addColumn( aliases, knownDuplicateAliases, persister.getDiscriminatorColumnName() );
addColumn( aliases, knownDuplicateAliases, persister.getVersionColumnName() );
for (int i = 0; i < persister.countSubclassProperties(); i++ ) {
addColumns(
aliases,
knownDuplicateAliases,
persister.getSubclassPropertyColumnNames( i )
);
}
}
}
final String[] aliases = new String[rowSize];
final Map<String, Boolean> aliasHasDuplicates = new HashMap<>( rowSize );
for ( int i = 0; i < rowSize; i++ ) {
aliasHasDuplicates.compute(
aliases[i] = jdbcResultsMetadata.resolveColumnName( i + 1 ),
(k, v) -> v == null ? Boolean.FALSE : Boolean.TRUE
);
}
// Only check for duplicates for the selections that we actually use
for ( SqlSelection sqlSelection : sqlSelections ) {
final String alias = aliases[sqlSelection.getValuesArrayPosition()];
if ( !knownDuplicateAliases.contains( alias ) && aliasHasDuplicates.get( alias ) == Boolean.TRUE ) {
throw new NonUniqueDiscoveredSqlAliasException(
"Encountered a duplicated sql alias [" + alias + "] during auto-discovery of a native-sql query"
);
}
}
}
return new JdbcValuesMappingImpl(
sqlSelections,
domainResults,
rowSize,
creationState.getRegisteredLockModes()
);
}
private static void addColumns(Set<String> aliases, Set<String> knownDuplicateAliases, String[] columns) {
for ( int i = 0; i < columns.length; i++ ) {
addColumn( aliases, knownDuplicateAliases, columns[i] );
}
}
private static void addColumn(Set<String> aliases, Set<String> knownDuplicateAliases, String column) {
if ( column != null && !aliases.add( column ) ) {
knownDuplicateAliases.add( column );
}
}
private DomainResult<?> makeImplicitDomainResult(
int valuesArrayPosition,
Consumer<SqlSelection> sqlSelectionConsumer,
JdbcValuesMetadata jdbcResultsMetadata,
SessionFactoryImplementor sessionFactory) {
final int jdbcPosition = valuesArrayPosition + 1;
final BasicType<?> jdbcMapping =
jdbcResultsMetadata.resolveType( jdbcPosition, null,
sessionFactory.getTypeConfiguration() );
final String name = jdbcResultsMetadata.resolveColumnName( jdbcPosition );
final ResultSetMappingSqlSelection sqlSelection = new ResultSetMappingSqlSelection( valuesArrayPosition, (BasicValuedMapping) jdbcMapping );
sqlSelectionConsumer.accept( sqlSelection );
return new BasicResult<>(
valuesArrayPosition,
name,
jdbcMapping,
null,
false,
false
);
}
@Override
public NamedResultSetMappingMemento toMemento(String name) {
throw new UnsupportedOperationException();
}
@Override
public ResultSetMapping cacheKeyInstance() {
return new ResultSetMappingImpl( this );
}
@Override
public int hashCode() {
if ( isDynamic ) {
int result = mappingIdentifier != null ? mappingIdentifier.hashCode() : 0;
result = 31 * result + ( resultBuilders != null ? resultBuilders.hashCode() : 0 );
result = 31 * result + ( legacyFetchBuilders != null ? legacyFetchBuilders.hashCode() : 0 );
return result;
}
else {
return mappingIdentifier.hashCode();
}
}
@Override
public boolean equals(Object o) {
if ( this == o ) {
return true;
}
if ( o == null || getClass() != o.getClass() ) {
return false;
}
final ResultSetMappingImpl that = (ResultSetMappingImpl) o;
if ( isDynamic ) {
return that.isDynamic
&& Objects.equals( mappingIdentifier, that.mappingIdentifier )
&& Objects.equals( resultBuilders, that.resultBuilders )
&& Objects.equals( legacyFetchBuilders, that.legacyFetchBuilders );
}
else {
return !that.isDynamic && mappingIdentifier != null && mappingIdentifier.equals( that.mappingIdentifier );
}
}
}
|
ResultSetMappingImpl
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/main/java/io/micronaut/annotation/processing/BeanDefinitionInjectProcessor.java
|
{
"start": 3202,
"end": 6292
}
|
class ____ extends AbstractInjectAnnotationProcessor {
private static final String[] ANNOTATION_STEREOTYPES = new String[] {
AnnotationUtil.POST_CONSTRUCT,
AnnotationUtil.PRE_DESTROY,
"jakarta.annotation.PreDestroy",
"jakarta.annotation.PostConstruct",
"jakarta.inject.Inject",
"jakarta.inject.Qualifier",
"jakarta.inject.Singleton",
"jakarta.inject.Inject",
"jakarta.inject.Qualifier",
"jakarta.inject.Singleton",
"io.micronaut.context.annotation.Bean",
"io.micronaut.context.annotation.Replaces",
"io.micronaut.context.annotation.Value",
"io.micronaut.context.annotation.Property",
"io.micronaut.context.annotation.Executable",
ClassImport.class.getName(),
AnnotationUtil.ANN_AROUND,
AnnotationUtil.ANN_INTERCEPTOR_BINDINGS,
AnnotationUtil.ANN_INTERCEPTOR_BINDING,
AnnotationUtil.ANN_INTRODUCTION
};
private Set<String> beanDefinitions;
private final Set<String> processed = new HashSet<>();
private final Map<String, PostponeToNextRoundException> postponed = new HashMap<>();
@Override
public final synchronized void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
this.beanDefinitions = new LinkedHashSet<>();
for (BeanElementVisitor<?> visitor : BeanElementVisitor.VISITORS) {
if (visitor.isEnabled()) {
try {
visitor.start(javaVisitorContext);
} catch (Exception e) {
javaVisitorContext.fail("Error initializing bean element visitor [" + visitor.getClass().getName() + "]: " + e.getMessage(), null);
}
}
}
}
@Override
public final boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
boolean processingOver = roundEnv.processingOver();
if (!processingOver) {
JavaAnnotationMetadataBuilder annotationMetadataBuilder = javaVisitorContext.getAnnotationMetadataBuilder();
annotations = annotations
.stream()
.filter(ann -> {
final String name = ann.getQualifiedName().toString();
String packageName = NameUtils.getPackageName(name);
return !name.equals(AnnotationUtil.KOTLIN_METADATA) && !AnnotationUtil.STEREOTYPE_EXCLUDES.contains(packageName);
})
.filter(ann -> annotationMetadataBuilder.lookupOrBuildForType(ann).hasStereotype(ANNOTATION_STEREOTYPES)
|| isProcessedAnnotation(ann.getQualifiedName().toString()))
.collect(Collectors.toSet());
if (!annotations.isEmpty()) {
TypeElement groovyObjectTypeElement = elementUtils.getTypeElement("groovy.lang.GroovyObject");
TypeMirror groovyObjectType = groovyObjectTypeElement != null ? groovyObjectTypeElement.asType() : null;
// accumulate all the
|
BeanDefinitionInjectProcessor
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/decorators/generics/GenericsDecoratorTest.java
|
{
"start": 1555,
"end": 2030
}
|
class ____ implements Converter<String, Long> {
@Inject
@Delegate
Converter<String, Long> delegate;
@Override
public String convert(List<List<String>> value) {
value = singletonList(singletonList(value.get(0).get(0).trim()));
return delegate.convert(value);
}
@Override
public Long ping(Long value) {
return delegate.ping(value) + 1;
}
}
}
|
TrimConverterDecorator
|
java
|
spring-projects__spring-security
|
test/src/test/java/org/springframework/security/test/web/servlet/request/SecurityMockMvcRequestPostProcessorsOidcLoginTests.java
|
{
"start": 7472,
"end": 8682
}
|
class ____ {
@GetMapping("/name")
String name(@AuthenticationPrincipal OidcUser oidcUser) {
return oidcUser.getName();
}
@GetMapping("/client-name")
String clientName(@RegisteredOAuth2AuthorizedClient OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getPrincipalName();
}
@GetMapping("/access-token")
String authorizedClient(@RegisteredOAuth2AuthorizedClient OAuth2AuthorizedClient authorizedClient) {
return authorizedClient.getAccessToken().getTokenValue();
}
@GetMapping("/id-token/{claim}")
String idTokenClaim(@AuthenticationPrincipal OidcUser oidcUser, @PathVariable("claim") String claim) {
return oidcUser.getIdToken().getClaim(claim);
}
@GetMapping("/user-info/{claim}")
String userInfoClaim(@AuthenticationPrincipal OidcUser oidcUser, @PathVariable("claim") String claim) {
return oidcUser.getUserInfo().getClaim(claim);
}
@GetMapping("/admin/scopes")
List<String> scopes(
@AuthenticationPrincipal(expression = "authorities") Collection<GrantedAuthority> authorities) {
return authorities.stream().map(GrantedAuthority::getAuthority).collect(Collectors.toList());
}
}
}
}
|
PrincipalController
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/ExtensionTest.java
|
{
"start": 12330,
"end": 13259
}
|
class ____ extends NonFinalExtension {
@Override
public Set<ExecutableElement> consumeMethods(Context context) {
// Find Integer.intValue() and try to consume that.
Elements elementUtils = context.processingEnvironment().getElementUtils();
TypeElement javaLangInteger = elementUtils.getTypeElement(Integer.class.getName());
for (ExecutableElement method :
ElementFilter.methodsIn(javaLangInteger.getEnclosedElements())) {
if (method.getSimpleName().contentEquals("intValue")) {
return ImmutableSet.of(method);
}
}
throw new AssertionError("Could not find Integer.intValue()");
}
}
JavaFileObject impl =
JavaFileObjects.forSourceLines(
"foo.bar.Baz",
"package foo.bar;",
"import com.google.auto.value.AutoValue;",
"@AutoValue public abstract
|
ConsumeBogusMethod
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/cglib/transform/impl/AddStaticInitTransformer.java
|
{
"start": 1172,
"end": 2068
}
|
class ____ extends ClassEmitterTransformer {
private MethodInfo info;
public AddStaticInitTransformer(Method classInit) {
info = ReflectUtils.getMethodInfo(classInit);
if (!TypeUtils.isStatic(info.getModifiers())) {
throw new IllegalArgumentException(classInit + " is not static");
}
Type[] types = info.getSignature().getArgumentTypes();
if (types.length != 1 ||
!types[0].equals(Constants.TYPE_CLASS) ||
!info.getSignature().getReturnType().equals(Type.VOID_TYPE)) {
throw new IllegalArgumentException(classInit + " illegal signature");
}
}
@Override
protected void init() {
if (!TypeUtils.isInterface(getAccess())) {
CodeEmitter e = getStaticHook();
EmitUtils.load_class_this(e);
e.invoke(info);
}
}
}
|
AddStaticInitTransformer
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/MarkerMixInYamlTest.java
|
{
"start": 1014,
"end": 1180
}
|
class ____ extends MarkerMixInTest {
@Override
protected ObjectMapper newObjectMapper() {
return new Log4jYamlObjectMapper();
}
}
|
MarkerMixInYamlTest
|
java
|
apache__camel
|
components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/integration/FromFileToFtpNotStepwiseIT.java
|
{
"start": 871,
"end": 1108
}
|
class ____ extends FromFileToFtpIT {
@Override
protected String getFtpUrl() {
return "ftp://admin@localhost:{{ftp.server.port}}/tmp2/camel?password=admin&initialDelay=3000&stepwise=false";
}
}
|
FromFileToFtpNotStepwiseIT
|
java
|
quarkusio__quarkus
|
independent-projects/tools/devtools-common/src/test/java/io/quarkus/platform/catalog/compatibility/ExtensionCatalogCompatibilityTest.java
|
{
"start": 6092,
"end": 6977
}
|
class ____ {
private final Extension.Mutable e = Extension.builder();
ExtensionBuilder(ArtifactCoords coords) {
e.setArtifact(coords);
}
@SuppressWarnings("unchecked")
ExtensionBuilder addCapability(String cap) {
((Map<String, List<String>>) e.getMetadata().computeIfAbsent("capabilities",
s -> Collections.singletonMap("provides", new ArrayList<>()))).get("provides").add(cap);
return this;
}
@SuppressWarnings("unchecked")
ExtensionBuilder addDependency(ArtifactCoords coords) {
((List<String>) e.getMetadata().computeIfAbsent("extension-dependencies",
s -> new ArrayList<>())).add(coords.getKey().toString());
return this;
}
Extension build() {
return e;
}
}
}
|
ExtensionBuilder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/com/nec/NECVEPlugin.java
|
{
"start": 2078,
"end": 10999
}
|
class ____ implements DevicePlugin, DevicePluginScheduler {
private static final String HADOOP_COMMON_HOME = "HADOOP_COMMON_HOME";
private static final String ENV_SCRIPT_PATH = "NEC_VE_GET_SCRIPT_PATH";
private static final String ENV_SCRIPT_NAME = "NEC_VE_GET_SCRIPT_NAME";
private static final String ENV_USE_UDEV = "NEC_USE_UDEV";
private static final String DEFAULT_SCRIPT_NAME = "nec-ve-get.py";
private static final Logger LOG = LoggerFactory.getLogger(NECVEPlugin.class);
private static final String[] DEFAULT_BINARY_SEARCH_DIRS = new String[]{
"/usr/bin", "/bin", "/opt/nec/ve/bin"};
private String binaryPath;
private boolean useUdev;
private VEDeviceDiscoverer discoverer;
private Function<String[], CommandExecutor>
commandExecutorProvider = this::createCommandExecutor;
public NECVEPlugin() throws ResourceHandlerException {
this(System::getenv, DEFAULT_BINARY_SEARCH_DIRS, new UdevUtil());
}
@VisibleForTesting
NECVEPlugin(Function<String, String> envProvider, String[] scriptPaths,
UdevUtil udev) throws ResourceHandlerException {
if (Boolean.parseBoolean(envProvider.apply(ENV_USE_UDEV))) {
LOG.info("Using libudev to retrieve syspath & device status");
useUdev = true;
udev.init();
discoverer = new VEDeviceDiscoverer(udev);
} else {
scriptBasedInit(envProvider, scriptPaths);
}
}
private void scriptBasedInit(Function<String, String> envProvider,
String[] scriptPaths) throws ResourceHandlerException {
String binaryName = DEFAULT_SCRIPT_NAME;
String envScriptName = envProvider.apply(ENV_SCRIPT_NAME);
if (envScriptName != null) {
binaryName = envScriptName;
}
LOG.info("Use {} as script name.", binaryName);
// Try to find the script based on an environment variable, if set
boolean found = false;
String envBinaryPath = envProvider.apply(ENV_SCRIPT_PATH);
if (envBinaryPath != null) {
this.binaryPath = getScriptFromEnvSetting(envBinaryPath);
found = binaryPath != null;
}
// Try $HADOOP_COMMON_HOME
if (!found) {
// print a warning only if the env variable was defined
if (envBinaryPath != null) {
LOG.warn("Script {} does not exist, falling back " +
"to $HADOOP_COMMON_HOME/sbin/DevicePluginScript/", envBinaryPath);
}
this.binaryPath = getScriptFromHadoopCommon(envProvider, binaryName);
found = binaryPath != null;
}
// Try the default search directories
if (!found) {
LOG.info("Script not found under" +
" $HADOOP_COMMON_HOME/sbin/DevicePluginScript/," +
" falling back to default search directories");
this.binaryPath = getScriptFromSearchDirs(binaryName, scriptPaths);
found = binaryPath != null;
}
// Script not found
if (!found) {
LOG.error("Script not found in "
+ Arrays.toString(scriptPaths));
throw new ResourceHandlerException(
"No binary found for " + NECVEPlugin.class.getName());
}
}
@Override
public DeviceRegisterRequest getRegisterRequestInfo() {
return DeviceRegisterRequest.Builder.newInstance()
.setResourceName("nec.com/ve").build();
}
@Override
public Set<Device> getDevices() {
Set<Device> devices = null;
if (useUdev) {
try {
devices = discoverer.getDevicesFromPath("/dev");
} catch (IOException e) {
LOG.error("Error during scanning devices", e);
}
} else {
CommandExecutor executor =
commandExecutorProvider.apply(new String[]{this.binaryPath});
try {
executor.execute();
String output = executor.getOutput();
devices = parseOutput(output);
} catch (IOException e) {
LOG.error("Error during executing external binary", e);
}
}
if (devices != null) {
LOG.info("Found devices:");
devices.forEach(dev -> LOG.info("{}", dev));
}
return devices;
}
@Override
public DeviceRuntimeSpec onDevicesAllocated(Set<Device> set,
YarnRuntimeType yarnRuntimeType) {
return null;
}
/**
* Parses the output of the external Python script.
*
* Sample line:
* id=0, dev=/dev/ve0, state=ONLINE, busId=0000:65:00.0, major=243, minor=0
*/
private Set<Device> parseOutput(String output) {
Set<Device> devices = new HashSet<>();
LOG.info("Parsing output: {}", output);
String[] lines = output.split("\n");
outer:
for (String line : lines) {
Device.Builder builder = Device.Builder.newInstance();
// map key --> builder calls
Map<String, Consumer<String>> builderInvocations =
getBuilderInvocationsMap(builder);
String[] keyValues = line.trim().split(",");
for (String keyValue : keyValues) {
String[] tokens = keyValue.trim().split("=");
if (tokens.length != 2) {
LOG.error("Unknown format of script output! Skipping this line");
continue outer;
}
final String key = tokens[0];
final String value = tokens[1];
Consumer<String> builderInvocation = builderInvocations.get(key);
if (builderInvocation != null) {
builderInvocation.accept(value);
} else {
LOG.warn("Unknown key {}, ignored", key);
}
}// for key value pairs
Device device = builder.build();
if (device.isHealthy()) {
devices.add(device);
} else {
LOG.warn("Skipping device {} because it's not healthy", device);
}
}
return devices;
}
@Override
public void onDevicesReleased(Set<Device> releasedDevices) {
// nop
}
@Override
public Set<Device> allocateDevices(Set<Device> availableDevices, int count,
Map<String, String> env) {
// Can consider topology, utilization.etc
Set<Device> allocated = new HashSet<>();
int number = 0;
for (Device d : availableDevices) {
allocated.add(d);
number++;
if (number == count) {
break;
}
}
return allocated;
}
private CommandExecutor createCommandExecutor(String[] command) {
return new Shell.ShellCommandExecutor(
command);
}
private String getScriptFromEnvSetting(String envBinaryPath) {
LOG.info("Checking script path: {}", envBinaryPath);
File f = new File(envBinaryPath);
if (!f.exists()) {
LOG.warn("Script {} does not exist", envBinaryPath);
return null;
}
if (f.isDirectory()) {
LOG.warn("Specified path {} is a directory", envBinaryPath);
return null;
}
if (!FileUtil.canExecute(f)) {
LOG.warn("Script {} is not executable", envBinaryPath);
return null;
}
LOG.info("Found script: {}", envBinaryPath);
return envBinaryPath;
}
private String getScriptFromHadoopCommon(
Function<String, String> envProvider, String binaryName) {
String scriptPath = null;
String hadoopCommon = envProvider.apply(HADOOP_COMMON_HOME);
if (hadoopCommon != null) {
String targetPath = hadoopCommon +
"/sbin/DevicePluginScript/" + binaryName;
LOG.info("Checking script {}: ", targetPath);
if (new File(targetPath).exists()) {
LOG.info("Found script: {}", targetPath);
scriptPath = targetPath;
}
} else {
LOG.info("$HADOOP_COMMON_HOME is not set");
}
return scriptPath;
}
private String getScriptFromSearchDirs(String binaryName,
String[] scriptPaths) {
String scriptPath = null;
for (String dir : scriptPaths) {
File f = new File(dir, binaryName);
if (f.exists()) {
LOG.info("Found script: {}", dir);
scriptPath = f.getAbsolutePath();
break;
}
}
return scriptPath;
}
private Map<String, Consumer<String>> getBuilderInvocationsMap(
Device.Builder builder) {
Map<String, Consumer<String>> builderInvocations = new HashMap<>();
builderInvocations.put("id", v -> builder.setId(Integer.parseInt(v)));
builderInvocations.put("dev", v -> builder.setDevPath(v));
builderInvocations.put("state", v -> {
if (v.equals("ONLINE")) {
builder.setHealthy(true);
}
builder.setStatus(v);
});
builderInvocations.put("busId", v -> builder.setBusID(v));
builderInvocations.put("major",
v -> builder.setMajorNumber(Integer.parseInt(v)));
builderInvocations.put("minor",
v -> builder.setMinorNumber(Integer.parseInt(v)));
return builderInvocations;
}
@VisibleForTesting
void setCommandExecutorProvider(
Function<String[], CommandExecutor> provider) {
this.commandExecutorProvider = provider;
}
@VisibleForTesting
void setVeDeviceDiscoverer(VEDeviceDiscoverer veDeviceDiscoverer) {
this.discoverer = veDeviceDiscoverer;
}
@VisibleForTesting
String getBinaryPath() {
return binaryPath;
}
}
|
NECVEPlugin
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/runner/AbstractDispatcherLeaderProcess.java
|
{
"start": 9117,
"end": 9423
}
|
interface ____ extends AutoCloseableAsync {
DispatcherGateway getGateway();
CompletableFuture<Void> onRemovedExecutionPlan(JobID jobId);
CompletableFuture<ApplicationStatus> getShutDownFuture();
CompletableFuture<Void> getTerminationFuture();
}
}
|
DispatcherGatewayService
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/aot/samples/basic/ImportsContextCustomizerFactory.java
|
{
"start": 2055,
"end": 2643
}
|
class ____ implements ContextCustomizer {
private final Class<?> testClass;
ImportsContextCustomizer(Class<?> testClass) {
this.testClass = testClass;
}
@Override
public void customizeContext(ConfigurableApplicationContext context, MergedContextConfiguration mergedConfig) {
AnnotatedBeanDefinitionReader annotatedBeanDefinitionReader =
new AnnotatedBeanDefinitionReader((GenericApplicationContext) context);
Arrays.stream(this.testClass.getAnnotation(Import.class).value())
.forEach(annotatedBeanDefinitionReader::register);
}
}
}
|
ImportsContextCustomizer
|
java
|
playframework__playframework
|
dev-mode/sbt-plugin/src/sbt-test/play-sbt-plugin/evolutions-multiple-databases/app/controllers/GroupsController.java
|
{
"start": 306,
"end": 1083
}
|
class ____ extends Controller {
private final Database db;
@Inject
public GroupsController(@NamedDatabase("groups") Database db) {
this.db = db;
}
public Result list() {
List<Group> groups = db.withConnection(connection -> {
List<Group> result = new ArrayList<>();
PreparedStatement statement = connection.prepareStatement("select id, name from groups");
ResultSet rs = statement.executeQuery();
while(rs.next()) {
Long id = rs.getLong("id");
String name = rs.getString("name");
result.add(new Group(id, name));
}
return result;
});
return ok(views.html.groups.render(groups));
}
}
|
GroupsController
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java
|
{
"start": 1834,
"end": 5336
}
|
class ____ extends AsyncTwoPhaseIndexer<Integer, MockJobStats> {
private final CountDownLatch latch;
// test the execution order
private volatile int step;
private final boolean stoppedBeforeFinished;
private final boolean noIndices;
protected MockIndexer(
ThreadPool threadPool,
AtomicReference<IndexerState> initialState,
Integer initialPosition,
CountDownLatch latch,
boolean stoppedBeforeFinished,
boolean noIndices
) {
super(threadPool, initialState, initialPosition, new MockJobStats());
this.latch = latch;
this.stoppedBeforeFinished = stoppedBeforeFinished;
this.noIndices = noIndices;
}
@Override
protected String getJobId() {
return "mock";
}
@Override
protected IterationResult<Integer> doProcess(SearchResponse searchResponse) {
assertFalse("should not be called as stoppedBeforeFinished is false", stoppedBeforeFinished);
assertThat(step, equalTo(2));
++step;
return new IterationResult<>(Stream.empty(), 3, true);
}
private void awaitForLatch() {
try {
latch.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onStart(long now, ActionListener<Boolean> listener) {
assertThat(step, equalTo(0));
++step;
listener.onResponse(true);
}
@Override
protected void doNextSearch(long waitTimeInNanos, ActionListener<SearchResponse> nextPhase) {
assertThat(step, equalTo(1));
++step;
// block till latch has been counted down, simulating network latency
awaitForLatch();
if (noIndices) {
// simulate no indices being searched due to optimizations
nextPhase.onResponse(null);
return;
}
ActionListener.respondAndRelease(nextPhase, SearchResponseUtils.successfulResponse(SearchHits.EMPTY_WITH_TOTAL_HITS));
}
@Override
protected void doNextBulk(BulkRequest request, ActionListener<BulkResponse> nextPhase) {
fail("should not be called");
}
@Override
protected void doSaveState(IndexerState state, Integer position, Runnable next) {
// for stop before finished we do not know if its stopped before are after the search
if (stoppedBeforeFinished == false) {
assertThat(step, equalTo(noIndices ? 3 : 4));
}
++step;
next.run();
}
@Override
protected void onFailure(Exception exc) {
fail(exc.getMessage());
}
@Override
protected void onFinish(ActionListener<Void> listener) {
assertThat(step, equalTo(noIndices ? 2 : 3));
++step;
listener.onResponse(null);
assertTrue(isFinished.compareAndSet(false, true));
}
@Override
protected void onStop() {
assertTrue(isStopped.compareAndSet(false, true));
}
@Override
protected void onAbort() {}
public int getStep() {
return step;
}
}
private
|
MockIndexer
|
java
|
apache__kafka
|
connect/runtime/src/test/resources/test-plugins/versioned-header-converter/test/plugins/VersionedHeaderConverter.java
|
{
"start": 1297,
"end": 2552
}
|
class ____ implements HeaderConverter, Versioned {
public VersionedHeaderConverter() {
super();
}
@Override
public SchemaAndValue toConnectHeader(String topic, String headerKey, byte[] value) {
return null;
}
@Override
public byte[] fromConnectHeader(String topic, String headerKey, Schema schema, Object value) {
return new byte[0];
}
@Override
public void configure(final Map<String, ?> configs) {
}
@Override
public ConfigDef config() {
return new ConfigDef()
// version specific config will have the defaul value (PLACEHOLDER_FOR_VERSION) replaced with the actual version during plugin compilation
// this will help with testing differnt configdef for different version of header converter
.define("version-specific-config", ConfigDef.Type.STRING, "PLACEHOLDER_FOR_VERSION", ConfigDef.Importance.HIGH, "version specific docs")
.define("other-config", ConfigDef.Type.STRING, "defaultVal", ConfigDef.Importance.HIGH, "other docs");
}
@Override
public String version() {
return "PLACEHOLDER_FOR_VERSION";
}
@Override
public void close() {
}
}
|
VersionedHeaderConverter
|
java
|
google__guava
|
android/guava-testlib/src/com/google/common/testing/SerializableTester.java
|
{
"start": 1374,
"end": 2456
}
|
class ____ {
private SerializableTester() {}
/**
* Serializes and deserializes the specified object.
*
* <p><b>GWT warning:</b> Under GWT, this method simply returns its input, as proper GWT
* serialization tests require more setup. This no-op behavior allows test authors to intersperse
* {@code SerializableTester} calls with other, GWT-compatible tests.
*
* <p>Note that the specified object may not be known by the compiler to be a {@link
* java.io.Serializable} instance, and is thus declared an {@code Object}. For example, it might
* be declared as a {@code List}.
*
* @return the re-serialized object
* @throws RuntimeException if the specified object was not successfully serialized or
* deserialized
*/
@CanIgnoreReturnValue
public static <T> T reserialize(T object) {
return Platform.reserialize(object);
}
/**
* Serializes and deserializes the specified object and verifies that the re-serialized object is
* equal to the provided object, that the hashcodes are identical, and that the
|
SerializableTester
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/uniqueconstraint/CompositePrimaryKeyColumnOrderTest.java
|
{
"start": 2551,
"end": 2645
}
|
class ____ {
@Id
private String b;
@Id
private String a;
}
private static
|
TestEntity
|
java
|
dropwizard__dropwizard
|
dropwizard-core/src/test/java/io/dropwizard/core/VirtualThreadsTest.java
|
{
"start": 1163,
"end": 5006
}
|
class ____ extends Configuration {
}
@Test
void virtualThreadsEnabledWhenRequested() throws Exception {
boolean isVirtualThread = probeVirtualThread(
defaultServerFactory -> defaultServerFactory.setEnableVirtualThreads(true),
this::selectServerThreadPool
);
assertThat(isVirtualThread).isTrue();
}
@Test
void virtualThreadsDisabledWhenNotRequested() throws Exception {
boolean isVirtualThread = probeVirtualThread(
defaultServerFactory -> defaultServerFactory.setEnableVirtualThreads(false),
this::selectServerThreadPool
);
assertThat(isVirtualThread).isFalse();
}
@Test
void virtualAdminThreadsEnabledWhenRequested() throws Exception {
boolean isVirtualThread = probeVirtualThread(
defaultServerFactory -> defaultServerFactory.setEnableAdminVirtualThreads(true),
this::selectAdminThreadPool
);
assertThat(isVirtualThread).isTrue();
}
@Test
void virtualAdminThreadsDisabledWhenNotRequested() throws Exception {
boolean isVirtualThread = probeVirtualThread(
defaultServerFactory -> defaultServerFactory.setEnableAdminVirtualThreads(false),
this::selectAdminThreadPool
);
assertThat(isVirtualThread).isFalse();
}
private boolean probeVirtualThread(Consumer<DefaultServerFactory> defaultServerFactoryConsumer,
Function<Server, ThreadPool> threadPoolSelector) throws Exception {
final AtomicReference<Boolean> isVirtualThread = new AtomicReference<>(null);
Environment environment = new Environment("VirtualThreadsTest", Jackson.newMinimalObjectMapper(),
Validators.newValidatorFactory(), new MetricRegistry(), this.getClass().getClassLoader(),
new HealthCheckRegistry(), new VirtualThreadsConfiguration());
DefaultServerFactory defaultServerFactory = new DefaultServerFactory();
defaultServerFactoryConsumer.accept(defaultServerFactory);
Server server = defaultServerFactory.build(environment);
server.start();
ExecutionStrategy.Producer producer = () -> {
if (isVirtualThread.get() != null) {
return null;
}
return Invocable.from(Invocable.InvocationType.BLOCKING, () -> isVirtualThread.set(VirtualThreads.isVirtualThread()));
};
AdaptiveExecutionStrategy adaptiveExecutionStrategy = new AdaptiveExecutionStrategy(producer, threadPoolSelector.apply(server));
adaptiveExecutionStrategy.start();
try {
adaptiveExecutionStrategy.dispatch();
while (isVirtualThread.get() == null) {
Thread.yield();
}
} finally {
adaptiveExecutionStrategy.stop();
server.stop();
}
if (isVirtualThread.get() == null) {
throw new IllegalStateException("Didn't execute virtual thread probe");
}
return isVirtualThread.get();
}
private ThreadPool selectServerThreadPool(Server server) {
return server.getThreadPool();
}
private ThreadPool selectAdminThreadPool(Server server) {
final int adminPort = 8081;
return Arrays.stream(server.getConnectors())
.filter(ServerConnector.class::isInstance)
.map(ServerConnector.class::cast)
.filter(serverConnector -> serverConnector.getLocalPort() == adminPort)
.map(AbstractConnector::getExecutor)
.filter(ThreadPool.class::isInstance)
.map(ThreadPool.class::cast)
.findFirst()
.orElseThrow(() -> new IllegalStateException("Couldn't find thread pool of admin connector"));
}
}
|
VirtualThreadsConfiguration
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/AbstractKeyedStateBackendBuilder.java
|
{
"start": 1495,
"end": 3843
}
|
class ____<K>
implements StateBackendBuilder<AbstractKeyedStateBackend<K>, BackendBuildingException> {
protected final Logger logger = LoggerFactory.getLogger(getClass());
protected final TaskKvStateRegistry kvStateRegistry;
protected final StateSerializerProvider<K> keySerializerProvider;
protected final ClassLoader userCodeClassLoader;
protected final int numberOfKeyGroups;
protected final KeyGroupRange keyGroupRange;
protected final ExecutionConfig executionConfig;
protected final TtlTimeProvider ttlTimeProvider;
protected final LatencyTrackingStateConfig latencyTrackingStateConfig;
protected final SizeTrackingStateConfig sizeTrackingStateConfig;
protected final StreamCompressionDecorator keyGroupCompressionDecorator;
protected final Collection<KeyedStateHandle> restoreStateHandles;
protected final CloseableRegistry cancelStreamRegistry;
public AbstractKeyedStateBackendBuilder(
TaskKvStateRegistry kvStateRegistry,
TypeSerializer<K> keySerializer,
ClassLoader userCodeClassLoader,
int numberOfKeyGroups,
KeyGroupRange keyGroupRange,
ExecutionConfig executionConfig,
TtlTimeProvider ttlTimeProvider,
LatencyTrackingStateConfig latencyTrackingStateConfig,
SizeTrackingStateConfig sizeTrackingStateConfig,
@Nonnull Collection<KeyedStateHandle> stateHandles,
StreamCompressionDecorator keyGroupCompressionDecorator,
CloseableRegistry cancelStreamRegistry) {
this.kvStateRegistry = kvStateRegistry;
this.keySerializerProvider =
StateSerializerProvider.fromNewRegisteredSerializer(keySerializer);
this.userCodeClassLoader = userCodeClassLoader;
this.numberOfKeyGroups = numberOfKeyGroups;
this.keyGroupRange = keyGroupRange;
this.executionConfig = executionConfig;
this.ttlTimeProvider = ttlTimeProvider;
this.latencyTrackingStateConfig = latencyTrackingStateConfig;
this.sizeTrackingStateConfig = sizeTrackingStateConfig;
this.keyGroupCompressionDecorator = keyGroupCompressionDecorator;
this.restoreStateHandles = stateHandles;
this.cancelStreamRegistry = cancelStreamRegistry;
}
}
|
AbstractKeyedStateBackendBuilder
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastMapEndpointBuilderFactory.java
|
{
"start": 29001,
"end": 29348
}
|
class ____ extends AbstractEndpointBuilder implements HazelcastMapEndpointBuilder, AdvancedHazelcastMapEndpointBuilder {
public HazelcastMapEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new HazelcastMapEndpointBuilderImpl(path);
}
}
|
HazelcastMapEndpointBuilderImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java
|
{
"start": 196288,
"end": 196817
}
|
class ____ extends ParserRuleContext {
@SuppressWarnings("this-escape")
public BooleanExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_booleanExpression; }
@SuppressWarnings("this-escape")
public BooleanExpressionContext() { }
public void copyFrom(BooleanExpressionContext ctx) {
super.copyFrom(ctx);
}
}
@SuppressWarnings("CheckReturnValue")
public static
|
BooleanExpressionContext
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-main/src/test/java/org/apache/camel/spring/MyProcessor.java
|
{
"start": 988,
"end": 1495
}
|
class ____ implements Processor {
private List<Exchange> exchanges = new CopyOnWriteArrayList<>();
private String name = "James";
public List<Exchange> getExchanges() {
return exchanges;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
@Override
public void process(Exchange exchange) {
exchange.getIn().setHeader("name", getName());
exchanges.add(exchange);
}
}
|
MyProcessor
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/postgresql/ast/expr/PGExpr.java
|
{
"start": 791,
"end": 841
}
|
interface ____ extends SQLExpr, PGSQLObject {
}
|
PGExpr
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/test/MockValueJoiner.java
|
{
"start": 892,
"end": 1173
}
|
class ____ {
public static final ValueJoiner<Object, Object, String> TOSTRING_JOINER = instance("+");
public static <V1, V2> ValueJoiner<V1, V2, String> instance(final String separator) {
return (value1, value2) -> value1 + separator + value2;
}
}
|
MockValueJoiner
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rest/messages/LogUrlResponseTest.java
|
{
"start": 1111,
"end": 1456
}
|
class ____ extends RestResponseMarshallingTestBase<LogUrlResponse> {
@Override
protected Class<LogUrlResponse> getTestResponseClass() {
return LogUrlResponse.class;
}
@Override
protected LogUrlResponse getTestResponseInstance() {
return new LogUrlResponse("http://localhost:8081/log");
}
}
|
LogUrlResponseTest
|
java
|
apache__camel
|
components/camel-openapi-java/src/test/java/org/apache/camel/openapi/RestOpenApiReaderTest.java
|
{
"start": 1462,
"end": 12007
}
|
class ____ extends CamelTestSupport {
private Logger log = LoggerFactory.getLogger(getClass());
@BindToRegistry("dummy-rest")
private DummyRestConsumerFactory factory = new DummyRestConsumerFactory();
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
rest("/hello")
.consumes("application/json")
.produces("application/json")
.get("/hi/{name}")
.description("Saying hi")
.param()
.name("name")
.type(RestParamType.path)
.dataType("string")
.description("Who is it")
.example("Donald Duck")
.endParam()
.param()
.name("filter")
.description("Filters to apply to the entity.")
.type(RestParamType.query)
.dataType("array")
.arrayType("date-time")
.endParam()
.to("log:hi")
.get("/bye/{name}")
.description("Saying bye")
.param()
.name("name")
.type(RestParamType.path)
.dataType("string")
.description("Who is it")
.example("Donald Duck")
.endParam()
.responseMessage()
.code(200)
.message("A reply number")
.responseModel(float.class)
.example("success", "123")
.example("error", "-1")
.endResponseMessage()
.to("log:bye")
.get("/array/params")
.description("Array params")
.param()
.name("string_array")
.dataType("array")
.arrayType("string")
.allowableValues("A", "B", "C")
.endParam()
.param()
.name("int_array")
.dataType("array")
.arrayType("int")
.allowableValues("1", "2", "3")
.endParam()
.param()
.name("integer_array")
.dataType("array")
.arrayType("integer")
.allowableValues("1", "2", "3")
.endParam()
.param()
.name("long_array")
.dataType("array")
.arrayType("long")
.allowableValues("1", "2", "3")
.endParam()
.param()
.name("float_array")
.dataType("array")
.arrayType("float")
.allowableValues("1.0", "2.0", "3.0")
.endParam()
.param()
.name("double_array")
.dataType("array")
.arrayType("double")
.allowableValues("1.0", "2.0", "3.0")
.endParam()
.param()
.name("boolean_array")
.dataType("array")
.arrayType("boolean")
.allowableValues("true", "false")
.endParam()
.param()
.name("byte_array")
.dataType("array")
.arrayType("byte")
.allowableValues("1", "2", "3")
.endParam()
.param()
.name("binary_array")
.dataType("array")
.arrayType("binary")
.allowableValues("1", "2", "3")
.endParam()
.param()
.name("date_array")
.dataType("array")
.arrayType("date")
.allowableValues("2023-01-01", "2023-02-02", "2023-03-03")
.endParam()
.param()
.name("datetime_array")
.dataType("array")
.arrayType("date-time")
.allowableValues("2011-12-03T10:15:30+01:00")
.endParam()
.param()
.name("password_array")
.dataType("array")
.arrayType("password")
.allowableValues("foo", "bar", "cheese")
.endParam()
.to("log:array")
.post("/bye")
.description("To update the greeting message")
.consumes("application/xml")
.produces("application/xml")
.outType(String.class)
.param()
.name("greeting")
.type(RestParamType.body)
.dataType("string")
.description("Message to use as greeting")
.example("application/xml", "<hello>Hi</hello>")
.endParam()
.to("log:bye");
rest("/tag")
.get("single")
.tag("Organisation")
.outType(String.class)
.param()
.name("body")
.type(RestParamType.body)
.dataType("string")
.description("Message body")
.endParam()
.to("log:bye");
rest("/tag")
.get("multiple/a")
.tag("Organisation,Group A")
.outType(String.class)
.param()
.name("body")
.type(RestParamType.body)
.dataType("string")
.description("Message body")
.endParam()
.to("log:bye");
rest("/tag")
.get("multiple/b")
.tag("Organisation,Group B")
.outType(String.class)
.param()
.name("body")
.type(RestParamType.body)
.dataType("string")
.description("Message body")
.endParam()
.to("log:bye");
}
};
}
@ParameterizedTest
@ValueSource(strings = { "3.0", "3.1" })
public void testReaderReadV3(String version) throws Exception {
BeanConfig config = new BeanConfig();
config.setHost("localhost:8080");
config.setSchemes(new String[] { "http" });
config.setBasePath("/api");
Info info = new Info();
config.setInfo(info);
config.setVersion(version);
RestOpenApiReader reader = new RestOpenApiReader();
OpenAPI openApi = reader.read(context, context.getRestDefinitions(), config, context.getName(),
new DefaultClassResolver());
assertNotNull(openApi);
String json = RestOpenApiSupport.getJsonFromOpenAPIAsString(openApi, config);
log.info(json);
json = json.replace("\n", " ").replaceAll("\\s+", " ");
assertTrue(json.contains("\"openapi\" : \"" + config.getVersion() + "\""));
assertTrue(json.contains("\"url\" : \"http://localhost:8080/api\""));
assertTrue(json.contains("\"/hello/bye\""));
assertTrue(json.contains("\"summary\" : \"To update the greeting message\""));
assertTrue(json.contains("\"/hello/bye/{name}\""));
assertTrue(json.contains("\"/hello/hi/{name}\""));
assertTrue(json.contains("\"type\" : \"number\""));
assertTrue(json.contains("\"format\" : \"float\""));
assertTrue(json.contains("\"example\" : \"<hello>Hi</hello>\""));
assertTrue(json.contains("\"example\" : \"Donald Duck\""));
assertTrue(json.contains("\"success\" : { \"value\" : \"123\" }"));
assertTrue(json.contains("\"error\" : { \"value\" : \"-1\" }"));
assertTrue(json.contains("\"type\" : \"array\""));
assertTrue(json.contains("\"format\" : \"date-time\""));
assertTrue(json.contains("\"enum\" : [ \"A\", \"B\", \"C\" ]"));
assertTrue(json.contains("\"enum\" : [ 1, 2, 3 ]"));
assertTrue(json.contains("\"enum\" : [ 1.0, 2.0, 3.0 ]"));
assertTrue(json.contains("\"enum\" : [ true, false ]"));
assertTrue(json.contains("\"enum\" : [ \"MQ==\", \"Mg==\", \"Mw==\" ]"));
assertTrue(json.contains("\"enum\" : [ \"2023-01-01\", \"2023-02-02\", \"2023-03-03\" ]"));
assertTrue(json.contains("\"enum\" : [ \"2011-12-03T10:15:30+01:00\" ]"));
assertTrue(json.contains("\"enum\" : [ \"foo\", \"bar\", \"cheese\" ]"));
assertTrue(json.contains("\"/hello/bye/{name}\" : { \"get\" : { \"tags\" : [ \"/hello\" ],"));
assertTrue(json.matches(".*\"/tag/single\" : \\{ \"get\" : .* \"tags\" : \\[ \"Organisation\" ],.*"));
assertTrue(
json.matches(".*\"/tag/multiple/a\" : \\{ \"get\" : .* \"tags\" : \\[ \"Organisation\", \"Group A\" ],.*"));
assertTrue(
json.matches(".*\"/tag/multiple/b\" : \\{ \"get\" : .*\"tags\" : \\[ \"Organisation\", \"Group B\" ],.*"));
assertTrue(json.contains(
"\"tags\" : [ { \"name\" : \"/hello\" }, { \"name\" : \"Organisation\" }, { \"name\" : \"Group A\" }, { \"name\" : \"Group B\" } ]"));
context.stop();
}
}
|
RestOpenApiReaderTest
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/CustomSerializationTest.java
|
{
"start": 4544,
"end": 4701
}
|
class ____ {
@Path("")
public CustomSerializationResource get() {
return new CustomSerializationResource();
}
}
}
|
Locator
|
java
|
apache__flink
|
flink-queryable-state/flink-queryable-state-runtime/src/test/java/org/apache/flink/queryablestate/itcases/AbstractQueryableStateTestBase.java
|
{
"start": 53352,
"end": 53952
}
|
class ____ implements ReduceFunction<Tuple2<Integer, Long>> {
private static final long serialVersionUID = -8651235077342052336L;
@Override
public Tuple2<Integer, Long> reduce(
Tuple2<Integer, Long> value1, Tuple2<Integer, Long> value2) throws Exception {
value1.f1 += value2.f1;
return value1;
}
}
///// General Utility Methods //////
/**
* A wrapper of the job graph that makes sure to cancel the job and wait for termination after
* the execution of every test.
*/
private static
|
SumReduce
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/ExecutionGraphPartitionReleaseTest.java
|
{
"start": 2390,
"end": 13555
}
|
class ____ {
@RegisterExtension
public static final TestExecutorExtension<ScheduledExecutorService> EXECUTOR_EXTENSION =
TestingUtils.defaultExecutorExtension();
@RegisterExtension
public static final TestingComponentMainThreadExecutor.Extension MAIN_THREAD_EXTENSION =
new TestingComponentMainThreadExecutor.Extension();
private final TestingComponentMainThreadExecutor mainThreadExecutor =
MAIN_THREAD_EXTENSION.getComponentMainThreadTestExecutor();
@Test
void testStrategyNotifiedOfFinishedVerticesAndResultsRespected() throws Exception {
// setup a simple pipeline of 3 operators with blocking partitions
final JobVertex sourceVertex = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex operatorVertex = ExecutionGraphTestUtils.createNoOpVertex(1);
final JobVertex sinkVertex = ExecutionGraphTestUtils.createNoOpVertex(1);
connectNewDataSetAsInput(
operatorVertex,
sourceVertex,
DistributionPattern.POINTWISE,
ResultPartitionType.BLOCKING);
connectNewDataSetAsInput(
sinkVertex,
operatorVertex,
DistributionPattern.POINTWISE,
ResultPartitionType.BLOCKING);
// setup partition tracker to intercept partition release calls
final TestingJobMasterPartitionTracker partitionTracker =
new TestingJobMasterPartitionTracker();
final Queue<ResultPartitionID> releasedPartitions = new ArrayDeque<>();
partitionTracker.setStopTrackingAndReleasePartitionsConsumer(
partitionIds -> releasedPartitions.add(partitionIds.iterator().next()));
final SchedulerBase scheduler =
createScheduler(partitionTracker, sourceVertex, operatorVertex, sinkVertex);
final ExecutionGraph executionGraph = scheduler.getExecutionGraph();
// finish vertices one after another, and verify that the appropriate partitions are
// released
mainThreadExecutor.execute(
() -> {
final Execution sourceExecution =
getCurrentExecution(sourceVertex, executionGraph);
scheduler.updateTaskExecutionState(
new TaskExecutionState(
sourceExecution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).isEmpty();
});
mainThreadExecutor.execute(
() -> {
final Execution sourceExecution =
getCurrentExecution(sourceVertex, executionGraph);
final Execution operatorExecution =
getCurrentExecution(operatorVertex, executionGraph);
scheduler.updateTaskExecutionState(
new TaskExecutionState(
operatorExecution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).hasSize(1);
assertThat(releasedPartitions.remove())
.isEqualTo(
new ResultPartitionID(
sourceExecution
.getVertex()
.getProducedPartitions()
.keySet()
.iterator()
.next(),
sourceExecution.getAttemptId()));
});
mainThreadExecutor.execute(
() -> {
final Execution operatorExecution =
getCurrentExecution(operatorVertex, executionGraph);
final Execution sinkExecution = getCurrentExecution(sinkVertex, executionGraph);
scheduler.updateTaskExecutionState(
new TaskExecutionState(
sinkExecution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).hasSize(1);
assertThat(releasedPartitions.remove())
.isEqualTo(
new ResultPartitionID(
operatorExecution
.getVertex()
.getProducedPartitions()
.keySet()
.iterator()
.next(),
operatorExecution.getAttemptId()));
});
}
@Test
void testStrategyNotifiedOfUnFinishedVertices() throws Exception {
// setup a pipeline of 2 failover regions (f1 -> f2), where
// f1 is just a source
// f2 consists of 3 operators (o1,o2,o3), where o1 consumes f1, and o2/o3 consume o1
final JobVertex sourceVertex = ExecutionGraphTestUtils.createNoOpVertex("source", 1);
final JobVertex operator1Vertex = ExecutionGraphTestUtils.createNoOpVertex("operator1", 1);
final JobVertex operator2Vertex = ExecutionGraphTestUtils.createNoOpVertex("operator2", 1);
final JobVertex operator3Vertex = ExecutionGraphTestUtils.createNoOpVertex("operator3", 1);
connectNewDataSetAsInput(
operator1Vertex,
sourceVertex,
DistributionPattern.POINTWISE,
ResultPartitionType.BLOCKING);
connectNewDataSetAsInput(
operator2Vertex,
operator1Vertex,
DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
connectNewDataSetAsInput(
operator3Vertex,
operator1Vertex,
DistributionPattern.ALL_TO_ALL,
ResultPartitionType.PIPELINED);
// setup partition tracker to intercept partition release calls
final TestingJobMasterPartitionTracker partitionTracker =
new TestingJobMasterPartitionTracker();
final Queue<ResultPartitionID> releasedPartitions = new ArrayDeque<>();
partitionTracker.setStopTrackingAndReleasePartitionsConsumer(
partitionIds -> releasedPartitions.add(partitionIds.iterator().next()));
final SchedulerBase scheduler =
createScheduler(
partitionTracker,
sourceVertex,
operator1Vertex,
operator2Vertex,
operator3Vertex);
final ExecutionGraph executionGraph = scheduler.getExecutionGraph();
mainThreadExecutor.execute(
() -> {
final Execution sourceExecution =
getCurrentExecution(sourceVertex, executionGraph);
// finish the source; this should not result in any release calls since the
// consumer o1 was not finished
scheduler.updateTaskExecutionState(
new TaskExecutionState(
sourceExecution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).isEmpty();
});
mainThreadExecutor.execute(
() -> {
final Execution operator1Execution =
getCurrentExecution(operator1Vertex, executionGraph);
// finish o1 and schedule the consumers (o2,o3); this should not result in any
// release calls since not all operators of the pipelined region are finished
scheduler.updateTaskExecutionState(
new TaskExecutionState(
operator1Execution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).isEmpty();
});
mainThreadExecutor.execute(
() -> {
final Execution operator2Execution =
getCurrentExecution(operator2Vertex, executionGraph);
// finish o2; this should not result in any release calls since o3 was not
// finished
scheduler.updateTaskExecutionState(
new TaskExecutionState(
operator2Execution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).isEmpty();
});
mainThreadExecutor.execute(
() -> {
final Execution operator2Execution =
getCurrentExecution(operator2Vertex, executionGraph);
// reset o2
operator2Execution.getVertex().resetForNewExecution();
assertThat(releasedPartitions).isEmpty();
});
mainThreadExecutor.execute(
() -> {
final Execution operator3Execution =
getCurrentExecution(operator3Vertex, executionGraph);
// finish o3; this should not result in any release calls since o2 was reset
scheduler.updateTaskExecutionState(
new TaskExecutionState(
operator3Execution.getAttemptId(), ExecutionState.FINISHED));
assertThat(releasedPartitions).isEmpty();
});
}
private static Execution getCurrentExecution(
final JobVertex jobVertex, final ExecutionGraph executionGraph) {
return executionGraph
.getJobVertex(jobVertex.getID())
.getTaskVertices()[0]
.getCurrentExecutionAttempt();
}
private SchedulerBase createScheduler(
final JobMasterPartitionTracker partitionTracker, final JobVertex... vertices)
throws Exception {
final JobGraph jobGraph = JobGraphTestUtils.batchJobGraph(vertices);
final SchedulerBase scheduler =
new DefaultSchedulerBuilder(
jobGraph,
mainThreadExecutor.getMainThreadExecutor(),
EXECUTOR_EXTENSION.getExecutor())
.setExecutionSlotAllocatorFactory(
SchedulerTestingUtils.newSlotSharingExecutionSlotAllocatorFactory())
.setPartitionTracker(partitionTracker)
.build();
mainThreadExecutor.execute(scheduler::startScheduling);
return scheduler;
}
}
|
ExecutionGraphPartitionReleaseTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/embedded/Country.java
|
{
"start": 474,
"end": 803
}
|
class ____ implements Serializable {
private String iso2;
private String name;
public String getIso2() {
return iso2;
}
public void setIso2(String iso2) {
this.iso2 = iso2;
}
@Column(name = "countryName")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
|
Country
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/bootstrap/BootstrapChecksTests.java
|
{
"start": 2425,
"end": 14031
}
|
class ____ extends AbstractBootstrapCheckTestCase {
public void testNonProductionMode() throws NodeValidationException {
// nothing should happen since we are in non-production mode
final List<TransportAddress> transportAddresses = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 8); i++) {
TransportAddress localTransportAddress = new TransportAddress(InetAddress.getLoopbackAddress(), i);
transportAddresses.add(localTransportAddress);
}
TransportAddress publishAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 0);
BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class);
when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0]));
when(boundTransportAddress.publishAddress()).thenReturn(publishAddress);
BootstrapChecks.check(emptyContext, boundTransportAddress, Collections.emptyList());
}
public void testNoLogMessageInNonProductionMode() throws NodeValidationException {
final Logger logger = mock(Logger.class);
BootstrapChecks.check(emptyContext, false, Collections.emptyList(), logger);
verifyNoMoreInteractions(logger);
}
public void testLogMessageInProductionMode() throws NodeValidationException {
final Logger logger = mock(Logger.class);
BootstrapChecks.check(emptyContext, true, Collections.emptyList(), logger);
verify(logger).info("bound or publishing to a non-loopback address, enforcing bootstrap checks");
verifyNoMoreInteractions(logger);
}
public void testEnforceLimitsWhenBoundToNonLocalAddress() {
final List<TransportAddress> transportAddresses = new ArrayList<>();
final TransportAddress nonLocalTransportAddress = buildNewFakeTransportAddress();
transportAddresses.add(nonLocalTransportAddress);
for (int i = 0; i < randomIntBetween(0, 7); i++) {
final TransportAddress randomTransportAddress = randomBoolean()
? buildNewFakeTransportAddress()
: new TransportAddress(InetAddress.getLoopbackAddress(), i);
transportAddresses.add(randomTransportAddress);
}
final TransportAddress publishAddress = randomBoolean()
? buildNewFakeTransportAddress()
: new TransportAddress(InetAddress.getLoopbackAddress(), 0);
final BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class);
Collections.shuffle(transportAddresses, random());
when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0]));
when(boundTransportAddress.publishAddress()).thenReturn(publishAddress);
final String discoveryType = randomFrom(MULTI_NODE_DISCOVERY_TYPE, SINGLE_NODE_DISCOVERY_TYPE);
assertEquals(
BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType, FALSE::booleanValue),
SINGLE_NODE_DISCOVERY_TYPE.equals(discoveryType) == false
);
}
public void testEnforceLimitsWhenPublishingToNonLocalAddress() {
final List<TransportAddress> transportAddresses = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 8); i++) {
final TransportAddress randomTransportAddress = buildNewFakeTransportAddress();
transportAddresses.add(randomTransportAddress);
}
final TransportAddress publishAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 0);
final BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class);
when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0]));
when(boundTransportAddress.publishAddress()).thenReturn(publishAddress);
final String discoveryType = randomFrom(MULTI_NODE_DISCOVERY_TYPE, SINGLE_NODE_DISCOVERY_TYPE);
assertEquals(
BootstrapChecks.enforceLimits(boundTransportAddress, discoveryType, FALSE::booleanValue),
SINGLE_NODE_DISCOVERY_TYPE.equals(discoveryType) == false
);
}
public void testDoNotEnforceLimitsWhenSnapshotBuild() {
final List<TransportAddress> transportAddresses = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 8); i++) {
final TransportAddress randomTransportAddress = buildNewFakeTransportAddress();
transportAddresses.add(randomTransportAddress);
}
final TransportAddress publishAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 0);
final BoundTransportAddress boundTransportAddress = mock(BoundTransportAddress.class);
when(boundTransportAddress.boundAddresses()).thenReturn(transportAddresses.toArray(new TransportAddress[0]));
when(boundTransportAddress.publishAddress()).thenReturn(publishAddress);
assertThat(BootstrapChecks.enforceLimits(boundTransportAddress, MULTI_NODE_DISCOVERY_TYPE, TRUE::booleanValue), is(false));
}
public void testExceptionAggregation() {
final List<BootstrapCheck> checks = Arrays.asList(new BootstrapCheck() {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
return BootstrapCheck.BootstrapCheckResult.failure("first");
}
@Override
public ReferenceDocs referenceDocs() {
return ReferenceDocs.BOOTSTRAP_CHECKS;
}
}, new BootstrapCheck() {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
return BootstrapCheck.BootstrapCheckResult.failure("second");
}
@Override
public ReferenceDocs referenceDocs() {
return ReferenceDocs.BOOTSTRAP_CHECKS;
}
});
final NodeValidationException e = expectThrows(
NodeValidationException.class,
() -> BootstrapChecks.check(emptyContext, true, checks)
);
assertThat(
e,
hasToString(
allOf(
containsString("[2] bootstrap checks failed"),
containsString("You must address the points described in the following [2] lines before starting Elasticsearch"),
containsString("bootstrap check failure [1] of [2]:"),
containsString("first"),
containsString("bootstrap check failure [2] of [2]:"),
containsString("second"),
containsString("For more information see [https://www.elastic.co/docs/")
)
)
);
final Throwable[] suppressed = e.getSuppressed();
assertThat(suppressed.length, equalTo(2));
assertThat(suppressed[0], instanceOf(IllegalStateException.class));
assertThat(suppressed[0], hasToString(containsString("first")));
assertThat(suppressed[1], instanceOf(IllegalStateException.class));
assertThat(suppressed[1], hasToString(containsString("second")));
}
public void testHeapSizeCheck() throws NodeValidationException {
final int initial = randomIntBetween(0, Integer.MAX_VALUE - 1);
final int max = randomIntBetween(initial + 1, Integer.MAX_VALUE);
final AtomicLong initialHeapSize = new AtomicLong(initial);
final AtomicLong maxHeapSize = new AtomicLong(max);
final boolean isMemoryLocked = randomBoolean();
final BootstrapChecks.HeapSizeCheck check = new BootstrapChecks.HeapSizeCheck() {
@Override
long getInitialHeapSize() {
return initialHeapSize.get();
}
@Override
long getMaxHeapSize() {
return maxHeapSize.get();
}
@Override
boolean isMemoryLocked() {
return isMemoryLocked;
}
};
final NodeValidationException e = expectThrows(
NodeValidationException.class,
() -> BootstrapChecks.check(emptyContext, true, Collections.singletonList(check))
);
assertThat(
e.getMessage(),
containsString(
"initial heap size [" + initialHeapSize.get() + "] " + "not equal to maximum heap size [" + maxHeapSize.get() + "]"
)
);
assertThat(e.getMessage(), containsString("; for more information see [https://www.elastic.co/docs/"));
final String memoryLockingMessage = "and prevents memory locking from locking the entire heap";
final Matcher<String> memoryLockingMatcher;
if (isMemoryLocked) {
memoryLockingMatcher = containsString(memoryLockingMessage);
} else {
memoryLockingMatcher = not(containsString(memoryLockingMessage));
}
assertThat(e.getMessage(), memoryLockingMatcher);
initialHeapSize.set(maxHeapSize.get());
BootstrapChecks.check(emptyContext, true, Collections.singletonList(check));
// nothing should happen if the initial heap size or the max
// heap size is not available
if (randomBoolean()) {
initialHeapSize.set(0);
} else {
maxHeapSize.set(0);
}
BootstrapChecks.check(emptyContext, true, Collections.singletonList(check));
}
public void testFileDescriptorLimits() throws NodeValidationException {
final boolean osX = randomBoolean(); // simulates OS X versus non-OS X
final int limit = osX ? 10240 : 65535;
final AtomicLong maxFileDescriptorCount = new AtomicLong(randomIntBetween(1, limit - 1));
final BootstrapChecks.FileDescriptorCheck check;
if (osX) {
check = new BootstrapChecks.OsXFileDescriptorCheck() {
@Override
long getMaxFileDescriptorCount() {
return maxFileDescriptorCount.get();
}
};
} else {
check = new BootstrapChecks.FileDescriptorCheck() {
@Override
long getMaxFileDescriptorCount() {
return maxFileDescriptorCount.get();
}
};
}
final NodeValidationException e = expectThrows(
NodeValidationException.class,
() -> BootstrapChecks.check(emptyContext, true, Collections.singletonList(check))
);
assertThat(e.getMessage(), containsString("max file descriptors"));
assertThat(e.getMessage(), containsString("; for more information see [https://www.elastic.co/docs/"));
maxFileDescriptorCount.set(randomIntBetween(limit + 1, Integer.MAX_VALUE));
BootstrapChecks.check(emptyContext, true, Collections.singletonList(check));
// nothing should happen if current file descriptor count is
// not available
maxFileDescriptorCount.set(-1);
BootstrapChecks.check(emptyContext, true, Collections.singletonList(check));
}
public void testFileDescriptorLimitsThrowsOnInvalidLimit() {
final IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new BootstrapChecks.FileDescriptorCheck(-randomIntBetween(0, Integer.MAX_VALUE))
);
assertThat(e.getMessage(), containsString("limit must be positive but was"));
}
public void testMlockallCheck() throws NodeValidationException {
|
BootstrapChecksTests
|
java
|
alibaba__fastjson
|
src/main/java/com/alibaba/fastjson/parser/deserializer/Jdk8DateCodec.java
|
{
"start": 965,
"end": 27596
}
|
class ____ extends ContextObjectDeserializer implements ObjectSerializer, ContextObjectSerializer, ObjectDeserializer {
public static final Jdk8DateCodec instance = new Jdk8DateCodec();
private final static String defaultPatttern = "yyyy-MM-dd HH:mm:ss";
private final static DateTimeFormatter defaultFormatter = DateTimeFormatter.ofPattern(defaultPatttern);
private final static DateTimeFormatter defaultFormatter_23 = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS");
private final static DateTimeFormatter formatter_dt19_tw = DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_cn = DateTimeFormatter.ofPattern("yyyy年M月d日 HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_cn_1 = DateTimeFormatter.ofPattern("yyyy年M月d日 H时m分s秒");
private final static DateTimeFormatter formatter_dt19_kr = DateTimeFormatter.ofPattern("yyyy년M월d일 HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_us = DateTimeFormatter.ofPattern("MM/dd/yyyy HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_eur = DateTimeFormatter.ofPattern("dd/MM/yyyy HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_de = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss");
private final static DateTimeFormatter formatter_dt19_in = DateTimeFormatter.ofPattern("dd-MM-yyyy HH:mm:ss");
private final static DateTimeFormatter formatter_d8 = DateTimeFormatter.ofPattern("yyyyMMdd");
private final static DateTimeFormatter formatter_d10_tw = DateTimeFormatter.ofPattern("yyyy/MM/dd");
private final static DateTimeFormatter formatter_d10_cn = DateTimeFormatter.ofPattern("yyyy年M月d日");
private final static DateTimeFormatter formatter_d10_kr = DateTimeFormatter.ofPattern("yyyy년M월d일");
private final static DateTimeFormatter formatter_d10_us = DateTimeFormatter.ofPattern("MM/dd/yyyy");
private final static DateTimeFormatter formatter_d10_eur = DateTimeFormatter.ofPattern("dd/MM/yyyy");
private final static DateTimeFormatter formatter_d10_de = DateTimeFormatter.ofPattern("dd.MM.yyyy");
private final static DateTimeFormatter formatter_d10_in = DateTimeFormatter.ofPattern("dd-MM-yyyy");
private final static DateTimeFormatter ISO_FIXED_FORMAT =
DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final static String formatter_iso8601_pattern = "yyyy-MM-dd'T'HH:mm:ss";
private final static String formatter_iso8601_pattern_23 = "yyyy-MM-dd'T'HH:mm:ss.SSS";
private final static String formatter_iso8601_pattern_29 = "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS";
private final static DateTimeFormatter formatter_iso8601 = DateTimeFormatter.ofPattern(formatter_iso8601_pattern);
@SuppressWarnings("unchecked")
public <T> T deserialze(DefaultJSONParser parser, Type type, Object fieldName, String format, int feature) {
JSONLexer lexer = parser.lexer;
if (lexer.token() == JSONToken.NULL){
lexer.nextToken();
return null;
}
if (lexer.token() == JSONToken.LITERAL_STRING) {
String text = lexer.stringVal();
lexer.nextToken();
DateTimeFormatter formatter = null;
if (format != null) {
if (defaultPatttern.equals(format)) {
formatter = defaultFormatter;
} else {
formatter = DateTimeFormatter.ofPattern(format);
}
}
if ("".equals(text)) {
return null;
}
if (type == LocalDateTime.class) {
LocalDateTime localDateTime;
if (text.length() == 10 || text.length() == 8) {
LocalDate localDate = parseLocalDate(text, format, formatter);
localDateTime = LocalDateTime.of(localDate, LocalTime.MIN);
} else {
localDateTime = parseDateTime(text, formatter);
}
return (T) localDateTime;
} else if (type == LocalDate.class) {
LocalDate localDate;
if (text.length() == 23) {
LocalDateTime localDateTime = LocalDateTime.parse(text);
localDate = LocalDate.of(localDateTime.getYear(), localDateTime.getMonthValue(),
localDateTime.getDayOfMonth());
} else {
localDate = parseLocalDate(text, format, formatter);
}
return (T) localDate;
} else if (type == LocalTime.class) {
LocalTime localTime;
if (text.length() == 23) {
LocalDateTime localDateTime = LocalDateTime.parse(text);
localTime = LocalTime.of(localDateTime.getHour(), localDateTime.getMinute(),
localDateTime.getSecond(), localDateTime.getNano());
} else {
boolean digit = true;
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
if (ch < '0' || ch > '9') {
digit = false;
break;
}
}
if (digit && text.length() > 8 && text.length() < 19) {
long epochMillis = Long.parseLong(text);
localTime = LocalDateTime
.ofInstant(
Instant.ofEpochMilli(epochMillis),
JSON.defaultTimeZone.toZoneId())
.toLocalTime();
} else {
localTime = LocalTime.parse(text);
}
}
return (T) localTime;
} else if (type == ZonedDateTime.class) {
if (formatter == defaultFormatter) {
formatter = ISO_FIXED_FORMAT;
}
if (formatter == null) {
if (text.length() <= 19) {
JSONScanner s = new JSONScanner(text);
TimeZone timeZone = parser.lexer.getTimeZone();
s.setTimeZone(timeZone);
boolean match = s.scanISO8601DateIfMatch(false);
if (match) {
Date date = s.getCalendar().getTime();
return (T) ZonedDateTime.ofInstant(date.toInstant(), timeZone.toZoneId());
}
}
}
ZonedDateTime zonedDateTime = parseZonedDateTime(text, formatter);
return (T) zonedDateTime;
} else if (type == OffsetDateTime.class) {
OffsetDateTime offsetDateTime = OffsetDateTime.parse(text);
return (T) offsetDateTime;
} else if (type == OffsetTime.class) {
OffsetTime offsetTime = OffsetTime.parse(text);
return (T) offsetTime;
} else if (type == ZoneId.class) {
ZoneId offsetTime = ZoneId.of(text);
return (T) offsetTime;
} else if (type == Period.class) {
Period period = Period.parse(text);
return (T) period;
} else if (type == Duration.class) {
Duration duration = Duration.parse(text);
return (T) duration;
} else if (type == Instant.class) {
boolean digit = true;
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
if (ch < '0' || ch > '9') {
digit = false;
break;
}
}
if (digit && text.length() > 8 && text.length() < 19) {
long epochMillis = Long.parseLong(text);
return (T) Instant.ofEpochMilli(epochMillis);
}
Instant instant = Instant.parse(text);
return (T) instant;
}
} else if (lexer.token() == JSONToken.LITERAL_INT) {
long millis = lexer.longValue();
lexer.nextToken();
if ("unixtime".equals(format)) {
millis *= 1000;
} else if ("yyyyMMddHHmmss".equals(format)) {
int yyyy = (int) (millis / 10000000000L);
int MM = (int) ((millis / 100000000L) % 100);
int dd = (int) ((millis / 1000000L) % 100);
int HH = (int) ((millis / 10000L) % 100);
int mm = (int) ((millis / 100L) % 100);
int ss = (int) (millis % 100);
if (type == LocalDateTime.class) {
return (T) LocalDateTime.of(yyyy, MM, dd, HH, mm, ss);
}
}
if (type == LocalDateTime.class) {
return (T) LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), JSON.defaultTimeZone.toZoneId());
}
if (type == LocalDate.class) {
return (T) LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), JSON.defaultTimeZone.toZoneId()).toLocalDate();
}
if (type == LocalTime.class) {
return (T) LocalDateTime.ofInstant(Instant.ofEpochMilli(millis), JSON.defaultTimeZone.toZoneId()).toLocalTime();
}
if (type == ZonedDateTime.class) {
return (T) ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), JSON.defaultTimeZone.toZoneId());
}
if (type == Instant.class) {
return (T) Instant.ofEpochMilli(millis);
}
throw new UnsupportedOperationException();
} else if (lexer.token() == JSONToken.LBRACE) {
JSONObject object = parser.parseObject();
if (type == Instant.class) {
Object epochSecond = object.get("epochSecond");
Object nano = object.get("nano");
if (epochSecond instanceof Number && nano instanceof Number) {
return (T) Instant.ofEpochSecond(
TypeUtils.longExtractValue((Number) epochSecond)
, TypeUtils.longExtractValue((Number) nano));
}
if (epochSecond instanceof Number) {
return (T) Instant.ofEpochSecond(
TypeUtils.longExtractValue((Number) epochSecond));
}
} else if (type == Duration.class) {
Long seconds = object.getLong("seconds");
if (seconds != null) {
long nanos = object.getLongValue("nano");
return (T) Duration.ofSeconds(seconds, nanos);
}
}
} else {
throw new UnsupportedOperationException();
}
return null;
}
protected LocalDateTime parseDateTime(String text, DateTimeFormatter formatter) {
if (formatter == null) {
if (text.length() == 19) {
char c4 = text.charAt(4);
char c7 = text.charAt(7);
char c10 = text.charAt(10);
char c13 = text.charAt(13);
char c16 = text.charAt(16);
if (c13 == ':' && c16 == ':') {
if (c4 == '-' && c7 == '-') {
if (c10 == 'T') {
formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
} else if (c10 == ' ') {
formatter = defaultFormatter;
}
} else if (c4 == '/' && c7 == '/') { // tw yyyy/mm/dd
formatter = formatter_dt19_tw;
} else {
char c0 = text.charAt(0);
char c1 = text.charAt(1);
char c2 = text.charAt(2);
char c3 = text.charAt(3);
char c5 = text.charAt(5);
if (c2 == '/' && c5 == '/') { // mm/dd/yyyy or mm/dd/yyyy
int v0 = (c0 - '0') * 10 + (c1 - '0');
int v1 = (c3 - '0') * 10 + (c4 - '0');
if (v0 > 12) {
formatter = formatter_dt19_eur;
} else if (v1 > 12) {
formatter = formatter_dt19_us;
} else {
String country = Locale.getDefault().getCountry();
if (country.equals("US")) {
formatter = formatter_dt19_us;
} else if (country.equals("BR") //
|| country.equals("AU")) {
formatter = formatter_dt19_eur;
}
}
} else if (c2 == '.' && c5 == '.') { // dd.mm.yyyy
formatter = formatter_dt19_de;
} else if (c2 == '-' && c5 == '-') { // dd-mm-yyyy
formatter = formatter_dt19_in;
}
}
}
} else if (text.length() == 23) {
char c4 = text.charAt(4);
char c7 = text.charAt(7);
char c10 = text.charAt(10);
char c13 = text.charAt(13);
char c16 = text.charAt(16);
char c19 = text.charAt(19);
if (c13 == ':'
&& c16 == ':'
&& c4 == '-'
&& c7 == '-'
&& c10 == ' '
&& c19 == '.'
) {
formatter = defaultFormatter_23;
}
}
if (text.length() >= 17) {
char c4 = text.charAt(4);
if (c4 == '年') {
if (text.charAt(text.length() - 1) == '秒') {
formatter = formatter_dt19_cn_1;
} else {
formatter = formatter_dt19_cn;
}
} else if (c4 == '년') {
formatter = formatter_dt19_kr;
}
}
}
if (formatter == null) {
JSONScanner dateScanner = new JSONScanner(text);
if (dateScanner.scanISO8601DateIfMatch(false)) {
Instant instant = dateScanner.getCalendar().toInstant();
return LocalDateTime.ofInstant(instant, ZoneId.systemDefault());
}
boolean digit = true;
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
if (ch < '0' || ch > '9') {
digit = false;
break;
}
}
if (digit && text.length() > 8 && text.length() < 19) {
long epochMillis = Long.parseLong(text);
return LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMillis), JSON.defaultTimeZone.toZoneId());
}
}
return formatter == null ? //
LocalDateTime.parse(text) //
: LocalDateTime.parse(text, formatter);
}
protected LocalDate parseLocalDate(String text, String format, DateTimeFormatter formatter) {
if (formatter == null) {
if (text.length() == 8) {
formatter = formatter_d8;
}
if (text.length() == 10) {
char c4 = text.charAt(4);
char c7 = text.charAt(7);
if (c4 == '/' && c7 == '/') { // tw yyyy/mm/dd
formatter = formatter_d10_tw;
}
char c0 = text.charAt(0);
char c1 = text.charAt(1);
char c2 = text.charAt(2);
char c3 = text.charAt(3);
char c5 = text.charAt(5);
if (c2 == '/' && c5 == '/') { // mm/dd/yyyy or mm/dd/yyyy
int v0 = (c0 - '0') * 10 + (c1 - '0');
int v1 = (c3 - '0') * 10 + (c4 - '0');
if (v0 > 12) {
formatter = formatter_d10_eur;
} else if (v1 > 12) {
formatter = formatter_d10_us;
} else {
String country = Locale.getDefault().getCountry();
if (country.equals("US")) {
formatter = formatter_d10_us;
} else if (country.equals("BR") //
|| country.equals("AU")) {
formatter = formatter_d10_eur;
}
}
} else if (c2 == '.' && c5 == '.') { // dd.mm.yyyy
formatter = formatter_d10_de;
} else if (c2 == '-' && c5 == '-') { // dd-mm-yyyy
formatter = formatter_d10_in;
}
}
if (text.length() >= 9) {
char c4 = text.charAt(4);
if (c4 == '年') {
formatter = formatter_d10_cn;
} else if (c4 == '년') {
formatter = formatter_d10_kr;
}
}
boolean digit = true;
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
if (ch < '0' || ch > '9') {
digit = false;
break;
}
}
if (digit && text.length() > 8 && text.length() < 19) {
long epochMillis = Long.parseLong(text);
return LocalDateTime
.ofInstant(
Instant.ofEpochMilli(epochMillis),
JSON.defaultTimeZone.toZoneId())
.toLocalDate();
}
}
return formatter == null ? //
LocalDate.parse(text) //
: LocalDate.parse(text, formatter);
}
protected ZonedDateTime parseZonedDateTime(String text, DateTimeFormatter formatter) {
if (formatter == null) {
if (text.length() == 19) {
char c4 = text.charAt(4);
char c7 = text.charAt(7);
char c10 = text.charAt(10);
char c13 = text.charAt(13);
char c16 = text.charAt(16);
if (c13 == ':' && c16 == ':') {
if (c4 == '-' && c7 == '-') {
if (c10 == 'T') {
formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME;
} else if (c10 == ' ') {
formatter = defaultFormatter;
}
} else if (c4 == '/' && c7 == '/') { // tw yyyy/mm/dd
formatter = formatter_dt19_tw;
} else {
char c0 = text.charAt(0);
char c1 = text.charAt(1);
char c2 = text.charAt(2);
char c3 = text.charAt(3);
char c5 = text.charAt(5);
if (c2 == '/' && c5 == '/') { // mm/dd/yyyy or mm/dd/yyyy
int v0 = (c0 - '0') * 10 + (c1 - '0');
int v1 = (c3 - '0') * 10 + (c4 - '0');
if (v0 > 12) {
formatter = formatter_dt19_eur;
} else if (v1 > 12) {
formatter = formatter_dt19_us;
} else {
String country = Locale.getDefault().getCountry();
if (country.equals("US")) {
formatter = formatter_dt19_us;
} else if (country.equals("BR") //
|| country.equals("AU")) {
formatter = formatter_dt19_eur;
}
}
} else if (c2 == '.' && c5 == '.') { // dd.mm.yyyy
formatter = formatter_dt19_de;
} else if (c2 == '-' && c5 == '-') { // dd-mm-yyyy
formatter = formatter_dt19_in;
}
}
}
}
if (text.length() >= 17) {
char c4 = text.charAt(4);
if (c4 == '年') {
if (text.charAt(text.length() - 1) == '秒') {
formatter = formatter_dt19_cn_1;
} else {
formatter = formatter_dt19_cn;
}
} else if (c4 == '년') {
formatter = formatter_dt19_kr;
}
}
boolean digit = true;
for (int i = 0; i < text.length(); ++i) {
char ch = text.charAt(i);
if (ch < '0' || ch > '9') {
digit = false;
break;
}
}
if (digit && text.length() > 8 && text.length() < 19) {
long epochMillis = Long.parseLong(text);
return ZonedDateTime.ofInstant(Instant.ofEpochMilli(epochMillis), JSON.defaultTimeZone.toZoneId());
}
}
return formatter == null ? //
ZonedDateTime.parse(text) //
: ZonedDateTime.parse(text, formatter);
}
public int getFastMatchToken() {
return JSONToken.LITERAL_STRING;
}
public void write(JSONSerializer serializer, Object object, Object fieldName, Type fieldType,
int features) throws IOException {
SerializeWriter out = serializer.out;
if (object == null) {
out.writeNull();
} else {
if (fieldType == null) {
fieldType = object.getClass();
}
if (fieldType == LocalDateTime.class) {
final int mask = SerializerFeature.UseISO8601DateFormat.getMask();
LocalDateTime dateTime = (LocalDateTime) object;
String format = serializer.getDateFormatPattern();
if (format == null) {
if ((features & mask) != 0 || serializer.isEnabled(SerializerFeature.UseISO8601DateFormat)) {
format = formatter_iso8601_pattern;
} else if (serializer.isEnabled(SerializerFeature.WriteDateUseDateFormat)) {
if (serializer.getFastJsonConfigDateFormatPattern() != null &&
serializer.getFastJsonConfigDateFormatPattern().length() > 0){
format = serializer.getFastJsonConfigDateFormatPattern();
}else{
format = JSON.DEFFAULT_DATE_FORMAT;
}
} else {
int nano = dateTime.getNano();
if (nano == 0) {
format = formatter_iso8601_pattern;
} else if (nano % 1000000 == 0) {
format = formatter_iso8601_pattern_23;
} else {
format = formatter_iso8601_pattern_29;
}
}
}
if (format != null) {
write(out, dateTime, format);
} else {
out.writeLong(dateTime.atZone(JSON.defaultTimeZone.toZoneId()).toInstant().toEpochMilli());
}
} else {
out.writeString(object.toString());
}
}
}
public void write(JSONSerializer serializer, Object object, BeanContext context) throws IOException {
SerializeWriter out = serializer.out;
String format = context.getFormat();
write(out, (TemporalAccessor) object, format);
}
private void write(SerializeWriter out, TemporalAccessor object, String format) {
DateTimeFormatter formatter;
if ("unixtime".equals(format)) {
Instant instant = null;
if (object instanceof ChronoZonedDateTime) {
long seconds = ((ChronoZonedDateTime) object).toEpochSecond();
out.writeInt((int) seconds);
return;
}
if (object instanceof LocalDateTime) {
long seconds = ((LocalDateTime) object).atZone(JSON.defaultTimeZone.toZoneId()).toEpochSecond();
out.writeInt((int) seconds);
return;
}
}
if ("millis".equals(format)) {
Instant instant = null;
if (object instanceof ChronoZonedDateTime) {
instant = ((ChronoZonedDateTime) object).toInstant();
} else if (object instanceof LocalDateTime) {
instant = ((LocalDateTime) object).atZone(JSON.defaultTimeZone.toZoneId()).toInstant();
}
if (instant != null) {
long millis = instant.toEpochMilli();
out.writeLong(millis);
return;
}
}
if (format == formatter_iso8601_pattern) {
formatter = formatter_iso8601;
} else {
formatter = DateTimeFormatter.ofPattern(format);
}
String text = formatter.format((TemporalAccessor) object);
out.writeString(text);
}
public static Object castToLocalDateTime(Object value, String format) {
if (value == null) {
return null;
}
if (format == null) {
format = "yyyy-MM-dd HH:mm:ss";
}
DateTimeFormatter df = DateTimeFormatter.ofPattern(format);
return LocalDateTime.parse(value.toString(), df);
}
}
|
Jdk8DateCodec
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/threadpool/support/eager/EagerThreadPool.java
|
{
"start": 2037,
"end": 3077
}
|
class ____ implements ThreadPool {
@Override
public Executor getExecutor(URL url) {
String name =
url.getParameter(THREAD_NAME_KEY, (String) url.getAttribute(THREAD_NAME_KEY, DEFAULT_THREAD_NAME));
int cores = url.getParameter(CORE_THREADS_KEY, DEFAULT_CORE_THREADS);
int threads = url.getParameter(THREADS_KEY, Integer.MAX_VALUE);
int queues = url.getParameter(QUEUES_KEY, DEFAULT_QUEUES);
int alive = url.getParameter(ALIVE_KEY, DEFAULT_ALIVE);
// init queue and executor
TaskQueue<Runnable> taskQueue = new TaskQueue<>(queues <= 0 ? 1 : queues);
EagerThreadPoolExecutor executor = new EagerThreadPoolExecutor(
cores,
threads,
alive,
TimeUnit.MILLISECONDS,
taskQueue,
new NamedInternalThreadFactory(name, true),
new AbortPolicyWithReport(name, url));
taskQueue.setExecutor(executor);
return executor;
}
}
|
EagerThreadPool
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/filter/JsonIncludeTest.java
|
{
"start": 1874,
"end": 2231
}
|
class ____
{
String _a = "a", _b = "b";
MixedBean() { }
public String getA() { return _a; }
@JsonInclude(JsonInclude.Include.NON_NULL)
public String getB() { return _b; }
}
// to ensure that default values work for collections as well
@JsonInclude(JsonInclude.Include.NON_DEFAULT)
static
|
MixedBean
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsAnalyzeTableTest.java
|
{
"start": 121,
"end": 610
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "analyze table t partition(pt='1') compute statistics";
assertEquals("ANALYZE TABLE t PARTITION (pt = '1') COMPUTE STATISTICS", SQLUtils.formatOdps(sql));
}
public void test_no_partition() throws Exception {
String sql = "analyze table t compute statistics";
assertEquals("ANALYZE TABLE t COMPUTE STATISTICS", SQLUtils.formatOdps(sql));
}
}
|
OdpsAnalyzeTableTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/interceptor/merge/MergeAuditingInterceptorTest.java
|
{
"start": 779,
"end": 1335
}
|
class ____ {
@Test
void test(EntityManagerFactoryScope scope) {
Thing t = scope.fromTransaction( em -> {
Thing thing = new Thing();
thing.name = "Hibernate";
em.persist( thing );
return thing;
} );
scope.inTransaction( em -> {
t.name = "Hibernate ORM";
Thing thing = em.merge( t );
assertEquals( 1, MergeAuditingInterceptor.auditTrail.size() );
assertEquals( "name changed from Hibernate to Hibernate ORM for " + t.id,
MergeAuditingInterceptor.auditTrail.get( 0 ) );
} );
}
@Entity
static
|
MergeAuditingInterceptorTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/json/JsonPathValueAssertTests.java
|
{
"start": 7987,
"end": 10281
}
|
class ____ {
@Test
void isEmptyWithEmptyString() {
assertThat(forValue("")).isEmpty();
}
@Test
void isEmptyWithNull() {
assertThat(forValue(null)).isEmpty();
}
@Test
void isEmptyWithEmptyArray() {
assertThat(forValue(Collections.emptyList())).isEmpty();
}
@Test
void isEmptyWithEmptyObject() {
assertThat(forValue(Collections.emptyMap())).isEmpty();
}
@Test
void isEmptyWithWhitespace() {
AssertProvider<JsonPathValueAssert> actual = forValue(" ");
assertThatExceptionOfType(AssertionError.class)
.isThrownBy(() -> assertThat(actual).isEmpty())
.satisfies(hasFailedEmptyCheck(" "));
}
@Test
void isNotEmptyWithString() {
assertThat(forValue("test")).isNotEmpty();
}
@Test
void isNotEmptyWithArray() {
assertThat(forValue(List.of("test"))).isNotEmpty();
}
@Test
void isNotEmptyWithObject() {
assertThat(forValue(Map.of("test", "value"))).isNotEmpty();
}
private Consumer<AssertionError> hasFailedEmptyCheck(Object actual) {
return error -> assertThat(error.getMessage()).containsSubsequence("Expected value at JSON path \"$.test\":",
"" + StringUtils.quoteIfString(actual), "To be empty");
}
}
private Consumer<AssertionError> hasFailedToBeOfType(Object actual, String expectedDescription) {
return error -> assertThat(error.getMessage()).containsSubsequence("Expected value at JSON path \"$.test\":",
"" + StringUtils.quoteIfString(actual), "To be " + expectedDescription, "But was:", actual.getClass().getName());
}
private Consumer<AssertionError> hasFailedToBeOfTypeWhenNull(String expectedDescription) {
return error -> assertThat(error.getMessage()).containsSubsequence("Expected value at JSON path \"$.test\":", "null",
"To be " + expectedDescription);
}
private Consumer<AssertionError> hasFailedToConvertToType(Object actual, Class<?> targetType) {
return error -> assertThat(error.getMessage()).containsSubsequence("Expected value at JSON path \"$.test\":",
"" + StringUtils.quoteIfString(actual), "To convert successfully to:", targetType.getTypeName(), "But it failed:");
}
private AssertProvider<JsonPathValueAssert> forValue(@Nullable Object actual) {
return () -> new JsonPathValueAssert(actual, "$.test", null);
}
}
|
EmptyNotEmptyTests
|
java
|
quarkusio__quarkus
|
extensions/undertow/spi/src/main/java/io/quarkus/undertow/deployment/UndertowStaticResourcesBuildStep.java
|
{
"start": 1163,
"end": 5985
}
|
class ____ {
protected static final String META_INF_RESOURCES_SLASH = "META-INF/resources/";
protected static final String META_INF_RESOURCES = "META-INF/resources";
@BuildStep
void handleGeneratedWebResources(Capabilities capabilities, BuildProducer<GeneratedResourceBuildItem> generatedResources,
List<GeneratedWebResourceBuildItem> generatedWebResources) throws Exception {
if (!capabilities.isPresent(Capability.SERVLET)) {
return;
}
for (GeneratedWebResourceBuildItem genResource : generatedWebResources) {
generatedResources.produce(new GeneratedResourceBuildItem(META_INF_RESOURCES_SLASH + genResource.getName(),
genResource.getClassData()));
}
}
@BuildStep
void scanStaticResources(Capabilities capabilities, ApplicationArchivesBuildItem applicationArchivesBuildItem,
BuildProducer<GeneratedResourceBuildItem> generatedResources,
BuildProducer<KnownPathsBuildItem> knownPathsBuilds,
List<GeneratedWebResourceBuildItem> generatedWebResources,
LaunchModeBuildItem launchModeBuildItem) throws Exception {
if (!capabilities.isPresent(Capability.SERVLET)) {
return;
}
//we need to check for web resources in order to get welcome files to work
//this kinda sucks
final Set<String> knownFiles = new HashSet<>();
final Set<String> knownDirectories = new HashSet<>();
for (ApplicationArchive i : applicationArchivesBuildItem.getAllApplicationArchives()) {
i.accept(tree -> {
Path resource = tree.getPath(META_INF_RESOURCES);
if (resource != null && Files.exists(resource)) {
collectKnownPaths(resource, knownFiles, knownDirectories);
}
});
}
for (ClassPathElement e : QuarkusClassLoader.getElements(META_INF_RESOURCES, false)) {
if (e.isRuntime()) {
e.apply(tree -> {
collectKnownPaths(tree.getPath(META_INF_RESOURCES), knownFiles, knownDirectories);
return null;
});
}
}
for (GeneratedWebResourceBuildItem genResource : generatedWebResources) {
String sub = genResource.getName();
if (sub.startsWith("/")) {
sub = sub.substring(1);
}
if (!sub.isEmpty()) {
knownFiles.add(sub);
for (int i = 0; i < sub.length(); ++i) {
if (sub.charAt(i) == '/') {
knownDirectories.add(sub.substring(0, i));
}
}
}
}
if (launchModeBuildItem.getLaunchMode() == LaunchMode.DEVELOPMENT) {
//we don't need knownPaths in development mode
//we serve directly from the project dir
knownPathsBuilds.produce(new KnownPathsBuildItem(Collections.emptySet(), Collections.emptySet()));
} else {
knownPathsBuilds.produce(new KnownPathsBuildItem(knownFiles, knownDirectories));
}
}
private void collectKnownPaths(Path resource, Set<String> knownFiles, Set<String> knownDirectories) {
try {
Files.walkFileTree(resource, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path path, BasicFileAttributes attrs)
throws IOException {
knownFiles.add(normalizePath(resource.relativize(path).toString()));
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult preVisitDirectory(Path path, BasicFileAttributes attrs)
throws IOException {
knownDirectories.add(normalizePath(resource.relativize(path).toString()));
return FileVisitResult.CONTINUE;
}
private String normalizePath(String path) {
if (OS.WINDOWS.isCurrent()) {
path = path.replace('\\', '/');
}
return path;
}
});
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
@BuildStep
void nativeImageResources(Capabilities capabilities, KnownPathsBuildItem paths,
BuildProducer<NativeImageResourceBuildItem> nativeImage) {
if (!capabilities.isPresent(Capability.SERVLET)) {
return;
}
for (String i : paths.knownFiles) {
nativeImage.produce(new NativeImageResourceBuildItem(META_INF_RESOURCES_SLASH + i));
}
}
}
|
UndertowStaticResourcesBuildStep
|
java
|
apache__camel
|
dsl/camel-java-joor-dsl/src/test/java/org/apache/camel/dsl/java/joor/HelperTest.java
|
{
"start": 1053,
"end": 1561
}
|
class ____ {
@Test
public void testImports() throws Exception {
List<String> list = Helper.determineImports(
IOHelper.loadText(new FileInputStream("src/test/java/org/apache/camel/dsl/java/joor/DummyRoute.java")));
Collections.sort(list);
Assertions.assertEquals(2, list.size());
Assertions.assertEquals("org.apache.camel.CamelContext", list.get(0));
Assertions.assertEquals("org.apache.camel.builder.RouteBuilder", list.get(1));
}
}
|
HelperTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/lucene/util/CombinedBitsTests.java
|
{
"start": 775,
"end": 4187
}
|
class ____ extends ESTestCase {
public void testEmpty() {
for (float percent : new float[] { 0f, 0.1f, 0.5f, 0.9f, 1f }) {
testCase(randomIntBetween(1, 10000), 0f, percent);
testCase(randomIntBetween(1, 10000), percent, 0f);
}
}
public void testSparse() {
for (float percent : new float[] { 0f, 0.1f, 0.5f, 0.9f, 1f }) {
testCase(randomIntBetween(1, 10000), 0.1f, percent);
testCase(randomIntBetween(1, 10000), percent, 0.1f);
}
}
public void testDense() {
for (float percent : new float[] { 0f, 0.1f, 0.5f, 0.9f, 1f }) {
testCase(randomIntBetween(1, 10000), 0.9f, percent);
testCase(randomIntBetween(1, 10000), percent, 0.9f);
}
}
public void testRandom() {
int iterations = atLeast(10);
for (int i = 0; i < iterations; i++) {
testCase(randomIntBetween(1, 10000), randomFloat(), randomFloat());
}
}
private void testCase(int numBits, float percent1, float percent2) {
BitSet first = randomSet(numBits, percent1);
BitSet second = randomSet(numBits, percent2);
CombinedBits actual = new CombinedBits(first, second);
FixedBitSet expected = new FixedBitSet(numBits);
or(expected, first);
and(expected, second);
assertEquals(expected, actual, numBits);
}
private void or(BitSet set1, BitSet set2) {
int next = 0;
while (next < set2.length() && (next = set2.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) {
set1.set(next);
next += 1;
}
}
private void and(BitSet set1, BitSet set2) {
int next = 0;
while (next < set1.length() && (next = set1.nextSetBit(next)) != DocIdSetIterator.NO_MORE_DOCS) {
if (set2.get(next) == false) {
set1.clear(next);
}
next += 1;
}
}
private void assertEquals(Bits set1, Bits set2, int maxDoc) {
for (int i = 0; i < maxDoc; ++i) {
assertEquals("Different at " + i, set1.get(i), set2.get(i));
}
FixedBitSet bitSet1 = new FixedBitSet(100);
FixedBitSet bitSet2 = new FixedBitSet(100);
for (int from = 0; from < maxDoc; from += bitSet1.length()) {
bitSet1.set(0, bitSet1.length());
bitSet2.set(0, bitSet1.length());
if (from + bitSet1.length() > maxDoc) {
bitSet1.clear(maxDoc - from, bitSet1.length());
bitSet2.clear(maxDoc - from, bitSet1.length());
}
set1.applyMask(bitSet1, from);
set2.applyMask(bitSet2, from);
assertEquals(bitSet1, bitSet2);
}
}
private BitSet randomSet(int numBits, float percentSet) {
return randomSet(numBits, (int) (percentSet * numBits));
}
private BitSet randomSet(int numBits, int numBitsSet) {
assert numBitsSet <= numBits;
final BitSet set = randomBoolean() ? new SparseFixedBitSet(numBits) : new FixedBitSet(numBits);
for (int i = 0; i < numBitsSet; ++i) {
while (true) {
final int o = random().nextInt(numBits);
if (set.get(o) == false) {
set.set(o);
break;
}
}
}
return set;
}
}
|
CombinedBitsTests
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/ProjectWindowTableFunctionTransposeRule.java
|
{
"start": 2599,
"end": 11115
}
|
class ____ extends RelOptRule {
public static final ProjectWindowTableFunctionTransposeRule INSTANCE =
new ProjectWindowTableFunctionTransposeRule();
public ProjectWindowTableFunctionTransposeRule() {
super(
operand(LogicalProject.class, operand(LogicalTableFunctionScan.class, any())),
"ProjectWindowTableFunctionTransposeRule");
}
@Override
public boolean matches(RelOptRuleCall call) {
LogicalTableFunctionScan scan = call.rel(1);
return WindowUtil.isWindowTableFunctionCall(scan.getCall());
}
@Override
public void onMatch(RelOptRuleCall call) {
LogicalProject project = call.rel(0);
LogicalTableFunctionScan scan = call.rel(1);
RelNode scanInput = scan.getInput(0);
TimeAttributeWindowingStrategy windowingStrategy =
WindowUtil.convertToWindowingStrategy((RexCall) scan.getCall(), scanInput);
// 1. get fields to push down
ImmutableBitSet projectFields = RelOptUtil.InputFinder.bits(project.getProjects(), null);
int scanInputFieldCount = scanInput.getRowType().getFieldCount();
ImmutableBitSet toPushFields =
ImmutableBitSet.range(0, scanInputFieldCount)
.intersect(projectFields)
.set(windowingStrategy.getTimeAttributeIndex());
// partition keys in session window tvf also need be pushed down
if (windowingStrategy.getWindow() instanceof SessionWindowSpec) {
SessionWindowSpec sessionWindowSpec = (SessionWindowSpec) windowingStrategy.getWindow();
int[] partitionKeyIndices = sessionWindowSpec.getPartitionKeyIndices();
toPushFields = toPushFields.union(ImmutableBitSet.of(partitionKeyIndices));
}
if (toPushFields.cardinality() == scanInputFieldCount) {
return;
}
// 2. create new input of window table function scan
RelBuilder relBuilder = call.builder();
RelNode newScanInput = createInnerProject(relBuilder, scanInput, toPushFields);
// mapping origin field index to new field index, used to rewrite WindowTableFunction and
// top project
Map<Integer, Integer> mapping =
getFieldMapping(
scan.getRowType().getFieldCount(), scanInputFieldCount, toPushFields);
// 3. create new window table function scan
LogicalTableFunctionScan newScan =
createNewTableFunctionScan(
relBuilder,
scan,
windowingStrategy.getTimeAttributeType(),
newScanInput,
mapping,
toPushFields);
// 4. create top project
RelNode topProject = createTopProject(relBuilder, project, newScan, mapping);
call.transformTo(topProject);
}
private Map<Integer, Integer> getFieldMapping(
int scanFieldCount, int scanInputFieldCount, ImmutableBitSet toPushFields) {
int toPushFieldCount = toPushFields.cardinality();
Map<Integer, Integer> mapping = new HashMap<>();
IntStream.range(0, scanFieldCount)
.forEach(
idx -> {
int newPosition;
if (idx < scanInputFieldCount) {
newPosition = toPushFields.indexOf(idx);
} else {
newPosition = toPushFieldCount + idx - scanInputFieldCount;
}
mapping.put(idx, newPosition);
});
return mapping;
}
private RelNode createInnerProject(
RelBuilder relBuilder, RelNode scanInput, ImmutableBitSet toPushFields) {
relBuilder.push(scanInput);
List<RexInputRef> newProjects =
toPushFields.toList().stream().map(relBuilder::field).collect(Collectors.toList());
return relBuilder.project(newProjects).build();
}
private LogicalTableFunctionScan createNewTableFunctionScan(
RelBuilder relBuilder,
LogicalTableFunctionScan oldScan,
LogicalType timeAttributeType,
RelNode newInput,
Map<Integer, Integer> mapping,
ImmutableBitSet toPushFields) {
relBuilder.push(newInput);
RelOptCluster cluster = oldScan.getCluster();
FlinkTypeFactory typeFactory = (FlinkTypeFactory) cluster.getTypeFactory();
RelDataType newScanOutputType =
SqlWindowTableFunction.inferRowType(
typeFactory,
newInput.getRowType(),
typeFactory.createFieldTypeFromLogicalType(timeAttributeType));
RexNode newCall =
rewriteWindowCall(
(RexCall) oldScan.getCall(),
mapping,
newScanOutputType,
relBuilder,
toPushFields);
return LogicalTableFunctionScan.create(
cluster,
new ArrayList<>(Collections.singleton(newInput)),
newCall,
oldScan.getElementType(),
newScanOutputType,
oldScan.getColumnMappings());
}
private RexNode rewriteWindowCall(
RexCall windowCall,
Map<Integer, Integer> mapping,
RelDataType newScanOutputType,
RelBuilder relBuilder,
ImmutableBitSet toPushFields) {
final RelDataTypeFactory typeFactory = relBuilder.getTypeFactory();
final List<RexNode> newOperands = new ArrayList<>();
for (RexNode next : windowCall.getOperands()) {
newOperands.add(adjustInputRef(next, mapping));
}
final RexTableArgCall tableArgCall = (RexTableArgCall) windowCall.operands.get(0);
// Preserves the field names in the table arg to power the descriptor (which works
// name-based)
final List<RelDataTypeField> newInputFields =
tableArgCall.getType().getFieldList().stream()
.filter(f -> toPushFields.get(f.getIndex()))
.collect(Collectors.toList());
final RelDataType newTableType =
typeFactory.createStructType(
newInputFields.stream()
.map(RelDataTypeField::getType)
.collect(Collectors.toList()),
newInputFields.stream()
.map(RelDataTypeField::getName)
.collect(Collectors.toList()));
final int[] newPartitionKeys =
Arrays.stream(tableArgCall.getPartitionKeys()).map(mapping::get).toArray();
final int[] newOrderKeys =
Arrays.stream(tableArgCall.getOrderKeys()).map(mapping::get).toArray();
final RexTableArgCall projectedTableArgCall =
tableArgCall.copy(newTableType, newPartitionKeys, newOrderKeys);
newOperands.set(0, projectedTableArgCall);
return relBuilder
.getRexBuilder()
.makeCall(newScanOutputType, windowCall.getOperator(), newOperands);
}
private RelNode createTopProject(
RelBuilder relBuilder,
LogicalProject oldProject,
LogicalTableFunctionScan newInput,
Map<Integer, Integer> mapping) {
List<Pair<RexNode, String>> newTopProjects =
oldProject.getNamedProjects().stream()
.map(r -> Pair.of(adjustInputRef(r.left, mapping), r.right))
.collect(Collectors.toList());
return relBuilder
.push(newInput)
.project(Pair.left(newTopProjects), Pair.right(newTopProjects))
.build();
}
private RexNode adjustInputRef(RexNode expr, Map<Integer, Integer> mapping) {
return expr.accept(
new RexShuttle() {
@Override
public RexNode visitInputRef(RexInputRef inputRef) {
Integer newIndex = mapping.get(inputRef.getIndex());
return new RexInputRef(newIndex, inputRef.getType());
}
});
}
}
|
ProjectWindowTableFunctionTransposeRule
|
java
|
apache__camel
|
components/camel-http/src/test/java/org/apache/camel/component/http/HttpProducerTwoParametersWithSameKeyTest.java
|
{
"start": 1406,
"end": 3321
}
|
class ____ extends BaseHttpTest {
private HttpServer localServer;
@Override
public void setupResources() throws Exception {
localServer = ServerBootstrap.bootstrap()
.setCanonicalHostName("localhost").setHttpProcessor(getBasicHttpProcessor())
.setConnectionReuseStrategy(getConnectionReuseStrategy()).setResponseFactory(getHttpResponseFactory())
.setSslContext(getSSLContext())
.register("/myapp", (request, response, context) -> {
String uri = request.getRequestUri();
assertEquals("/myapp?from=me&to=foo&to=bar", uri);
response.setHeader("bar", "yes");
response.addHeader("foo", "123");
response.addHeader("foo", "456");
response.setEntity(new StringEntity("OK", StandardCharsets.US_ASCII));
response.setCode(HttpStatus.SC_OK);
}).create();
localServer.start();
}
@Override
public void cleanupResources() throws Exception {
if (localServer != null) {
localServer.stop();
}
}
@Test
public void testTwoParametersWithSameKey() {
String endpointUri = "http://localhost:" + localServer.getLocalPort()
+ "/myapp?from=me&to=foo&to=bar";
Exchange out = template.request(endpointUri, null);
assertNotNull(out);
assertFalse(out.isFailed(), "Should not fail");
assertEquals("OK", out.getMessage().getBody(String.class));
assertEquals("yes", out.getMessage().getHeader("bar"));
List<?> foo = out.getMessage().getHeader("foo", List.class);
assertNotNull(foo);
assertEquals(2, foo.size());
assertEquals("123", foo.get(0));
assertEquals("456", foo.get(1));
}
}
|
HttpProducerTwoParametersWithSameKeyTest
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionVisitor.java
|
{
"start": 1545,
"end": 1925
}
|
interface ____ extends OriginatingElements, Toggleable {
/**
* The suffix use for generated AOP intercepted types.
*/
String PROXY_SUFFIX = "$Intercepted";
/**
* @return The element where the bean definition originated from.
*/
@Nullable
Element getOriginatingElement();
/**
* <p>In the case where the produced
|
BeanDefinitionVisitor
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/config/CustomThreadPoolFactoryTest.java
|
{
"start": 1428,
"end": 1988
}
|
class ____ extends SpringRunWithTestSupport {
@Autowired
protected CamelContext context;
@Test
public void testCustomThreadPoolFactory() throws Exception {
context.getExecutorServiceManager().newSingleThreadExecutor(this, "foo");
MyCustomThreadPoolFactory factory = assertIsInstanceOf(MyCustomThreadPoolFactory.class,
context.getExecutorServiceManager().getThreadPoolFactory());
assertTrue(factory.isInvoked(), "Should use custom thread pool factory");
}
public static
|
CustomThreadPoolFactoryTest
|
java
|
apache__camel
|
components/camel-cm-sms/src/test/java/org/apache/camel/component/cm/test/CMTest.java
|
{
"start": 3480,
"end": 13133
}
|
class ____ extends CamelTestConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(CMTest.class);
private SecureRandom random = new SecureRandom();
private final PhoneNumberUtil pnu = PhoneNumberUtil.getInstance();
private String validNumber;
@Produce("direct:sms")
private CMProxy cmProxy;
@EndpointInject("mock:test")
private MockEndpoint mock;
@BeforeEach
public void beforeTest() throws Exception {
mock.reset();
context.getRouteController().startRoute(CamelTestConfiguration.SIMPLE_ROUTE_ID);
validNumber = pnu.format(pnu.getExampleNumber("ES"), PhoneNumberFormat.E164);
}
@AfterEach
public void afterTest() {
try {
context.getRouteController().stopRoute(CamelTestConfiguration.SIMPLE_ROUTE_ID);
} catch (Exception e) {
LOGGER.error("Exception trying to stop de routes", e);
}
}
/*
* 1. Invalid URI
*/
@Test
public void testNotRequiredProductToken() {
String schemedUri
= "cm-sms://sgw01.cm.nl/gateway.ashx?defaultFrom=MyBusiness&defaultMaxNumberOfParts=8&testConnectionOnStartup=true";
assertThrows(ResolveEndpointFailedException.class,
() -> context.getEndpoint(schemedUri));
}
@Test
public void testHostUnavailableException() throws Exception {
// cm-sms://sgw01.cm.nl/gateway.ashx?defaultFrom=MyBusiness&defaultMaxNumberOfParts=8&productToken=ea723fd7-da81-4826-89bc-fa7144e71c40&testConnectionOnStartup=true
String schemedUri
= "cm-sms://dummy.sgw01.cm.nl/gateway.ashx?defaultFrom=MyBusiness&defaultMaxNumberOfParts=8&productToken=ea723fd7-da81-4826-89bc-fa7144e71c40&testConnectionOnStartup=true";
Service service = context.getEndpoint(schemedUri).createProducer();
assertThrows(HostUnavailableException.class,
() -> service.start());
}
@Test
public void testInvalidHostDuplicateScheme() {
// cm-sms://sgw01.cm.nl/gateway.ashx?defaultFrom=MyBusiness&defaultMaxNumberOfParts=8&productToken=ea723fd7-da81-4826-89bc-fa7144e71c40&testConnectionOnStartup=true
String schemedUri = "cm-sms://https://demo.com";
assertThrows(ResolveEndpointFailedException.class,
() -> context.getEndpoint(schemedUri));
}
/*
* 2. Invalid Payload
*/
@Test
public void testNullPayload() {
assertThrows(RuntimeException.class,
() -> cmProxy.send(null));
}
@Test
public void testAsPartOfARoute() {
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(NoAccountFoundForProductTokenException.class,
() -> cmProxy.send(smsMessage));
}
@Test
public void testNoAccountFoundForProductTokenException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new NoAccountFoundForProductTokenExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateGSM0338Message(), validNumber, null);
assertThrows(NoAccountFoundForProductTokenException.class,
() -> send(producer, smsMessage));
}
/*
* 3. CM Responses (Faking Exceptions)
*/
@Test
public void testCMResponseException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new CMResponseExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(CMResponseException.class,
() -> send(producer, smsMessage));
}
@Test
public void testInsufficientBalanceException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new InsufficientBalanceExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateGSM0338Message(), validNumber, null);
assertThrows(InsufficientBalanceException.class,
() -> send(producer, smsMessage));
}
@Test
public void testInvalidMSISDNException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new InvalidMSISDNExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(InvalidMSISDNException.class,
() -> send(producer, smsMessage));
}
@Test
public void testInvalidProductTokenException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new InvalidProductTokenExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(InvalidProductTokenException.class,
() -> send(producer, smsMessage));
}
@Test
public void testNoMessageException() throws Exception {
// Change sending strategy
CMEndpoint endpoint
= (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new NoMessageExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateGSM0338Message(), validNumber, null);
assertThrows(NoMessageException.class,
() -> send(producer, smsMessage));
}
@Test
public void testNotPhoneNumberFoundException() throws Exception {
// Change sending strategy
CMEndpoint endpoint = (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new NotPhoneNumberFoundExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(NotPhoneNumberFoundException.class,
() -> send(producer, smsMessage));
}
@Test
public void testUnknownErrorException() throws Exception {
// Change sending strategy
CMEndpoint endpoint = (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new UnknownErrorExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateGSM0338Message(), validNumber, null);
assertThrows(UnknownErrorException.class,
() -> send(producer, smsMessage));
}
@Test
public void testUnroutableMessageException() throws Exception {
// Change sending strategy
CMEndpoint endpoint = (CMEndpoint) context.getEndpoint(getUri());
CMProducer producer = endpoint.createProducer();
producer.setSender(new UnroutableMessageExceptionSender());
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateUnicodeMessage(), validNumber, null);
assertThrows(UnroutableMessageException.class,
() -> send(producer, smsMessage));
}
@Test
public void testCMEndpointIsForProducing() {
// Change sending strategy
CMEndpoint endpoint = (CMEndpoint) context.getEndpoint(getUri());
assertThrows(UnsupportedOperationException.class,
() -> endpoint.createConsumer(null));
}
@Test
public void testSendInvalidPayload() {
// Body
final SMSMessage smsMessage = new SMSMessage(generateIdAsString(), generateGSM0338Message(), null, null);
assertThrows(InvalidPayloadRuntimeException.class,
() -> cmProxy.send(smsMessage));
}
/*
* CMMessages
*/
private String generateUnicodeMessage() {
String ch = "\uF400";
return generateRandomLengthMessageByChar(ch);
}
private String generateGSM0338Message() {
String ch = "a";
return generateRandomLengthMessageByChar(ch);
}
private String generateRandomLengthMessageByChar(String ch) {
// random Length
int msgLength = (int) (Math.random() * 2000);
StringBuilder sb = new StringBuilder();
for (int index = 0; index < msgLength; index++) {
sb.append(ch);
}
return sb.toString();
}
private String generateIdAsString() {
return new BigInteger(130, random).toString(32);
}
private static void send(CMProducer producer, SMSMessage smsMessage) throws Exception {
Exchange exchange = producer.getEndpoint().createExchange();
exchange.getIn().setBody(smsMessage);
producer.process(exchange);
}
}
|
CMTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketTests.java
|
{
"start": 973,
"end": 2918
}
|
class ____ extends AbstractBucketMetricsTestCase<MinBucketPipelineAggregationBuilder> {
@Override
protected MinBucketPipelineAggregationBuilder doCreateTestAggregatorFactory(String name, String bucketsPath) {
return new MinBucketPipelineAggregationBuilder(name, bucketsPath);
}
public void testValidate() {
AggregationBuilder singleBucketAgg = new GlobalAggregationBuilder("global");
AggregationBuilder multiBucketAgg = new TermsAggregationBuilder("terms").userValueTypeHint(ValueType.STRING);
final Set<AggregationBuilder> aggBuilders = new HashSet<>();
aggBuilders.add(singleBucketAgg);
aggBuilders.add(multiBucketAgg);
// First try to point to a non-existent agg
assertThat(
validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "invalid_agg>metric")),
equalTo(
"Validation Failed: 1: "
+ PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " aggregation does not exist for aggregation [name]: invalid_agg>metric;"
)
);
// Now try to point to a single bucket agg
assertThat(
validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "global>metric")),
equalTo(
"Validation Failed: 1: Unable to find unqualified multi-bucket aggregation in "
+ PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ ". Path must include a multi-bucket aggregation for aggregation [name] found :"
+ GlobalAggregationBuilder.class.getName()
+ " for buckets path: global>metric;"
)
);
// Now try to point to a valid multi-bucket agg
assertThat(validate(aggBuilders, new MinBucketPipelineAggregationBuilder("name", "terms>metric")), nullValue());
}
}
|
MinBucketTests
|
java
|
alibaba__nacos
|
plugin/control/src/main/java/com/alibaba/nacos/plugin/control/tps/barrier/creator/LocalSimpleCountBarrierCreator.java
|
{
"start": 943,
"end": 1574
}
|
class ____ implements RuleBarrierCreator {
private static final LocalSimpleCountBarrierCreator INSTANCE = new LocalSimpleCountBarrierCreator();
public LocalSimpleCountBarrierCreator() {
}
public static final LocalSimpleCountBarrierCreator getInstance() {
return INSTANCE;
}
@Override
public RuleBarrier createRuleBarrier(String pointName, String ruleName, TimeUnit period) {
return new LocalSimpleCountRuleBarrier(pointName, ruleName, period);
}
@Override
public String name() {
return "localsimplecountor";
}
}
|
LocalSimpleCountBarrierCreator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/VectorTileUtils.java
|
{
"start": 759,
"end": 3268
}
|
class ____ {
private VectorTileUtils() {
// no instances
}
/**
* Creates a vector layer builder with the provided name and extent.
*/
public static VectorTile.Tile.Layer.Builder createLayerBuilder(String layerName, int extent) {
final VectorTile.Tile.Layer.Builder layerBuilder = VectorTile.Tile.Layer.newBuilder();
layerBuilder.setVersion(2);
layerBuilder.setName(layerName);
layerBuilder.setExtent(extent);
return layerBuilder;
}
/**
* Adds the flatten elements of toXContent into the feature as tags.
*/
public static void addToXContentToFeature(VectorTile.Tile.Feature.Builder feature, MvtLayerProps layerProps, ToXContent toXContent)
throws IOException {
final Map<String, Object> map = Maps.flatten(
XContentHelper.convertToMap(XContentHelper.toXContent(toXContent, XContentType.CBOR, false), true, XContentType.CBOR).v2(),
true,
true
);
for (Map.Entry<String, Object> entry : map.entrySet()) {
if (entry.getValue() != null) {
addPropertyToFeature(feature, layerProps, entry.getKey(), entry.getValue());
}
}
}
/**
* Adds the provided key / value pair into the feature as tags.
*/
public static void addPropertyToFeature(VectorTile.Tile.Feature.Builder feature, MvtLayerProps layerProps, String key, Object value) {
if (value == null) {
// guard for null values
return;
}
if (value instanceof Byte || value instanceof Short) {
// mvt does not support byte and short data types
value = ((Number) value).intValue();
}
feature.addTags(layerProps.addKey(key));
int valIndex = layerProps.addValue(value);
if (valIndex < 0) {
throw new IllegalArgumentException("Unsupported vector tile type for field [" + key + "] : " + value.getClass().getName());
}
feature.addTags(valIndex);
}
/**
* Adds the given properties to the provided layer.
*/
public static void addPropertiesToLayer(VectorTile.Tile.Layer.Builder layer, MvtLayerProps layerProps) {
// Add keys
layer.addAllKeys(layerProps.getKeys());
// Add values
final Iterable<Object> values = layerProps.getVals();
for (Object value : values) {
layer.addValues(MvtValue.toValue(value));
}
}
}
|
VectorTileUtils
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.