language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/cloudevents/CloudEventAttributes.java
|
{
"start": 948,
"end": 2013
}
|
class ____ {
// attributes for all 1.0.x specs
public static final Collection<CloudEvent.Attribute> V1_0_ATTRIBUTES = Arrays.asList(
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_ID, "ce-id", "id"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, "ce-source", "source"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_VERSION, "ce-specversion", "specversion"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "ce-type", "type"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE, "ce-datacontenttype",
"datacontenttype"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_SCHEMA_URL, "ce-dataschema", "dataschema"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, "ce-subject", "subject"),
CloudEvent.Attribute.simple(CloudEvent.CAMEL_CLOUD_EVENT_TIME, "ce-time", "time"));
private CloudEventAttributes() {
}
}
|
CloudEventAttributes
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 418736,
"end": 420562
}
|
class ____ extends YamlDeserializerBase<GzipDeflaterDataFormat> {
public GzipDeflaterDataFormatDeserializer() {
super(GzipDeflaterDataFormat.class);
}
@Override
protected GzipDeflaterDataFormat newInstance() {
return new GzipDeflaterDataFormat();
}
@Override
protected boolean setProperty(GzipDeflaterDataFormat target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "id": {
String val = asText(node);
target.setId(val);
break;
}
default: {
return false;
}
}
return true;
}
}
@YamlType(
nodes = "hl7",
types = org.apache.camel.model.dataformat.HL7DataFormat.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "HL7",
description = "Marshal and unmarshal HL7 (Health Care) model objects using the HL7 MLLP codec.",
deprecated = false,
properties = {
@YamlProperty(name = "id", type = "string", description = "The id of this node", displayName = "Id"),
@YamlProperty(name = "parser", type = "string", description = "To use a custom HL7 parser", displayName = "Parser"),
@YamlProperty(name = "validate", type = "boolean", defaultValue = "true", description = "Whether to validate the HL7 message Is by default true.", displayName = "Validate")
}
)
public static
|
GzipDeflaterDataFormatDeserializer
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/processor/internal/aggregateddeps/PkgPrivateEntryPointGenerator.java
|
{
"start": 2004,
"end": 3108
}
|
class ____ extends MyEntryPoint {
// }
void generate() throws IOException {
TypeSpec.Builder entryPointInterfaceBuilder =
JavaPoetExtKt.addOriginatingElement(
TypeSpec.interfaceBuilder(metadata.generatedClassName().simpleName()),
metadata.getTypeElement())
.addAnnotation(Processors.getOriginatingElementAnnotation(metadata.getTypeElement()))
.addModifiers(Modifier.PUBLIC)
.addSuperinterface(metadata.baseClassName())
.addAnnotation(metadata.getAnnotation());
Processors.addGeneratedAnnotation(entryPointInterfaceBuilder, env, getClass());
if (metadata.getOptionalInstallInAnnotation().isPresent()) {
entryPointInterfaceBuilder.addAnnotation(
XAnnotations.getAnnotationSpec(metadata.getOptionalInstallInAnnotation().get()));
}
env.getFiler()
.write(
JavaFile.builder(
metadata.generatedClassName().packageName(), entryPointInterfaceBuilder.build())
.build(),
Mode.Isolating);
}
}
|
HiltWrapper_MyEntryPoint
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/type/AbstractPostgreSQLStructJdbcType.java
|
{
"start": 2657,
"end": 45913
}
|
class ____ implements StructuredJdbcType {
private static final DateTimeFormatter LOCAL_DATE_TIME;
static {
LOCAL_DATE_TIME = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(DateTimeFormatter.ISO_LOCAL_DATE)
.appendLiteral(' ')
.append(DateTimeFormatter.ISO_LOCAL_TIME)
.optionalStart()
.appendOffset( "+HH:mm", "+00" )
.toFormatter();
}
// Need a custom formatter for parsing what PostgresPlus/EDB produces
private static final DateTimeFormatter LOCAL_DATE;
static {
LOCAL_DATE = new DateTimeFormatterBuilder()
.parseCaseInsensitive()
.append(DateTimeFormatter.ISO_LOCAL_DATE)
.optionalStart()
.appendLiteral(' ')
.append(DateTimeFormatter.ISO_LOCAL_TIME)
.optionalStart()
.appendOffset( "+HH:mm", "+00" )
.toFormatter();
}
private final String typeName;
private final int[] orderMapping;
private final int[] inverseOrderMapping;
private final EmbeddableMappingType embeddableMappingType;
protected AbstractPostgreSQLStructJdbcType(
EmbeddableMappingType embeddableMappingType,
String typeName,
int[] orderMapping) {
this.typeName = typeName;
this.embeddableMappingType = embeddableMappingType;
this.orderMapping = orderMapping;
if ( orderMapping == null ) {
this.inverseOrderMapping = null;
}
else {
final int[] inverseOrderMapping = new int[orderMapping.length];
for ( int i = 0; i < orderMapping.length; i++ ) {
inverseOrderMapping[orderMapping[i]] = i;
}
this.inverseOrderMapping = inverseOrderMapping;
}
}
@Override
public int getJdbcTypeCode() {
return SqlTypes.STRUCT;
}
@Override
public String getStructTypeName() {
return typeName;
}
@Override
public EmbeddableMappingType getEmbeddableMappingType() {
return embeddableMappingType;
}
@Override
public <T> JavaType<T> getJdbcRecommendedJavaTypeMapping(
Integer precision,
Integer scale,
TypeConfiguration typeConfiguration) {
if ( embeddableMappingType == null ) {
return typeConfiguration.getJavaTypeRegistry().getDescriptor( Object[].class );
}
else {
//noinspection unchecked
return (JavaType<T>) embeddableMappingType.getMappedJavaType();
}
}
@Override
public <X> ValueExtractor<X> getExtractor(JavaType<X> javaType) {
return new BasicExtractor<>( javaType, this ) {
@Override
protected X doExtract(ResultSet rs, int paramIndex, WrapperOptions options) throws SQLException {
return getObject( rs.getObject( paramIndex ), options );
}
@Override
protected X doExtract(CallableStatement statement, int index, WrapperOptions options) throws SQLException {
return getObject( statement.getObject( index ), options );
}
@Override
protected X doExtract(CallableStatement statement, String name, WrapperOptions options)
throws SQLException {
return getObject( statement.getObject( name ), options );
}
private X getObject(Object object, WrapperOptions options) throws SQLException {
if ( object == null ) {
return null;
}
return ( (AbstractPostgreSQLStructJdbcType) getJdbcType() ).fromString(
object.toString(),
getJavaType(),
options
);
}
};
}
protected <X> X fromString(String string, JavaType<X> javaType, WrapperOptions options) throws SQLException {
if ( string == null ) {
return null;
}
final boolean returnEmbeddable = javaType.getJavaTypeClass() != Object[].class;
final int end;
final Object[] array;
if ( embeddableMappingType == null ) {
assert !returnEmbeddable;
final ArrayList<Object> values = new ArrayList<>( 8 );
end = deserializeStruct( string, 0, string.length() - 1, values );
array = values.toArray();
}
else {
array = new Object[embeddableMappingType.getJdbcValueCount() + ( embeddableMappingType.isPolymorphic() ? 1 : 0 )];
end = deserializeStruct( string, 0, 0, array, returnEmbeddable, options );
}
assert end == string.length();
if ( returnEmbeddable ) {
final StructAttributeValues attributeValues = getAttributeValues( embeddableMappingType, orderMapping, array, options );
//noinspection unchecked
return (X) instantiate( embeddableMappingType, attributeValues );
}
else if ( inverseOrderMapping != null ) {
StructHelper.orderJdbcValues( embeddableMappingType, inverseOrderMapping, array.clone(), array );
}
//noinspection unchecked
return (X) array;
}
private int deserializeStruct(
String string,
int begin,
int end,
ArrayList<Object> values) {
int column = 0;
boolean inQuote = false;
boolean hasEscape = false;
assert string.charAt( begin ) == '(';
int start = begin + 1;
int element = 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '"':
if ( inQuote ) {
if ( i + 1 != end && string.charAt( i + 1 ) == '"' ) {
// Skip double quotes as that will be unescaped later
i++;
hasEscape = true;
continue;
}
if ( hasEscape ) {
values.add( unescape( string, start, i ) );
}
else {
values.add( string.substring( start, i ) );
}
column++;
inQuote = false;
}
else {
inQuote = true;
}
hasEscape = false;
start = i + 1;
break;
case ',':
if ( !inQuote ) {
if ( column < element ) {
if ( start == i ) {
values.add( null );
}
else {
values.add( string.substring( start, i ) );
}
column++;
}
start = i + 1;
element++;
}
break;
case ')':
if ( !inQuote ) {
if ( column < element ) {
if ( start == i ) {
values.add( null );
}
else {
values.add( string.substring( start, i ) );
}
}
return i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.subSequence( start, end ) );
}
private int deserializeStruct(
String string,
int begin,
int quotes,
Object[] values,
boolean returnEmbeddable,
WrapperOptions options) throws SQLException {
int column = 0;
boolean inQuote = false;
StringBuilder escapingSb = null;
assert string.charAt( begin ) == '(';
int start = begin + 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '\\':
if ( inQuote ) {
final int expectedQuoteCount = 1 << quotes;
if ( repeatsChar( string, i, expectedQuoteCount, '\\' ) ) {
if ( isDoubleQuote( string, i + expectedQuoteCount, expectedQuoteCount ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += expectedQuoteCount + expectedQuoteCount - 1;
start = i + 1;
continue;
}
else {
assert repeatsChar( string, i + expectedQuoteCount, expectedQuoteCount, '\\' );
// Don't create an escaping string builder for binary literals
if ( i != start || !isBinary( column ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '\\' );
start = i + expectedQuoteCount + expectedQuoteCount;
}
// Move forward to the last backslash
i += expectedQuoteCount + expectedQuoteCount - 1;
continue;
}
}
}
// Fall-through since a backslash is an escaping mechanism for a start quote within arrays
case '"':
if ( inQuote ) {
if ( isDoubleQuote( string, i, 1 << ( quotes + 1 ) ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += ( 1 << ( quotes + 1 ) ) - 1;
start = i + 1;
continue;
}
assert isDoubleQuote( string, i, 1 << quotes );
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.DATE:
values[column] = fromRawObject(
jdbcMapping,
parseDate(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
);
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
values[column] = fromRawObject(
jdbcMapping,
parseTime(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
);
break;
case SqlTypes.TIMESTAMP:
values[column] = fromRawObject(
jdbcMapping,
parseTimestamp(
CharSequenceHelper.subSequence(
string,
start,
i
),
jdbcMapping.getJdbcJavaType()
),
options
);
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
values[column] = fromRawObject(
jdbcMapping,
parseTimestampWithTimeZone(
CharSequenceHelper.subSequence(
string,
start,
i
),
jdbcMapping.getJdbcJavaType()
),
options
);
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
final int xCharPosition = start + backslashes;
assert string.charAt( xCharPosition ) == 'x';
values[column] = fromString(
jdbcMapping,
string,
xCharPosition + 1,
i
);
break;
default:
if ( escapingSb == null || escapingSb.length() == 0 ) {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
else {
escapingSb.append( string, start, i );
values[column] = fromString(
jdbcMapping,
escapingSb,
0,
escapingSb.length()
);
escapingSb.setLength( 0 );
}
break;
}
column++;
inQuote = false;
// move forward the index by 2 ^ quoteLevel to point to the next char after the quote
i += 1 << quotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
// This is a start quote, so move forward the index to the last quote
final int expectedQuotes = 1 << quotes;
assert isDoubleQuote( string, i, expectedQuotes );
i += expectedQuotes - 1;
if ( string.charAt( i + 1 ) == '(' ) {
// This could be a nested struct
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping.getJdbcType() instanceof AbstractPostgreSQLStructJdbcType structJdbcType ) {
final Object[] subValues = new Object[structJdbcType.embeddableMappingType.getJdbcValueCount()];
final int subEnd = structJdbcType.deserializeStruct(
string,
i + 1,
quotes + 1,
subValues,
returnEmbeddable,
options
);
if ( returnEmbeddable ) {
final StructAttributeValues attributeValues = structJdbcType.getAttributeValues(
structJdbcType.embeddableMappingType,
structJdbcType.orderMapping,
subValues,
options
);
values[column] = instantiate( structJdbcType.embeddableMappingType, attributeValues );
}
else {
if ( structJdbcType.inverseOrderMapping != null ) {
StructHelper.orderJdbcValues(
structJdbcType.embeddableMappingType,
structJdbcType.inverseOrderMapping,
subValues.clone(),
subValues
);
}
values[column] = subValues;
}
column++;
// The subEnd points to the first character after the ')',
// so move forward the index to point to the next char after quotes
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else if ( string.charAt( i + 1 ) == '{' ) {
// This could be a quoted array
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping instanceof BasicPluralType<?, ?> pluralType ) {
final ArrayList<Object> arrayList = new ArrayList<>();
//noinspection unchecked
final int subEnd = deserializeArray(
string,
i + 1,
quotes + 1,
arrayList,
(BasicType<Object>) pluralType.getElementType(),
options
);
assert string.charAt( subEnd - 1 ) == '}';
values[column] = pluralType.getJdbcJavaType().wrap( arrayList, options );
column++;
// The subEnd points to the first character after the ')',
// so move forward the index to point to the next char after quotes
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else {
inQuote = true;
}
}
start = i + 1;
break;
case ',':
if ( !inQuote ) {
if ( start == i ) {
values[column] = null;
}
else {
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values[column] = fromRawObject(
jdbcMapping,
string.charAt( start ) == 't',
options
);
}
else if ( jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& jdbcMapping.getJdbcType().isInteger() ) {
values[column] = fromRawObject(
jdbcMapping,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
);
}
else {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
}
column++;
start = i + 1;
}
break;
case ')':
if ( !inQuote ) {
if ( column < values.length ) {
if ( start == i ) {
values[column] = null;
}
else {
final JdbcMapping jdbcMapping = getJdbcValueSelectable( column ).getJdbcMapping();
if ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values[column] = fromRawObject(
jdbcMapping,
string.charAt( start ) == 't',
options
);
}
else if ( jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& jdbcMapping.getJdbcType().isInteger() ) {
values[column] = fromRawObject(
jdbcMapping,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
);
}
else {
values[column] = fromString(
jdbcMapping,
string,
start,
i
);
}
}
}
return i + 1;
}
break;
case '{':
if ( !inQuote ) {
final BasicPluralType<?, ?> pluralType = (BasicPluralType<?, ?>) getJdbcValueSelectable( column ).getJdbcMapping();
final ArrayList<Object> arrayList = new ArrayList<>();
//noinspection unchecked
i = deserializeArray(
string,
i,
quotes + 1,
arrayList,
(BasicType<Object>) pluralType.getElementType(),
options
);
assert string.charAt( i - 1 ) == '}';
values[column] = pluralType.getJdbcJavaType().wrap( arrayList, options );
column++;
if ( string.charAt( i ) == ')' ) {
// Return the end position if this is the last element
assert column == values.length;
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
start = i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Struct not properly formed: " + string.substring( start ) );
}
private boolean isBinary(int column) {
return isBinary( getJdbcValueSelectable( column ).getJdbcMapping() );
}
private static boolean isBinary(JdbcMapping jdbcMapping) {
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
return true;
}
return false;
}
private int deserializeArray(
String string,
int begin,
int quotes,
ArrayList<Object> values,
BasicType<Object> elementType,
WrapperOptions options) throws SQLException {
boolean inQuote = false;
StringBuilder escapingSb = null;
assert string.charAt( begin ) == '{';
int start = begin + 1;
for ( int i = start; i < string.length(); i++ ) {
final char c = string.charAt( i );
switch ( c ) {
case '\\':
if ( inQuote ) {
final int expectedQuoteCount = 1 << quotes;
if ( repeatsChar( string, i, expectedQuoteCount, '\\' ) ) {
if ( isDoubleQuote( string, i + expectedQuoteCount, expectedQuoteCount ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += expectedQuoteCount + expectedQuoteCount - 1;
start = i + 1;
continue;
}
else {
assert repeatsChar( string, i + expectedQuoteCount, expectedQuoteCount, '\\' );
// Don't create an escaping string builder for binary literals
if ( i != start || !isBinary( elementType ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '\\' );
start = i + expectedQuoteCount + expectedQuoteCount;
}
// Move forward to the last backslash
i += expectedQuoteCount + expectedQuoteCount - 1;
continue;
}
}
}
// Fall-through since a backslash is an escaping mechanism for a start quote within arrays
case '"':
if ( inQuote ) {
if ( isDoubleQuote( string, i, 1 << ( quotes + 1 ) ) ) {
// Skip quote escaping as that will be unescaped later
if ( escapingSb == null ) {
escapingSb = new StringBuilder();
}
escapingSb.append( string, start, i );
escapingSb.append( '"' );
// Move forward to the last quote
i += ( 1 << ( quotes + 1 ) ) - 1;
start = i + 1;
continue;
}
assert isDoubleQuote( string, i, 1 << quotes );
switch ( elementType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.DATE:
values.add(
fromRawObject(
elementType,
parseDate(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
)
);
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
values.add(
fromRawObject(
elementType,
parseTime(
CharSequenceHelper.subSequence(
string,
start,
i
)
),
options
)
);
break;
case SqlTypes.TIMESTAMP:
values.add(
fromRawObject(
elementType,
parseTimestamp(
CharSequenceHelper.subSequence(
string,
start,
i
),
elementType.getJdbcJavaType()
),
options
)
);
break;
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
values.add(
fromRawObject(
elementType,
parseTimestampWithTimeZone(
CharSequenceHelper.subSequence(
string,
start,
i
),
elementType.getJdbcJavaType()
),
options
)
);
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
final int xCharPosition = start + backslashes;
assert string.charAt( xCharPosition ) == 'x';
values.add(
fromString(
elementType,
string,
xCharPosition + 1,
i
)
);
break;
default:
if ( escapingSb == null || escapingSb.length() == 0 ) {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
else {
escapingSb.append( string, start, i );
values.add(
fromString(
elementType,
escapingSb,
0,
escapingSb.length()
)
);
escapingSb.setLength( 0 );
}
break;
}
inQuote = false;
// move forward the index by 2 ^ quotes to point to the next char after the quote
i += 1 << quotes;
if ( string.charAt( i ) == '}' ) {
// Return the end position if this is the last element
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
// This is a start quote, so move forward the index to the last quote
final int expectedQuotes = 1 << quotes;
assert isDoubleQuote( string, i, expectedQuotes );
i += expectedQuotes - 1;
if ( string.charAt( i + 1 ) == '(' ) {
// This could be a nested struct
if ( elementType.getJdbcType() instanceof AbstractPostgreSQLStructJdbcType structJdbcType ) {
final Object[] subValues = new Object[structJdbcType.embeddableMappingType.getJdbcValueCount()];
final int subEnd = structJdbcType.deserializeStruct(
string,
i + 1,
quotes + 1,
subValues,
true,
options
);
final StructAttributeValues attributeValues = structJdbcType.getAttributeValues(
structJdbcType.embeddableMappingType,
structJdbcType.orderMapping,
subValues,
options
);
values.add( instantiate( structJdbcType.embeddableMappingType, attributeValues ) );
// The subEnd points to the first character after the '}',
// so move forward the index to point to the next char after quotes
assert isDoubleQuote( string, subEnd, expectedQuotes );
i = subEnd + expectedQuotes;
if ( string.charAt( i ) == '}' ) {
// Return the end position if this is the last element
return i + 1;
}
// at this point, we must see a comma to indicate the next element
assert string.charAt( i ) == ',';
}
else {
inQuote = true;
}
}
else {
inQuote = true;
}
}
start = i + 1;
switch ( elementType.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
// Skip past the backslashes in the binary literal, this will be handled later
final int backslashes = 1 << ( quotes + 1 );
assert repeatsChar( string, start, backslashes, '\\' );
i += backslashes;
break;
}
break;
case ',':
if ( !inQuote ) {
if ( start == i ) {
values.add( null );
}
else {
if ( elementType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values.add(
fromRawObject(
elementType,
string.charAt( start ) == 't',
options
)
);
}
else if ( elementType.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& elementType.getJdbcType().isInteger() ) {
values.add(
fromRawObject(
elementType,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
)
);
}
else {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
}
start = i + 1;
}
break;
case '}':
if ( !inQuote ) {
if ( start == i ) {
values.add( null );
}
else {
if ( elementType.getJdbcType().getDefaultSqlTypeCode() == SqlTypes.BOOLEAN ) {
values.add(
fromRawObject(
elementType,
string.charAt( start ) == 't',
options
)
);
}
else if ( elementType.getJavaTypeDescriptor().getJavaTypeClass().isEnum()
&& elementType.getJdbcType().isInteger() ) {
values.add(
fromRawObject(
elementType,
IntegerJavaType.INSTANCE.fromEncodedString( string, start, i ),
options
)
);
}
else {
values.add(
fromString(
elementType,
string,
start,
i
)
);
}
}
return i + 1;
}
break;
}
}
throw new IllegalArgumentException( "Array not properly formed: " + string.substring( start ) );
}
private SelectableMapping getJdbcValueSelectable(int jdbcValueSelectableIndex) {
return embeddableMappingType.getJdbcValueSelectable(
orderMapping != null ? orderMapping[jdbcValueSelectableIndex] : jdbcValueSelectableIndex );
}
private static boolean repeatsChar(String string, int start, int times, char expectedChar) {
final int end = start + times;
if ( end < string.length() ) {
for ( ; start < end; start++ ) {
if ( string.charAt( start ) != expectedChar ) {
return false;
}
}
return true;
}
return false;
}
private static boolean isDoubleQuote(String string, int start, int escapes) {
if ( escapes == 1 ) {
return string.charAt( start ) == '"';
}
assert ( escapes & 1 ) == 0 : "Only an even number of escapes allowed";
final int end = start + escapes;
if ( end < string.length() ) {
for ( ; start < end; start += 2 ) {
final char c1 = string.charAt( start );
final char c2 = string.charAt( start + 1 );
switch ( c1 ) {
case '\\':
// After a backslash, another backslash or a double quote may follow
if ( c2 != '\\' && c2 != '"' ) {
return false;
}
break;
case '"':
// After a double quote, only another double quote may follow
if ( c2 != '"' ) {
return false;
}
break;
default:
return false;
}
}
return string.charAt( end - 1 ) == '"';
}
return false;
}
private Object fromString(
int selectableIndex,
String string,
int start,
int end) {
return fromString(
getJdbcValueSelectable( selectableIndex ).getJdbcMapping(),
string,
start,
end
);
}
private static Object fromString(JdbcMapping jdbcMapping, CharSequence charSequence, int start, int end) {
return jdbcMapping.getJdbcJavaType().fromEncodedString(
charSequence,
start,
end
);
}
private static Object fromRawObject(JdbcMapping jdbcMapping, Object raw, WrapperOptions options) {
return jdbcMapping.getJdbcJavaType().wrap(
raw,
options
);
}
private Object parseDate(CharSequence subSequence) {
return LOCAL_DATE.parse( subSequence, LocalDate::from );
}
private Object parseTime(CharSequence subSequence) {
return DateTimeFormatter.ISO_LOCAL_TIME.parse( subSequence, LocalTime::from );
}
private Object parseTimestamp(CharSequence subSequence, JavaType<?> jdbcJavaType) {
final TemporalAccessor temporalAccessor = LOCAL_DATE_TIME.parse( subSequence );
final LocalDateTime localDateTime = LocalDateTime.from( temporalAccessor );
final Timestamp timestamp = Timestamp.valueOf( localDateTime );
timestamp.setNanos( temporalAccessor.get( ChronoField.NANO_OF_SECOND ) );
return timestamp;
}
private Object parseTimestampWithTimeZone(CharSequence subSequence, JavaType<?> jdbcJavaType) {
final TemporalAccessor temporalAccessor = LOCAL_DATE_TIME.parse( subSequence );
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
if ( jdbcJavaType.getJavaTypeClass() == Instant.class ) {
return Instant.from( temporalAccessor );
}
else {
return OffsetDateTime.from( temporalAccessor );
}
}
return LocalDateTime.from( temporalAccessor );
}
private static String unescape(CharSequence string, int start, int end) {
StringBuilder sb = new StringBuilder( end - start );
for ( int i = start; i < end; i++ ) {
final char c = string.charAt( i );
if ( c == '\\' || c == '"' ) {
i++;
sb.append( string.charAt( i ) );
continue;
}
sb.append( c );
}
return sb.toString();
}
@Override
public Object createJdbcValue(Object domainValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final StringBuilder sb = new StringBuilder();
serializeStructTo( new PostgreSQLAppender( sb ), domainValue, options );
return sb.toString();
}
@Override
public Object[] extractJdbcValues(Object rawJdbcValue, WrapperOptions options) throws SQLException {
assert embeddableMappingType != null;
final Object[] array = new Object[embeddableMappingType.getJdbcValueCount()];
final String struct = getRawStructFromJdbcValue( rawJdbcValue );
if ( struct == null ) {
return null;
}
deserializeStruct( struct, 0, 0, array, true, options );
if ( inverseOrderMapping != null ) {
StructHelper.orderJdbcValues( embeddableMappingType, inverseOrderMapping, array.clone(), array );
}
return array;
}
protected String getRawStructFromJdbcValue(Object rawJdbcValue) {
return rawJdbcValue.toString();
}
protected <X> String toString(X value, JavaType<X> javaType, WrapperOptions options) throws SQLException {
if ( value == null ) {
return null;
}
final StringBuilder sb = new StringBuilder();
serializeStructTo( new PostgreSQLAppender( sb ), value, options );
return sb.toString();
}
private void serializeStructTo(PostgreSQLAppender appender, Object value, WrapperOptions options) throws SQLException {
serializeDomainValueTo( appender, options, value, '(' );
appender.append( ')' );
}
private void serializeDomainValueTo(
PostgreSQLAppender appender,
WrapperOptions options,
Object domainValue,
char separator) throws SQLException {
serializeJdbcValuesTo(
appender,
options,
StructHelper.getJdbcValues( embeddableMappingType, orderMapping, domainValue, options ),
separator
);
}
private void serializeJdbcValuesTo(
PostgreSQLAppender appender,
WrapperOptions options,
Object[] jdbcValues,
char separator) throws SQLException {
for ( int i = 0; i < jdbcValues.length; i++ ) {
appender.append( separator );
separator = ',';
final Object jdbcValue = jdbcValues[i];
if ( jdbcValue == null ) {
continue;
}
final SelectableMapping selectableMapping = orderMapping == null ?
embeddableMappingType.getJdbcValueSelectable( i ) :
embeddableMappingType.getJdbcValueSelectable( orderMapping[i] );
final JdbcMapping jdbcMapping = selectableMapping.getJdbcMapping();
if ( jdbcMapping.getJdbcType() instanceof AbstractPostgreSQLStructJdbcType structJdbcType ) {
appender.quoteStart();
structJdbcType.serializeJdbcValuesTo(
appender,
options,
(Object[]) jdbcValue,
'('
);
appender.append( ')' );
appender.quoteEnd();
}
else {
serializeConvertedBasicTo( appender, options, jdbcMapping, jdbcValue );
}
}
}
private void serializeConvertedBasicTo(
PostgreSQLAppender appender,
WrapperOptions options,
JdbcMapping jdbcMapping,
Object subValue) throws SQLException {
//noinspection unchecked
final JavaType<Object> jdbcJavaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
switch ( jdbcMapping.getJdbcType().getDefaultSqlTypeCode() ) {
case SqlTypes.TINYINT:
case SqlTypes.SMALLINT:
case SqlTypes.INTEGER:
if ( subValue instanceof Boolean booleanValue ) {
// BooleanJavaType has this as an implicit conversion
appender.append( booleanValue ? '1' : '0' );
break;
}
if ( subValue instanceof Enum<?> enumValue ) {
appender.appendSql( enumValue.ordinal() );
break;
}
case SqlTypes.BOOLEAN:
case SqlTypes.BIT:
case SqlTypes.BIGINT:
case SqlTypes.FLOAT:
case SqlTypes.REAL:
case SqlTypes.DOUBLE:
case SqlTypes.DECIMAL:
case SqlTypes.NUMERIC:
case SqlTypes.DURATION:
appender.append( subValue.toString() );
break;
case SqlTypes.CHAR:
case SqlTypes.NCHAR:
case SqlTypes.VARCHAR:
case SqlTypes.NVARCHAR:
if ( subValue instanceof Boolean booleanValue ) {
// BooleanJavaType has this as an implicit conversion
appender.append( booleanValue ? 'Y' : 'N' );
break;
}
case SqlTypes.LONGVARCHAR:
case SqlTypes.LONGNVARCHAR:
case SqlTypes.LONG32VARCHAR:
case SqlTypes.LONG32NVARCHAR:
case SqlTypes.ENUM:
case SqlTypes.NAMED_ENUM:
appender.quoteStart();
appender.append( (String) subValue );
appender.quoteEnd();
break;
case SqlTypes.DATE:
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
case SqlTypes.TIMESTAMP:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
appendTemporal( appender, jdbcMapping, subValue, options );
break;
case SqlTypes.BINARY:
case SqlTypes.VARBINARY:
case SqlTypes.LONGVARBINARY:
case SqlTypes.LONG32VARBINARY:
final byte[] bytes = jdbcJavaType.unwrap(
subValue,
byte[].class,
options
);
appender.ensureCanFit( appender.quote + 1 + ( bytes.length << 1 ) );
appender.append( '\\' );
appender.append( '\\' );
appender.append( 'x' );
PrimitiveByteArrayJavaType.INSTANCE.appendString(
appender,
bytes
);
break;
case SqlTypes.UUID:
appender.append( subValue.toString() );
break;
case SqlTypes.ARRAY:
if ( subValue != null ) {
final int length = Array.getLength( subValue );
if ( length == 0 ) {
appender.append( "{}" );
}
else {
//noinspection unchecked
final BasicType<Object> elementType = ((BasicPluralType<?, Object>) jdbcMapping).getElementType();
appender.quoteStart();
appender.append( '{' );
Object arrayElement = Array.get( subValue, 0 );
if ( arrayElement == null ) {
appender.appendNull();
}
else {
serializeConvertedBasicTo( appender, options, elementType, arrayElement );
}
for ( int i = 1; i < length; i++ ) {
arrayElement = Array.get( subValue, i );
appender.append( ',' );
if ( arrayElement == null ) {
appender.appendNull();
}
else {
serializeConvertedBasicTo( appender, options, elementType, arrayElement );
}
}
appender.append( '}' );
appender.quoteEnd();
}
}
break;
case SqlTypes.STRUCT:
if ( subValue != null ) {
final AbstractPostgreSQLStructJdbcType structJdbcType = (AbstractPostgreSQLStructJdbcType) jdbcMapping.getJdbcType();
appender.quoteStart();
structJdbcType.serializeJdbcValuesTo( appender, options, (Object[]) subValue, '(' );
appender.append( ')' );
appender.quoteEnd();
}
break;
default:
throw new UnsupportedOperationException( "Unsupported JdbcType nested in struct: " + jdbcMapping.getJdbcType() );
}
}
private StructAttributeValues getAttributeValues(
EmbeddableMappingType embeddableMappingType,
int[] orderMapping,
Object[] rawJdbcValues,
WrapperOptions options) throws SQLException {
final int numberOfAttributeMappings = embeddableMappingType.getNumberOfAttributeMappings();
final int size = numberOfAttributeMappings + ( embeddableMappingType.isPolymorphic() ? 1 : 0 );
final StructAttributeValues attributeValues = new StructAttributeValues(
numberOfAttributeMappings,
orderMapping != null ?
null :
rawJdbcValues
);
int jdbcIndex = 0;
for ( int i = 0; i < size; i++ ) {
final int attributeIndex;
if ( orderMapping == null ) {
attributeIndex = i;
}
else {
attributeIndex = orderMapping[i];
}
jdbcIndex += injectAttributeValue(
getSubPart( embeddableMappingType, attributeIndex ),
attributeValues,
attributeIndex,
rawJdbcValues,
jdbcIndex,
options
);
}
return attributeValues;
}
private int injectAttributeValue(
ValuedModelPart modelPart,
StructAttributeValues attributeValues,
int attributeIndex,
Object[] rawJdbcValues,
int jdbcIndex,
WrapperOptions options) throws SQLException {
final MappingType mappedType = modelPart.getMappedType();
final int jdbcValueCount;
final Object rawJdbcValue = rawJdbcValues[jdbcIndex];
if ( mappedType instanceof EmbeddableMappingType embeddableMappingType ) {
if ( embeddableMappingType.getAggregateMapping() != null ) {
jdbcValueCount = 1;
attributeValues.setAttributeValue( attributeIndex, rawJdbcValue );
}
else {
jdbcValueCount = embeddableMappingType.getJdbcValueCount();
final Object[] subJdbcValues = new Object[jdbcValueCount];
System.arraycopy( rawJdbcValues, jdbcIndex, subJdbcValues, 0, subJdbcValues.length );
final StructAttributeValues subValues = getAttributeValues(
embeddableMappingType,
null,
subJdbcValues,
options
);
attributeValues.setAttributeValue( attributeIndex, instantiate( embeddableMappingType, subValues ) );
}
}
else {
assert modelPart.getJdbcTypeCount() == 1;
jdbcValueCount = 1;
final JdbcMapping jdbcMapping = modelPart.getSingleJdbcMapping();
final Object jdbcValue = jdbcMapping.getJdbcJavaType().wrap(
rawJdbcValue,
options
);
attributeValues.setAttributeValue( attributeIndex, jdbcMapping.convertToDomainValue( jdbcValue ) );
}
return jdbcValueCount;
}
private void appendTemporal(SqlAppender appender, JdbcMapping jdbcMapping, Object value, WrapperOptions options) {
final TimeZone jdbcTimeZone = getJdbcTimeZone( options );
//noinspection unchecked
final JavaType<Object> javaType = (JavaType<Object>) jdbcMapping.getJdbcJavaType();
appender.append( '"' );
switch ( jdbcMapping.getJdbcType().getJdbcTypeCode() ) {
case SqlTypes.DATE:
if ( value instanceof java.util.Date date ) {
appendAsDate( appender, date );
}
else if ( value instanceof java.util.Calendar calendar ) {
appendAsDate( appender, calendar );
}
else if ( value instanceof TemporalAccessor temporalAccessor ) {
appendAsDate( appender, temporalAccessor );
}
else {
appendAsDate(
appender,
javaType.unwrap( value, java.util.Date.class, options )
);
}
break;
case SqlTypes.TIME:
case SqlTypes.TIME_WITH_TIMEZONE:
case SqlTypes.TIME_UTC:
if ( value instanceof java.util.Date date ) {
appendAsTime( appender, date, jdbcTimeZone );
}
else if ( value instanceof java.util.Calendar calendar ) {
appendAsTime( appender, calendar, jdbcTimeZone );
}
else if ( value instanceof TemporalAccessor temporalAccessor ) {
if ( temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ) ) {
appendAsTime( appender, temporalAccessor, true, jdbcTimeZone );
}
else {
appendAsLocalTime( appender, temporalAccessor );
}
}
else {
appendAsTime(
appender,
javaType.unwrap( value, java.sql.Time.class, options ),
jdbcTimeZone
);
}
break;
case SqlTypes.TIMESTAMP:
case SqlTypes.TIMESTAMP_WITH_TIMEZONE:
case SqlTypes.TIMESTAMP_UTC:
if ( value instanceof java.util.Date date ) {
appendAsTimestampWithMicros( appender, date, jdbcTimeZone );
}
else if ( value instanceof java.util.Calendar calendar ) {
appendAsTimestampWithMillis( appender, calendar, jdbcTimeZone );
}
else if ( value instanceof TemporalAccessor temporalAccessor ) {
appendAsTimestampWithMicros( appender, temporalAccessor, temporalAccessor.isSupported( ChronoField.OFFSET_SECONDS ), jdbcTimeZone );
}
else {
appendAsTimestampWithMicros(
appender,
javaType.unwrap( value, java.util.Date.class, options ),
jdbcTimeZone
);
}
break;
default:
throw new IllegalArgumentException();
}
appender.append( '"' );
}
private static TimeZone getJdbcTimeZone(WrapperOptions options) {
return options == null || options.getJdbcTimeZone() == null
? TimeZone.getDefault()
: options.getJdbcTimeZone();
}
protected <X> Object getBindValue(X value, WrapperOptions options) throws SQLException {
return StructHelper.getJdbcValues( embeddableMappingType, orderMapping, value, options );
}
private static
|
AbstractPostgreSQLStructJdbcType
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBeAssignableFrom.java
|
{
"start": 755,
"end": 824
}
|
class ____ assignable from.
*
* @author William Delanoue
*/
public
|
is
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/annotation/web/builders/HttpSecurity.java
|
{
"start": 10585,
"end": 11163
}
|
class ____ {
*
* @Bean
* public SecurityFilterChain securityFilterChain(HttpSecurity http) {
* http
* .headers((headers) ->
* headers
* .contentTypeOptions(withDefaults())
* .xssProtection(withDefaults())
* .cacheControl(withDefaults())
* .httpStrictTransportSecurity(withDefaults())
* .frameOptions(withDefaults()
* );
* return http.build();
* }
* }
* </pre>
*
* You can disable the headers using the following:
*
* <pre>
* @Configuration
* @EnableWebSecurity
* public
|
CsrfSecurityConfig
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsComposedOnSingleAnnotatedElementTests.java
|
{
"start": 7476,
"end": 7704
}
|
interface ____ {
@AliasFor(annotation = Cacheable.class)
String key() default "";
}
@Cacheable("noninheritedCache2")
@Target({ ElementType.METHOD, ElementType.TYPE })
@Retention(RetentionPolicy.RUNTIME)
@
|
NoninheritedCache1
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/converter/smile/MappingJackson2SmileHttpMessageConverter.java
|
{
"start": 1865,
"end": 3100
}
|
class ____ extends AbstractJackson2HttpMessageConverter {
/**
* Construct a new {@code MappingJackson2SmileHttpMessageConverter} using default configuration
* provided by {@code Jackson2ObjectMapperBuilder}.
*/
public MappingJackson2SmileHttpMessageConverter() {
this(Jackson2ObjectMapperBuilder.smile().build());
}
/**
* Construct a new {@code MappingJackson2SmileHttpMessageConverter} with a custom {@link ObjectMapper}
* (must be configured with a {@code SmileFactory} instance).
* You can use {@link Jackson2ObjectMapperBuilder} to build it easily.
* @see Jackson2ObjectMapperBuilder#smile()
*/
public MappingJackson2SmileHttpMessageConverter(ObjectMapper objectMapper) {
super(objectMapper, new MediaType("application", "x-jackson-smile"));
Assert.isInstanceOf(SmileFactory.class, objectMapper.getFactory(), "SmileFactory required");
}
/**
* {@inheritDoc}
* <p>The {@code ObjectMapper} must be configured with a {@code SmileFactory} instance.
*/
@Override
public void setObjectMapper(ObjectMapper objectMapper) {
Assert.isInstanceOf(SmileFactory.class, objectMapper.getFactory(), "SmileFactory required");
super.setObjectMapper(objectMapper);
}
}
|
MappingJackson2SmileHttpMessageConverter
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/strategies/ObjectOfInputTypeStrategyTest.java
|
{
"start": 6909,
"end": 7282
}
|
class ____", OBJECT_OF_INPUT_STRATEGY)
.calledWithArgumentTypes(DataTypes.INT())
.calledWithLiteralAt(0, 72)
.expectArgumentTypes(DataTypes.INT())
.expectErrorMessage(
"The first argument must be a non-nullable character string representing the
|
name
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/DefaultFiltersTests.java
|
{
"start": 7859,
"end": 8162
}
|
class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
TestHttpSecurities.disableDefaults(http);
http.formLogin(withDefaults());
return http.build();
}
}
@Configuration
@EnableWebSecurity
@EnableWebMvc
static
|
NullWebInvocationPrivilegeEvaluatorConfig
|
java
|
google__truth
|
core/src/test/java/com/google/common/truth/FailureAssertions.java
|
{
"start": 755,
"end": 1256
}
|
class ____ {
static void assertFailureKeys(AssertionError e, String... keys) {
assertThat(e).factKeys().containsExactlyElementsIn(keys).inOrder();
}
static void assertFailureValue(AssertionError e, String key, String value) {
assertThat(e).factValue(key).isEqualTo(value);
}
static void assertFailureValueIndexed(AssertionError e, String key, int index, String value) {
assertThat(e).factValue(key, index).isEqualTo(value);
}
private FailureAssertions() {}
}
|
FailureAssertions
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/utils/ServiceUtil.java
|
{
"start": 14508,
"end": 15056
}
|
interface ____ {
/**
* Do customized filtering.
*
* @param filteredResult result with instances already been filtered cluster/enabled/healthy
* @param allInstances all instances filtered by cluster/enabled
* @param healthyCount healthy instances count filtered by cluster/enabled
*/
void doFilter(ServiceInfo filteredResult,
List<com.alibaba.nacos.api.naming.pojo.Instance> allInstances,
long healthyCount);
}
}
|
InstancesFilter
|
java
|
apache__camel
|
components/camel-pqc/src/test/java/org/apache/camel/component/pqc/PQCSignatureSPHINCSPLUSNoAutowiredTest.java
|
{
"start": 1462,
"end": 2877
}
|
class ____ extends CamelTestSupport {
@EndpointInject("mock:sign")
protected MockEndpoint resultSign;
@EndpointInject("mock:verify")
protected MockEndpoint resultVerify;
@Produce("direct:sign")
protected ProducerTemplate templateSign;
public PQCSignatureSPHINCSPLUSNoAutowiredTest() throws NoSuchAlgorithmException {
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:sign").to("pqc:sign?operation=sign&signatureAlgorithm=SPHINCSPLUS").to("mock:sign")
.to("pqc:verify?operation=verify&signatureAlgorithm=SPHINCSPLUS")
.to("mock:verify");
}
};
}
@BeforeAll
public static void startup() throws Exception {
Security.addProvider(new BouncyCastleProvider());
Security.addProvider(new BouncyCastlePQCProvider());
}
@Test
void testSignAndVerify() throws Exception {
resultSign.expectedMessageCount(1);
resultVerify.expectedMessageCount(1);
templateSign.sendBody("Hello");
resultSign.assertIsSatisfied();
resultVerify.assertIsSatisfied();
assertTrue(resultVerify.getExchanges().get(0).getMessage().getHeader(PQCConstants.VERIFY, Boolean.class));
}
}
|
PQCSignatureSPHINCSPLUSNoAutowiredTest
|
java
|
square__retrofit
|
retrofit/java-test/src/test/java/retrofit2/RequestFactoryTest.java
|
{
"start": 83663,
"end": 84131
}
|
class ____ {
@FormUrlEncoded //
@POST("/foo") //
Call<ResponseBody> method(@FieldMap Map<String, Object> fieldMap) {
return null;
}
}
Map<String, Object> fieldMap = new LinkedHashMap<>();
fieldMap.put("kit", "kat");
fieldMap.put("ping", "pong");
Request request = buildRequest(Example.class, fieldMap);
assertBody(request.body(), "kit=kat&ping=pong");
}
@Test
public void fieldMapRejectsNull() {
|
Example
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/filter/wall/mysql/MySqlWallTest4.java
|
{
"start": 837,
"end": 1111
}
|
class ____ extends TestCase {
public void test_stuff() throws Exception {
assertFalse(WallUtils.isValidateMySql(//
"SSELECT a.*,b.name FROM vote_info a left join vote_item b on a.item_id=b.id where a.id<10 or 1=1 limit 1,10"));
}
}
|
MySqlWallTest4
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/filesystem/FileBasedStateOutputStream.java
|
{
"start": 1778,
"end": 5259
}
|
class ____ extends CheckpointStateOutputStream {
private static final Logger LOG = LoggerFactory.getLogger(FileBasedStateOutputStream.class);
// ------------------------------------------------------------------------
private final FSDataOutputStream out;
private final Path path;
private final FileSystem fileSystem;
private volatile boolean closed;
public FileBasedStateOutputStream(FileSystem fileSystem, Path path) throws IOException {
this.fileSystem = checkNotNull(fileSystem);
this.path = checkNotNull(path);
this.out = fileSystem.create(path, WriteMode.NO_OVERWRITE);
}
// ------------------------------------------------------------------------
// I/O
// ------------------------------------------------------------------------
@Override
public final void write(int b) throws IOException {
out.write(b);
}
@Override
public final void write(@Nonnull byte[] b, int off, int len) throws IOException {
out.write(b, off, len);
}
@Override
public long getPos() throws IOException {
return out.getPos();
}
@Override
public void flush() throws IOException {
out.flush();
}
@Override
public void sync() throws IOException {
out.sync();
}
// ------------------------------------------------------------------------
// Closing
// ------------------------------------------------------------------------
public boolean isClosed() {
return closed;
}
@Override
public void close() {
if (!closed) {
closed = true;
try {
out.close();
fileSystem.delete(path, false);
} catch (Throwable t) {
LOG.warn("Could not close the state stream for {}.", path, t);
}
}
}
@Nullable
@Override
public FileStateHandle closeAndGetHandle() throws IOException {
synchronized (this) {
if (!closed) {
try {
// make a best effort attempt to figure out the size
long size = 0;
try {
size = out.getPos();
} catch (Exception ignored) {
}
// close and return
out.close();
return new FileStateHandle(path, size);
} catch (Exception e) {
try {
fileSystem.delete(path, false);
} catch (Exception deleteException) {
LOG.warn(
"Could not delete the checkpoint stream file {}.",
path,
deleteException);
}
throw new IOException(
"Could not flush and close the file system "
+ "output stream to "
+ path
+ " in order to obtain the "
+ "stream state handle",
e);
} finally {
closed = true;
}
} else {
throw new IOException("Stream has already been closed and discarded.");
}
}
}
}
|
FileBasedStateOutputStream
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/parallel/ResourceLocksProvider.java
|
{
"start": 809,
"end": 1409
}
|
class ____ its test methods dynamically at runtime.
*
* <p>Each shared resource is represented by an instance of {@link Lock}.
*
* <p>Adding shared resources via this API has the same semantics as declaring
* them declaratively via {@link ResourceLock @ResourceLock(value, mode)}, but for
* some use cases the programmatic approach may be more flexible and less verbose.
*
* <p>Implementations must provide a no-args constructor.
*
* @since 5.12
* @see ResourceLock#providers()
* @see Resources
* @see ResourceAccessMode
* @see Lock
*/
@API(status = MAINTAINED, since = "5.13.3")
public
|
or
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SftpComponentBuilderFactory.java
|
{
"start": 6698,
"end": 7848
}
|
class ____
extends AbstractComponentBuilder<SftpComponent>
implements SftpComponentBuilder {
@Override
protected SftpComponent buildConcreteComponent() {
return new SftpComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((SftpComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((SftpComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((SftpComponent) component).setAutowiredEnabled((boolean) value); return true;
case "healthCheckConsumerEnabled": ((SftpComponent) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((SftpComponent) component).setHealthCheckProducerEnabled((boolean) value); return true;
default: return false;
}
}
}
}
|
SftpComponentBuilderImpl
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java
|
{
"start": 29472,
"end": 30002
}
|
class ____ extends EmailService {
public NoopEmailService() {
super(
Settings.EMPTY,
null,
mock(SSLService.class),
new ClusterSettings(Settings.EMPTY, new HashSet<>(EmailService.getSettings()))
);
}
@Override
public EmailSent send(Email email, Authentication auth, Profile profile, String accountName) {
return new EmailSent(accountName, email);
}
}
protected static
|
NoopEmailService
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/event/spi/EnversListener.java
|
{
"start": 149,
"end": 246
}
|
interface ____ Envers listeners for duplication handling.
*
* @author Steve Ebersole
*/
public
|
for
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/errors/DelegationTokenOwnerMismatchException.java
|
{
"start": 847,
"end": 1191
}
|
class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public DelegationTokenOwnerMismatchException(String message) {
super(message);
}
public DelegationTokenOwnerMismatchException(String message, Throwable cause) {
super(message, cause);
}
}
|
DelegationTokenOwnerMismatchException
|
java
|
apache__maven
|
compat/maven-compat/src/main/java/org/apache/maven/toolchain/ToolchainManagerFactory.java
|
{
"start": 1717,
"end": 6278
}
|
class ____ {
private final Lookup lookup;
private final Logger logger;
@Inject
ToolchainManagerFactory(Lookup lookup) {
this(lookup, null);
}
protected ToolchainManagerFactory(Lookup lookup, Logger logger) {
this.lookup = lookup;
this.logger = logger;
}
@Provides
@Typed({ToolchainManager.class, ToolchainManagerPrivate.class})
@Named // qualifier is required for SiduDIBridge to work
DefaultToolchainManagerV3 v3Manager() {
return new DefaultToolchainManagerV3();
}
@Provides
@Priority(10)
@Typed(org.apache.maven.api.services.ToolchainManager.class)
DefaultToolchainManagerV4 v4Manager() {
return new DefaultToolchainManagerV4();
}
@Provides
@Typed(ToolchainFactory.class)
@Named("jdk")
ToolchainFactory jdkFactory() {
return createV3FactoryBridge("jdk");
}
/**
* Creates a v3 ToolchainFactory bridge that wraps a v4 ToolchainFactory.
*/
public ToolchainFactory createV3FactoryBridge(String type) {
try {
org.apache.maven.api.services.ToolchainFactory v4Factory =
lookup.lookup(org.apache.maven.api.services.ToolchainFactory.class, type);
if (v4Factory == null) {
return null;
}
return createV3FactoryBridgeForV4Factory(v4Factory);
} catch (Exception e) {
// If lookup fails, no v4 factory exists for this type
return null;
}
}
/**
* Creates a v3 ToolchainFactory bridge that wraps a specific v4 ToolchainFactory instance.
*/
public ToolchainFactory createV3FactoryBridgeForV4Factory(
org.apache.maven.api.services.ToolchainFactory v4Factory) {
return new ToolchainFactory() {
@Override
public ToolchainPrivate createToolchain(ToolchainModel model) throws MisconfiguredToolchainException {
try {
org.apache.maven.api.Toolchain v4Toolchain = v4Factory.createToolchain(model.getDelegate());
return getToolchainV3(v4Toolchain);
} catch (ToolchainFactoryException e) {
throw new MisconfiguredToolchainException(e.getMessage(), e);
}
}
@Override
public ToolchainPrivate createDefaultToolchain() {
try {
return v4Factory
.createDefaultToolchain()
.map(ToolchainManagerFactory.this::getToolchainV3)
.orElse(null);
} catch (ToolchainFactoryException e) {
return null;
}
}
};
}
private org.apache.maven.impl.DefaultToolchainManager getDelegate() {
Map<String, ToolchainFactory> v3Factories = lookup.lookupMap(ToolchainFactory.class);
Map<String, org.apache.maven.api.services.ToolchainFactory> v4Factories =
lookup.lookupMap(org.apache.maven.api.services.ToolchainFactory.class);
Map<String, org.apache.maven.api.services.ToolchainFactory> allFactories = new HashMap<>();
for (Map.Entry<String, ToolchainFactory> entry : v3Factories.entrySet()) {
ToolchainFactory v3Factory = entry.getValue();
allFactories.put(entry.getKey(), new org.apache.maven.api.services.ToolchainFactory() {
@Nonnull
@Override
public org.apache.maven.api.Toolchain createToolchain(
@Nonnull org.apache.maven.api.toolchain.ToolchainModel model) throws ToolchainFactoryException {
try {
return getToolchainV4(v3Factory.createToolchain(new ToolchainModel(model)));
} catch (MisconfiguredToolchainException e) {
throw new RuntimeException(e);
}
}
@Nonnull
@Override
public Optional<org.apache.maven.api.Toolchain> createDefaultToolchain()
throws ToolchainFactoryException {
return Optional.ofNullable(v3Factory.createDefaultToolchain())
.map(ToolchainManagerFactory.this::getToolchainV4);
}
});
}
allFactories.putAll(v4Factories);
return new org.apache.maven.impl.DefaultToolchainManager(allFactories, logger) {};
}
public
|
ToolchainManagerFactory
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ComponentsProviderGenerator.java
|
{
"start": 21237,
"end": 31899
}
|
class
____.returning(void.class);
ParamVar rtRemovedBeans = mc.parameter("removedBeans", List.class);
ParamVar typeCacheMap = mc.parameter("typeCache", Map.class);
mc.body(b0 -> {
LocalVar tccl = b0.localVar("tccl",
b0.invokeVirtual(MethodDescs.THREAD_GET_TCCL, b0.currentThread()));
Map<AnnotationInstanceEquivalenceProxy, LocalVar> sharedQualifers = new HashMap<>();
for (BeanInfo btRemovedBean : group.removedBeans()) {
// Bean types
LocalVar rtTypes = b0.localVar("types", b0.new_(HashSet.class));
for (Type btType : btRemovedBean.getTypes()) {
if (DotNames.OBJECT.equals(btType.name())) {
// Skip java.lang.Object
continue;
}
b0.try_(tc -> {
tc.body(b1 -> {
try {
LocalVar rtType = RuntimeTypeCreator.of(b1)
.withCache(typeCacheMap)
.withTCCL(tccl)
.create(btType);
b1.withSet(rtTypes).add(rtType);
} catch (IllegalArgumentException e) {
throw new IllegalStateException("Unable to construct type for " + btRemovedBean
+ ": " + e.getMessage());
}
});
tc.catch_(Throwable.class, "e", (b1, e) -> {
b1.invokeStatic(MethodDescs.COMPONENTS_PROVIDER_UNABLE_TO_LOAD_REMOVED_BEAN_TYPE,
Const.of(btType.toString()), e);
});
});
}
// Qualifiers
LocalVar rtQualifiers;
if (btRemovedBean.hasDefaultQualifiers() || btRemovedBean.getQualifiers().isEmpty()) {
// No qualifiers or default qualifiers (@Any, @Default)
rtQualifiers = b0.localVar("qualifiers", Const.ofNull(Set.class));
} else {
rtQualifiers = b0.localVar("qualifiers", b0.new_(HashSet.class));
for (AnnotationInstance btQualifier : btRemovedBean.getQualifiers()) {
if (DotNames.ANY.equals(btQualifier.name())) {
// Skip @Any
continue;
}
BuiltinQualifier btBuiltinQualifier = BuiltinQualifier.of(btQualifier);
if (btBuiltinQualifier != null) {
// Use the literal instance for built-in qualifiers
b0.withSet(rtQualifiers).add(btBuiltinQualifier.getLiteralInstance());
} else {
LocalVar rtSharedQualifier = sharedQualifers.get(btQualifier.createEquivalenceProxy());
if (rtSharedQualifier == null) {
// Create annotation literal first
ClassInfo btQualifierClass = btRemovedBean.getDeployment().getQualifier(
btQualifier.name());
LocalVar rtQualifier = b0.localVar("qualifier",
annotationLiterals.create(b0, btQualifierClass, btQualifier));
b0.withSet(rtQualifiers).add(rtQualifier);
sharedQualifers.put(btQualifier.createEquivalenceProxy(), rtQualifier);
} else {
b0.withSet(rtQualifiers).add(rtSharedQualifier);
}
}
}
}
InjectableBean.Kind kind;
String description = null;
if (btRemovedBean.isClassBean()) {
// This is the default
kind = null;
} else if (btRemovedBean.isProducerField()) {
kind = InjectableBean.Kind.PRODUCER_FIELD;
description = btRemovedBean.getTarget().get().asField().declaringClass().name() + "#"
+ btRemovedBean.getTarget().get().asField().name();
} else if (btRemovedBean.isProducerMethod()) {
kind = InjectableBean.Kind.PRODUCER_METHOD;
description = btRemovedBean.getTarget().get().asMethod().declaringClass().name() + "#"
+ btRemovedBean.getTarget().get().asMethod().name() + "()";
} else {
// unused interceptors/decorators are removed, but they are not treated
// as unused beans and do not appear here
kind = InjectableBean.Kind.SYNTHETIC;
}
Expr rtKind = kind != null
? Expr.staticField(FieldDesc.of(InjectableBean.Kind.class, kind.name()))
: Const.ofNull(InjectableBean.Kind.class);
Expr rtRemovedBean = b0.new_(MethodDescs.REMOVED_BEAN_IMPL, rtKind,
description != null ? Const.of(description) : Const.ofNull(String.class),
rtTypes, rtQualifiers);
b0.withList(rtRemovedBeans).add(rtRemovedBean);
}
b0.return_();
});
});
}
}
record BeanGroup(int id, List<BeanInfo> beans) {
}
record ObserverGroup(int id, List<ObserverInfo> observers) {
}
record RemovedBeanGroup(int id, List<BeanInfo> removedBeans) {
}
record CodeGenInfo(List<BeanGroup> beanGroups, List<ObserverGroup> observerGroups,
List<RemovedBeanGroup> removedBeans) {
}
private CodeGenInfo preprocess(BeanDeployment deployment) {
List<BeanInfo> beans = preprocessBeans(deployment);
List<BeanGroup> beanGroups = Grouping.of(beans, 30, BeanGroup::new);
List<ObserverInfo> observers = orderedObservers(deployment.getObservers());
List<ObserverGroup> observerGroups = Grouping.of(observers, 30, ObserverGroup::new);
List<BeanInfo> removedBeans = orderedBeans(deployment.getRemovedBeans());
List<RemovedBeanGroup> removedBeanGroups = Grouping.of(removedBeans, 5, RemovedBeanGroup::new);
return new CodeGenInfo(beanGroups, observerGroups, removedBeanGroups);
}
private List<BeanInfo> preprocessBeans(BeanDeployment deployment) {
Map<BeanInfo, List<BeanInfo>> dependencyMap = initBeanDependencyMap(deployment);
// - iterate over dependencyMap entries and process beans for which all dependencies were already processed
// - when a bean is processed the map entry is removed
// - if we're stuck and the map is not empty, we found a circular dependency (and throw an ISE)
Predicate<BeanInfo> isNotDependencyPredicate = new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo b) {
return !isDependency(b, dependencyMap);
}
};
Predicate<BeanInfo> isNormalScopedOrNotDependencyPredicate = new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo b) {
return b.getScope().isNormal() || !isDependency(b, dependencyMap);
}
};
Predicate<BeanInfo> isNotProducerOrNormalScopedOrNotDependencyPredicate = new Predicate<BeanInfo>() {
@Override
public boolean test(BeanInfo b) {
// Try to process non-producer beans first, including declaring beans of producers
if (b.isProducer()) {
return false;
}
return b.getScope().isNormal() || !isDependency(b, dependencyMap);
}
};
List<BeanInfo> result = new ArrayList<>();
Set<BeanInfo> processed = new HashSet<>();
boolean stuck = false;
while (!dependencyMap.isEmpty()) {
if (stuck) {
throw circularDependenciesNotSupportedException(dependencyMap);
}
stuck = true;
// First try to process beans that are not dependencies
stuck = addBeans(result, dependencyMap, processed, isNotDependencyPredicate);
if (stuck) {
// It seems we're stuck but we can try to process normal scoped beans that can prevent a circular dependency
stuck = addBeans(result, dependencyMap, processed, isNotProducerOrNormalScopedOrNotDependencyPredicate);
if (stuck) {
stuck = addBeans(result, dependencyMap, processed, isNormalScopedOrNotDependencyPredicate);
}
}
}
// Finally process beans and interceptors that are not dependencies
// We need to iterate in a deterministic order for build time reproducibility
for (BeanInfo bean : orderedBeans(deployment.getBeans())) {
if (!processed.contains(bean)) {
result.add(bean);
}
}
for (BeanInfo interceptor : orderedInterceptors(deployment.getInterceptors())) {
if (!processed.contains(interceptor)) {
result.add(interceptor);
}
}
for (BeanInfo decorator : orderedDecorators(deployment.getDecorators())) {
if (!processed.contains(decorator)) {
result.add(decorator);
}
}
return result;
}
/**
* Returns a dependency map for bean instantiation. Say the following beans exist:
*
* <pre>
*
|
mc
|
java
|
apache__camel
|
components/camel-consul/src/main/java/org/apache/camel/component/consul/endpoint/ConsulKeyValueActions.java
|
{
"start": 863,
"end": 1207
}
|
interface ____ {
String PUT = "PUT";
String GET_VALUE = "GET_VALUE";
String GET_VALUES = "GET_VALUES";
String GET_KEYS = "GET_KEYS";
String GET_SESSIONS = "GET_SESSIONS";
String DELETE_KEY = "DELETE_KEY";
String DELETE_KEYS = "DELETE_KEYS";
String LOCK = "LOCK";
String UNLOCK = "UNLOCK";
}
|
ConsulKeyValueActions
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/views/DefaultViewTest.java
|
{
"start": 356,
"end": 452
}
|
class ____ extends DatabindTestUtil
{
// Classes that represent views
static
|
DefaultViewTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/EhcacheEndpointBuilderFactory.java
|
{
"start": 8363,
"end": 17408
}
|
interface ____
extends
EndpointConsumerBuilder {
default EhcacheEndpointConsumerBuilder basic() {
return (EhcacheEndpointConsumerBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* The default cache configuration to be used to create caches.
*
* The option is a: <code>org.ehcache.config.CacheConfiguration</code>
* type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder configuration(org.ehcache.config.CacheConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* The default cache configuration to be used to create caches.
*
* The option will be converted to a
* <code>org.ehcache.config.CacheConfiguration</code> type.
*
* Group: advanced
*
* @param configuration the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder configuration(String configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* A map of cache configuration to be used to create caches.
*
* The option is a: <code>java.util.Map<java.lang.String,
* org.ehcache.config.CacheConfiguration></code> type.
*
* Group: advanced
*
* @param configurations the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder configurations(Map<java.lang.String, org.ehcache.config.CacheConfiguration> configurations) {
doSetProperty("configurations", configurations);
return this;
}
/**
* A map of cache configuration to be used to create caches.
*
* The option will be converted to a
* <code>java.util.Map<java.lang.String,
* org.ehcache.config.CacheConfiguration></code> type.
*
* Group: advanced
*
* @param configurations the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder configurations(String configurations) {
doSetProperty("configurations", configurations);
return this;
}
/**
* The cache key type, default java.lang.Object.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param keyType the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder keyType(String keyType) {
doSetProperty("keyType", keyType);
return this;
}
/**
* The cache value type, default java.lang.Object.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param valueType the value to set
* @return the dsl builder
*/
default AdvancedEhcacheEndpointConsumerBuilder valueType(String valueType) {
doSetProperty("valueType", valueType);
return this;
}
}
/**
* Builder for endpoint producers for the Ehcache component.
*/
public
|
AdvancedEhcacheEndpointConsumerBuilder
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableGauge.java
|
{
"start": 1188,
"end": 1599
}
|
class ____ extends MutableMetric {
private final MetricsInfo info;
protected MutableGauge(MetricsInfo info) {
this.info = checkNotNull(info, "metric info");
}
protected MetricsInfo info() {
return info;
}
/**
* Increment the value of the metric by 1
*/
public abstract void incr();
/**
* Decrement the value of the metric by 1
*/
public abstract void decr();
}
|
MutableGauge
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/caching/BasicUnstructuredCachingOfConvertedValueTest.java
|
{
"start": 1403,
"end": 4076
}
|
class ____ {
public static final int postalAreaAttributeIndex = 0;
@Test
@JiraKey( value = "HHH-9615" )
public void basicCacheStructureTest(SessionFactoryScope scope) {
EntityPersister persister = scope.getSessionFactory()
.getMappingMetamodel()
.getEntityDescriptor( Address.class.getName() );
final DomainDataRegion region = persister.getCacheAccessStrategy().getRegion();
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// test during store...
PostalAreaConverter.clearCounts();
scope.inTransaction(
(session) -> {
session.persist( new Address( 1, "123 Main St.", null, PostalArea._78729 ) );
}
);
scope.inTransaction(
(session) -> {
final EntityDataAccess entityDataAccess = region.getEntityDataAccess( persister.getNavigableRole() );
final Object cacheKey = entityDataAccess.generateCacheKey(
1,
persister,
scope.getSessionFactory(),
null
);
final Object cachedItem = entityDataAccess.get( session, cacheKey );
final StandardCacheEntryImpl state = (StandardCacheEntryImpl) cachedItem;
// this is the point of the Jira.. that this "should be" the converted value
assertThat( state.getDisassembledState()[ postalAreaAttributeIndex ], instanceOf( PostalArea.class ) );
}
);
assertThat( PostalAreaConverter.toDatabaseCallCount, is( 1 ) );
assertThat( PostalAreaConverter.toDomainCallCount, is( 0 ) );
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// test during load...
PostalAreaConverter.clearCounts();
scope.getSessionFactory().getCache().evictAll();
scope.inTransaction(
(session) -> session.get( Address.class, 1 )
);
scope.inTransaction(
(session) -> {
final EntityDataAccess entityDataAccess = region.getEntityDataAccess( persister.getNavigableRole() );
final Object cacheKey = entityDataAccess.generateCacheKey(
1,
persister,
scope.getSessionFactory(),
null
);
final Object cachedItem = entityDataAccess.get( session, cacheKey );
final StandardCacheEntryImpl state = (StandardCacheEntryImpl) cachedItem;
// this is the point of the Jira.. that this "should be" the converted value
assertThat( state.getDisassembledState()[ postalAreaAttributeIndex ], instanceOf( PostalArea.class ) );
}
);
assertThat( PostalAreaConverter.toDatabaseCallCount, is( 0 ) );
assertThat( PostalAreaConverter.toDomainCallCount, is( 1 ) );
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
}
|
BasicUnstructuredCachingOfConvertedValueTest
|
java
|
netty__netty
|
resolver-dns/src/main/java/io/netty/resolver/dns/DnsResolveContext.java
|
{
"start": 2618,
"end": 6338
}
|
class ____<T> {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(DnsResolveContext.class);
private static final String PROP_TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS =
"io.netty.resolver.dns.tryCnameOnAddressLookups";
static boolean TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS;
static {
TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS =
SystemPropertyUtil.getBoolean(PROP_TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS, false);
if (logger.isDebugEnabled()) {
logger.debug("-D{}: {}", PROP_TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS, TRY_FINAL_CNAME_ON_ADDRESS_LOOKUPS);
}
}
private static final RuntimeException NXDOMAIN_QUERY_FAILED_EXCEPTION =
DnsResolveContextException.newStatic("No answer found and NXDOMAIN response code returned",
DnsResolveContext.class, "onResponse(..)");
private static final RuntimeException CNAME_NOT_FOUND_QUERY_FAILED_EXCEPTION =
DnsResolveContextException.newStatic("No matching CNAME record found",
DnsResolveContext.class, "onResponseCNAME(..)");
private static final RuntimeException NO_MATCHING_RECORD_QUERY_FAILED_EXCEPTION =
DnsResolveContextException.newStatic("No matching record type found",
DnsResolveContext.class, "onResponseAorAAAA(..)");
private static final RuntimeException UNRECOGNIZED_TYPE_QUERY_FAILED_EXCEPTION =
DnsResolveContextException.newStatic("Response type was unrecognized",
DnsResolveContext.class, "onResponse(..)");
private static final RuntimeException NAME_SERVERS_EXHAUSTED_EXCEPTION =
DnsResolveContextException.newStatic("No name servers returned an answer",
DnsResolveContext.class, "tryToFinishResolve(..)");
private static final RuntimeException SERVFAIL_QUERY_FAILED_EXCEPTION =
DnsErrorCauseException.newStatic("Query failed with SERVFAIL", SERVFAIL,
DnsResolveContext.class, "onResponse(..)");
private static final RuntimeException NXDOMAIN_CAUSE_QUERY_FAILED_EXCEPTION =
DnsErrorCauseException.newStatic("Query failed with NXDOMAIN", NXDOMAIN,
DnsResolveContext.class, "onResponse(..)");
final DnsNameResolver parent;
private final Channel channel;
private final Promise<?> originalPromise;
private final DnsServerAddressStream nameServerAddrs;
private final String hostname;
private final int dnsClass;
private final DnsRecordType[] expectedTypes;
final DnsRecord[] additionals;
private final Set<Future<AddressedEnvelope<DnsResponse, InetSocketAddress>>> queriesInProgress =
Collections.newSetFromMap(
new IdentityHashMap<Future<AddressedEnvelope<DnsResponse, InetSocketAddress>>, Boolean>());
private List<T> finalResult;
private int allowedQueries;
private boolean triedCNAME;
private boolean completeEarly;
DnsResolveContext(DnsNameResolver parent, Channel channel,
Promise<?> originalPromise, String hostname, int dnsClass, DnsRecordType[] expectedTypes,
DnsRecord[] additionals, DnsServerAddressStream nameServerAddrs, int allowedQueries) {
assert expectedTypes.length > 0;
this.parent = parent;
this.channel = channel;
this.originalPromise = originalPromise;
this.hostname = hostname;
this.dnsClass = dnsClass;
this.expectedTypes = expectedTypes;
this.additionals = additionals;
this.nameServerAddrs = ObjectUtil.checkNotNull(nameServerAddrs, "nameServerAddrs");
this.allowedQueries = allowedQueries;
}
static final
|
DnsResolveContext
|
java
|
quarkusio__quarkus
|
extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/applicationfieldaccess/PublicFieldAccessAssociationsTest.java
|
{
"start": 5005,
"end": 5864
}
|
class ____ {
@Id
@GeneratedValue
public long id;
@OneToOne
public ContainedEntity oneToOne;
@ManyToOne
public ContainedEntity manyToOne;
@OneToMany
@JoinTable(name = "containing_oneToMany")
public List<ContainedEntity> oneToMany = new ArrayList<>();
@ManyToMany
@JoinTable(name = "containing_manyToMany")
public List<ContainedEntity> manyToMany = new ArrayList<>();
@OneToOne(mappedBy = "oneToOne")
public ContainedEntity oneToOneMappedBy;
@OneToMany(mappedBy = "manyToOne")
public List<ContainedEntity> oneToManyMappedBy = new ArrayList<>();
@ManyToMany(mappedBy = "manyToMany")
public List<ContainedEntity> manyToManyMappedBy = new ArrayList<>();
}
@Entity
public static
|
ContainingEntity
|
java
|
apache__camel
|
core/camel-core-languages/src/main/java/org/apache/camel/language/variable/VariableLanguage.java
|
{
"start": 1200,
"end": 1799
}
|
class ____ extends LanguageSupport {
public static Expression variable(String name) {
return ExpressionBuilder.variableExpression(name);
}
@Override
public Predicate createPredicate(String expression) {
return ExpressionToPredicateAdapter.toPredicate(createExpression(expression));
}
@Override
public Expression createExpression(String expression) {
if (expression != null && isStaticResource(expression)) {
expression = loadResource(expression);
}
return VariableLanguage.variable(expression);
}
}
|
VariableLanguage
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-client-jackson/deployment/src/test/java/io/quarkus/rest/client/reactive/jackson/test/BadRequestNotPropagatedTestCase.java
|
{
"start": 2481,
"end": 2805
}
|
interface ____ extends BadClient {
@ClientExceptionMapper
static RuntimeException toException(Response response) {
if (response.getStatus() == 400) {
return new WebApplicationException(999);
}
return null;
}
}
static
|
BadClientWithCustomMapper
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClickUpEndpointBuilderFactory.java
|
{
"start": 3346,
"end": 9532
}
|
interface ____
extends
EndpointConsumerBuilder {
default ClickUpEndpointBuilder basic() {
return (ClickUpEndpointBuilder) this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer (advanced)
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder bridgeErrorHandler(String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder exceptionHandler(org.apache.camel.spi.ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*
* @param exceptionHandler the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder exceptionHandler(String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder exchangePattern(org.apache.camel.ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*
* @param exchangePattern the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder exchangePattern(String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Can be used to set an alternative base URL, e.g. when you want to
* test the component against a mock ClickUp API.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: advanced
*
* @param baseUrl the value to set
* @return the dsl builder
*/
default AdvancedClickUpEndpointBuilder baseUrl(String baseUrl) {
doSetProperty("baseUrl", baseUrl);
return this;
}
}
public
|
AdvancedClickUpEndpointBuilder
|
java
|
netty__netty
|
codec-http3/src/test/java/io/netty/handler/codec/http3/Http3ControlStreamInboundHandlerTest.java
|
{
"start": 1730,
"end": 12479
}
|
class ____ extends
AbstractHttp3FrameTypeValidationHandlerTest<Http3ControlStreamFrame> {
private QpackEncoder qpackEncoder;
private Http3ControlStreamOutboundHandler remoteControlStreamHandler;
public Http3ControlStreamInboundHandlerTest() {
super(QuicStreamType.UNIDIRECTIONAL, false, false);
}
static Collection<Object[]> testData() {
List<Object[]> config = new ArrayList<>();
for (int a = 0; a < 2; a++) {
for (int b = 0; b < 2; b++) {
config.add(new Object[] { a == 0, b == 0 });
}
}
return config;
}
@Override
protected void setUp(boolean server) {
super.setUp(server);
qpackEncoder = new QpackEncoder();
remoteControlStreamHandler = new Http3ControlStreamOutboundHandler(server, new DefaultHttp3SettingsFrame(),
new ChannelInboundHandlerAdapter());
}
@Override
protected void afterSettingsFrameRead(Http3SettingsFrame settingsFrame) {
if (!qpackAttributes.dynamicTableDisabled()) {
// settings frame initialize QPACK streams
readAndReleaseStreamHeader(qPACKEncoderStream());
readAndReleaseStreamHeader(qPACKDecoderStream());
}
}
@Override
protected ChannelHandler newHandler(boolean server) {
return new Http3ControlStreamInboundHandler(server, new ChannelInboundHandlerAdapter(), qpackEncoder,
remoteControlStreamHandler);
}
@Override
protected List<Http3ControlStreamFrame> newValidFrames() {
return Arrays.asList(new DefaultHttp3SettingsFrame(), new DefaultHttp3GoAwayFrame(0),
new DefaultHttp3MaxPushIdFrame(0), new DefaultHttp3CancelPushFrame(0));
}
@Override
protected List<Http3Frame> newInvalidFrames() {
return Arrays.asList(Http3TestUtils.newHttp3RequestStreamFrame(), Http3TestUtils.newHttp3PushStreamFrame());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testInvalidFirstFrameHttp3GoAwayFrame(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
testInvalidFirstFrame(server, forwardControlFrames, new DefaultHttp3GoAwayFrame(0));
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testInvalidFirstFrameHttp3MaxPushIdFrame(boolean server, boolean forwardControlFrames)
throws Exception {
setUp(server);
testInvalidFirstFrame(server, forwardControlFrames, new DefaultHttp3MaxPushIdFrame(0));
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testInvalidFirstFrameHttp3CancelPushFrame(boolean server, boolean forwardControlFrames)
throws Exception {
setUp(server);
testInvalidFirstFrame(server, forwardControlFrames, new DefaultHttp3CancelPushFrame(0));
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testInvalidFirstFrameNonControlFrame(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
testInvalidFirstFrame(server, forwardControlFrames, () -> 9999);
}
private void testInvalidFirstFrame(boolean server, boolean forwardControlFrames, Http3Frame frame)
throws Exception {
final EmbeddedQuicStreamChannel channel = newStream(QuicStreamType.BIDIRECTIONAL,
new Http3ControlStreamInboundHandler(server,
forwardControlFrames ? new ChannelInboundHandlerAdapter() : null,
qpackEncoder, remoteControlStreamHandler));
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_MISSING_SETTINGS, channel, frame);
verifyClose(Http3ErrorCode.H3_MISSING_SETTINGS, parent);
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testValidGoAwayFrame(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedChannel channel = newStream(server, forwardControlFrames);
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3GoAwayFrame(0));
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3GoAwayFrame(0));
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testSecondGoAwayFrameFailsWithHigherId(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedChannel channel = newStream(server, forwardControlFrames);
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3GoAwayFrame(0));
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_ID_ERROR, channel, new DefaultHttp3GoAwayFrame(4));
verifyClose(Http3ErrorCode.H3_ID_ERROR, parent);
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testGoAwayFrameIdNonRequestStream(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedChannel channel = newStream(server, forwardControlFrames);
if (server) {
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3GoAwayFrame(3));
} else {
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_FRAME_UNEXPECTED, channel,
new DefaultHttp3GoAwayFrame(3));
verifyClose(Http3ErrorCode.H3_FRAME_UNEXPECTED, parent);
}
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testHttp3MaxPushIdFrames(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedChannel channel = newStream(server, forwardControlFrames);
if (server) {
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3MaxPushIdFrame(0));
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3MaxPushIdFrame(4));
} else {
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_FRAME_UNEXPECTED, channel,
new DefaultHttp3MaxPushIdFrame(4));
verifyClose(Http3ErrorCode.H3_FRAME_UNEXPECTED, parent);
}
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: forwardControlFrames = {0}")
@ValueSource(booleans = { true, false })
public void testSecondHttp3MaxPushIdFrameFailsWithSmallerId(boolean forwardControlFrames)
throws Exception {
setUp(true);
EmbeddedChannel channel = newStream(true, forwardControlFrames);
writeValidFrame(forwardControlFrames, channel, new DefaultHttp3MaxPushIdFrame(4));
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_ID_ERROR, channel, new DefaultHttp3MaxPushIdFrame(0));
verifyClose(Http3ErrorCode.H3_ID_ERROR, parent);
assertFalse(channel.finish());
}
private EmbeddedQuicStreamChannel newStream(boolean server, boolean forwardControlFrames) throws Exception {
EmbeddedQuicStreamChannel channel = newStream(QuicStreamType.UNIDIRECTIONAL,
new Http3ControlStreamInboundHandler(server,
forwardControlFrames ? new ChannelInboundHandlerAdapter() : null,
qpackEncoder, remoteControlStreamHandler));
// We always need to start with a settings frame.
Http3SettingsFrame settingsFrame = new DefaultHttp3SettingsFrame();
assertEquals(forwardControlFrames, channel.writeInbound(settingsFrame));
if (forwardControlFrames) {
assertFrameSame(settingsFrame, channel.readInbound());
} else {
assertFrameReleased(settingsFrame);
}
Object streamType = qPACKEncoderStream().readOutbound();
assertNotNull(streamType);
ReferenceCountUtil.release(streamType);
streamType = qPACKDecoderStream().readOutbound();
assertNotNull(streamType);
ReferenceCountUtil.release(streamType);
return channel;
}
private void writeValidFrame(boolean forwardControlFrames, EmbeddedChannel channel,
Http3ControlStreamFrame controlStreamFrame) {
assertEquals(forwardControlFrames, channel.writeInbound(controlStreamFrame));
if (forwardControlFrames) {
assertFrameSame(controlStreamFrame, channel.readInbound());
} else {
assertFrameReleased(controlStreamFrame);
}
}
private void writeInvalidFrame(boolean forwardControlFrames, Http3ErrorCode expectedCode, EmbeddedChannel channel,
Http3Frame frame) {
if (forwardControlFrames) {
Exception e = assertThrows(Exception.class, () -> channel.writeInbound(frame));
assertException(expectedCode, e);
} else {
assertFalse(channel.writeInbound(frame));
}
assertFrameReleased(frame);
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testSecondSettingsFrameFails(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedChannel channel = newStream(server, forwardControlFrames);
writeInvalidFrame(forwardControlFrames, Http3ErrorCode.H3_FRAME_UNEXPECTED, channel,
new DefaultHttp3SettingsFrame());
verifyClose(Http3ErrorCode.H3_FRAME_UNEXPECTED, parent);
assertFalse(channel.finish());
}
@ParameterizedTest(name = "{index}: server = {0}, forwardControlFrames = {1}")
@MethodSource("testData")
public void testControlStreamClosed(boolean server, boolean forwardControlFrames) throws Exception {
setUp(server);
EmbeddedQuicStreamChannel channel = newStream(server, forwardControlFrames);
channel.writeInboundFin();
verifyClose(Http3ErrorCode.H3_CLOSED_CRITICAL_STREAM, parent);
assertFalse(channel.finish());
}
@Override
protected Http3ErrorCode inboundErrorCodeInvalid() {
return Http3ErrorCode.H3_MISSING_SETTINGS;
}
}
|
Http3ControlStreamInboundHandlerTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpUtilsTests.java
|
{
"start": 1043,
"end": 3287
}
|
class ____ extends ESTestCase {
public void testCheckForFailureStatusCode_ThrowsWhenStatusCodeIs300() {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(300);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
var result = new HttpResult(httpResponse, new byte[0]);
var thrownException = expectThrows(
IllegalStateException.class,
() -> checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mockRequest("id"), result)
);
assertThat(thrownException.getMessage(), is("Unhandled redirection for request from inference entity id [id] status [300]"));
}
public void testCheckForFailureStatusCode_DoesNotThrowWhenStatusCodeIs200() {
var statusLine = mock(StatusLine.class);
when(statusLine.getStatusCode()).thenReturn(200);
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(statusLine);
var result = new HttpResult(httpResponse, new byte[0]);
checkForFailureStatusCode(mockThrottlerManager(), mock(Logger.class), mock(Request.class), result);
}
public void testCheckForEmptyBody_DoesNotThrowWhenTheBodyIsNotEmpty() {
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class));
var result = new HttpResult(httpResponse, new byte[] { 'a' });
checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mock(Request.class), result);
}
public void testCheckForEmptyBody_ThrowsWhenTheBodyIsEmpty() {
var httpResponse = mock(HttpResponse.class);
when(httpResponse.getStatusLine()).thenReturn(mock(StatusLine.class));
var result = new HttpResult(httpResponse, new byte[0]);
var thrownException = expectThrows(
IllegalStateException.class,
() -> checkForEmptyBody(mockThrottlerManager(), mock(Logger.class), mockRequest("id"), result)
);
assertThat(thrownException.getMessage(), is("Response body was empty for request from inference entity id [id]"));
}
}
|
HttpUtilsTests
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/resteasy/async/filters/AsyncResponseFilter2.java
|
{
"start": 176,
"end": 312
}
|
class ____ extends AsyncResponseFilter {
public AsyncResponseFilter2() {
super("ResponseFilter2");
}
}
|
AsyncResponseFilter2
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/annotation/ComposedRepeatableAnnotationsTests.java
|
{
"start": 11373,
"end": 11487
}
|
class ____ {
}
@ForPetesSake("C")
@PeteRepeats(@PeteRepeat("A"))
@PeteRepeat("B")
static
|
ComposedRepeatableClass
|
java
|
apache__flink
|
flink-formats/flink-orc/src/main/java/org/apache/flink/orc/writer/OrcBulkWriterFactory.java
|
{
"start": 1757,
"end": 4791
}
|
class ____<T> implements BulkWriter.Factory<T> {
private final Vectorizer<T> vectorizer;
private final Properties writerProperties;
private final Map<String, String> confMap;
private OrcFile.WriterOptions writerOptions;
/**
* Creates a new OrcBulkWriterFactory using the provided Vectorizer implementation.
*
* @param vectorizer The vectorizer implementation to convert input record to a
* VectorizerRowBatch.
*/
public OrcBulkWriterFactory(Vectorizer<T> vectorizer) {
this(vectorizer, new Configuration());
}
/**
* Creates a new OrcBulkWriterFactory using the provided Vectorizer, Hadoop Configuration.
*
* @param vectorizer The vectorizer implementation to convert input record to a
* VectorizerRowBatch.
*/
public OrcBulkWriterFactory(Vectorizer<T> vectorizer, Configuration configuration) {
this(vectorizer, null, configuration);
}
/**
* Creates a new OrcBulkWriterFactory using the provided Vectorizer, Hadoop Configuration, ORC
* writer properties.
*
* @param vectorizer The vectorizer implementation to convert input record to a
* VectorizerRowBatch.
* @param writerProperties Properties that can be used in ORC WriterOptions.
*/
public OrcBulkWriterFactory(
Vectorizer<T> vectorizer, Properties writerProperties, Configuration configuration) {
this.vectorizer = checkNotNull(vectorizer);
this.writerProperties = writerProperties;
this.confMap = new HashMap<>();
// Todo: Replace the Map based approach with a better approach
for (Map.Entry<String, String> entry : configuration) {
confMap.put(entry.getKey(), entry.getValue());
}
}
@Override
public BulkWriter<T> create(FSDataOutputStream out) throws IOException {
OrcFile.WriterOptions opts = getWriterOptions();
opts.physicalWriter(new PhysicalWriterImpl(out, opts));
// The path of the Writer is not used to indicate the destination file
// in this case since we have used a dedicated physical writer to write
// to the give output stream directly. However, the path would be used as
// the key of writer in the ORC memory manager, thus we need to make it unique.
Path unusedPath = new Path(UUID.randomUUID().toString());
return new OrcBulkWriter<>(vectorizer, new WriterImpl(null, unusedPath, opts));
}
@VisibleForTesting
protected OrcFile.WriterOptions getWriterOptions() {
if (null == writerOptions) {
Configuration conf = new ThreadLocalClassLoaderConfiguration();
for (Map.Entry<String, String> entry : confMap.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
writerOptions = OrcFile.writerOptions(writerProperties, conf);
writerOptions.setSchema(this.vectorizer.getSchema());
}
return writerOptions;
}
}
|
OrcBulkWriterFactory
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-resource-server/src/test/java/org/springframework/security/oauth2/server/resource/web/server/BearerTokenServerAuthenticationEntryPointTests.java
|
{
"start": 1512,
"end": 4690
}
|
class ____ {
private BearerTokenServerAuthenticationEntryPoint entryPoint = new BearerTokenServerAuthenticationEntryPoint();
private MockServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("/"));
@Test
public void commenceWhenNotOAuth2AuthenticationExceptionThenBearer() {
this.entryPoint.commence(this.exchange, new BadCredentialsException("")).block();
assertThat(getResponse().getHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE)).isEqualTo("Bearer");
assertThat(getResponse().getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
public void commenceWhenRealmNameThenHasRealmName() {
this.entryPoint.setRealmName("Realm");
this.entryPoint.commence(this.exchange, new BadCredentialsException("")).block();
assertThat(getResponse().getHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE))
.isEqualTo("Bearer realm=\"Realm\"");
assertThat(getResponse().getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
public void commenceWhenOAuth2AuthenticationExceptionThenContainsErrorInformation() {
OAuth2Error oauthError = new OAuth2Error(OAuth2ErrorCodes.INVALID_REQUEST);
OAuth2AuthenticationException exception = new OAuth2AuthenticationException(oauthError);
this.entryPoint.commence(this.exchange, exception).block();
assertThat(getResponse().getHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE))
.isEqualTo("Bearer error=\"invalid_request\"");
assertThat(getResponse().getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
public void commenceWhenOAuth2ErrorCompleteThenContainsErrorInformation() {
OAuth2Error oauthError = new OAuth2Error(OAuth2ErrorCodes.INVALID_REQUEST, "Oops", "https://example.com");
OAuth2AuthenticationException exception = new OAuth2AuthenticationException(oauthError);
this.entryPoint.commence(this.exchange, exception).block();
assertThat(getResponse().getHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE)).isEqualTo(
"Bearer error=\"invalid_request\", error_description=\"Oops\", error_uri=\"https://example.com\"");
assertThat(getResponse().getStatusCode()).isEqualTo(HttpStatus.UNAUTHORIZED);
}
@Test
public void commenceWhenBearerTokenThenErrorInformation() {
OAuth2Error oauthError = new BearerTokenError(OAuth2ErrorCodes.INVALID_REQUEST, HttpStatus.BAD_REQUEST, "Oops",
"https://example.com");
OAuth2AuthenticationException exception = new OAuth2AuthenticationException(oauthError);
this.entryPoint.commence(this.exchange, exception).block();
assertThat(getResponse().getHeaders().getFirst(HttpHeaders.WWW_AUTHENTICATE)).isEqualTo(
"Bearer error=\"invalid_request\", error_description=\"Oops\", error_uri=\"https://example.com\"");
assertThat(getResponse().getStatusCode()).isEqualTo(HttpStatus.BAD_REQUEST);
}
@Test
public void commenceWhenNoSubscriberThenNothingHappens() {
this.entryPoint.commence(this.exchange, new BadCredentialsException(""));
assertThat(getResponse().getHeaders().headerNames()).isEmpty();
assertThat(getResponse().getStatusCode()).isNull();
}
private MockServerHttpResponse getResponse() {
return this.exchange.getResponse();
}
}
|
BearerTokenServerAuthenticationEntryPointTests
|
java
|
apache__camel
|
components/camel-smpp/src/main/java/org/apache/camel/component/smpp/SmppConstants.java
|
{
"start": 941,
"end": 22296
}
|
interface ____ {
@Metadata(label = "producer", description = "*For SubmitSm, SubmitMulti and ReplaceSm* The data\n" +
"coding according to the SMPP 3.4 specification, section 5.2.19. Use the\n" +
"URI option `alphabet` settings above.",
javaType = "Byte")
String ALPHABET = "CamelSmppAlphabet";
@Metadata(description = "The command", javaType = "String")
String COMMAND = "CamelSmppCommand";
@Metadata(label = "consumer", description = "*only for AlertNotification, DeliverSm and DataSm* The command id field\n" +
"identifies the particular SMPP PDU. For the complete list of defined\n" +
"values see chapter 5.1.2.1 in the smpp specification v3.4.",
javaType = "Integer")
String COMMAND_ID = "CamelSmppCommandId";
@Metadata(label = "consumer", description = "*only for DataSm* The Command status of the message.", javaType = "Integer")
String COMMAND_STATUS = "CamelSmppCommandStatus";
@Metadata(label = "producer", description = "*For SubmitSm, SubmitMulti and ReplaceSm* The data\n" +
"coding according to the SMPP 3.4 specification, section 5.2.19. Use the\n" +
"URI option `alphabet` settings above.",
javaType = "Byte")
String DATA_CODING = "CamelSmppDataCoding";
@Metadata(label = "producer", description = "The splitter", javaType = "org.apache.camel.component.smpp.SmppSplitter")
String DATA_SPLITTER = "CamelSmppSplitter";
@Metadata(label = "consumer", description = "*only for smsc DeliveryReceipt* Number of short messages delivered. This\n" +
"is only relevant where the original message was submitted to a\n" +
"distribution list.The value is padded with leading zeros if necessary.",
javaType = "Integer")
String DELIVERED = "CamelSmppDelivered";
@Metadata(description = "*Producer:* *only for SubmitSm, SubmitMulti, CancelSm and DataSm* Defines the\n" +
"destination SME address(es). For mobile terminated messages, this is the\n" +
"directory number of the recipient MS. It must be a `List<String>` for\n" +
"SubmitMulti and a `String` otherwise.\n" +
"*Consumer:* *only for DeliverSm and DataSm:* Defines the destination SME address.\n" +
"For mobile terminated messages, this is the directory number of the\n" +
"recipient MS.",
javaType = "List or String")
String DEST_ADDR = "CamelSmppDestAddr";
@Metadata(description = "*Producer:* *only for SubmitSm, SubmitMulti, CancelSm and DataSm* Defines the\n" +
"numeric plan indicator (NPI) to be used in the SME destination address\n" +
"parameters. Use the URI option `sourceAddrNpi` values defined above.\n" +
"*Consumer:* *only for DataSm* Defines the numeric plan indicator (NPI) in the\n" +
"destination address parameters. Use the URI option `sourceAddrNpi`\n" +
"values defined above.",
javaType = "Byte")
String DEST_ADDR_NPI = "CamelSmppDestAddrNpi";
@Metadata(description = "*Producer:* *only for SubmitSm, SubmitMulti, CancelSm and DataSm* Defines the type\n" +
"of number (TON) to be used in the SME destination address parameters.\n" +
"Use the `sourceAddrTon` URI option values defined above.\n" +
"*Consumer:* *only for DataSm* Defines the type of number (TON) in the destination\n" +
"address parameters. Use the `sourceAddrTon` URI option values defined\n" +
"above.",
javaType = "Byte")
String DEST_ADDR_TON = "CamelSmppDestAddrTon";
@Metadata(label = "consumer", description = "*only for smsc DeliveryReceipt* The time and date at which the short\n" +
"message reached it's final state. The format is as follows: YYMMDDhhmm.",
javaType = "Date")
String DONE_DATE = "CamelSmppDoneDate";
@Metadata(label = "producer", description = "*only for SubmitSm,\n" +
"SubmitMulti and DataSm*. Specifies the encoding (character set name) of\n" +
"the bytes in the message body. If the message body is a string then\n" +
"this is not relevant because Java Strings are always Unicode. If the\n" +
"body is a byte array then this header can be used to indicate that it is\n" +
"ISO-8859-1 or some other value. Default value is specified by the\n" +
"endpoint configuration parameter _encoding_",
javaType = "String")
String ENCODING = "CamelSmppEncoding";
@Metadata(description = "*Producer:* *only for SubmitMultiSm* The errors which\n" +
"occurred by sending the short message(s) the form `Map<String, List<Map<String, Object>>>` (messageID : (destAddr :\n"
+
"address, error : errorCode)).\n" +
"*Consumer:* *only for smsc DeliveryReceipt* Where appropriate this may hold a\n" +
"Network specific error code or an SMSC error code for the attempted\n" +
"delivery of the message. These errors are Network or SMSC specific and\n" +
"are not included here.",
javaType = "String or Map<String, List<Map<String, Object>>>")
String ERROR = "CamelSmppError";
@Metadata(label = "producer", description = "the ASM class", javaType = "org.jsmpp.bean.ESMClass")
String ESM_CLASS = "CamelSmppClass";
@Metadata(label = "consumer", description = "*only for AlertNotification* Defines the destination ESME address. For\n" +
"mobile terminated messages, this is the directory number of the\n" +
"recipient MS.",
javaType = "String")
String ESME_ADDR = "CamelSmppEsmeAddr";
@Metadata(label = "consumer", description = "*only for AlertNotification* Defines the numeric plan indicator (NPI) to\n" +
"be used in the ESME originator address parameters. Use the URI option\n" +
"`sourceAddrNpi` values defined above.",
javaType = "Byte")
String ESME_ADDR_NPI = "CamelSmppEsmeAddrNpi";
@Metadata(label = "consumer", description = "*only for AlertNotification* Defines the type of number (TON) to be used\n" +
"in the ESME originator address parameters. Use the `sourceAddrTon` URI\n" +
"option values defined above.",
javaType = "Byte")
String ESME_ADDR_TON = "CamelSmppEsmeAddrTon";
@Metadata(label = "producer", description = "The final date", javaType = "java.util.Date")
String FINAL_DATE = "CamelSmppFinalDate";
@Metadata(label = "consumer", description = "*only for smsc DeliveryReceipt:* The final status of the message.",
javaType = "org.jsmpp.util.DeliveryReceiptState")
String FINAL_STATUS = "CamelSmppStatus";
@Metadata(description = "*Producer:* The id to identify the submitted short message(s) for later use.\n" +
"In case of a ReplaceSm, QuerySm, CancelSm and DataSm this\n" +
"header value is a `String`. In case of a SubmitSm or SubmitMultiSm this\n" +
"header value is a `List<String>`.\n" +
"*Consumer:* *only for smsc DeliveryReceipt and DataSm* The message ID allocated to\n" +
"the message by the SMSC when originally submitted.",
javaType = "String or List<String>")
String ID = "CamelSmppId";
@Metadata(label = "producer", description = "The message date", javaType = "String")
String MESSAGE_STATE = "CamelSmppMessageState";
@Metadata(label = "consumer", description = "Identifies the type of an incoming message:\n" +
"`AlertNotification`: an SMSC alert notification,\n" +
"`DataSm`: an SMSC data short message,\n" +
"`DeliveryReceipt`: an SMSC delivery receipt,\n" +
"`DeliverSm`: an SMSC deliver short message",
javaType = "String")
String MESSAGE_TYPE = "CamelSmppMessageType";
@Metadata(label = "producer", description = "*only for SubmitSm and SubmitMulti* Allows the originating SME to assign\n" +
"a priority level to the short message. Use the URI option `priorityFlag`\n" +
"settings above.",
javaType = "Byte")
String PRIORITY_FLAG = "CamelSmppPriorityFlag";
@Metadata(label = "producer", description = "The protocol id", javaType = "Byte")
String PROTOCOL_ID = "CamelSmppProtocolId";
@Metadata(description = "*Producer:* *only for SubmitSm, ReplaceSm, SubmitMulti and DataSm* Is used to\n" +
"request an SMSC delivery receipt and/or SME originated acknowledgements.\n" +
"Use the URI option `registeredDelivery` settings above.\n" +
"*Consumer:* *only for DataSm* Is used to request an delivery receipt and/or SME\n" +
"originated acknowledgements. Same values as in Producer header list\n" +
"above.",
javaType = "Byte")
String REGISTERED_DELIVERY = "CamelSmppRegisteredDelivery";
@Metadata(label = "producer", description = "*only for SubmitSm, SubmitMulti* Is used to\n" +
"request the SMSC delivery receipt only on the last segment of multi-segment (long) messages.\n"
+
"Use the URI option `singleDLR` settings above.",
javaType = "Boolean")
String SINGLE_DLR = "CamelSmppSingleDLR";
@Metadata(label = "producer", description = "*only for SubmitSm and SubmitMulti* The replace if present flag\n" +
"parameter is used to request the SMSC to replace a previously submitted\n" +
"message, that is still pending delivery. The SMSC will replace an\n" +
"existing message provided that the source address, destination address\n" +
"and service type match the same fields in the new message. The following\n" +
"values are defined: `0`, Don't replace and `1`, Replace",
javaType = "Boolean")
String REPLACE_IF_PRESENT_FLAG = "CamelSmppReplaceIfPresentFlag";
@Metadata(description = "*Producer:* only for SubmitSm, SubmitMulti and ReplaceSm* This parameter specifies\n" +
"the scheduled time at which the message delivery should be first\n" +
"attempted. It defines either the absolute date and time or relative time\n" +
"from the current SMSC time at which delivery of this message will be\n" +
"attempted by the SMSC. It can be specified in either absolute time\n" +
"format or relative time format. The encoding of a time format is\n" +
"specified in chapter 7.1.1. in the smpp specification v3.4.\n" +
"*Consumer:* *only for DeliverSm:* This parameter specifies the scheduled time at\n" +
"which the message delivery should be first attempted. It defines either\n" +
"the absolute date and time or relative time from the current SMSC time\n" +
"at which delivery of this message will be attempted by the SMSC. It can\n" +
"be specified in either absolute time format or relative time format. The\n" +
"encoding of a time format is specified in Section 7.1.1. in the smpp\n" +
"specification v3.4.",
javaType = "Date")
String SCHEDULE_DELIVERY_TIME = "CamelSmppScheduleDeliveryTime";
@Metadata(label = "producer", description = "*only for SubmitSm and SubmitMultiSm* The total\n" +
"number of messages which has been sent.",
javaType = "Integer")
String SENT_MESSAGE_COUNT = "CamelSmppSentMessageCount";
@Metadata(label = "consumer", description = "*only for AlertNotification, DeliverSm and DataSm* A sequence number\n" +
"allows a response PDU to be correlated with a request PDU. The\n" +
"associated SMPP response PDU must preserve this field.",
javaType = "int")
String SEQUENCE_NUMBER = "CamelSmppSequenceNumber";
@Metadata(description = "*Producer:* The service type parameter can be used to indicate the SMS Application\n" +
"service associated with the message. Use the URI option `serviceType`\n" +
"settings above.\n" +
"*Consumer:* *only for DeliverSm and DataSm* The service type parameter indicates the\n" +
"SMS Application service associated with the message.",
javaType = "String")
String SERVICE_TYPE = "CamelSmppServiceType";
@Metadata(description = "*Producer:* Defines the address of SME (Short Message Entity) which originated this message.\n" +
"*Consumer:* *Only for AlertNotification, DeliverSm and DataSm* Defines the address\n" +
"of SME (Short Message Entity) which originated this message.",
javaType = "String")
String SOURCE_ADDR = "CamelSmppSourceAddr";
@Metadata(description = "*Producer:* Defines the numeric plan indicator (NPI) to be used in the SME\n" +
"originator address parameters. Use the URI option `sourceAddrNpi` values\n" +
"defined above.\n" +
"*Consumer:* *only for AlertNotification and DataSm* Defines the numeric plan\n" +
"indicator (NPI) to be used in the SME originator address parameters. Use\n" +
"the URI option `sourceAddrNpi` values defined above.",
javaType = "Byte")
String SOURCE_ADDR_NPI = "CamelSmppSourceAddrNpi";
@Metadata(description = "*Producer:* Defines the type of number (TON) to be used in the SME originator\n" +
"address parameters. Use the `sourceAddrTon` URI option values defined\n" +
"above.\n" +
"*Consumer:* *only for AlertNotification and DataSm* Defines the type of number (TON)\n" +
"to be used in the SME originator address parameters. Use the\n" +
"`sourceAddrTon` URI option values defined above.",
javaType = "Byte")
String SOURCE_ADDR_TON = "CamelSmppSourceAddrTon";
@Metadata(label = "consumer", description = "*only for smsc DeliveryReceipt* Number of short messages originally\n" +
"submitted. This is only relevant when the original message was submitted\n" +
"to a distribution list.The value is padded with leading zeros if\n" +
"necessary.",
javaType = "Integer")
String SUBMITTED = "CamelSmppSubmitted";
@Metadata(label = "consumer", description = "*only for smsc DeliveryReceipt* The time and date at which the short\n" +
"message was submitted. In the case of a message which has been replaced,\n" +
"this is the date that the original message was replaced. The format is\n" +
"as follows: YYMMDDhhmm.",
javaType = "Date")
String SUBMIT_DATE = "CamelSmppSubmitDate";
@Metadata(label = "producer", description = "The system id.", javaType = "String")
String SYSTEM_ID = "CamelSmppSystemId";
@Metadata(label = "producer", description = "The password.", javaType = "String")
String PASSWORD = "CamelSmppPassword";
@Metadata(description = "*Producer:* *only for SubmitSm, SubmitMulti and ReplaceSm* The validity period\n" +
"parameter indicates the SMSC expiration time, after which the message\n" +
"should be discarded if not delivered to the destination. If it's\n" +
"provided as `Date`, it's interpreted as absolute time or relative time\n" +
"format if you provide it as `String` as specified in chapter 7.1.1 in\n" +
"the smpp specification v3.4.\n" +
"*Consumer:* *only for DeliverSm* The validity period parameter indicates the SMSC\n" +
"expiration time, after which the message should be discarded if not\n" +
"delivered to the destination. It can be defined in absolute time format\n" +
"or relative time format. The encoding of absolute and relative time\n" +
"format is specified in Section 7.1.1 in the smpp specification v3.4.",
javaType = "String or Date")
String VALIDITY_PERIOD = "CamelSmppValidityPeriod";
@Metadata(label = "consumer", description = "The optional parameters by name.", javaType = "Map<String, Object>",
deprecationNote = "Use CamelSmppOptionalParameter instead")
String OPTIONAL_PARAMETERS = "CamelSmppOptionalParameters";
@Metadata(description = "*Producer:* *only for SubmitSm, SubmitMulti and\n" +
"DataSm* The optional parameter which are send to the SMSC. The value is\n" +
"converted in the following way: `String` -> `org.jsmpp.bean.OptionalParameter.COctetString`, \n" +
"`byte[]` -> `org.jsmpp.bean.OptionalParameter.OctetString`, \n" +
"`Byte` -> `org.jsmpp.bean.OptionalParameter.Byte`,\n" +
"`Integer` -> `org.jsmpp.bean.OptionalParameter.Int`,\n" +
"`Short` -> `org.jsmpp.bean.OptionalParameter.Short`, \n" +
"`null` -> `org.jsmpp.bean.OptionalParameter.Null`\n" +
"*Consumer:* *only for DeliverSm* The optional\n" +
"parameters send back by the SMSC. The key is the `Short` code for the\n" +
"optional parameter. The value is converted in the following way: \n" +
"`org.jsmpp.bean.OptionalParameter.COctetString` -> `String`,\n" +
"`org.jsmpp.bean.OptionalParameter.OctetString` -> `byte[]`,\n" +
"`org.jsmpp.bean.OptionalParameter.Byte` -> `Byte`,\n" +
"`org.jsmpp.bean.OptionalParameter.Int` -> `Integer`,\n" +
"`org.jsmpp.bean.OptionalParameter.Short` -> `Short`,\n" +
"`org.jsmpp.bean.OptionalParameter.Null` -> `null`",
javaType = "Map<Short, Object>")
String OPTIONAL_PARAMETER = "CamelSmppOptionalParameter";
@Metadata(label = "producer", description = "*only for SubmitSm,\n" +
"SubmitMulti and DataSm*. Specifies the policy for message splitting for\n" +
"this exchange. Possible values are described in the endpoint\n" +
"configuration parameter _splittingPolicy_",
javaType = "String")
String SPLITTING_POLICY = "CamelSmppSplittingPolicy";
byte UNKNOWN_ALPHABET = -1;
}
|
SmppConstants
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/output/committer/manifest/ManifestCommitterTestSupport.java
|
{
"start": 13443,
"end": 15617
}
|
class ____ {
/** Job ID; will be created uniquely for each instance. */
private final String jobId;
/**
* Store the details as strings; generate
* IDs on demand.
*/
private final String[][] taskAttempts;
/**
* Constructor.
* @param tasks number of tasks.
* @param attempts number of attempts.
*/
public JobAndTaskIDsForTests(int tasks, int attempts) {
this(randomJobId(), tasks, attempts);
}
public JobAndTaskIDsForTests(final String jobId,
int tasks, int attempts) {
this.jobId = jobId;
this.taskAttempts = new String[tasks][attempts];
for (int i = 0; i < tasks; i++) {
for (int j = 0; j < attempts; j++) {
String a = String.format(ATTEMPT_STRING,
jobId, i, j);
this.taskAttempts[i][j] = a;
}
}
}
/**
* Get the job ID.
* @return job ID string.
*/
public String getJobId() {
return jobId;
}
/**
* Get the job ID as the MR type.
* @return job ID type.
*/
public JobID getJobIdType() {
return getTaskIdType(0).getJobID();
}
/**
* Get a task attempt ID.
* @param task task index
* @param attempt attempt number.
* @return the task attempt.
*/
public String getTaskAttempt(int task, int attempt) {
return taskAttempts[task][attempt];
}
/**
* Get task attempt ID as the MR type.
* @param task task index
* @param attempt attempt number.
* @return the task attempt type
*/
public TaskAttemptID getTaskAttemptIdType(int task, int attempt) {
return TaskAttemptID.forName(getTaskAttempt(task, attempt));
}
/**
* Get task ID as the MR type.
* @param task task index
* @return the task ID type
*/
public TaskID getTaskIdType(int task) {
return TaskAttemptID.forName(getTaskAttempt(task, 0)).getTaskID();
}
/**
* Get task ID as a string.
* @param task task index
* @return the task ID
*/
public String getTaskId(int task) {
return getTaskIdType(task).toString();
}
}
}
|
JobAndTaskIDsForTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/model/internal/EntityBinder.java
|
{
"start": 34761,
"end": 47159
}
|
class ____'re processing is the root of the hierarchy, so
// let's see if we had a discriminator column (it's perfectly
// valid for joined inheritance to not have a discriminator)
if ( discriminatorColumn != null ) {
// we do have a discriminator column
if ( state.hasSiblings() || !discriminatorColumn.isImplicit() ) {
final var rootClass = (RootClass) persistentClass;
bindDiscriminatorColumnToRootPersistentClass( rootClass, discriminatorColumn, holder );
if ( context.getBuildingOptions().shouldImplicitlyForceDiscriminatorInSelect() ) {
rootClass.setForceDiscriminator( true );
}
}
}
}
}
private void checkNoJoinColumns(ClassDetails annotatedClass) {
if ( annotatedClass.hasAnnotationUsage( PrimaryKeyJoinColumns.class, modelsContext() )
|| annotatedClass.hasAnnotationUsage( PrimaryKeyJoinColumn.class, modelsContext() ) ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName()
+ "' may not specify a '@PrimaryKeyJoinColumn'" );
}
}
private void checkNoOnDelete(ClassDetails annotatedClass) {
if ( annotatedClass.hasAnnotationUsage( PrimaryKeyJoinColumns.class, modelsContext() )
|| annotatedClass.hasAnnotationUsage( PrimaryKeyJoinColumn.class, modelsContext() ) ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName() + "' may not be annotated '@OnDelete'" );
}
}
private static void handleForeignKey(ClassDetails clazzToProcess, MetadataBuildingContext context, DependantValue key) {
final var foreignKey = clazzToProcess.getDirectAnnotationUsage( ForeignKey.class );
handleForeignKeyConstraint( key, foreignKey, nestedForeignKey( clazzToProcess ), context );
}
private static ForeignKey nestedForeignKey(ClassDetails clazzToProcess) {
final var pkJoinColumn = clazzToProcess.getDirectAnnotationUsage( PrimaryKeyJoinColumn.class );
final var pkJoinColumns = clazzToProcess.getDirectAnnotationUsage( PrimaryKeyJoinColumns.class );
if ( pkJoinColumn != null ) {
return pkJoinColumn.foreignKey();
}
else if ( pkJoinColumns != null ) {
return pkJoinColumns.foreignKey();
}
else {
return null;
}
}
private void bindDiscriminatorColumnToRootPersistentClass(
RootClass rootClass,
AnnotatedDiscriminatorColumn discriminatorColumn,
PropertyHolder holder) {
if ( rootClass.getDiscriminator() == null ) {
if ( discriminatorColumn == null ) {
throw new AssertionFailure( "discriminator column should have been built" );
}
final var columns = new AnnotatedColumns();
columns.setPropertyHolder( holder );
columns.setBuildingContext( context );
columns.setJoins( secondaryTables );
// discriminatorColumn.setJoins( secondaryTables );
// discriminatorColumn.setPropertyHolder( holder );
discriminatorColumn.setParent( columns );
final var discriminatorColumnBinding = new BasicValue( context, rootClass.getTable() );
rootClass.setDiscriminator( discriminatorColumnBinding );
discriminatorColumn.linkWithValue( discriminatorColumnBinding );
discriminatorColumnBinding.setTypeName( discriminatorColumn.getDiscriminatorTypeName() );
rootClass.setPolymorphic( true );
getMetadataCollector()
.addSecondPass( new DiscriminatorColumnSecondPass( rootClass.getEntityName(),
getDatabase().getDialect() ) );
}
}
/**
* Process all discriminator-related metadata per rules for "single table" inheritance
*/
private AnnotatedDiscriminatorColumn processSingleTableDiscriminatorProperties(InheritanceState inheritanceState) {
final var discriminatorColumn = annotatedClass.getAnnotationUsage( DiscriminatorColumn.class, modelsContext() );
final var discriminatorFormula = getOverridableAnnotation( annotatedClass, DiscriminatorFormula.class, context );
if ( !inheritanceState.hasParents()
|| annotatedClass.hasAnnotationUsage( Inheritance.class, modelsContext() ) ) {
return buildDiscriminatorColumn( discriminatorColumn, discriminatorFormula,
null, DEFAULT_DISCRIMINATOR_COLUMN_NAME, context );
}
else {
// not a root entity
if ( discriminatorColumn != null ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName()
+ "' is annotated '@DiscriminatorColumn' but it is not the root of the entity inheritance hierarchy");
}
if ( discriminatorFormula != null ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName()
+ "' is annotated '@DiscriminatorFormula' but it is not the root of the entity inheritance hierarchy");
}
return null;
}
}
/**
* Process all discriminator-related metadata per rules for "joined" inheritance, taking
* into account {@value AvailableSettings#IMPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS}
* and {@value AvailableSettings#IGNORE_EXPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS}.
*/
private AnnotatedDiscriminatorColumn processJoinedDiscriminatorProperties(InheritanceState inheritanceState) {
final var modelContext = modelsContext();
if ( annotatedClass.hasAnnotationUsage( DiscriminatorFormula.class, modelContext ) ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName()
+ "' has 'JOINED' inheritance and is annotated '@DiscriminatorFormula'" );
}
final var discriminatorColumn = annotatedClass.getAnnotationUsage( DiscriminatorColumn.class, modelContext );
if ( !inheritanceState.hasParents()
|| annotatedClass.hasAnnotationUsage( Inheritance.class, modelContext ) ) {
return useDiscriminatorColumnForJoined( discriminatorColumn )
? buildDiscriminatorColumn( discriminatorColumn, null, null, DEFAULT_DISCRIMINATOR_COLUMN_NAME, context )
: null;
}
else {
// not a root entity
if ( discriminatorColumn != null ) {
throw new AnnotationException( "Entity class '" + annotatedClass.getName()
+ "' is annotated '@DiscriminatorColumn' but it is not the root of the entity inheritance hierarchy");
}
return null;
}
}
/**
* We want to process the discriminator column if either:
* <ol>
* <li>there is an explicit {@link DiscriminatorColumn} annotation and we are not told to ignore it
* via {@value AvailableSettings#IGNORE_EXPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS}, or
* <li>there is no explicit {@link DiscriminatorColumn} annotation but we are told to create it
* implicitly via {@value AvailableSettings#IMPLICIT_DISCRIMINATOR_COLUMNS_FOR_JOINED_SUBCLASS}.
* </ol>
*/
private boolean useDiscriminatorColumnForJoined(DiscriminatorColumn discriminatorColumn) {
final var buildingOptions = context.getBuildingOptions();
if ( discriminatorColumn != null ) {
final boolean ignore = buildingOptions.ignoreExplicitDiscriminatorsForJoinedInheritance();
if ( ignore ) {
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.ignoringExplicitDiscriminatorForJoined( annotatedClass.getName() );
}
}
return !ignore;
}
else {
final boolean createImplicit = buildingOptions.createImplicitDiscriminatorsForJoinedInheritance();
if ( createImplicit ) {
if ( BOOT_LOGGER.isTraceEnabled() ) {
BOOT_LOGGER.inferringImplicitDiscriminatorForJoined( annotatedClass.getName() );
}
}
return createImplicit;
}
}
private void processIdPropertiesIfNotAlready(
PersistentClass persistentClass,
InheritanceState inheritanceState,
MetadataBuildingContext context,
PropertyHolder propertyHolder,
Set<String> idPropertiesIfIdClass,
ElementsToProcess elementsToProcess,
Map<ClassDetails, InheritanceState> inheritanceStates) {
final Set<String> missingIdProperties = new HashSet<>( idPropertiesIfIdClass );
final Set<String> missingEntityProperties = new HashSet<>();
for ( var propertyAnnotatedElement : elementsToProcess.getElements() ) {
final String propertyName = propertyAnnotatedElement.getPropertyName();
if ( !idPropertiesIfIdClass.contains( propertyName ) ) {
final var memberDetails = propertyAnnotatedElement.getAttributeMember();
final boolean hasIdAnnotation = hasIdAnnotation( memberDetails );
if ( !idPropertiesIfIdClass.isEmpty() && !isIgnoreIdAnnotations() && hasIdAnnotation ) {
missingEntityProperties.add( propertyName );
}
else {
final boolean subclassAndSingleTableStrategy =
inheritanceState.getType() == SINGLE_TABLE
&& inheritanceState.hasParents();
if ( !hasIdAnnotation && memberDetails.hasAnnotationUsage( GeneratedValue.class, modelsContext() ) ) {
throw new AnnotationException( "Property '" + getPath( propertyHolder, propertyAnnotatedElement )
+ "' is annotated '@GeneratedValue' but is not part of an identifier" );
}
processElementAnnotations(
propertyHolder,
subclassAndSingleTableStrategy
? Nullability.FORCED_NULL
: Nullability.NO_CONSTRAINT,
propertyAnnotatedElement,
this,
false,
false,
false,
context,
inheritanceStates
);
}
}
else {
missingIdProperties.remove( propertyName );
}
}
if ( !missingIdProperties.isEmpty() ) {
throw new AnnotationException( "Entity '" + persistentClass.getEntityName()
+ "' has an '@IdClass' with properties " + getMissingPropertiesString( missingIdProperties )
+ " which do not match properties of the entity class" );
}
else if ( !missingEntityProperties.isEmpty() ) {
throw new AnnotationException( "Entity '" + persistentClass.getEntityName()
+ "' has '@Id' annotated properties " + getMissingPropertiesString( missingEntityProperties )
+ " which do not match properties of the specified '@IdClass'" );
}
}
private static String getMissingPropertiesString(Set<String> propertyNames) {
final var missingProperties = new StringBuilder();
for ( String propertyName : propertyNames ) {
if ( !missingProperties.isEmpty() ) {
missingProperties.append( ", " );
}
missingProperties.append( "'" ).append( propertyName ).append( "'" );
}
return missingProperties.toString();
}
private static PersistentClass makePersistentClass(
InheritanceState inheritanceState,
PersistentClass superEntity,
MetadataBuildingContext metadataBuildingContext) {
//we now know what kind of persistent entity it is
if ( !inheritanceState.hasParents() ) {
return new RootClass( metadataBuildingContext );
}
else {
return switch ( inheritanceState.getType() ) {
case SINGLE_TABLE -> new SingleTableSubclass( superEntity, metadataBuildingContext );
case JOINED -> new JoinedSubclass( superEntity, metadataBuildingContext );
case TABLE_PER_CLASS -> new UnionSubclass( superEntity, metadataBuildingContext );
};
}
}
private static AnnotatedJoinColumns subclassJoinColumns(
ClassDetails clazzToProcess,
PersistentClass superEntity,
MetadataBuildingContext context) {
//@Inheritance(JOINED) subclass need to link back to the super entity
final var joinColumns = new AnnotatedJoinColumns();
joinColumns.setBuildingContext( context );
final var modelsContext = context.getBootstrapContext().getModelsContext();
if ( clazzToProcess.hasAnnotationUsage( PrimaryKeyJoinColumn.class, modelsContext ) ) {
final var columns = clazzToProcess.getRepeatedAnnotationUsages( PrimaryKeyJoinColumn.class, modelsContext );
if ( columns.length == 0 ) {
// PrimaryKeyJoinColumns must not be empty according to Javadoc
throw new AnnotationException( "Empty '@PrimaryKeyJoinColumns' annotation" );
}
for ( var column : columns ) {
buildInheritanceJoinColumn(
column,
null,
superEntity.getIdentifier(),
joinColumns,
context
);
}
}
else {
buildInheritanceJoinColumn(
null,
null,
superEntity.getIdentifier(),
joinColumns,
context
);
}
return joinColumns;
}
private static PersistentClass getSuperEntity(
ClassDetails clazzToProcess,
Map<ClassDetails, InheritanceState> inheritanceStates,
MetadataBuildingContext context,
InheritanceState inheritanceState) {
final var superState = getInheritanceStateOfSuperEntity( clazzToProcess, inheritanceStates );
if ( superState == null ) {
return null;
}
else {
final var superEntity =
context.getMetadataCollector()
.getEntityBinding( superState.getClassDetails().getName() );
//check if superclass is not a potential persistent
|
we
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/sources/TableSourceValidation.java
|
{
"start": 1964,
"end": 9630
}
|
class ____ {
/**
* Validates a TableSource.
*
* <ul>
* <li>checks that all fields of the schema can be resolved
* <li>checks that resolved fields have the correct type
* <li>checks that the time attributes are correctly configured.
* </ul>
*
* @param tableSource The {@link TableSource} for which the time attributes are checked.
*/
public static void validateTableSource(TableSource<?> tableSource, TableSchema schema) {
List<RowtimeAttributeDescriptor> rowtimeAttributes = getRowtimeAttributes(tableSource);
Optional<String> proctimeAttribute = getProctimeAttribute(tableSource);
validateNoGeneratedColumns(schema);
validateSingleRowtimeAttribute(rowtimeAttributes);
validateRowtimeAttributesExistInSchema(rowtimeAttributes, schema);
validateProctimeAttributesExistInSchema(proctimeAttribute, schema);
validateLogicalToPhysicalMapping(tableSource, schema);
validateTimestampExtractorArguments(rowtimeAttributes, tableSource);
validateNotOverlapping(rowtimeAttributes, proctimeAttribute);
}
/**
* Checks if the given {@link TableSource} defines a rowtime attribute.
*
* @param tableSource The table source to check.
* @return true if the given table source defines rowtime attribute
*/
public static boolean hasRowtimeAttribute(TableSource<?> tableSource) {
return !getRowtimeAttributes(tableSource).isEmpty();
}
/**
* Checks if the given {@link TableSource} defines a proctime attribute.
*
* @param tableSource The table source to check.
* @return true if the given table source defines proctime attribute.
*/
public static boolean hasProctimeAttribute(TableSource<?> tableSource) {
return getProctimeAttribute(tableSource).isPresent();
}
private static void validateSingleRowtimeAttribute(
List<RowtimeAttributeDescriptor> rowtimeAttributes) {
if (rowtimeAttributes.size() > 1) {
throw new ValidationException(
"Currently, only a single rowtime attribute is supported. "
+ "Please remove all but one RowtimeAttributeDescriptor.");
}
}
private static void validateRowtimeAttributesExistInSchema(
List<RowtimeAttributeDescriptor> rowtimeAttributes, TableSchema tableSchema) {
rowtimeAttributes.forEach(
r -> {
if (!tableSchema.getFieldDataType(r.getAttributeName()).isPresent()) {
throw new ValidationException(
String.format(
"Found a rowtime attribute for field '%s' but it does not exist in the Table. TableSchema: %s",
r.getAttributeName(), tableSchema));
}
});
}
private static void validateProctimeAttributesExistInSchema(
Optional<String> proctimeAttribute, TableSchema tableSchema) {
proctimeAttribute.ifPresent(
r -> {
if (!tableSchema.getFieldDataType(r).isPresent()) {
throw new ValidationException(
String.format(
"Found a proctime attribute for field '%s' but it does not exist in the Table. TableSchema: %s",
r, tableSchema));
}
});
}
private static void validateNotOverlapping(
List<RowtimeAttributeDescriptor> rowtimeAttributes,
Optional<String> proctimeAttribute) {
proctimeAttribute.ifPresent(
proctime -> {
if (rowtimeAttributes.stream()
.anyMatch(
rowtimeAttribute ->
rowtimeAttribute.getAttributeName().equals(proctime))) {
throw new ValidationException(
String.format(
"Field '%s' must not be processing time and rowtime attribute at the same time.",
proctime));
}
});
}
private static void validateLogicalToPhysicalMapping(
TableSource<?> tableSource, TableSchema schema) {
final Function<String, String> fieldMapping = getNameMappingFunction(tableSource);
// if we can
TypeMappingUtils.computePhysicalIndicesOrTimeAttributeMarkers(
tableSource,
schema.getTableColumns(),
true, // this makes no difference for validation, we don't care about the returned
// indices
fieldMapping);
}
private static Function<String, String> getNameMappingFunction(TableSource<?> tableSource) {
final Function<String, String> fieldMapping;
if (tableSource instanceof DefinedFieldMapping
&& ((DefinedFieldMapping) tableSource).getFieldMapping() != null) {
Map<String, String> fieldsMap = ((DefinedFieldMapping) tableSource).getFieldMapping();
if (fieldsMap != null) {
fieldMapping = fieldsMap::get;
} else {
fieldMapping = Function.identity();
}
} else {
fieldMapping = Function.identity();
}
return fieldMapping;
}
private static void validateTimestampExtractorArguments(
List<RowtimeAttributeDescriptor> descriptors, TableSource<?> tableSource) {
if (descriptors.size() == 1) {
TimestampExtractor extractor = descriptors.get(0).getTimestampExtractor();
TypeInformation<?>[] types =
Arrays.stream(
TimestampExtractorUtils.getAccessedFields(
extractor,
tableSource.getProducedDataType(),
getNameMappingFunction(tableSource)))
.map(ResolvedFieldReference::resultType)
.toArray(TypeInformation<?>[]::new);
extractor.validateArgumentFields(types);
}
}
private static void validateNoGeneratedColumns(TableSchema tableSchema) {
if (!TableSchemaUtils.containsPhysicalColumnsOnly(tableSchema)) {
throw new ValidationException(
"TableSource#getTableSchema should only contain physical columns, schema: \n"
+ tableSchema);
}
}
/** Returns a list with all rowtime attribute descriptors of the {@link TableSource}. */
private static List<RowtimeAttributeDescriptor> getRowtimeAttributes(
TableSource<?> tableSource) {
if (tableSource instanceof DefinedRowtimeAttributes) {
return ((DefinedRowtimeAttributes) tableSource).getRowtimeAttributeDescriptors();
}
return Collections.emptyList();
}
/** Returns the proctime attribute of the {@link TableSource} if it is defined. */
private static Optional<String> getProctimeAttribute(TableSource<?> tableSource) {
if (tableSource instanceof DefinedProctimeAttribute) {
return Optional.ofNullable(
((DefinedProctimeAttribute) tableSource).getProctimeAttribute());
}
return Optional.empty();
}
private TableSourceValidation() {}
}
|
TableSourceValidation
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/engine/internal/MutableEntityEntryFactory.java
|
{
"start": 636,
"end": 1374
}
|
class ____ implements EntityEntryFactory {
/**
* Singleton access
*/
public static final MutableEntityEntryFactory INSTANCE = new MutableEntityEntryFactory();
private MutableEntityEntryFactory() {
}
@Override
public EntityEntry createEntityEntry(
Status status,
Object[] loadedState,
Object rowId,
Object id,
Object version,
LockMode lockMode,
boolean existsInDatabase,
EntityPersister persister,
boolean disableVersionIncrement,
PersistenceContext persistenceContext) {
return new EntityEntryImpl(
status,
loadedState,
rowId,
id,
version,
lockMode,
existsInDatabase,
persister,
disableVersionIncrement,
persistenceContext
);
}
}
|
MutableEntityEntryFactory
|
java
|
apache__camel
|
components/camel-quartz/src/test/java/org/apache/camel/component/quartz/SpringQuartzConsumerTwoAppsClusteredRecoveryTest.java
|
{
"start": 4558,
"end": 4969
}
|
class ____ implements Predicate {
private final String expectedPayload;
ClusteringPredicate(boolean pings) {
expectedPayload = pings ? "clustering PINGS!" : "clustering PONGS!";
}
@Override
public boolean matches(Exchange exchange) {
return exchange.getIn().getBody().equals(expectedPayload);
}
}
public static
|
ClusteringPredicate
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/scripting/xmltags/DynamicContext.java
|
{
"start": 5248,
"end": 6191
}
|
class ____ implements PropertyAccessor {
@Override
public Object getProperty(OgnlContext context, Object target, Object name) {
Map map = (Map) target;
Object result = map.get(name);
if (map.containsKey(name) || result != null) {
return result;
}
Object parameterObject = map.get(PARAMETER_OBJECT_KEY);
if (parameterObject instanceof Map) {
return ((Map) parameterObject).get(name);
}
return null;
}
@Override
public void setProperty(OgnlContext context, Object target, Object name, Object value) {
Map<Object, Object> map = (Map<Object, Object>) target;
map.put(name, value);
}
@Override
public String getSourceAccessor(OgnlContext arg0, Object arg1, Object arg2) {
return null;
}
@Override
public String getSourceSetter(OgnlContext arg0, Object arg1, Object arg2) {
return null;
}
}
}
|
ContextAccessor
|
java
|
apache__maven
|
impl/maven-core/src/test/java/org/apache/maven/lifecycle/internal/stub/ProjectDependencyGraphStub.java
|
{
"start": 2419,
"end": 5519
}
|
class ____ implements ProjectDependencyGraph {
public static final MavenProject A = new MavenProject();
public static final MavenProject B = new MavenProject();
public static final MavenProject C = new MavenProject();
public static final MavenProject X = new MavenProject();
public static final MavenProject Y = new MavenProject();
public static final MavenProject Z = new MavenProject();
public static final MavenProject UNKNOWN = new MavenProject();
static {
A.setArtifactId("A");
B.setArtifactId("B");
C.setArtifactId("C");
X.setArtifactId("X");
Y.setArtifactId("Y");
Z.setArtifactId("Z");
}
// This should probably be moved to a separate stub
public static ProjectBuildList getProjectBuildList(MavenSession session)
throws InvalidPluginDescriptorException, PluginVersionResolutionException, PluginDescriptorParsingException,
NoPluginFoundForPrefixException, MojoNotFoundException, PluginNotFoundException,
PluginResolutionException, LifecyclePhaseNotFoundException, LifecycleNotFoundException {
final List<ProjectSegment> list = getProjectBuilds(session);
return new ProjectBuildList(list);
}
public static List<ProjectSegment> getProjectBuilds(MavenSession session)
throws InvalidPluginDescriptorException, PluginVersionResolutionException, PluginDescriptorParsingException,
NoPluginFoundForPrefixException, PluginNotFoundException, MojoNotFoundException,
PluginResolutionException, LifecyclePhaseNotFoundException, LifecycleNotFoundException {
List<ProjectSegment> projectBuilds = new ArrayList<>();
TaskSegment segment = createTaskSegment();
projectBuilds.add(createProjectBuild(A, session, segment));
projectBuilds.add(createProjectBuild(B, session, segment));
projectBuilds.add(createProjectBuild(C, session, segment));
projectBuilds.add(createProjectBuild(X, session, segment));
projectBuilds.add(createProjectBuild(Y, session, segment));
projectBuilds.add(createProjectBuild(Z, session, segment));
return projectBuilds;
}
private static ProjectSegment createProjectBuild(
MavenProject project, MavenSession session, TaskSegment taskSegment)
throws InvalidPluginDescriptorException, PluginVersionResolutionException, PluginDescriptorParsingException,
NoPluginFoundForPrefixException, MojoNotFoundException, PluginNotFoundException,
PluginResolutionException, LifecyclePhaseNotFoundException, LifecycleNotFoundException {
final MavenSession session1 = session.clone();
return new ProjectSegment(project, taskSegment, session1);
}
private static TaskSegment createTaskSegment() {
TaskSegment result = new TaskSegment(false);
result.getTasks().add(new GoalTask("t1"));
result.getTasks().add(new GoalTask("t2"));
return result;
}
|
ProjectDependencyGraphStub
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/DescriptorType.java
|
{
"start": 1707,
"end": 2987
}
|
class ____ extends LogicalType {
private static final long serialVersionUID = 1L;
private static final String FORMAT = "DESCRIPTOR";
private static final Class<?> INPUT_OUTPUT_CONVERSION = ColumnList.class;
private static final Class<?> DEFAULT_CONVERSION = ColumnList.class;
public DescriptorType(boolean isNullable) {
super(isNullable, LogicalTypeRoot.DESCRIPTOR);
}
public DescriptorType() {
this(true);
}
@Override
public LogicalType copy(boolean isNullable) {
return new DescriptorType(isNullable);
}
@Override
public String asSerializableString() {
return withNullability(FORMAT);
}
@Override
public boolean supportsInputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION == clazz;
}
@Override
public boolean supportsOutputConversion(Class<?> clazz) {
return INPUT_OUTPUT_CONVERSION == clazz;
}
@Override
public Class<?> getDefaultConversion() {
return DEFAULT_CONVERSION;
}
@Override
public List<LogicalType> getChildren() {
return Collections.emptyList();
}
@Override
public <R> R accept(LogicalTypeVisitor<R> visitor) {
return visitor.visit(this);
}
}
|
DescriptorType
|
java
|
netty__netty
|
codec-native-quic/src/test/java/io/netty/handler/codec/quic/QuicChannelDatagramTest.java
|
{
"start": 1680,
"end": 12243
}
|
class ____ extends AbstractQuicTest {
private static final Random random = new Random();
static final byte[] data = new byte[512];
static {
random.nextBytes(data);
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramFlushInChannelRead(Executor executor) throws Throwable {
testDatagram(executor, false);
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramFlushInChannelReadComplete(Executor executor) throws Throwable {
testDatagram(executor, true);
}
private void testDatagram(Executor executor, boolean flushInReadComplete) throws Throwable {
AtomicReference<QuicDatagramExtensionEvent> serverEventRef = new AtomicReference<>();
QuicChannelValidationHandler serverHandler = new QuicChannelValidationHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
final ChannelFuture future;
if (!flushInReadComplete) {
future = ctx.writeAndFlush(msg);
} else {
future = ctx.write(msg);
}
future.addListener(ChannelFutureListener.CLOSE);
} else {
ctx.fireChannelRead(msg);
}
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
if (flushInReadComplete) {
ctx.flush();
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof QuicDatagramExtensionEvent) {
serverEventRef.set((QuicDatagramExtensionEvent) evt);
}
super.userEventTriggered(ctx, evt);
}
};
Channel server = QuicTestUtils.newServer(QuicTestUtils.newQuicServerBuilder(executor)
.datagram(10, 10),
InsecureQuicTokenHandler.INSTANCE, serverHandler , new ChannelInboundHandlerAdapter());
InetSocketAddress address = (InetSocketAddress) server.localAddress();
Promise<ByteBuf> receivedBuffer = ImmediateEventExecutor.INSTANCE.newPromise();
AtomicReference<QuicDatagramExtensionEvent> clientEventRef = new AtomicReference<>();
Channel channel = QuicTestUtils.newClient(QuicTestUtils.newQuicClientBuilder(executor)
.datagram(10, 10));
QuicChannelValidationHandler clientHandler = new QuicChannelValidationHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (!receivedBuffer.trySuccess((ByteBuf) msg)) {
ReferenceCountUtil.release(msg);
}
}
@Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception {
if (evt instanceof QuicDatagramExtensionEvent) {
clientEventRef.set((QuicDatagramExtensionEvent) evt);
}
super.userEventTriggered(ctx, evt);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
receivedBuffer.tryFailure(cause);
super.exceptionCaught(ctx, cause);
}
};
try {
QuicChannel quicChannel = QuicTestUtils.newQuicChannelBootstrap(channel)
.handler(clientHandler)
.remoteAddress(address)
.connect()
.get();
quicChannel.writeAndFlush(Unpooled.copiedBuffer(data)).sync();
ByteBuf buffer = receivedBuffer.get();
ByteBuf expected = Unpooled.wrappedBuffer(data);
assertEquals(expected, buffer);
buffer.release();
expected.release();
assertNotEquals(0, serverEventRef.get().maxLength());
assertNotEquals(0, clientEventRef.get().maxLength());
quicChannel.close().sync();
serverHandler.assertState();
clientHandler.assertState();
} finally {
server.close().sync();
// Close the parent Datagram channel as well.
channel.close().sync();
shutdown(executor);
}
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramNoAutoReadMaxMessagesPerRead1(Executor executor) throws Throwable {
testDatagramNoAutoRead(executor, 1, false);
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramNoAutoReadMaxMessagesPerRead3(Executor executor) throws Throwable {
testDatagramNoAutoRead(executor, 3, false);
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramNoAutoReadMaxMessagesPerRead1OutSideEventLoop(Executor executor) throws Throwable {
testDatagramNoAutoRead(executor, 1, true);
}
@ParameterizedTest
@MethodSource("newSslTaskExecutors")
public void testDatagramNoAutoReadMaxMessagesPerRead3OutSideEventLoop(Executor executor) throws Throwable {
testDatagramNoAutoRead(executor, 3, true);
}
private void testDatagramNoAutoRead(Executor executor, int maxMessagesPerRead, boolean readLater) throws Throwable {
Promise<Void> serverPromise = ImmediateEventExecutor.INSTANCE.newPromise();
Promise<ByteBuf> clientPromise = ImmediateEventExecutor.INSTANCE.newPromise();
int numDatagrams = 5;
AtomicInteger serverReadCount = new AtomicInteger();
CountDownLatch latch = new CountDownLatch(numDatagrams);
QuicChannelValidationHandler serverHandler = new QuicChannelValidationHandler() {
private int readPerLoop;
@Override
public void channelActive(ChannelHandlerContext ctx) {
super.channelActive(ctx);
ctx.read();
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
readPerLoop++;
ctx.writeAndFlush(msg).addListener(future -> {
if (future.isSuccess()) {
latch.countDown();
}
});
if (serverReadCount.incrementAndGet() == numDatagrams) {
serverPromise.trySuccess(null);
}
} else {
ctx.fireChannelRead(msg);
}
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) {
if (readPerLoop > maxMessagesPerRead) {
ctx.close();
serverPromise.tryFailure(new AssertionError(
"Read more then " + maxMessagesPerRead + " time per read loop"));
return;
}
readPerLoop = 0;
if (serverReadCount.get() < numDatagrams) {
if (readLater) {
ctx.executor().execute(ctx::read);
} else {
ctx.read();
}
}
}
};
Channel server = QuicTestUtils.newServer(QuicTestUtils.newQuicServerBuilder(executor)
.option(ChannelOption.AUTO_READ, false)
.option(ChannelOption.MAX_MESSAGES_PER_READ, maxMessagesPerRead)
.datagram(10, 10),
InsecureQuicTokenHandler.INSTANCE, serverHandler, new ChannelInboundHandlerAdapter());
InetSocketAddress address = (InetSocketAddress) server.localAddress();
Channel channel = QuicTestUtils.newClient(QuicTestUtils.newQuicClientBuilder(executor)
.datagram(10, 10));
AtomicInteger clientReadCount = new AtomicInteger();
QuicChannelValidationHandler clientHandler = new QuicChannelValidationHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
if (msg instanceof ByteBuf) {
if (clientReadCount.incrementAndGet() == numDatagrams) {
if (!clientPromise.trySuccess((ByteBuf) msg)) {
ReferenceCountUtil.release(msg);
}
} else {
ReferenceCountUtil.release(msg);
}
} else {
ctx.fireChannelRead(msg);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
clientPromise.tryFailure(cause);
}
};
try {
QuicChannel quicChannel = QuicTestUtils.newQuicChannelBootstrap(channel)
.handler(clientHandler)
.remoteAddress(address)
.connect()
.get();
for (int i = 0; i < numDatagrams; i++) {
quicChannel.writeAndFlush(Unpooled.copiedBuffer(data)).sync();
// Let's add some sleep in between as this is UDP so we may loose some data otherwise.
Thread.sleep(50);
}
assertTrue(serverPromise.await(3000), "Server received: " + serverReadCount.get() +
", Client received: " + clientReadCount.get());
serverPromise.sync();
assertTrue(clientPromise.await(3000), "Server received: " + serverReadCount.get() +
", Client received: " + clientReadCount.get());
ByteBuf buffer = clientPromise.get();
ByteBuf expected = Unpooled.wrappedBuffer(data);
assertEquals(expected, buffer);
buffer.release();
expected.release();
quicChannel.close().sync();
serverHandler.assertState();
clientHandler.assertState();
} finally {
server.close().sync();
// Close the parent Datagram channel as well.
channel.close().sync();
shutdown(executor);
}
}
}
|
QuicChannelDatagramTest
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/ssl/SslServerWithPemWithSniMatchingSanDNSTest.java
|
{
"start": 1428,
"end": 3757
}
|
class ____ {
@TestHTTPResource(value = "/ssl", ssl = true)
URL url;
private static final String configuration = """
# Enable SSL, configure the key store
quarkus.http.ssl.certificate.files=server-cert.pem,alias.crt,alias-2.crt
quarkus.http.ssl.certificate.key-files=server-key.pem,alias.key,alias-2.key
# Test that server starts with this option
# See https://github.com/quarkusio/quarkus/issues/8336
quarkus.http.insecure-requests=disabled
quarkus.http.ssl.sni=true
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyBean.class)
.addAsResource(new StringAsset((configuration)), "application.properties")
.addAsResource(new File("target/certs/ssl-test-sni.key"), "server-key.pem")
.addAsResource(new File("target/certs/ssl-test-sni.crt"), "server-cert.pem")
.addAsResource(new File("target/certs/alias.key"), "alias.key")
.addAsResource(new File("target/certs/alias.crt"), "alias.crt")
.addAsResource(new File("target/certs/alias-2.key"), "alias-2.key")
.addAsResource(new File("target/certs/alias-2.crt"), "alias-2.crt"));
@Inject
Vertx vertx;
@Test
public void testSslServerWithPkcs12() {
// Cannot use RESTAssured as it does not validate the certificate names (even when forced.)
WebClientOptions options = new WebClientOptions()
.setSsl(true)
.setTrustOptions(new io.vertx.core.net.JksOptions()
.setPath("target/certs/ssl-test-sni-truststore.jks")
.setPassword("secret"))
.setForceSni(true);
WebClient client = WebClient.create(vertx, options);
HttpResponse<Buffer> response = client.getAbs(url.toExternalForm()).send().toCompletionStage().toCompletableFuture()
.join();
Assertions.assertThat(response.statusCode()).isEqualTo(200);
Assertions.assertThat(response.bodyAsString()).isEqualTo("ssl");
}
@ApplicationScoped
static
|
SslServerWithPemWithSniMatchingSanDNSTest
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/logging/logback/Extractor.java
|
{
"start": 1126,
"end": 2095
}
|
class ____ {
private final @Nullable StackTracePrinter stackTracePrinter;
private final ThrowableProxyConverter throwableProxyConverter;
Extractor(@Nullable StackTracePrinter stackTracePrinter, ThrowableProxyConverter throwableProxyConverter) {
this.stackTracePrinter = stackTracePrinter;
this.throwableProxyConverter = throwableProxyConverter;
}
String messageAndStackTrace(ILoggingEvent event) {
return event.getFormattedMessage() + "\n\n" + stackTrace(event);
}
String stackTrace(ILoggingEvent event) {
if (this.stackTracePrinter != null) {
IThrowableProxy throwableProxy = event.getThrowableProxy();
Assert.state(throwableProxy instanceof ThrowableProxy,
"Instance must be a ThrowableProxy in order to print exception");
Throwable throwable = ((ThrowableProxy) throwableProxy).getThrowable();
return this.stackTracePrinter.printStackTraceToString(throwable);
}
return this.throwableProxyConverter.convert(event);
}
}
|
Extractor
|
java
|
apache__flink
|
flink-formats/flink-protobuf/src/main/java/org/apache/flink/formats/protobuf/PbConstant.java
|
{
"start": 902,
"end": 1709
}
|
class ____ {
public static final String PB_METHOD_GET_DESCRIPTOR = "getDescriptor";
public static final String PB_METHOD_PARSE_FROM = "parseFrom";
public static final String GENERATED_DECODE_METHOD = "decode";
public static final String GENERATED_ENCODE_METHOD = "encode";
public static final String PB_MAP_KEY_NAME = "key";
public static final String PB_MAP_VALUE_NAME = "value";
public static final String PB_OUTER_CLASS_SUFFIX = "OuterClass";
/**
* JIT optimizer threshold is 8K, unicode encode one char use 1byte, so use 4K as
* codegen_spilt_threshold,A conservative threshold is selected to prevent multiple element code
* segments in RowType from being combined to exceed 8K.
*/
public static final int CODEGEN_SPLIT_THRESHOLD = 4000;
}
|
PbConstant
|
java
|
apache__camel
|
components/camel-ldif/src/main/java/org/apache/camel/component/ldif/LdifEndpoint.java
|
{
"start": 1421,
"end": 2521
}
|
class ____ extends DefaultEndpoint {
@UriPath
@Metadata(required = true)
private String ldapConnectionName;
protected LdifEndpoint(String endpointUri, String remaining, LdifComponent component) {
super(endpointUri, component);
this.ldapConnectionName = remaining;
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("An LDIF Consumer would be the LDAP server itself! No such support here");
}
@Override
public Producer createProducer() throws Exception {
return new LdifProducer(this, ldapConnectionName);
}
public String getLdapConnectionName() {
return ldapConnectionName;
}
/**
* The name of the LdapConnection bean to pull from the registry. Note that this must be of scope "prototype" to
* avoid it being shared among threads or using a connection that has timed out.
*/
public void setLdapConnectionName(String ldapConnectionName) {
this.ldapConnectionName = ldapConnectionName;
}
}
|
LdifEndpoint
|
java
|
netty__netty
|
codec-http3/src/main/java/io/netty/handler/codec/http3/Http3ControlStreamFrame.java
|
{
"start": 857,
"end": 914
}
|
interface ____ extends Http3Frame {
}
|
Http3ControlStreamFrame
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/RedissonCountDownLatchTest.java
|
{
"start": 521,
"end": 5998
}
|
class ____ extends RedisDockerTest {
@Test
public void testAwaitTimeout() throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(2);
final RCountDownLatch latch = redisson.getCountDownLatch("latch1");
Assertions.assertTrue(latch.trySetCount(1));
executor.execute(() -> {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
Assertions.fail();
}
latch.countDown();
});
executor.execute(() -> {
try {
Assertions.assertEquals(1, latch.getCount());
boolean res = latch.await(550, TimeUnit.MILLISECONDS);
Assertions.assertTrue(res);
} catch (InterruptedException e) {
Assertions.fail();
}
});
executor.shutdown();
Assertions.assertTrue(executor.awaitTermination(10, TimeUnit.SECONDS));
}
@Test
public void testAwaitTimeoutFail() throws InterruptedException {
ExecutorService executor = Executors.newFixedThreadPool(2);
final RCountDownLatch latch = redisson.getCountDownLatch("latch1");
Assertions.assertTrue(latch.trySetCount(1));
executor.execute(() -> {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Assertions.fail();
}
latch.countDown();
});
executor.execute(() -> {
try {
Assertions.assertEquals(1, latch.getCount());
boolean res = latch.await(500, TimeUnit.MILLISECONDS);
Assertions.assertFalse(res);
} catch (InterruptedException e) {
Assertions.fail();
}
});
executor.shutdown();
Assertions.assertTrue(executor.awaitTermination(10, TimeUnit.SECONDS));
}
@Test
public void testMultiAwait() throws InterruptedException {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
latch.trySetCount(5);
AtomicInteger counter = new AtomicInteger();
for (int i = 0; i < 5; i++) {
Thread t = new Thread() {
@Override
public void run() {
RCountDownLatch latch2 = redisson.getCountDownLatch("latch");
latch2.awaitAsync(10L, TimeUnit.SECONDS).thenAccept(r -> {
if (r) {
counter.incrementAndGet();
}
});
}
};
t.start();
}
ScheduledExecutorService ee = Executors.newScheduledThreadPool(1);
for (int i = 0; i < 5; i++) {
ee.schedule(() -> {
RCountDownLatch latch2 = redisson.getCountDownLatch("latch");
latch2.countDown();
}, 1, TimeUnit.SECONDS);
}
Awaitility.await().atMost(Duration.ofSeconds(7)).until(() -> {
return latch.getCount() == 0;
});
assertThat(counter.get()).isEqualTo(5);
}
@Test
public void testCountDown() throws InterruptedException {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
latch.trySetCount(2);
Assertions.assertEquals(2, latch.getCount());
latch.countDown();
Assertions.assertEquals(1, latch.getCount());
latch.countDown();
Assertions.assertEquals(0, latch.getCount());
latch.await();
latch.countDown();
Assertions.assertEquals(0, latch.getCount());
latch.await();
latch.countDown();
Assertions.assertEquals(0, latch.getCount());
latch.await();
RCountDownLatch latch1 = redisson.getCountDownLatch("latch1");
latch1.trySetCount(1);
latch1.countDown();
Assertions.assertEquals(0, latch.getCount());
latch1.countDown();
Assertions.assertEquals(0, latch.getCount());
latch1.await();
RCountDownLatch latch2 = redisson.getCountDownLatch("latch2");
latch2.trySetCount(1);
latch2.countDown();
latch2.await();
latch2.await();
RCountDownLatch latch3 = redisson.getCountDownLatch("latch3");
Assertions.assertEquals(0, latch.getCount());
latch3.await();
RCountDownLatch latch4 = redisson.getCountDownLatch("latch4");
Assertions.assertEquals(0, latch.getCount());
latch4.countDown();
Assertions.assertEquals(0, latch.getCount());
latch4.await();
}
@Test
public void testDelete() throws Exception {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
latch.trySetCount(1);
Assertions.assertTrue(latch.delete());
}
@Test
public void testDeleteFailed() throws Exception {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
Assertions.assertFalse(latch.delete());
}
@Test
public void testTrySetCount() throws Exception {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
assertThat(latch.trySetCount(1)).isTrue();
assertThat(latch.trySetCount(2)).isFalse();
}
@Test
public void testCount() {
RCountDownLatch latch = redisson.getCountDownLatch("latch");
assertThat(latch.getCount()).isEqualTo(0);
}
}
|
RedissonCountDownLatchTest
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/core/parameters/AnnotationParameterNameDiscovererTests.java
|
{
"start": 4381,
"end": 4501
}
|
class ____ {
public String findMessageByIdNoAnnotation(@P("id") String id) {
return null;
}
}
static
|
BaseDaoImpl
|
java
|
elastic__elasticsearch
|
x-pack/qa/src/main/java/org/elasticsearch/xpack/test/rest/XPackRestTestHelper.java
|
{
"start": 775,
"end": 3694
}
|
class ____ {
private XPackRestTestHelper() {}
/**
* For each template name wait for the template to be created.
*
* @param client The rest client
* @param expectedTemplates Names of the templates to wait for
* @throws InterruptedException If the wait is interrupted
*/
@SuppressWarnings("unchecked")
public static void waitForTemplates(RestClient client, List<String> expectedTemplates) throws Exception {
// TODO: legacy support can be removed once all X-Pack plugins use only composable
// templates in the oldest version we test upgrades from
assertBusy(() -> {
Map<String, Object> response;
final Request request = new Request("GET", "_index_template");
request.addParameter("error_trace", "true");
String string = EntityUtils.toString(client.performRequest(request).getEntity());
List<Map<String, Object>> templateList = (List<Map<String, Object>>) XContentHelper.convertToMap(
JsonXContent.jsonXContent,
string,
false
).get("index_templates");
response = templateList.stream().collect(Collectors.toMap(m -> (String) m.get("name"), m -> m.get("index_template")));
final Set<String> templates = new TreeSet<>(response.keySet());
final Request legacyRequest = new Request("GET", "_template");
legacyRequest.addParameter("error_trace", "true");
string = EntityUtils.toString(client.performRequest(legacyRequest).getEntity());
Map<String, Object> legacyResponse = XContentHelper.convertToMap(JsonXContent.jsonXContent, string, false);
final Set<String> legacyTemplates = new TreeSet<>(legacyResponse.keySet());
final List<String> missingTemplates = expectedTemplates.stream()
.filter(each -> templates.contains(each) == false)
.filter(each -> legacyTemplates.contains(each) == false)
.toList();
// While it's possible to use a Hamcrest matcher for this, the failure is much less legible.
if (missingTemplates.isEmpty() == false) {
fail(
"Some expected templates are missing: "
+ missingTemplates
+ ". The composable templates that exist are: "
+ templates
+ ". The legacy templates that exist are: "
+ legacyTemplates
);
}
});
}
public static String resultsWriteAlias(String jobId) {
// ".write" rather than simply "write" to avoid the danger of clashing
// with the read alias of a job whose name begins with "write-"
return XPackRestTestConstants.RESULTS_INDEX_PREFIX + ".write-" + jobId;
}
}
|
XPackRestTestHelper
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/layer/ApplicationContentFilter.java
|
{
"start": 947,
"end": 1355
}
|
class ____ implements ContentFilter<String> {
private static final AntPathMatcher MATCHER = new AntPathMatcher();
private final String pattern;
public ApplicationContentFilter(String pattern) {
Assert.hasText(pattern, "'pattern' must not be empty");
this.pattern = pattern;
}
@Override
public boolean matches(String path) {
return MATCHER.match(this.pattern, path);
}
}
|
ApplicationContentFilter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/action/SubGroupTests.java
|
{
"start": 733,
"end": 2913
}
|
class ____ extends ESTestCase {
public void testToXContent() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
// tag::noformat
XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
.startObject()
.startObject("transaction.name")
.startObject("basket")
.field("count", 7L)
.endObject()
.endObject()
.endObject();
// end::noformat
XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType);
actualRequest.startObject();
SubGroup g = SubGroup.root("transaction.name").addCount("basket", 7L);
g.toXContent(actualRequest, ToXContent.EMPTY_PARAMS);
actualRequest.endObject();
assertToXContentEquivalent(BytesReference.bytes(expectedRequest), BytesReference.bytes(actualRequest), contentType);
}
public void testMergeNoCommonRoot() {
SubGroup root1 = SubGroup.root("transaction.name");
SubGroup root2 = SubGroup.root("service.name");
SubGroup toMerge = root1.copy();
toMerge.merge(root2);
assertEquals(root1, toMerge);
}
public void testMergeIdenticalTree() {
SubGroup g = SubGroup.root("transaction.name");
g.addCount("basket", 5L);
g.addCount("checkout", 7L);
SubGroup g2 = g.copy();
g.merge(g2);
assertEquals(Long.valueOf(10L), g.getCount("basket"));
assertEquals(Long.valueOf(14L), g.getCount("checkout"));
}
public void testMergeMixedTree() {
SubGroup g1 = SubGroup.root("transaction.name");
g1.addCount("basket", 5L);
g1.addCount("checkout", 7L);
SubGroup g2 = SubGroup.root("transaction.name");
g2.addCount("catalog", 8L);
g2.addCount("basket", 5L);
g2.addCount("checkout", 2L);
g1.merge(g2);
assertEquals(Long.valueOf(8L), g1.getCount("catalog"));
assertEquals(Long.valueOf(10L), g1.getCount("basket"));
assertEquals(Long.valueOf(9L), g1.getCount("checkout"));
}
}
|
SubGroupTests
|
java
|
apache__camel
|
catalog/camel-catalog/src/main/java/org/apache/camel/catalog/CamelCatalog.java
|
{
"start": 3612,
"end": 3865
}
|
class ____ for the component class
*/
void addComponent(String name, String className);
/**
* Adds a 3rd party component to this catalog.
*
* @param name the component name
* @param className the fully qualified
|
name
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/state/internals/RocksDBVersionedStore.java
|
{
"start": 20610,
"end": 21150
}
|
interface ____ {
/**
* @return segment id
*/
long id();
void put(Bytes key, byte[] value);
byte[] get(Bytes key);
}
/**
* Extracts all operations required for writing to the versioned store (via
* {@link #put(Bytes, byte[], long)}) into a generic client interface, so that the same
* {@code put(...)} logic can be shared during regular store operation and during restore.
*
* @param <T> the segment type used by this client
*/
|
VersionedStoreSegment
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/BooleanArraysBaseTest.java
|
{
"start": 1421,
"end": 1897
}
|
class ____ {
protected static final WritableAssertionInfo INFO = someInfo();
protected boolean[] actual;
protected Failures failures;
protected BooleanArrays arrays;
@BeforeEach
public void setUp() {
actual = arrayOf(true, false);
failures = spy(Failures.instance());
arrays = new BooleanArrays();
arrays.failures = failures;
}
protected void setArrays(Arrays internalArrays) {
arrays.setArrays(internalArrays);
}
}
|
BooleanArraysBaseTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java
|
{
"start": 1251,
"end": 9565
}
|
class ____ extends SearchPhaseResult {
private static final Term[] EMPTY_TERMS = new Term[0];
private static final TermStatistics[] EMPTY_TERM_STATS = new TermStatistics[0];
private Term[] terms;
private TermStatistics[] termStatistics;
private Map<String, CollectionStatistics> fieldStatistics = new HashMap<>();
private List<DfsKnnResults> knnResults;
private int maxDoc;
private SearchProfileDfsPhaseResult searchProfileDfsPhaseResult;
public DfsSearchResult(StreamInput in) throws IOException {
contextId = new ShardSearchContextId(in);
int termsSize = in.readVInt();
if (termsSize == 0) {
terms = EMPTY_TERMS;
} else {
terms = new Term[termsSize];
for (int i = 0; i < terms.length; i++) {
terms[i] = new Term(in.readString(), in.readSlicedBytesReference().toBytesRef());
}
}
this.termStatistics = readTermStats(in, terms);
fieldStatistics = readFieldStats(in);
maxDoc = in.readVInt();
setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new));
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) {
knnResults = in.readOptionalCollectionAsList(DfsKnnResults::new);
} else {
DfsKnnResults results = in.readOptionalWriteable(DfsKnnResults::new);
knnResults = results != null ? List.of(results) : List.of();
}
}
if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_6_0)) {
searchProfileDfsPhaseResult = in.readOptionalWriteable(SearchProfileDfsPhaseResult::new);
}
}
public DfsSearchResult(ShardSearchContextId contextId, SearchShardTarget shardTarget, ShardSearchRequest shardSearchRequest) {
this.setSearchShardTarget(shardTarget);
this.contextId = contextId;
setShardSearchRequest(shardSearchRequest);
}
public DfsSearchResult maxDoc(int maxDoc) {
this.maxDoc = maxDoc;
return this;
}
public int maxDoc() {
return maxDoc;
}
public DfsSearchResult termsStatistics(Term[] terms, TermStatistics[] termStatistics) {
this.terms = terms;
this.termStatistics = termStatistics;
return this;
}
public DfsSearchResult fieldStatistics(Map<String, CollectionStatistics> fieldStatistics) {
this.fieldStatistics = fieldStatistics;
return this;
}
public DfsSearchResult knnResults(List<DfsKnnResults> knnResults) {
this.knnResults = knnResults;
return this;
}
public DfsSearchResult profileResult(SearchProfileDfsPhaseResult searchProfileDfsPhaseResult) {
this.searchProfileDfsPhaseResult = searchProfileDfsPhaseResult;
return this;
}
public Term[] terms() {
return terms;
}
public TermStatistics[] termStatistics() {
return termStatistics;
}
public Map<String, CollectionStatistics> fieldStatistics() {
return fieldStatistics;
}
public List<DfsKnnResults> knnResults() {
return knnResults;
}
public SearchProfileDfsPhaseResult searchProfileDfsPhaseResult() {
return searchProfileDfsPhaseResult;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
contextId.writeTo(out);
out.writeArray((o, term) -> {
o.writeString(term.field());
o.writeBytesRef(term.bytes());
}, terms);
writeTermStats(out, termStatistics);
writeFieldStats(out, fieldStatistics);
out.writeVInt(maxDoc);
out.writeOptionalWriteable(getShardSearchRequest());
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_4_0)) {
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_7_0)) {
out.writeOptionalCollection(knnResults);
} else {
if (knnResults != null && knnResults.size() > 1) {
throw new IllegalArgumentException(
"Cannot serialize multiple KNN results to nodes using previous transport version ["
+ out.getTransportVersion().toReleaseVersion()
+ "], minimum required transport version is ["
+ TransportVersions.V_8_7_0.toReleaseVersion()
+ "]"
);
}
out.writeOptionalWriteable(knnResults == null || knnResults.isEmpty() ? null : knnResults.get(0));
}
}
if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_6_0)) {
out.writeOptionalWriteable(searchProfileDfsPhaseResult);
}
}
public static void writeFieldStats(StreamOutput out, Map<String, CollectionStatistics> fieldStatistics) throws IOException {
out.writeMap(fieldStatistics, (o, statistics) -> {
assert statistics.maxDoc() >= 0;
o.writeVLong(statistics.maxDoc());
// stats are always positive numbers
o.writeVLong(statistics.docCount());
o.writeVLong(statistics.sumTotalTermFreq());
o.writeVLong(statistics.sumDocFreq());
});
}
public static void writeTermStats(StreamOutput out, TermStatistics[] termStatistics) throws IOException {
out.writeArray(DfsSearchResult::writeSingleTermStats, termStatistics);
}
public static void writeSingleTermStats(StreamOutput out, TermStatistics termStatistic) throws IOException {
if (termStatistic != null) {
assert termStatistic.docFreq() > 0;
out.writeVLong(termStatistic.docFreq());
out.writeVLong(addOne(termStatistic.totalTermFreq()));
} else {
out.writeVLong(0);
out.writeVLong(0);
}
}
static Map<String, CollectionStatistics> readFieldStats(StreamInput in) throws IOException {
final int numFieldStatistics = in.readVInt();
Map<String, CollectionStatistics> fieldStatistics = new HashMap<>(numFieldStatistics);
for (int i = 0; i < numFieldStatistics; i++) {
final String field = in.readString();
assert field != null;
final long maxDoc = in.readVLong();
// stats are always positive numbers
final long docCount = in.readVLong();
final long sumTotalTermFreq = in.readVLong();
final long sumDocFreq = in.readVLong();
CollectionStatistics stats = new CollectionStatistics(field, maxDoc, docCount, sumTotalTermFreq, sumDocFreq);
fieldStatistics.put(field, stats);
}
return fieldStatistics;
}
static TermStatistics[] readTermStats(StreamInput in, Term[] terms) throws IOException {
int termsStatsSize = in.readVInt();
final TermStatistics[] termStatistics;
if (termsStatsSize == 0) {
termStatistics = EMPTY_TERM_STATS;
} else {
termStatistics = new TermStatistics[termsStatsSize];
assert terms.length == termsStatsSize;
for (int i = 0; i < termStatistics.length; i++) {
BytesRef term = terms[i].bytes();
final long docFreq = in.readVLong();
assert docFreq >= 0;
final long totalTermFreq = subOne(in.readVLong());
if (docFreq == 0) {
continue;
}
termStatistics[i] = new TermStatistics(term, docFreq, totalTermFreq);
}
}
return termStatistics;
}
/*
* optional statistics are set to -1 in lucene by default.
* Since we are using var longs to encode values we add one to each value
* to ensure we don't waste space and don't add negative values.
*/
public static long addOne(long value) {
assert value + 1 >= 0;
return value + 1;
}
/*
* See #addOne this just subtracting one and asserts that the actual value
* is positive.
*/
public static long subOne(long value) {
assert value >= 0;
return value - 1;
}
}
|
DfsSearchResult
|
java
|
spring-projects__spring-boot
|
cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/shell/ForkProcessCommand.java
|
{
"start": 1184,
"end": 2275
}
|
class ____ extends RunProcessCommand {
private static final String MAIN_CLASS = "org.springframework.boot.loader.launch.JarLauncher";
private final Command command;
ForkProcessCommand(Command command) {
super(new JavaExecutable().toString());
this.command = command;
}
@Override
public String getName() {
return this.command.getName();
}
@Override
public String getDescription() {
return this.command.getDescription();
}
@Override
public @Nullable String getUsageHelp() {
return this.command.getUsageHelp();
}
@Override
public @Nullable String getHelp() {
return this.command.getHelp();
}
@Override
public Collection<OptionHelp> getOptionsHelp() {
return this.command.getOptionsHelp();
}
@Override
public ExitStatus run(String... args) throws Exception {
List<String> fullArgs = new ArrayList<>();
fullArgs.add("-cp");
fullArgs.add(System.getProperty("java.class.path"));
fullArgs.add(MAIN_CLASS);
fullArgs.add(this.command.getName());
fullArgs.addAll(Arrays.asList(args));
run(fullArgs);
return ExitStatus.OK;
}
}
|
ForkProcessCommand
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/dao/TransientDataAccessException.java
|
{
"start": 1078,
"end": 1626
}
|
class ____ extends DataAccessException {
/**
* Constructor for TransientDataAccessException.
* @param msg the detail message
*/
public TransientDataAccessException(@Nullable String msg) {
super(msg);
}
/**
* Constructor for TransientDataAccessException.
* @param msg the detail message
* @param cause the root cause (usually from using an underlying
* data access API such as JDBC)
*/
public TransientDataAccessException(@Nullable String msg, @Nullable Throwable cause) {
super(msg, cause);
}
}
|
TransientDataAccessException
|
java
|
apache__kafka
|
metadata/src/main/java/org/apache/kafka/controller/errors/EventHandlerExceptionInfo.java
|
{
"start": 1425,
"end": 5018
}
|
class ____ {
/**
* True if this exception should be treated as a fault, and tracked via the metadata errors
* metric.
*/
private final boolean isFault;
/**
* True if this exception should cause a controller failover.
* All faults cause failover
*/
private final boolean causesFailover;
/**
* The internal exception.
*/
private final Throwable internalException;
/**
* The exception to present to RPC callers, or Optional.empty if the internal exception should
* be presented directly.
*/
private final Optional<Throwable> externalException;
/**
* Create an EventHandlerExceptionInfo object from an internal exception.
*
* @param internal The internal exception.
* @param latestControllerSupplier A function we can call to obtain the latest leader id.
*
* @return The new immutable info object.
*/
public static EventHandlerExceptionInfo fromInternal(
Throwable internal,
Supplier<OptionalInt> latestControllerSupplier
) {
if (internal instanceof ApiException) {
// This exception is a standard API error response from the controller, which can pass
// through without modification.
return new EventHandlerExceptionInfo(false, false, internal);
} else if (internal instanceof NotLeaderException) {
// The controller has lost leadership.
return new EventHandlerExceptionInfo(false, true, internal,
ControllerExceptions.newWrongControllerException(latestControllerSupplier.get()));
} else if (internal instanceof RejectedExecutionException) {
// The controller event queue is shutting down.
return new EventHandlerExceptionInfo(false, false, internal,
new TimeoutException("The controller is shutting down.", internal));
} else if (internal instanceof BoundedListTooLongException) {
// The operation could not be performed because it would have created an overly large
// batch.
return new EventHandlerExceptionInfo(false, false, internal,
new PolicyViolationException("Unable to perform excessively large batch " +
"operation."));
} else if (internal instanceof PeriodicControlTaskException) {
// This exception is a periodic task which failed.
return new EventHandlerExceptionInfo(true, false, internal);
} else if (internal instanceof InterruptedException) {
// The controller event queue has been interrupted. This normally only happens during
// a JUnit test that has hung. The test framework sometimes sends an InterruptException
// to all threads to try to get them to shut down. This isn't the correct way to shut
// the test, but it may happen if something hung.
return new EventHandlerExceptionInfo(true, true, internal,
new UnknownServerException("The controller was interrupted."));
} else {
// This is the catch-all case for things that aren't supposed to happen. Null pointer
// exceptions, illegal argument exceptions, etc. They get translated into an
// UnknownServerException and a controller failover.
return new EventHandlerExceptionInfo(true, true, internal,
new UnknownServerException(internal));
}
}
/**
* Returns true if the
|
EventHandlerExceptionInfo
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/GuiceNestedCombineTest.java
|
{
"start": 8679,
"end": 9277
}
|
class ____ extends AbstractModule {}
public void test() {
foo(new ModuleA(), new ModuleB(), new ModuleC(), new ModuleB(), new ModuleC());
}
public void foo(Module a, Module... b) {}
}
""")
.doTest();
}
@Test
public void noCombine_noFinding() {
refactoringTestHelper
.addInputLines(
"Test.java",
"""
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.util.Modules;
|
ModuleC
|
java
|
apache__flink
|
flink-annotations/src/main/java/org/apache/flink/annotation/docs/ConfigGroup.java
|
{
"start": 942,
"end": 1193
}
|
class ____ specifies a group of config options. The name of the group will be used as the basis
* for the filename of the generated html file, as defined in {@link ConfigOptionsDocGenerator}.
*
* @see ConfigGroups
*/
@Target({})
@Internal
public @
|
that
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java
|
{
"start": 5751,
"end": 6182
}
|
interface ____ script will implement.
*/
private final Class<?> scriptClass;
/**
* The whitelist the script will use.
*/
private final PainlessLookup painlessLookup;
/**
* Classes that do not exist in the lookup, but are needed by the script factories.
*/
private final Map<String, Class<?>> additionalClasses;
/**
* Standard constructor.
* @param scriptClass The class/
|
the
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMCriticalThreadUncaughtExceptionHandler.java
|
{
"start": 1108,
"end": 1466
}
|
class ____ shuts down {@link ResourceManager} or transitions the
* {@link ResourceManager} to standby state if a critical thread throws an
* uncaught exception. It is intended to be installed by calling
* {@code setUncaughtExceptionHandler(Thread.UncaughtExceptionHandler)}
* in the thread entry point or after creation of threads.
*/
@Private
public
|
either
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/simple/ApplicationWithBlockingTest.java
|
{
"start": 1728,
"end": 1821
}
|
class ____ extends Application {
}
@Path("tname")
public static
|
BlockingApplication
|
java
|
elastic__elasticsearch
|
libs/logstash-bridge/src/main/java/org/elasticsearch/logstashbridge/geoip/AbstractExternalIpDatabaseBridge.java
|
{
"start": 1034,
"end": 1512
}
|
class ____ implements IpDatabaseBridge {
private ProxyExternal internalDatabase;
@Override
public IpDatabase toInternal() {
if (internalDatabase == null) {
internalDatabase = new ProxyExternal();
}
return internalDatabase;
}
/**
* An implementation of {@link IpDatabase} that proxies calls to
* a bridged {@link AbstractExternalIpDatabaseBridge} instance.
*/
private final
|
AbstractExternalIpDatabaseBridge
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/NotEqualsMillisNanosEvaluator.java
|
{
"start": 1200,
"end": 5036
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(NotEqualsMillisNanosEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator lhs;
private final EvalOperator.ExpressionEvaluator rhs;
private final DriverContext driverContext;
private Warnings warnings;
public NotEqualsMillisNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs,
EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) {
this.source = source;
this.lhs = lhs;
this.rhs = rhs;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock lhsBlock = (LongBlock) lhs.eval(page)) {
try (LongBlock rhsBlock = (LongBlock) rhs.eval(page)) {
LongVector lhsVector = lhsBlock.asVector();
if (lhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
LongVector rhsVector = rhsBlock.asVector();
if (rhsVector == null) {
return eval(page.getPositionCount(), lhsBlock, rhsBlock);
}
return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock();
}
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += lhs.baseRamBytesUsed();
baseRamBytesUsed += rhs.baseRamBytesUsed();
return baseRamBytesUsed;
}
public BooleanBlock eval(int positionCount, LongBlock lhsBlock, LongBlock rhsBlock) {
try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (lhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
switch (rhsBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
long lhs = lhsBlock.getLong(lhsBlock.getFirstValueIndex(p));
long rhs = rhsBlock.getLong(rhsBlock.getFirstValueIndex(p));
result.appendBoolean(NotEquals.processMillisNanos(lhs, rhs));
}
return result.build();
}
}
public BooleanVector eval(int positionCount, LongVector lhsVector, LongVector rhsVector) {
try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
long lhs = lhsVector.getLong(p);
long rhs = rhsVector.getLong(p);
result.appendBoolean(p, NotEquals.processMillisNanos(lhs, rhs));
}
return result.build();
}
}
@Override
public String toString() {
return "NotEqualsMillisNanosEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(lhs, rhs);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
NotEqualsMillisNanosEvaluator
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericAssociationOrderColumnTest.java
|
{
"start": 2638,
"end": 3011
}
|
class ____ {
@Id
public Long id;
@ManyToOne
public ParentEntity parent;
public ChildEntity() {
}
public ChildEntity(Long id, ParentEntity parent) {
this.id = id;
this.parent = parent;
}
public Long getId() {
return id;
}
public ParentEntity getParent() {
return parent;
}
}
@Entity( name = "ParentEntity" )
public static
|
ChildEntity
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregation.java
|
{
"start": 1566,
"end": 1632
}
|
class ____ all the single bucket aggregations.
*/
public abstract
|
for
|
java
|
apache__camel
|
components/camel-stax/src/test/java/org/apache/camel/language/xtokenizer/XMLTokenizeLanguageStreamingTest.java
|
{
"start": 956,
"end": 1369
}
|
class ____ extends XMLTokenizeLanguageTest {
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
Namespaces ns = new Namespaces("C", "urn:c");
public void configure() {
from("direct:start").split().xtokenize("//C:child", ns).streaming().to("mock:result").end();
}
};
}
}
|
XMLTokenizeLanguageStreamingTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/SelfAlwaysReturnsThisTest.java
|
{
"start": 7532,
"end": 7917
}
|
class ____ {
public Builder self(int foo) {
return new Builder();
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void self_abstract() {
helper
.addInputLines(
"Builder.java",
"""
package com.google.frobber;
public abstract
|
Builder
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestUniformQueueResourceCalculation.java
|
{
"start": 1196,
"end": 8877
}
|
class ____ extends CapacitySchedulerQueueCalculationTestBase {
private static final Resource QUEUE_A_RES = Resource.newInstance(80 * GB,
10);
private static final Resource QUEUE_B_RES = Resource.newInstance(170 * GB,
30);
private static final Resource QUEUE_A1_RES = Resource.newInstance(50 * GB,
4);
private static final Resource QUEUE_A2_RES = Resource.newInstance(30 * GB,
6);
private static final Resource QUEUE_A11_RES = Resource.newInstance(40 * GB,
2);
private static final Resource QUEUE_A12_RES = Resource.newInstance(10 * GB,
2);
private static final Resource UPDATE_RES = Resource.newInstance(250 * GB, 40);
private static final Resource PERCENTAGE_ALL_RES = Resource.newInstance(10 * GB, 20);
public static final double A_CAPACITY = 0.3;
public static final double B_CAPACITY = 0.7;
public static final double A1_CAPACITY = 0.17;
public static final double A11_CAPACITY = 0.25;
public static final double A12_CAPACITY = 0.75;
public static final double A2_CAPACITY = 0.83;
public static final float A_WEIGHT = 3;
public static final float B_WEIGHT = 6;
public static final float A1_WEIGHT = 2;
public static final float A11_WEIGHT = 5;
public static final float A12_WEIGHT = 8;
public static final float A2_WEIGHT = 3;
public static final double A_NORMALIZED_WEIGHT = A_WEIGHT / (A_WEIGHT + B_WEIGHT);
public static final double B_NORMALIZED_WEIGHT = B_WEIGHT / (A_WEIGHT + B_WEIGHT);
public static final double A1_NORMALIZED_WEIGHT = A1_WEIGHT / (A1_WEIGHT + A2_WEIGHT);
public static final double A2_NORMALIZED_WEIGHT = A2_WEIGHT / (A1_WEIGHT + A2_WEIGHT);
public static final double A11_NORMALIZED_WEIGHT = A11_WEIGHT / (A11_WEIGHT + A12_WEIGHT);
public static final double A12_NORMALIZED_WEIGHT = A12_WEIGHT / (A11_WEIGHT + A12_WEIGHT);
@Test
public void testWeightResourceCalculation() throws IOException {
csConf.setNonLabeledQueueWeight(A, A_WEIGHT);
csConf.setNonLabeledQueueWeight(B, B_WEIGHT);
csConf.setNonLabeledQueueWeight(A1, A1_WEIGHT);
csConf.setNonLabeledQueueWeight(A11, A11_WEIGHT);
csConf.setNonLabeledQueueWeight(A12, A12_WEIGHT);
csConf.setNonLabeledQueueWeight(A2, A2_WEIGHT);
QueueAssertionBuilder queueAssertionBuilder = createAssertionBuilder()
.withQueue(A)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES, A_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(A_NORMALIZED_WEIGHT)
.withQueue(B)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES, B_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(B_NORMALIZED_WEIGHT)
.withQueue(A1)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES,
A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT)
.withQueue(A2)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES,
A_NORMALIZED_WEIGHT * A2_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(A_NORMALIZED_WEIGHT * A2_NORMALIZED_WEIGHT)
.withQueue(A11)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES,
A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT * A11_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT * A11_NORMALIZED_WEIGHT)
.withQueue(A12)
.assertEffectiveMinResource(ResourceUtils.multiplyRound(UPDATE_RES,
A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT * A12_NORMALIZED_WEIGHT))
.assertAbsoluteCapacity(A_NORMALIZED_WEIGHT * A1_NORMALIZED_WEIGHT * A12_NORMALIZED_WEIGHT)
.build();
update(queueAssertionBuilder, UPDATE_RES);
}
@Test
public void testPercentageResourceCalculation() throws IOException {
csConf.setCapacity(A, (float) (A_CAPACITY * 100));
csConf.setCapacity(B, (float) (B_CAPACITY * 100));
csConf.setCapacity(A1, (float) (A1_CAPACITY * 100));
csConf.setCapacity(A11, (float) (A11_CAPACITY * 100));
csConf.setCapacity(A12, (float) (A12_CAPACITY * 100));
csConf.setCapacity(A2, (float) (A2_CAPACITY * 100));
QueueAssertionBuilder queueAssertionBuilder = createAssertionBuilder()
.withQueue(A)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES, A_CAPACITY))
.assertCapacity(A_CAPACITY)
.assertAbsoluteCapacity(A_CAPACITY)
.withQueue(B)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES, B_CAPACITY))
.assertCapacity(B_CAPACITY)
.assertAbsoluteCapacity(B_CAPACITY)
.withQueue(A1)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES,
A_CAPACITY * A1_CAPACITY))
.assertCapacity(A1_CAPACITY)
.assertAbsoluteCapacity(A_CAPACITY * A1_CAPACITY)
.withQueue(A2)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES,
A_CAPACITY * A2_CAPACITY))
.assertCapacity(A2_CAPACITY)
.assertAbsoluteCapacity(A_CAPACITY * A2_CAPACITY)
.withQueue(A11)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES,
A11_CAPACITY * A_CAPACITY * A1_CAPACITY))
.assertCapacity(A11_CAPACITY)
.assertAbsoluteCapacity(A11_CAPACITY * A_CAPACITY * A1_CAPACITY)
.withQueue(A12)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(PERCENTAGE_ALL_RES,
A12_CAPACITY * A_CAPACITY * A1_CAPACITY))
.assertCapacity(A12_CAPACITY)
.assertAbsoluteCapacity(A12_CAPACITY * A_CAPACITY * A1_CAPACITY)
.build();
update(queueAssertionBuilder, PERCENTAGE_ALL_RES);
}
@Test
public void testAbsoluteResourceCalculation() throws IOException {
csConf.setMinimumResourceRequirement("", A, QUEUE_A_RES);
csConf.setMinimumResourceRequirement("", B, QUEUE_B_RES);
csConf.setMinimumResourceRequirement("", A1, QUEUE_A1_RES);
csConf.setMinimumResourceRequirement("", A2, QUEUE_A2_RES);
csConf.setMinimumResourceRequirement("", A11, QUEUE_A11_RES);
csConf.setMinimumResourceRequirement("", A12, QUEUE_A12_RES);
QueueAssertionBuilder queueAssertionBuilder = createAssertionBuilder()
.withQueue(A)
.assertEffectiveMinResource(QUEUE_A_RES)
.withQueue(B)
.assertEffectiveMinResource(QUEUE_B_RES)
.withQueue(A1)
.assertEffectiveMinResource(QUEUE_A1_RES)
.withQueue(A2)
.assertEffectiveMinResource(QUEUE_A2_RES)
.withQueue(A11)
.assertEffectiveMinResource(QUEUE_A11_RES)
.withQueue(A12)
.assertEffectiveMinResource(QUEUE_A12_RES)
.build();
update(queueAssertionBuilder, UPDATE_RES);
QueueAssertionBuilder queueAssertionHalfClusterResource = createAssertionBuilder()
.withQueue(A)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_A_RES, 0.5f))
.withQueue(B)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_B_RES, 0.5f))
.withQueue(A1)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_A1_RES, 0.5f))
.withQueue(A2)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_A2_RES, 0.5f))
.withQueue(A11)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_A11_RES, 0.5f))
.withQueue(A12)
.assertEffectiveMinResource(ResourceUtils.multiplyFloor(QUEUE_A12_RES, 0.5f))
.build();
update(queueAssertionHalfClusterResource, ResourceUtils.multiplyFloor(UPDATE_RES, 0.5f));
}
}
|
TestUniformQueueResourceCalculation
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeErrorCallable.java
|
{
"start": 1008,
"end": 1667
}
|
class ____<T> extends Maybe<T> {
final Supplier<? extends Throwable> errorSupplier;
public MaybeErrorCallable(Supplier<? extends Throwable> errorSupplier) {
this.errorSupplier = errorSupplier;
}
@Override
protected void subscribeActual(MaybeObserver<? super T> observer) {
observer.onSubscribe(Disposable.disposed());
Throwable ex;
try {
ex = ExceptionHelper.nullCheck(errorSupplier.get(), "Supplier returned a null Throwable.");
} catch (Throwable ex1) {
Exceptions.throwIfFatal(ex1);
ex = ex1;
}
observer.onError(ex);
}
}
|
MaybeErrorCallable
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/hint/ResourcePatternHint.java
|
{
"start": 2235,
"end": 3562
}
|
class ____ implements ConditionalHint {
private static final AntPathMatcher PATH_MATCHER = new AntPathMatcher();
private final String pattern;
private final @Nullable TypeReference reachableType;
ResourcePatternHint(String pattern, @Nullable TypeReference reachableType) {
Assert.isTrue(("/".equals(pattern) || !pattern.startsWith("/")),
() -> "Resource pattern [%s] must not start with a '/' unless it is the root directory"
.formatted(pattern));
this.pattern = pattern;
this.reachableType = reachableType;
}
/**
* Return the pattern to use for identifying the resources to match.
*/
public String getPattern() {
return this.pattern;
}
/**
* Whether the given path matches the current glob pattern.
* @param path the path to match against
*/
public boolean matches(String path) {
return PATH_MATCHER.match(this.pattern, path);
}
@Override
public @Nullable TypeReference getReachableType() {
return this.reachableType;
}
@Override
public boolean equals(@Nullable Object other) {
return (this == other || (other instanceof ResourcePatternHint that &&
this.pattern.equals(that.pattern) && Objects.equals(this.reachableType, that.reachableType)));
}
@Override
public int hashCode() {
return Objects.hash(this.pattern, this.reachableType);
}
}
|
ResourcePatternHint
|
java
|
google__auto
|
factory/src/test/resources/bad/ProvidedButNoAutoFactory.java
|
{
"start": 654,
"end": 749
}
|
class ____ {
ProvidedButNoAutoFactory(Object a, @Provided Object b) {}
}
|
ProvidedButNoAutoFactory
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/mapping/RuntimeResource.java
|
{
"start": 778,
"end": 4716
}
|
class ____ {
private final String httpMethod;
private final URITemplate path;
private final URITemplate classPath;
private final ServerMediaType produces;
private final List<MediaType> consumes;
private final EndpointInvoker invoker;
private final BeanFactory<Object> endpointFactory;
private final ServerRestHandler[] handlerChain;
private final String javaMethodName;
private final Class<?>[] parameterTypes;
private final Type returnType;
private final boolean blocking;
private final boolean runOnVirtualThread;
private final Class<?> resourceClass;
private final ResteasyReactiveResourceInfo lazyMethod;
private final Map<String, Integer> pathParameterIndexes;
private final Map<ScoreSystem.Category, List<ScoreSystem.Diagnostic>> score;
private final MediaType streamElementType;
private final Map<Class<? extends Throwable>, ResourceExceptionMapper<? extends Throwable>> classExceptionMappers;
public RuntimeResource(String httpMethod, URITemplate path, URITemplate classPath, ServerMediaType produces,
List<MediaType> consumes,
EndpointInvoker invoker,
BeanFactory<Object> endpointFactory, ServerRestHandler[] handlerChain, String javaMethodName,
Class<?>[] parameterTypes,
Type returnType, boolean blocking, boolean runOnVirtualThread, Class<?> resourceClass,
ResteasyReactiveResourceInfo lazyMethod,
Map<String, Integer> pathParameterIndexes, Map<ScoreSystem.Category, List<ScoreSystem.Diagnostic>> score,
MediaType streamElementType,
Map<Class<? extends Throwable>, ResourceExceptionMapper<? extends Throwable>> classExceptionMappers) {
this.httpMethod = httpMethod;
this.path = path;
this.classPath = classPath;
this.produces = produces;
this.consumes = consumes;
this.invoker = invoker;
this.endpointFactory = endpointFactory;
this.handlerChain = handlerChain;
this.javaMethodName = javaMethodName;
this.parameterTypes = parameterTypes;
this.returnType = returnType;
this.blocking = blocking;
this.runOnVirtualThread = runOnVirtualThread;
this.resourceClass = resourceClass;
this.lazyMethod = lazyMethod;
this.pathParameterIndexes = pathParameterIndexes;
this.score = score;
this.streamElementType = streamElementType;
this.classExceptionMappers = classExceptionMappers;
}
public ServerRestHandler[] getHandlerChain() {
return handlerChain;
}
public String getJavaMethodName() {
return javaMethodName;
}
public Class<?>[] getParameterTypes() {
return parameterTypes;
}
public Type getReturnType() {
return returnType;
}
public String getHttpMethod() {
return httpMethod;
}
public URITemplate getPath() {
return path;
}
public ServerMediaType getProduces() {
return produces;
}
public List<MediaType> getConsumes() {
return consumes;
}
public EndpointInvoker getInvoker() {
return invoker;
}
public boolean isBlocking() {
return blocking;
}
public boolean isRunOnVirtualThread() {
return runOnVirtualThread;
}
public Class<?> getResourceClass() {
return resourceClass;
}
public BeanFactory<Object> getEndpointFactory() {
return endpointFactory;
}
public ResteasyReactiveResourceInfo getLazyMethod() {
return lazyMethod;
}
public SimpleResourceInfo getSimplifiedResourceInfo() {
return new ResteasyReactiveSimplifiedResourceInfo(javaMethodName, resourceClass, parameterTypes);
}
public MediaType getStreamElementType() {
return streamElementType;
}
/**
* The @Path that is present on the
|
RuntimeResource
|
java
|
apache__camel
|
components/camel-sql/src/main/java/org/apache/camel/processor/aggregate/jdbc/ClusteredPostgresAggregationRepository.java
|
{
"start": 1273,
"end": 3684
}
|
class ____ extends ClusteredJdbcAggregationRepository {
/**
* Creates an aggregation repository
*/
public ClusteredPostgresAggregationRepository() {
}
/**
* Creates an aggregation repository with the three mandatory parameters
*/
public ClusteredPostgresAggregationRepository(PlatformTransactionManager transactionManager, String repositoryName,
DataSource dataSource) {
super(transactionManager, repositoryName, dataSource);
}
/**
* Inserts a new record into the given repository table
*
* @param camelContext the current CamelContext
* @param correlationId the correlation key
* @param exchange the aggregated exchange
* @param repositoryName The name of the table
*/
@Override
protected void insert(
final CamelContext camelContext, final String correlationId, final Exchange exchange, String repositoryName,
final Long version, final boolean completed)
throws Exception {
// The default totalParameterIndex is 2 for ID and Exchange. Depending on logic this will be increased
int totalParameterIndex = 2;
StringBuilder queryBuilder = new StringBuilder(256)
.append("INSERT INTO ").append(repositoryName)
.append('(')
.append(EXCHANGE).append(", ")
.append(ID);
if (isStoreBodyAsText()) {
queryBuilder.append(", ").append(BODY);
totalParameterIndex++;
}
if (hasHeadersToStoreAsText()) {
for (String headerName : getHeadersToStoreAsText()) {
queryBuilder.append(", ").append(headerName);
totalParameterIndex++;
}
}
queryBuilder.append(") VALUES (");
queryBuilder.append("?, ".repeat(Math.max(0, totalParameterIndex - 1)));
queryBuilder.append("?)");
queryBuilder.append(" ON CONFLICT DO NOTHING");
String sql = queryBuilder.toString();
int updateCount = insertHelper(camelContext, correlationId, exchange, sql, 1L, completed);
if (updateCount == 0 && getRepositoryName().equals(repositoryName)) {
throw new DataIntegrityViolationException("No row was inserted due to data violation");
}
}
}
|
ClusteredPostgresAggregationRepository
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java
|
{
"start": 5548,
"end": 5642
}
|
class ____ extends BaseTasksResponse implements ToXContentObject {
public static
|
Response
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/MergedContextConfiguration.java
|
{
"start": 1487,
"end": 1881
}
|
class ____ all of its superclasses and
* enclosing classes via {@link ContextConfiguration @ContextConfiguration},
* {@link ActiveProfiles @ActiveProfiles}, and
* {@link TestPropertySource @TestPropertySource}.
*
* <p>Merged context resource locations, annotated classes, active profiles,
* property resource locations, and in-lined properties represent all declared
* values in the test
|
and
|
java
|
spring-projects__spring-security
|
buildSrc/src/main/java/lock/GlobalLockTask.java
|
{
"start": 272,
"end": 1031
}
|
class ____ extends DefaultTask {
@TaskAction
public void lock() {
Project taskProject = getProject();
if (!taskProject.getGradle().getStartParameter().isWriteDependencyLocks()) {
throw new IllegalStateException("You just specify --write-locks argument");
}
writeLocksFor(taskProject);
taskProject.getSubprojects().forEach(new Consumer<Project>() {
@Override
public void accept(Project subproject) {
writeLocksFor(subproject);
}
});
}
private void writeLocksFor(Project project) {
project.getConfigurations().configureEach(new Action<Configuration>() {
@Override
public void execute(Configuration configuration) {
if (configuration.isCanBeResolved()) {
configuration.resolve();
}
}
});
}
}
|
GlobalLockTask
|
java
|
apache__camel
|
core/camel-management/src/main/java/org/apache/camel/management/mbean/ManagedTransformer.java
|
{
"start": 1157,
"end": 1981
}
|
class ____ extends ManagedProcessor implements ManagedTransformMBean {
public ManagedTransformer(CamelContext context, Processor processor, TransformDefinition definition) {
super(context, processor, definition);
}
@Override
public TransformDefinition getDefinition() {
return (TransformDefinition) super.getDefinition();
}
@Override
public String getExpressionLanguage() {
return getDefinition().getExpression().getLanguage();
}
@Override
public String getExpression() {
return getDefinition().getExpression().getExpression();
}
@Override
public String getFromType() {
return getDefinition().getFromType();
}
@Override
public String getToType() {
return getDefinition().getToType();
}
}
|
ManagedTransformer
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/core/src/main/java/io/quarkus/bootstrap/classloading/QuarkusClassLoader.java
|
{
"start": 37015,
"end": 37754
}
|
class ____
* is reset.
* <p>
* If this is not explicitly added to the elements list then it will be automatically
* added as the highest priority element.
*
* @param resettableElement The element
* @return This builder
*/
public Builder setResettableElement(MemoryClassPathElement resettableElement) {
this.resettableElement = resettableElement;
return this;
}
/**
* Adds an element that contains classes that will always be loaded in a parent first manner.
* <p>
* Note that this does not mean that the parent will always have this class, it is possible that
* in some cases the
|
loader
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/QueuesTest.java
|
{
"start": 11744,
"end": 12418
}
|
class ____ implements Callable<@Nullable Void> {
final BlockingQueue<Object> q;
final int elements;
final CountDownLatch beganProducing = new CountDownLatch(1);
final CountDownLatch doneProducing = new CountDownLatch(1);
Producer(BlockingQueue<Object> q, int elements) {
this.q = q;
this.elements = elements;
}
@Override
public @Nullable Void call() throws InterruptedException {
try {
beganProducing.countDown();
for (int i = 0; i < elements; i++) {
q.put(new Object());
}
return null;
} finally {
doneProducing.countDown();
}
}
}
private static
|
Producer
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/collection/adder/_target/TargetWithoutSetter.java
|
{
"start": 297,
"end": 608
}
|
class ____ {
private List<Long> pets;
public List<Long> getPets() {
return pets;
}
public void addPet(Long pet) {
AdderUsageObserver.setUsed( true );
if ( pets == null ) {
pets = new ArrayList<>();
}
pets.add( pet );
}
}
|
TargetWithoutSetter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java
|
{
"start": 2399,
"end": 22350
}
|
class ____ {
private final RestClient client;
private final RequestOptions options;
public TestSecurityClient(RestClient client) {
this(client, RequestOptions.DEFAULT);
}
public TestSecurityClient(RestClient client, RequestOptions options) {
this.client = client;
this.options = options;
}
/**
* Uses the REST API to retrieve the currently authenticated user.
* @see User.Fields
* @see org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction
*/
public Map<String, Object> authenticate() throws IOException {
final String endpoint = "/_security/_authenticate";
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
return entityAsMap(execute(request));
}
/**
* Uses the REST API to create a new user in the native realm.
* @see org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction
*/
public void putUser(User user, SecureString password) throws IOException {
final String endpoint = "/_security/user/" + user.principal();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
final Map<String, Object> map = XContentTestUtils.convertToMap(user);
if (password != null) {
map.put("password", password.toString());
}
final String body = toJson(map);
request.setJsonEntity(body);
request.addParameters(Map.of("refresh", "true"));
execute(request);
}
/**
* Uses the REST API to delete a user from the native realm.
* @see org.elasticsearch.xpack.security.rest.action.user.RestDeleteUserAction
*/
public void deleteUser(String username) throws IOException {
final String endpoint = "/_security/user/" + username;
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
request.addParameters(Map.of("refresh", "true"));
execute(request);
}
/**
* Uses the REST API to change the password of a user in the native/reserverd realms.
* @see org.elasticsearch.xpack.security.rest.action.user.RestChangePasswordAction
*/
public void changePassword(String username, SecureString password) throws IOException {
final String endpoint = "/_security/user/" + username + "/_password";
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
final String body = String.format(Locale.ROOT, """
{
"password": "%s"
}
""", password.toString());
request.setJsonEntity(body);
execute(request);
}
/**
* Uses the REST API to enable or disable a user in the native/reserved realm.
* @see org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction
*/
public void setUserEnabled(String username, boolean enabled) throws IOException {
final String endpoint = "/_security/user/" + username + "/" + (enabled ? "_enable" : "_disable");
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
execute(request);
}
/**
* Uses the REST API to retrieve an API Key.
* @see org.elasticsearch.xpack.security.rest.action.apikey.RestGetApiKeyAction
*/
public ApiKey getApiKey(String id) throws IOException {
final String endpoint = "/_security/api_key/";
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.addParameter("id", id);
final Response response = execute(request);
try (XContentParser parser = getParser(response)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
XContentParserUtils.ensureFieldName(parser, parser.nextToken(), "api_keys");
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.nextToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
final ApiKey apiKey = ApiKey.fromXContent(parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.currentToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_ARRAY, parser.nextToken(), parser);
XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser);
return apiKey;
}
}
/**
* Uses the REST API to invalidate an API Key.
* @see org.elasticsearch.xpack.security.rest.action.apikey.RestInvalidateApiKeyAction
*/
public void invalidateApiKeysForUser(String username) throws IOException {
final String endpoint = "/_security/api_key/";
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
request.setJsonEntity(String.format(Locale.ROOT, """
{
"username":"%s"
}
""", username));
execute(request);
}
/**
* Uses the REST API to invalidate API Keys given their IDs.
* @see org.elasticsearch.xpack.security.rest.action.apikey.RestInvalidateApiKeyAction
*/
public void invalidateApiKeys(final String... apiKeyIds) throws IOException {
final var endpoint = "/_security/api_key/";
final var request = new Request(HttpDelete.METHOD_NAME, endpoint);
request.setJsonEntity(XContentTestUtils.convertToXContent(Map.of("ids", apiKeyIds), XContentType.JSON).utf8ToString());
execute(request);
}
/**
* Uses the REST API to get a Role descriptor
* @see org.elasticsearch.xpack.security.rest.action.role.RestGetRolesAction
*/
public RoleDescriptor getRoleDescriptor(String roleName) throws IOException {
if (Strings.isNullOrEmpty(roleName) || roleName.contains("*") || roleName.contains(",")) {
throw new IllegalArgumentException("Provided role name must be for a single role (not [" + roleName + "])");
}
final Map<String, RoleDescriptor> descriptors = getRoleDescriptors(roleName);
final RoleDescriptor descriptor = descriptors.get(roleName);
if (descriptor == null) {
throw new IllegalStateException("Did not find role [" + roleName + "]");
}
return descriptor;
}
public Map<String, RoleDescriptor> getRoleDescriptors(String[] roles) throws IOException {
return getRoleDescriptors(Strings.arrayToCommaDelimitedString(roles));
}
private Map<String, RoleDescriptor> getRoleDescriptors(String roleParameter) throws IOException {
final String endpoint = "/_security/role/" + roleParameter;
final Request request = new Request(HttpGet.METHOD_NAME, endpoint);
final Response response = execute(request);
final Map<String, RoleDescriptor> roles = new LinkedHashMap<>();
try (XContentParser parser = getParser(response)) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser);
final String roleName = parser.currentName();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
final RoleDescriptor role = RoleDescriptor.parserBuilder().allowDescription(true).build().parse(roleName, parser);
roles.put(roleName, role);
}
}
return roles;
}
/**
* Uses the REST API to create a new role in the native store.
* @see org.elasticsearch.xpack.security.rest.action.role.RestPutRoleAction
*/
public DocWriteResponse.Result putRole(RoleDescriptor descriptor) throws IOException {
final String endpoint = "/_security/role/" + descriptor.getName();
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
final String body = toJson(descriptor);
request.setJsonEntity(body);
request.addParameters(Map.of("refresh", "true"));
final Map<String, Object> response = entityAsMap(execute(request));
final String createdFieldName = "role.created";
final Object created = ObjectPath.eval(createdFieldName, response);
if (Boolean.TRUE.equals(created)) {
return DocWriteResponse.Result.CREATED;
} else if (Boolean.FALSE.equals(created)) {
return DocWriteResponse.Result.UPDATED;
} else {
throw new IllegalStateException(
"Expected boolean for [" + createdFieldName + "] flag in [" + response + "], but was [" + created + "]"
);
}
}
/**
* Uses the REST API to delete a role from the native store.
* @see org.elasticsearch.xpack.security.rest.action.role.RestDeleteRoleAction
*/
public boolean deleteRole(String roleName) throws IOException {
final String endpoint = "/_security/role/" + roleName;
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
final Map<String, Object> response = entityAsMap(execute(request));
final Object found = response.get("found");
if (found instanceof Boolean b) {
return b;
} else {
throw new IllegalStateException("Expected boolean [found], but was [" + found + "]");
}
}
/**
* Uses the REST API to add a role-mapping to the native store.
* @see org.elasticsearch.xpack.security.rest.action.rolemapping.RestPutRoleMappingAction
*/
public void putRoleMapping(String mappingName, Map<String, Object> mappingBody) throws IOException {
putRoleMapping(mappingName, toJson(mappingBody));
}
/**
* Uses the REST API to add a role-mapping to the native store.
* @see org.elasticsearch.xpack.security.rest.action.rolemapping.RestPutRoleMappingAction
*/
public void putRoleMapping(String mappingName, String mappingJson) throws IOException {
final String endpoint = "/_security/role_mapping/" + mappingName;
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setJsonEntity(mappingJson);
execute(request);
}
/**
* Uses the REST API to delete a role-mapping from the native store.
* @see org.elasticsearch.xpack.security.rest.action.rolemapping.RestDeleteRoleMappingAction
*/
public void deleteRoleMapping(String mappingName) throws IOException {
final String endpoint = "/_security/role_mapping/" + mappingName;
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
execute(request);
}
/**
* Uses the REST API to create a new access token via a password grant
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction
*/
public OAuth2Token createToken(UsernamePasswordToken grant) throws IOException {
return createToken(String.format(Locale.ROOT, """
{
"grant_type":"password",
"username":"%s",
"password":"%s"
}
""", grant.principal(), grant.credentials()));
}
/**
* Uses the REST API to create a new access token via a refresh_token grant
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction
*/
public OAuth2Token refreshToken(String refreshToken) throws IOException {
return createToken(String.format(Locale.ROOT, """
{
"grant_type":"refresh_token",
"refresh_token":"%s"
}
""", refreshToken));
}
/**
* Uses the REST API to create a new access token via a client_credentials grant
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestGetTokenAction
*/
public OAuth2Token createTokenWithClientCredentialsGrant() throws IOException {
return createToken("""
{
"grant_type":"client_credentials"
}
""");
}
private OAuth2Token createToken(String requestBody) throws IOException {
final String endpoint = "/_security/oauth2/token";
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
request.setJsonEntity(requestBody);
final Map<String, Object> responseBody = entityAsMap(execute(request));
return new OAuth2Token(
(String) responseBody.get("access_token"),
Optional.ofNullable((String) responseBody.get("refresh_token")),
ObjectPath.eval("authentication.username", responseBody)
);
}
/**
* Uses the REST API to invalidate an access-token
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction
*/
public TokenInvalidation invalidateAccessToken(String accessToken) throws IOException {
return invalidateTokens(String.format(Locale.ROOT, """
{
"token":"%s"
}
""", accessToken));
}
/**
* Uses the REST API to invalidate a refresh-token
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction
*/
public TokenInvalidation invalidateRefreshToken(String refreshToken) throws IOException {
return invalidateTokens(String.format(Locale.ROOT, """
{
"refresh_token":"%s"
}
""", refreshToken));
}
/**
* Uses the REST API to invalidate all tokens owned by a named user
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction
*/
public TokenInvalidation invalidateTokensForUser(String username) throws IOException {
return invalidateTokens(String.format(Locale.ROOT, """
{
"username":"%s"
}
""", username));
}
/**
* Uses the REST API to invalidate all tokens owned by a named realm
* @see org.elasticsearch.xpack.security.rest.action.oauth2.RestInvalidateTokenAction
*/
public TokenInvalidation invalidateTokensForRealm(String realmName) throws IOException {
return invalidateTokens(String.format(Locale.ROOT, """
{
"realm_name":"%s"
}
""", realmName));
}
@SuppressWarnings("unchecked")
public TokenInvalidation invalidateTokens(String requestBody) throws IOException {
final String endpoint = "/_security/oauth2/token";
final Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
// This API returns 404 (with the same body as a 200 response) if there's nothing to delete.
// RestClient will throw an exception on 404, but we don't want that, we want to parse the body and return it
setIgnoredErrorResponseCodes(request, RestStatus.NOT_FOUND);
request.setJsonEntity(requestBody);
final Map<String, Object> responseBody = entityAsMap(execute(request));
final List<Map<String, ?>> errors = (List<Map<String, ?>>) responseBody.get("error_details");
return new TokenInvalidation(
((Number) responseBody.get("invalidated_tokens")).intValue(),
((Number) responseBody.get("previously_invalidated_tokens")).intValue(),
errors == null ? List.of() : errors.stream().map(TestSecurityClient::toException).toList()
);
}
/**
* Uses the REST API to clear the cache for one or more realms
* @see org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction
*/
public void clearRealmCache(String realm) throws IOException {
final String endpoint = "/_security/realm/" + realm + "/_clear_cache";
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
execute(request);
}
/**
* Uses the REST API to authenticate using delegated PKI
* @see org.elasticsearch.xpack.security.rest.action.RestDelegatePkiAuthenticationAction
* @return A {@code Tuple} of <em>access-token</em> and <em>response-body</em>.
*/
public Tuple<String, Map<String, Object>> delegatePkiAuthentication(List<X509Certificate> certificateChain) throws IOException {
final String endpoint = "/_security/delegate_pki";
final Request request = new Request(HttpPost.METHOD_NAME, endpoint);
final List<String> certificateContent = certificateChain.stream().map(c -> {
try {
return c.getEncoded();
} catch (CertificateEncodingException e) {
throw new RuntimeException("Failed to encode certificate", e);
}
}).map(encoded -> Base64.getEncoder().encodeToString(encoded)).collect(Collectors.toList());
final Map<String, Object> body = Map.of("x509_certificate_chain", certificateContent);
request.setJsonEntity(toJson(body));
final Map<String, Object> response = entityAsMap(execute(request));
return new Tuple<>(Objects.toString(response.get("access_token"), null), response);
}
/**
* Uses the REST API to create an application privilege
* @see org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction
*/
public void putApplicationPrivilege(String applicationName, String privilegeName, String[] actions) throws IOException {
final String endpoint = "/_security/privilege/";
final Request request = new Request(HttpPut.METHOD_NAME, endpoint);
final Map<String, Object> body = Map.of(applicationName, Map.of(privilegeName, Map.of("actions", List.of(actions))));
request.setJsonEntity(toJson(body));
execute(request);
}
private static String toJson(Map<String, ? extends Object> map) throws IOException {
final XContentBuilder builder = XContentFactory.jsonBuilder().map(map);
final BytesReference bytes = BytesReference.bytes(builder);
return bytes.utf8ToString();
}
private static String toJson(ToXContent obj) throws IOException {
final XContentBuilder builder = XContentFactory.jsonBuilder();
if (obj.isFragment()) {
builder.startObject();
obj.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
} else {
obj.toXContent(builder, ToXContent.EMPTY_PARAMS);
}
final BytesReference bytes = BytesReference.bytes(builder);
return bytes.utf8ToString();
}
private XContentParser getParser(Response response) throws IOException {
final byte[] responseBody = EntityUtils.toByteArray(response.getEntity());
return XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, responseBody);
}
private static ElasticsearchException toException(Map<String, ?> map) {
try (var parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, toJson(map))) {
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser);
return ElasticsearchException.fromXContent(parser);
} catch (IOException e) {
throw new RuntimeIoException(e);
}
}
private Response execute(Request request) throws IOException {
request.setOptions(options);
return this.client.performRequest(request);
}
public record OAuth2Token(String accessToken, Optional<String> refreshToken, String principal) {
@Nullable
public String getRefreshToken() {
return refreshToken.orElse(null);
}
}
public record TokenInvalidation(int invalidated, int previouslyInvalidated, List<ElasticsearchException> errors) {}
}
|
TestSecurityClient
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/runtime/src/main/java/io/quarkus/vertx/http/runtime/QuarkusErrorHandler.java
|
{
"start": 15140,
"end": 18083
}
|
class ____ {
private ContentTypes() {
}
private static final String APPLICATION_JSON = "application/json";
private static final String TEXT_JSON = "text/json";
private static final String TEXT_HTML = "text/html";
private static final String TEXT_PLAIN = "text/plain";
private static final String APPLICATION_XHTML = "application/xhtml+xml";
private static final String APPLICATION_XML = "application/xml";
private static final String TEXT_XML = "text/xml";
// WARNING: The order matters for wildcards: if text/json is before text/html, then text/* will match text/json.
private static final List<MIMEHeader> BASE_HEADERS = List.of(
createParsableMIMEValue(APPLICATION_JSON),
createParsableMIMEValue(TEXT_JSON),
createParsableMIMEValue(TEXT_HTML),
createParsableMIMEValue(APPLICATION_XHTML),
createParsableMIMEValue(APPLICATION_XML),
createParsableMIMEValue(TEXT_XML));
private static final Collection<MIMEHeader> SUPPORTED = createSupported();
private static final Collection<MIMEHeader> SUPPORTED_CURL = createSupportedCurl();
private static Collection<MIMEHeader> createSupported() {
var supported = new ArrayList<MIMEHeader>(BASE_HEADERS.size() + 1);
supported.addAll(BASE_HEADERS);
supported.add(createParsableMIMEValue(TEXT_PLAIN));
return Collections.unmodifiableCollection(supported);
}
private static Collection<MIMEHeader> createSupportedCurl() {
var supportedCurl = new ArrayList<MIMEHeader>(BASE_HEADERS.size() + 1);
supportedCurl.add(createParsableMIMEValue(TEXT_PLAIN));
supportedCurl.addAll(BASE_HEADERS);
return Collections.unmodifiableCollection(supportedCurl);
}
private static ParsableMIMEValue createParsableMIMEValue(String applicationJson) {
return new ParsableMIMEValue(applicationJson).forceParse();
}
static String pickFirstSupportedAndAcceptedContentType(RoutingContext context) {
List<MIMEHeader> acceptableTypes = context.parsedHeaders().accept();
String userAgent = context.request().getHeader("User-Agent");
if (userAgent != null && (userAgent.toLowerCase(Locale.ROOT).startsWith("wget/")
|| userAgent.toLowerCase(Locale.ROOT).startsWith("curl/"))) {
MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED_CURL);
return result == null ? null : result.value();
} else {
MIMEHeader result = context.parsedHeaders().findBestUserAcceptedIn(acceptableTypes, SUPPORTED);
return result == null ? null : result.value();
}
}
}
private static
|
ContentTypes
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/ReturnValueIgnoredTest.java
|
{
"start": 25583,
"end": 25840
}
|
class ____ {
void f(Optional<Integer> o) {
o.map(i -> i + 1);
}
}
""")
.addOutputLines(
"Test.java",
"""
import java.util.Optional;
|
Test
|
java
|
apache__camel
|
dsl/camel-xml-io-dsl/src/main/java/org/apache/camel/dsl/xml/io/XmlRoutesBuilderLoader.java
|
{
"start": 2902,
"end": 22458
}
|
class ____ extends RouteBuilderLoaderSupport {
public static final Logger LOG = LoggerFactory.getLogger(XmlRoutesBuilderLoader.class);
public static final String EXTENSION = "xml";
private final Map<String, Boolean> preparseDone = new ConcurrentHashMap<>();
private final Map<String, Resource> resourceCache = new ConcurrentHashMap<>();
private final Map<String, XmlStreamInfo> xmlInfoCache = new ConcurrentHashMap<>();
private final Map<String, BeansDefinition> camelAppCache = new ConcurrentHashMap<>();
private final List<BeanFactoryDefinition<?>> delayedRegistrations = new ArrayList<>();
private final AtomicInteger counter = new AtomicInteger(0);
public XmlRoutesBuilderLoader() {
super(EXTENSION);
}
XmlRoutesBuilderLoader(String extension) {
super(extension);
}
@Override
public void preParseRoute(Resource resource) throws Exception {
// preparsing is done at early stage, so we have a chance to load additional beans and populate
// Camel registry
if (preparseDone.getOrDefault(resource.getLocation(), false)) {
return;
}
XmlStreamInfo xmlInfo = xmlInfo(resource);
if (xmlInfo.isValid()) {
String root = xmlInfo.getRootElementName();
if ("beans".equals(root) || "blueprint".equals(root) || "camel".equals(root)) {
new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseBeansDefinition()
.ifPresent(bd -> {
registerBeans(resource, bd);
camelAppCache.put(resource.getLocation(), bd);
});
}
}
preparseDone.put(resource.getLocation(), true);
}
@Override
public RouteBuilder doLoadRouteBuilder(Resource input) throws Exception {
final Resource resource = resource(input);
XmlStreamInfo xmlInfo = xmlInfo(input);
if (!xmlInfo.isValid()) {
// should be valid, because we checked it before
LOG.warn("Invalid XML document: {}", xmlInfo.getProblem().getMessage());
return null;
}
return new RouteConfigurationBuilder() {
@Override
public void configure() throws Exception {
String resourceLocation = input.getLocation();
try {
switch (xmlInfo.getRootElementName()) {
case "beans", "blueprint", "camel" -> {
BeansDefinition def = camelAppCache.get(resourceLocation);
if (def != null) {
configureCamel(def);
} else {
new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseBeansDefinition()
.ifPresent(this::configureCamel);
}
}
case "dataFormats", "dataFormat" -> new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseDataFormatsDefinition()
.ifPresent(this::addDataFormats);
case "routeTemplate", "routeTemplates" ->
new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseRouteTemplatesDefinition()
.ifPresent(this::addRouteTemplatesCollection);
case "templatedRoutes", "templatedRoute" ->
new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseTemplatedRoutesDefinition()
.ifPresent(this::addTemplatedRoutesCollection);
case "rests", "rest" -> new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseRestsDefinition()
.ifPresent(this::addRests);
case "routes", "route" -> new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseRoutesDefinition()
.ifPresent(this::addRoutes);
default -> {
}
}
} finally {
// knowing this is the last time an XML may have been parsed, we can clear the cache
// (route may get reloaded later)
resourceCache.remove(resourceLocation);
xmlInfoCache.remove(resourceLocation);
camelAppCache.remove(resourceLocation);
preparseDone.remove(resourceLocation);
}
}
@Override
public void configuration() throws Exception {
switch (xmlInfo.getRootElementName()) {
// load any route configuration before that may be nested under camel/spring/blueprint root tag
case "beans", "blueprint", "camel", "routeConfigurations", "routeConfiguration": {
BeansDefinition bp = camelAppCache.get(input.getLocation());
if (bp != null) {
bp.getRouteConfigurations().forEach(rc -> {
rc.setResource(getResource());
List<RouteConfigurationDefinition> list = new ArrayList<>();
list.add(rc);
RouteConfigurationsDefinition def = new RouteConfigurationsDefinition();
def.setResource(getResource());
def.setRouteConfigurations(list);
addConfigurations(def);
});
// remove the configurations we have added
bp.getRouteConfigurations().clear();
}
new XmlModelParser(resource, xmlInfo.getRootElementNamespace())
.parseRouteConfigurationsDefinition()
.ifPresent(this::addConfigurations);
}
default: {
// NO-OP
}
}
}
private void configureCamel(BeansDefinition app) {
if (!delayedRegistrations.isEmpty()) {
// some of the beans were not available yet, so we have to try register them now
for (BeanFactoryDefinition<?> def : delayedRegistrations) {
def.setResource(getResource());
registerBeanDefinition(def, false);
}
delayedRegistrations.clear();
}
// we have access to beans and spring beans, but these are already processed
// in preParseRoute() and possibly registered in
// org.apache.camel.main.BaseMainSupport.postProcessCamelRegistry() (if given Main implementation
// decides to do so)
if (app.getRestConfigurations().size() > 1) {
throw new RuntimeException("There should only be one <restConfiguration>");
}
if (app.getRestConfigurations().size() == 1) {
RestConfigurationDefinition config = app.getRestConfigurations().get(0);
try {
config.asRestConfiguration(getCamelContext(), getCamelContext().getRestConfiguration());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
if (app.getDataFormats() != null) {
app.getDataFormats().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
DataFormatsDefinition list = new DataFormatsDefinition();
list.setDataFormats(app.getDataFormats());
addDataFormats(list);
}
if (!app.getRests().isEmpty()) {
app.getRests().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
RestsDefinition def = new RestsDefinition();
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
def.setRests(app.getRests());
setRestCollection(def);
}
if (!app.getRouteConfigurations().isEmpty()) {
app.getRouteConfigurations().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
RouteConfigurationsDefinition def = new RouteConfigurationsDefinition();
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
def.setRouteConfigurations(app.getRouteConfigurations());
addConfigurations(def);
}
if (!app.getRouteTemplates().isEmpty()) {
app.getRouteTemplates().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
RouteTemplatesDefinition def = new RouteTemplatesDefinition();
def.setResource(getResource());
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setRouteTemplates(app.getRouteTemplates());
setRouteTemplateCollection(def);
}
if (!app.getTemplatedRoutes().isEmpty()) {
app.getTemplatedRoutes().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
TemplatedRoutesDefinition def = new TemplatedRoutesDefinition();
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
def.setTemplatedRoutes(app.getTemplatedRoutes());
setTemplatedRouteCollection(def);
}
if (!app.getRoutes().isEmpty()) {
app.getRoutes().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
RoutesDefinition def = new RoutesDefinition();
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
def.setRoutes(app.getRoutes());
addRoutes(def);
}
}
private void addTemplatedRoutesCollection(TemplatedRoutesDefinition list) {
for (TemplatedRouteDefinition def : list.getTemplatedRoutes()) {
CamelContextAware.trySetCamelContext(def, getCamelContext());
}
setTemplatedRouteCollection(list);
}
private void addRouteTemplatesCollection(RouteTemplatesDefinition list) {
for (RouteTemplateDefinition def : list.getRouteTemplates()) {
CamelContextAware.trySetCamelContext(def, getCamelContext());
}
setRouteTemplateCollection(list);
}
private void addRests(RestsDefinition list) {
for (RestDefinition def : list.getRests()) {
CamelContextAware.trySetCamelContext(def, getCamelContext());
}
setRestCollection(list);
}
private void addRoutes(RoutesDefinition routes) {
// xml routes must be prepared in the same way java-dsl (via RoutesDefinition)
// so create a copy and use the fluent builder to add the route
for (RouteDefinition route : routes.getRoutes()) {
CamelContextAware.trySetCamelContext(route, getCamelContext());
getRouteCollection().route(route);
}
}
private void addConfigurations(RouteConfigurationsDefinition configurations) {
// xml routes must be prepared in the same way java-dsl (via RouteConfigurationDefinition)
// so create a copy and use the fluent builder to add the route
for (RouteConfigurationDefinition config : configurations.getRouteConfigurations()) {
CamelContextAware.trySetCamelContext(config, getCamelContext());
getRouteConfigurationCollection().routeConfiguration(config);
}
}
private void addDataFormats(DataFormatsDefinition dataFormats) {
Model model = getCamelContext().getCamelContextExtension().getContextPlugin(Model.class);
dataFormats.getDataFormats().forEach(def -> {
CamelContextAware.trySetCamelContext(def, getCamelContext());
def.setResource(getResource());
});
model.setDataFormats(dataFormats.asMap());
}
};
}
private Resource resource(Resource resource) {
return resourceCache.computeIfAbsent(resource.getLocation(), l -> new CachedResource(resource));
}
private XmlStreamInfo xmlInfo(Resource resource) {
return xmlInfoCache.computeIfAbsent(resource.getLocation(), l -> {
try {
// instead of parsing the document NxM times (for each namespace x root element combination),
// we preparse it using XmlStreamDetector and then parse it fully knowing what's inside.
// we could even do better, by passing already preparsed information through config file, but
// it's getting complicated when using multiple files.
XmlStreamDetector detector = new XmlStreamDetector(resource.getInputStream());
return detector.information();
} catch (IOException e) {
XmlStreamInfo invalid = new XmlStreamInfo();
invalid.setProblem(e);
return invalid;
}
});
}
private void registerBeans(Resource resource, BeansDefinition app) {
// <component-scan> - discover and register beans directly with Camel injection
Set<String> packagesToScan = new LinkedHashSet<>();
app.getComponentScanning().forEach(cs -> {
packagesToScan.add(cs.getBasePackage());
});
PackageScanHelper.registerBeans(getCamelContext(), packagesToScan);
// <bean>s - register Camel beans directly with Camel injection
for (BeanFactoryDefinition<?> def : app.getBeans()) {
def.setResource(resource);
registerBeanDefinition(def, true);
}
// <s:bean>, <s:beans> and <s:alias> elements - all the elements in single BeansDefinition have
// one parent org.w3c.dom.Document - and this is what we collect from each resource
if (!app.getSpringOrBlueprintBeans().isEmpty()) {
Document doc = app.getSpringOrBlueprintBeans().get(0).getOwnerDocument();
String ns = doc.getDocumentElement().getNamespaceURI();
String id = null;
if ("http://www.springframework.org/schema/beans".equals(ns)) {
id = String.format("camel-xml-io-dsl-spring-xml:%05d:%s", counter.incrementAndGet(), resource.getLocation());
} else if ("http://www.osgi.org/xmlns/blueprint/v1.0.0".equals(ns)) {
id = String.format("camel-xml-io-dsl-blueprint-xml:%05d:%s", counter.incrementAndGet(),
resource.getLocation());
}
if (id != null) {
getCamelContext().getRegistry().bind(id, doc);
}
}
}
/**
* Try to instantiate bean from the definition. Depending on the stage ({@link #preParseRoute} or
* {@link #doLoadRouteBuilder}), a failure may lead to delayed registration.
*/
private void registerBeanDefinition(BeanFactoryDefinition<?> def, boolean delayIfFailed) {
CamelBeanPostProcessor cbpp = PluginHelper.getBeanPostProcessor(getCamelContext());
Predicate<?> lazy = cbpp.getLazyBeanStrategy();
String name = def.getName();
String type = def.getType();
try {
// only do lazy bean on 2nd pass as 1st pass may work
if (!delayIfFailed && lazy != null && lazy.test(null)) {
bindLazyBean(def, name, () -> {
try {
return BeanModelHelper.newInstance(def, getCamelContext());
} catch (Exception e) {
throw new RuntimeException(e);
}
});
} else {
Object target = BeanModelHelper.newInstance(def, getCamelContext());
bindBean(def, name, target);
}
} catch (Exception e) {
if (delayIfFailed) {
delayedRegistrations.add(def);
} else {
String msg
= name != null ? "Error creating bean: " + name + " of type: " + type : "Error creating bean: " + type;
throw new RuntimeException(msg, e);
}
}
}
protected void bindBean(BeanFactoryDefinition<?> def, String name, Object target) throws Exception {
// unbind in case we reload
getCamelContext().getRegistry().unbind(name);
getCamelContext().getRegistry().bind(name, target, def.getInitMethod(), def.getDestroyMethod());
// register bean in model
Model model = getCamelContext().getCamelContextExtension().getContextPlugin(Model.class);
model.addCustomBean(def);
}
protected void bindLazyBean(
BeanFactoryDefinition<?> def,
String name, Supplier<Object> target)
throws Exception {
Class<?> beanType = null;
if (def.getType() != null) {
beanType = getCamelContext().getClassResolver().resolveClass(def.getType());
}
if (beanType == null) {
beanType = Object.class;
}
// unbind in case we reload
getCamelContext().getRegistry().unbind(name);
getCamelContext().getRegistry().bind(name, beanType, target, def.getInitMethod(), def.getDestroyMethod());
// register bean in model
Model model = getCamelContext().getCamelContextExtension().getContextPlugin(Model.class);
model.addCustomBean(def);
}
}
|
XmlRoutesBuilderLoader
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/serde/RexNodeJsonSerdeTest.java
|
{
"start": 22668,
"end": 22865
}
|
class ____ {
private final CatalogPlanCompilation compilation = CatalogPlanCompilation.ALL;
@Nested
@DisplayName("and CatalogPlanRestore == IDENTIFIER")
|
TestCompileAll
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/functions/table/CachingAsyncLookupFunctionTest.java
|
{
"start": 4161,
"end": 5529
}
|
class ____ extends AsyncLookupFunction {
private final transient ConcurrentMap<RowData, Collection<RowData>> data =
new ConcurrentHashMap<>();
private transient AtomicInteger lookupCount;
private transient ExecutorService executor;
@Override
public void open(FunctionContext context) throws Exception {
data.put(KEY_1, VALUE_1);
data.put(KEY_2, VALUE_2);
lookupCount = new AtomicInteger(0);
executor = Executors.newFixedThreadPool(3);
}
@Override
public CompletableFuture<Collection<RowData>> asyncLookup(RowData keyRow) {
return CompletableFuture.supplyAsync(
() -> {
try {
Thread.sleep(ThreadLocalRandom.current().nextInt(0, 10));
Collection<RowData> values = data.get(keyRow);
lookupCount.incrementAndGet();
return values;
} catch (Exception e) {
throw new RuntimeException("Failed to lookup value", e);
}
},
executor);
}
public AtomicInteger getLookupCount() {
return lookupCount;
}
}
}
|
TestingAsyncLookupFunction
|
java
|
apache__flink
|
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dql/SqlShowTables.java
|
{
"start": 1389,
"end": 2401
}
|
class ____ extends SqlShowCall {
private final SqlTableKind kind;
public SqlShowTables(
SqlParserPos pos,
SqlTableKind kind,
String preposition,
SqlIdentifier databaseName,
boolean notLike,
SqlCharStringLiteral likeLiteral) {
// only LIKE currently supported for SHOW TABLES
super(
pos,
preposition,
databaseName,
likeLiteral == null ? null : "LIKE",
likeLiteral,
notLike);
this.kind = kind;
}
@Override
public SqlOperator getOperator() {
return kind.getOperator();
}
@Override
String getOperationName() {
return getOperator().getName();
}
public SqlTableKind getTableKind() {
return kind;
}
/**
* The kind of table. Keep in sync with {@link
* org.apache.flink.table.catalog.CatalogBaseTable.TableKind}.
*/
public
|
SqlShowTables
|
java
|
quarkusio__quarkus
|
extensions/redis-client/deployment/src/main/java/io/quarkus/redis/deployment/client/DevServicesRedisProcessor.java
|
{
"start": 9634,
"end": 11555
}
|
class ____ extends GenericContainer<QuarkusPortRedisContainer> implements Startable {
private final OptionalInt fixedExposedPort;
private final boolean useSharedNetwork;
private final String hostName;
public QuarkusPortRedisContainer(DockerImageName dockerImageName, OptionalInt fixedExposedPort,
String defaultNetworkId, boolean useSharedNetwork) {
super(dockerImageName);
this.fixedExposedPort = fixedExposedPort;
this.useSharedNetwork = useSharedNetwork;
this.hostName = ConfigureUtil.configureNetwork(this, defaultNetworkId, useSharedNetwork, "redis");
}
public QuarkusPortRedisContainer withSharedServiceLabel(LaunchMode launchMode, String serviceName) {
return configureSharedServiceLabel(this, launchMode, DEV_SERVICE_LABEL, serviceName);
}
@Override
protected void configure() {
super.configure();
if (useSharedNetwork) {
return;
}
if (fixedExposedPort.isPresent()) {
addFixedExposedPort(fixedExposedPort.getAsInt(), REDIS_EXPOSED_PORT);
} else {
addExposedPort(REDIS_EXPOSED_PORT);
}
}
public int getPort() {
if (useSharedNetwork) {
return REDIS_EXPOSED_PORT;
}
if (fixedExposedPort.isPresent()) {
return fixedExposedPort.getAsInt();
}
return super.getFirstMappedPort();
}
@Override
public String getHost() {
return useSharedNetwork ? hostName : super.getHost();
}
public void close() {
super.close();
}
@Override
public String getConnectionInfo() {
return getHost() + ":" + getPort();
}
}
}
|
QuarkusPortRedisContainer
|
java
|
apache__kafka
|
metadata/src/main/java/org/apache/kafka/metadata/placement/StripedReplicaPlacer.java
|
{
"start": 8251,
"end": 9954
}
|
class ____ {
private final BrokerList fenced = new BrokerList();
private final BrokerList unfenced = new BrokerList();
/**
* Initialize this rack.
*
* @param random The random number generator.
*/
void initialize(Random random) {
fenced.initialize(random);
unfenced.initialize(random);
}
void shuffle(Random random) {
fenced.shuffle(random);
unfenced.shuffle(random);
}
BrokerList fenced() {
return fenced;
}
BrokerList unfenced() {
return unfenced;
}
/**
* Get the next unfenced broker in this rack, or -1 if there are no more brokers
* to be returned.
*
* @param epoch The current iteration epoch.
*
* @return The broker ID, or -1 if there are no more brokers to be
* returned in this epoch.
*/
int nextUnfenced(int epoch) {
return unfenced.next(epoch);
}
/**
* Get the next broker in this rack, or -1 if there are no more brokers to be
* returned.
*
* @param epoch The current iteration epoch.
*
* @return The broker ID, or -1 if there are no more brokers to be
* returned in this epoch.
*/
int next(int epoch) {
int result = unfenced.next(epoch);
if (result >= 0) return result;
return fenced.next(epoch);
}
}
/**
* A list of racks that we can iterate through.
*/
static
|
Rack
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_2538/GroupDto.java
|
{
"start": 232,
"end": 397
}
|
class ____ {
private final String id;
public GroupDto(String id) {
this.id = id;
}
public String getId() {
return id;
}
}
|
GroupDto
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.