language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-thymeleaf/src/test/java/org/apache/camel/component/thymeleaf/ThymeleafFileResolverTest.java | {
"start": 1558,
"end": 4917
} | class ____ extends ThymeleafAbstractBaseTest {
@Test
public void testThymeleaf() throws InterruptedException {
MockEndpoint mock = getMockEndpoint(MOCK_RESULT);
mock.expectedMessageCount(1);
mock.message(0).body().contains(THANK_YOU_FOR_YOUR_ORDER);
mock.message(0).body().endsWith(SPAZZ_TESTING_SERVICE);
mock.message(0).header(ThymeleafConstants.THYMELEAF_TEMPLATE).isNull();
mock.message(0).header(ThymeleafConstants.THYMELEAF_VARIABLE_MAP).isNull();
mock.message(0).header(FIRST_NAME).isEqualTo(JANE);
template.request(DIRECT_START, basicHeaderProcessor);
mock.assertIsSatisfied();
ThymeleafEndpoint thymeleafEndpoint = context.getEndpoint(
"thymeleaf:src/test/resources/org/apache/camel/component/thymeleaf/letter.html?allowContextMapAll=true&resolver=FILE",
ThymeleafEndpoint.class);
assertAll("properties",
() -> assertNotNull(thymeleafEndpoint),
() -> assertTrue(thymeleafEndpoint.isAllowContextMapAll()),
() -> assertNull(thymeleafEndpoint.getCacheable()),
() -> assertNull(thymeleafEndpoint.getCacheTimeToLive()),
() -> assertNull(thymeleafEndpoint.getCheckExistence()),
() -> assertNull(thymeleafEndpoint.getEncoding()),
() -> assertEquals(ExchangePattern.InOut, thymeleafEndpoint.getExchangePattern()),
() -> assertNull(thymeleafEndpoint.getOrder()),
() -> assertNull(thymeleafEndpoint.getPrefix()),
() -> assertEquals(ThymeleafResolverType.FILE, thymeleafEndpoint.getResolver()),
() -> assertNull(thymeleafEndpoint.getSuffix()),
() -> assertNotNull(thymeleafEndpoint.getTemplateEngine()),
() -> assertNull(thymeleafEndpoint.getTemplateMode()));
assertEquals(1, thymeleafEndpoint.getTemplateEngine().getTemplateResolvers().size());
ITemplateResolver resolver = thymeleafEndpoint.getTemplateEngine().getTemplateResolvers().stream().findFirst().get();
assertTrue(resolver instanceof FileTemplateResolver);
FileTemplateResolver templateResolver = (FileTemplateResolver) resolver;
assertAll("templateResolver",
() -> assertTrue(templateResolver.isCacheable()),
() -> assertNull(templateResolver.getCacheTTLMs()),
() -> assertNull(templateResolver.getCharacterEncoding()),
() -> assertFalse(templateResolver.getCheckExistence()),
() -> assertNull(templateResolver.getOrder()),
() -> assertNull(templateResolver.getPrefix()),
() -> assertNull(templateResolver.getSuffix()),
() -> assertEquals(TemplateMode.HTML, templateResolver.getTemplateMode()));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from(DIRECT_START)
.setBody(simple(SPAZZ_TESTING_SERVICE))
.to("thymeleaf:src/test/resources/org/apache/camel/component/thymeleaf/letter.html?allowContextMapAll=true&resolver=FILE")
.to(MOCK_RESULT);
}
};
}
}
| ThymeleafFileResolverTest |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/ItemSetMapReduceValueSource.java | {
"start": 14774,
"end": 17673
} | class ____ extends ItemSetMapReduceValueSource {
private final ValuesSource.Numeric source;
private final IncludeExclude.LongFilter longFilter;
public NumericValueSource(
ValuesSourceConfig config,
int id,
IncludeExclude includeExclude,
AbstractItemSetMapReducer.OrdinalOptimization unusedOrdinalOptimization,
Optional<LeafReaderContext> unusedCtx
) {
super(config, id, ValueFormatter.LONG);
this.source = (Numeric) config.getValuesSource();
this.longFilter = includeExclude == null ? null : includeExclude.convertToLongFilter(config.format());
}
@Override
ValueCollector getValueCollector(LeafReaderContext ctx) throws IOException {
final SortedNumericLongValues values = source.longValues(ctx);
final LongValues singleton = SortedNumericLongValues.unwrapSingleton(values);
final Field field = getField();
final Tuple<Field, List<Object>> empty = new Tuple<>(field, Collections.emptyList());
return singleton != null ? getValueCollector(singleton, empty, field) : getValueCollector(values, empty, field);
}
private ValueCollector getValueCollector(SortedNumericLongValues values, Tuple<Field, List<Object>> empty, Field field) {
return doc -> {
if (values.advanceExact(doc)) {
final int valuesCount = values.docValueCount();
if (valuesCount == 1) {
final long v = values.nextValue();
if (longFilter == null || longFilter.accept(v)) {
return new Tuple<>(field, Collections.singletonList(v));
}
return empty;
}
final List<Object> objects = new ArrayList<>(valuesCount);
for (int i = 0; i < valuesCount; ++i) {
final long v = values.nextValue();
if (longFilter == null || longFilter.accept(v)) {
objects.add(v);
}
}
return new Tuple<>(field, objects);
}
return empty;
};
}
private ValueCollector getValueCollector(LongValues values, Tuple<Field, List<Object>> empty, Field field) {
return doc -> {
if (values.advanceExact(doc)) {
final long v = values.longValue();
if (longFilter == null || longFilter.accept(v)) {
return new Tuple<>(field, Collections.singletonList(v));
}
return empty;
}
return empty;
};
}
}
}
| NumericValueSource |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/emops/MergeMultipleEntityCopiesCustomTest.java | {
"start": 1346,
"end": 5341
} | class ____ {
@AfterEach
void cleanup(EntityManagerFactoryScope scope) {
scope.getEntityManagerFactory().getSchemaManager().truncate();
}
@Test
public void testMergeMultipleEntityCopiesAllowed(EntityManagerFactoryScope scope) {
Item item1 = new Item();
item1.setName( "item1" );
Hoarder hoarder = new Hoarder();
hoarder.setName( "joe" );
scope.inTransaction(
entityManager -> {
entityManager.persist( item1 );
entityManager.persist( hoarder );
}
);
// Get another representation of the same Item from a different EntityManager.
Item item1_1 = scope.fromTransaction(
entityManager -> entityManager.find( Item.class, item1.getId() )
);
// item1_1 and item1_2 are unmodified representations of the same persistent entity.
assertFalse( item1 == item1_1 );
assertTrue( item1.equals( item1_1 ) );
// Update hoarder (detached) to references both representations.
hoarder.getItems().add( item1 );
hoarder.setFavoriteItem( item1_1 );
scope.inTransaction(
entityManager -> {
// the merge should succeed because it does not have Category copies.
// (CustomEntityCopyObserver does not allow Category copies; it does allow Item copies)
Hoarder _hoarder = entityManager.merge( hoarder );
assertEquals( 1, _hoarder.getItems().size() );
assertSame( _hoarder.getFavoriteItem(), _hoarder.getItems().iterator().next() );
assertEquals( item1.getId(), _hoarder.getFavoriteItem().getId() );
assertEquals( item1.getCategory(), _hoarder.getFavoriteItem().getCategory() );
}
);
scope.inTransaction(
entityManager -> {
Hoarder _hoarder = entityManager.find( Hoarder.class, hoarder.getId() );
assertEquals( 1, _hoarder.getItems().size() );
assertSame( _hoarder.getFavoriteItem(), _hoarder.getItems().iterator().next() );
assertEquals( item1.getId(), _hoarder.getFavoriteItem().getId() );
assertEquals( item1.getCategory(), _hoarder.getFavoriteItem().getCategory() );
}
);
}
@Test
public void testMergeMultipleEntityCopiesAllowedAndDisallowed(EntityManagerFactoryScope scope) {
Item item1 = new Item();
item1.setName( "item1 name" );
Category category = new Category();
category.setName( "category" );
item1.setCategory( category );
category.setExampleItem( item1 );
scope.inTransaction(
entityManager -> {
entityManager.persist( item1 );
}
);
// get another representation of item1
Item item1_1 = scope.fromTransaction(
entityManager -> {
Item _item1_1 = entityManager.find( Item.class, item1.getId() );
// make sure item1_1.category is initialized
Hibernate.initialize( _item1_1.getCategory() );
return _item1_1;
}
);
scope.inEntityManager(
entityManager -> {
try {
entityManager.getTransaction().begin();
Item item1Merged = entityManager.merge( item1 );
// make sure item1Merged.category is also managed
Hibernate.initialize( item1Merged.getCategory() );
item1Merged.setCategory( category );
category.setExampleItem( item1_1 );
// now item1Merged is managed and it has a nested detached item
// and there is multiple managed/detached Category objects
try {
// the following should fail because multiple copies of Category objects is not allowed by
// CustomEntityCopyObserver
entityManager.merge( item1Merged );
fail( "should have failed because CustomEntityCopyObserver does not allow multiple copies of a Category. " );
}
catch (IllegalStateException ex) {
// expected
}
}
finally {
entityManager.getTransaction().rollback();
}
}
);
scope.inTransaction(
entityManager -> {
Item _item1 = entityManager.find( Item.class, item1.getId() );
assertEquals( category.getName(), _item1.getCategory().getName() );
assertSame( _item1, _item1.getCategory().getExampleItem() );
}
);
}
public static | MergeMultipleEntityCopiesCustomTest |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InvokerBuilder.java | {
"start": 8506,
"end": 10688
} | class ____
* declares the transformer</li>
* </ul>
*
* An output transformer must consume a type that can be produced by the target method.
* Specifically: when {@code Y} is <em>any-type</em>, it is not type checked during deployment.
* Otherwise, it is a deployment problem if {@code Y} is not assignable from the return type of
* the target method in case of return value transformers, or from {@code java.lang.Throwable}
* in case of exception transformers. {@code X} is not type checked during deployment, so that
* output transformers may produce arbitrary types.
* <p>
* When a transformer is registered for given output, it is called after the target method
* is invoked, and the outcome of the transformer is passed back to the caller of the invoker
* instead of the original output produced by the target method.
* <p>
* If the target method returns normally, any registered exception transformer is ignored; only
* the return value transformer is called. The return value transformer may throw, in which case
* the invoker will rethrow the exception. If the invoker is supposed to return normally,
* the return value transformer must return normally.
* <p>
* Similarly, if the target method throws, any registered return value transformer is ignored;
* only the exception transformer is called. The exception transformer may return normally,
* in which case the invoker will return the return value of the exception transformer. If
* the invoker is supposed to throw an exception, the exception transformer must throw.
*
* <h2>Invoker wrapping</h2>
*
* An invoker, possibly utilizing input lookups and input/output transformations, may be wrapped
* by a custom piece of code for maximum flexibility. A wrapper must have the following signature,
* where {@code X}, {@code Y} and {@code Z} are types:
*
* <ul>
* <li>{@code static Z wrap(X instance, Object[] arguments, Invoker<X, Y> invoker)}</li>
* </ul>
*
* A wrapper must operate on a matching instance type. Specifically: when {@code X} is
* <em>any-type</em>, it is not type checked during deployment. Otherwise, it is a deployment
* problem if {@code X} is not assignable from the | that |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/TypeConverterSupport.java | {
"start": 1139,
"end": 1266
} | class ____ {@link BeanWrapperImpl}.
*
* @author Juergen Hoeller
* @since 3.2
* @see SimpleTypeConverter
*/
public abstract | for |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterNodesResponsePBImpl.java | {
"start": 1607,
"end": 5130
} | class ____ extends GetClusterNodesResponse {
GetClusterNodesResponseProto proto =
GetClusterNodesResponseProto.getDefaultInstance();
GetClusterNodesResponseProto.Builder builder = null;
boolean viaProto = false;
List<NodeReport> nodeManagerInfoList;
public GetClusterNodesResponsePBImpl() {
builder = GetClusterNodesResponseProto.newBuilder();
}
public GetClusterNodesResponsePBImpl(GetClusterNodesResponseProto proto) {
this.proto = proto;
viaProto = true;
}
@Override
public List<NodeReport> getNodeReports() {
initLocalNodeManagerInfosList();
return this.nodeManagerInfoList;
}
@Override
public void setNodeReports(List<NodeReport> nodeManagers) {
if (nodeManagers == null) {
builder.clearNodeReports();
}
this.nodeManagerInfoList = nodeManagers;
}
public GetClusterNodesResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToBuilder() {
if (this.nodeManagerInfoList != null) {
addLocalNodeManagerInfosToProto();
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetClusterNodesResponseProto.newBuilder(proto);
}
viaProto = false;
}
// Once this is called. containerList will never be null - until a getProto
// is called.
private void initLocalNodeManagerInfosList() {
if (this.nodeManagerInfoList != null) {
return;
}
GetClusterNodesResponseProtoOrBuilder p = viaProto ? proto : builder;
List<NodeReportProto> list = p.getNodeReportsList();
nodeManagerInfoList = new ArrayList<NodeReport>();
for (NodeReportProto a : list) {
nodeManagerInfoList.add(convertFromProtoFormat(a));
}
}
private void addLocalNodeManagerInfosToProto() {
maybeInitBuilder();
builder.clearNodeReports();
if (nodeManagerInfoList == null)
return;
Iterable<NodeReportProto> iterable = new Iterable<NodeReportProto>() {
@Override
public Iterator<NodeReportProto> iterator() {
return new Iterator<NodeReportProto>() {
Iterator<NodeReport> iter = nodeManagerInfoList.iterator();
@Override
public boolean hasNext() {
return iter.hasNext();
}
@Override
public NodeReportProto next() {
return convertToProtoFormat(iter.next());
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
};
builder.addAllNodeReports(iterable);
}
private NodeReportPBImpl convertFromProtoFormat(NodeReportProto p) {
return new NodeReportPBImpl(p);
}
private NodeReportProto convertToProtoFormat(NodeReport t) {
return ((NodeReportPBImpl)t).getProto();
}
}
| GetClusterNodesResponsePBImpl |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableSubscribeOn.java | {
"start": 4591,
"end": 4942
} | class ____ implements Runnable {
final Subscription upstream;
final long n;
Request(Subscription s, long n) {
this.upstream = s;
this.n = n;
}
@Override
public void run() {
upstream.request(n);
}
}
}
}
| Request |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallVisitorUtilsTest.java | {
"start": 792,
"end": 3364
} | class ____ extends TestCase {
public void test_isTrue() throws Exception {
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("1 != 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("1 != 2 AND 2 = 2")));
assertEquals(Boolean.FALSE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("1 != 2 AND 2 != 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("23 = 23")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("NOT 23 != 23")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("f1 like '%'")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("f1 like '%%'")));
assertEquals(null, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("a1 = b1 AND f1 like '%%'")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("a1 = b1 OR f1 like '%%'")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("1 < 2")));
assertEquals(Boolean.FALSE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("2 < 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("2 <= 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("2 > 1")));
assertEquals(Boolean.FALSE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("2 > 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("2 >= 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("len('44') > 0")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("len('44') >= 2")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("(select count(*) from t) > 0")));
assertEquals(Boolean.TRUE,
WallVisitorUtils.getValue(SQLUtils.toSQLExpr("(select count(*) from t) >= 0")));
assertEquals(Boolean.FALSE,
WallVisitorUtils.getValue(SQLUtils.toSQLExpr("(select count(*) from t) < 0")));
assertEquals(Boolean.TRUE,
WallVisitorUtils.getValue(SQLUtils.toSQLExpr("NOT (select count(*) from t) < 0")));
//
}
public void test_chr() throws Exception {
assertEquals("CAT", WallVisitorUtils.getValue(SQLUtils.toSQLExpr("CHR(67)||CHR(65)||CHR(84)")));
assertEquals(Boolean.TRUE, WallVisitorUtils.getValue(SQLUtils.toSQLExpr("CHR(67)||CHR(65)||CHR(84) = 'CAT'")));
}
}
| WallVisitorUtilsTest |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/annotation/SecuredAnnotationSecurityMetadataSourceTests.java | {
"start": 8344,
"end": 8438
} | interface ____ {
void doSomething(List<?> param);
}
@AnnotatedAnnotation
public | ReturnVoid |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/BlockMetadataAccumulator.java | {
"start": 903,
"end": 2816
} | class ____ implements Closeable {
private final DelayedOffsetAccumulator blockAddressAcc;
private final DelayedOffsetAccumulator blockDocRangeAcc;
BlockMetadataAccumulator(Directory dir, IOContext context, IndexOutput data, long addressesStart) throws IOException {
boolean success = false;
try {
blockDocRangeAcc = new DelayedOffsetAccumulator(dir, context, data, "block-doc-ranges", 0);
blockAddressAcc = new DelayedOffsetAccumulator(dir, context, data, "block-addresses", addressesStart);
success = true;
} finally {
if (success == false) {
IOUtils.closeWhileHandlingException(this); // self-close because constructor caller can't
}
}
}
public void addDoc(long numDocsInBlock, long blockLenInBytes) throws IOException {
blockDocRangeAcc.addDoc(numDocsInBlock);
blockAddressAcc.addDoc(blockLenInBytes);
}
public void build(IndexOutput meta, IndexOutput data) throws IOException {
long dataAddressesStart = data.getFilePointer();
blockAddressAcc.build(meta, data);
long dataDocRangeStart = data.getFilePointer();
long addressesLength = dataDocRangeStart - dataAddressesStart;
meta.writeLong(addressesLength);
meta.writeLong(dataDocRangeStart);
blockDocRangeAcc.build(meta, data);
long docRangesLen = data.getFilePointer() - dataDocRangeStart;
meta.writeLong(docRangesLen);
}
@Override
public void close() throws IOException {
IOUtils.closeWhileHandlingException(blockAddressAcc, blockDocRangeAcc);
}
/**
* Like OffsetsAccumulator builds offsets and stores in a DirectMonotonicWriter. But write to temp file
* rather than directly to a DirectMonotonicWriter because the number of values is unknown.
*/
static final | BlockMetadataAccumulator |
java | apache__kafka | server-common/src/test/java/org/apache/kafka/server/immutable/pcollections/PCollectionsImmutableNavigableSetTest.java | {
"start": 1826,
"end": 2002
} | class ____ {
private static final TreePSet<Integer> SINGLETON_SET = TreePSet.singleton(new Random().nextInt());
private static final | PCollectionsImmutableNavigableSetTest |
java | apache__flink | flink-formats/flink-protobuf/src/main/java/org/apache/flink/formats/protobuf/serialize/PbRowDataSerializationSchema.java | {
"start": 1595,
"end": 2771
} | class ____ implements SerializationSchema<RowData> {
public static final long serialVersionUID = 1L;
private final RowType rowType;
private final PbFormatConfig pbFormatConfig;
private transient RowToProtoConverter rowToProtoConverter;
public PbRowDataSerializationSchema(RowType rowType, PbFormatConfig pbFormatConfig) {
this.rowType = rowType;
this.pbFormatConfig = pbFormatConfig;
Descriptors.Descriptor descriptor =
PbFormatUtils.getDescriptor(pbFormatConfig.getMessageClassName());
PbSchemaValidationUtils.validate(descriptor, rowType);
}
@Override
public void open(InitializationContext context) throws Exception {
rowToProtoConverter = new RowToProtoConverter(rowType, pbFormatConfig);
}
@VisibleForTesting
public boolean isCodeSplit() {
return rowToProtoConverter.isCodeSplit();
}
@Override
public byte[] serialize(RowData element) {
try {
return rowToProtoConverter.convertRowToProtoBinary(element);
} catch (Exception e) {
throw new FlinkRuntimeException(e);
}
}
}
| PbRowDataSerializationSchema |
java | processing__processing4 | core/src/processing/data/IntList.java | {
"start": 643,
"end": 19780
} | class ____ implements Iterable<Integer> {
protected int count;
protected int[] data;
public IntList() {
data = new int[10];
}
/**
* @nowebref
*/
public IntList(int length) {
data = new int[length];
}
/**
* @nowebref
*/
public IntList(int[] source) {
count = source.length;
data = new int[count];
System.arraycopy(source, 0, data, 0, count);
}
/**
* Construct an IntList from an iterable pile of objects.
* For instance, a float array, an array of strings, who knows.
* Un-parsable or null values will be set to 0.
* @nowebref
*/
public IntList(Iterable<Object> iterable) {
this(10);
for (Object o : iterable) {
if (o == null) {
append(0); // missing value default
} else if (o instanceof Number) {
append(((Number) o).intValue());
} else {
append(PApplet.parseInt(o.toString().trim()));
}
}
crop();
}
/**
* Construct an IntList from a random pile of objects.
* Un-parsable or null values will be set to zero.
*/
public IntList(Object... items) {
final int missingValue = 0; // nuts, can't be last/final/second arg
count = items.length;
data = new int[count];
int index = 0;
for (Object o : items) {
int value = missingValue;
if (o != null) {
if (o instanceof Number) {
value = ((Number) o).intValue();
} else {
value = PApplet.parseInt(o.toString().trim(), missingValue);
}
}
data[index++] = value;
}
}
static public IntList fromRange(int stop) {
return fromRange(0, stop);
}
static public IntList fromRange(int start, int stop) {
int count = stop - start;
IntList newbie = new IntList(count);
for (int i = 0; i < count; i++) {
newbie.set(i, start+i);
}
return newbie;
}
/**
* Improve efficiency by removing allocated but unused entries from the
* internal array used to store the data. Set to private, though it could
* be useful to have this public if lists are frequently making drastic
* size changes (from very large to very small).
*/
private void crop() {
if (count != data.length) {
data = PApplet.subset(data, 0, count);
}
}
/**
* Get the length of the list.
*
* @webref intlist:method
* @webBrief Get the length of the list
*/
public int size() {
return count;
}
public void resize(int length) {
if (length > data.length) {
int[] temp = new int[length];
System.arraycopy(data, 0, temp, 0, count);
data = temp;
} else if (length > count) {
Arrays.fill(data, count, length, 0);
}
count = length;
}
/**
* Remove all entries from the list.
*
* @webref intlist:method
* @webBrief Remove all entries from the list
*/
public void clear() {
count = 0;
}
/**
* Get an entry at a particular index.
*
* @webref intlist:method
* @webBrief Get an entry at a particular index
*/
public int get(int index) {
if (index >= this.count) {
throw new ArrayIndexOutOfBoundsException(index);
}
return data[index];
}
/**
* Set the entry at a particular index.
*
* @webref intlist:method
* @webBrief Set the entry at a particular index
*/
public void set(int index, int what) {
if (index >= count) {
data = PApplet.expand(data, index+1);
for (int i = count; i < index; i++) {
data[i] = 0;
}
count = index+1;
}
data[index] = what;
}
/** Just an alias for append(), but matches pop() */
public void push(int value) {
append(value);
}
public int pop() {
if (count == 0) {
throw new RuntimeException("Can't call pop() on an empty list");
}
int value = get(count-1);
count--;
return value;
}
/**
* Remove an element from the specified index
*
* @webref intlist:method
* @webBrief Remove an element from the specified index
*/
public int remove(int index) {
if (index < 0 || index >= count) {
throw new ArrayIndexOutOfBoundsException(index);
}
int entry = data[index];
// int[] outgoing = new int[count - 1];
// System.arraycopy(data, 0, outgoing, 0, index);
// count--;
// System.arraycopy(data, index + 1, outgoing, 0, count - index);
// data = outgoing;
// For most cases, this actually appears to be faster
// than arraycopy() on an array copying into itself.
for (int i = index; i < count-1; i++) {
data[i] = data[i+1];
}
count--;
return entry;
}
// Remove the first instance of a particular value,
// and return the index at which it was found.
@SuppressWarnings("unused")
public int removeValue(int value) {
int index = index(value);
if (index != -1) {
remove(index);
return index;
}
return -1;
}
// Remove all instances of a particular value,
// and return the number of values found and removed
@SuppressWarnings("unused")
public int removeValues(int value) {
int ii = 0;
for (int i = 0; i < count; i++) {
if (data[i] != value) {
data[ii++] = data[i];
}
}
int removed = count - ii;
count = ii;
return removed;
}
/**
* Add a new entry to the list.
*
* @webref intlist:method
* @webBrief Add a new entry to the list
*/
public void append(int value) {
if (count == data.length) {
data = PApplet.expand(data);
}
data[count++] = value;
}
public void append(int[] values) {
for (int v : values) {
append(v);
}
}
public void append(IntList list) {
for (int v : list.values()) { // will concat the list...
append(v);
}
}
/** Add this value, but only if it's not already in the list. */
@SuppressWarnings("unused")
public void appendUnique(int value) {
if (!hasValue(value)) {
append(value);
}
}
public void insert(int index, int value) {
insert(index, new int[] { value });
}
// same as splice
public void insert(int index, int[] values) {
if (index < 0) {
throw new IllegalArgumentException("insert() index cannot be negative: it was " + index);
}
if (index >= data.length) {
throw new IllegalArgumentException("insert() index " + index + " is past the end of this list");
}
int[] temp = new int[count + values.length];
// Copy the old values, but not more than already exist
System.arraycopy(data, 0, temp, 0, Math.min(count, index));
// Copy the new values into the proper place
System.arraycopy(values, 0, temp, index, values.length);
// if (index < count) {
// The index was inside count, so it's a true splice/insert
System.arraycopy(data, index, temp, index+values.length, count - index);
count = count + values.length;
// } else {
// // The index was past 'count', so the new count is weirder
// count = index + values.length;
// }
data = temp;
}
public void insert(int index, IntList list) {
insert(index, list.values());
}
// below are aborted attempts at more optimized versions of the code
// that are harder to read and debug...
// if (index + values.length >= count) {
// // We're past the current 'count', check to see if we're still allocated
// // index 9, data.length = 10, values.length = 1
// if (index + values.length < data.length) {
// // There's still room for these entries, even though it's past 'count'.
// // First clear out the entries leading up to it, however.
// for (int i = count; i < index; i++) {
// data[i] = 0;
// }
// data[index] =
// }
// if (index >= data.length) {
// int length = index + values.length;
// int[] temp = new int[length];
// System.arraycopy(data, 0, temp, 0, count);
// System.arraycopy(values, 0, temp, index, values.length);
// data = temp;
// count = data.length;
// } else {
//
// }
//
// } else if (count == data.length) {
// int[] temp = new int[count << 1];
// System.arraycopy(data, 0, temp, 0, index);
// temp[index] = value;
// System.arraycopy(data, index, temp, index+1, count - index);
// data = temp;
//
// } else {
// // data[] has room to grow
// // for() loop believed to be faster than System.arraycopy over itself
// for (int i = count; i > index; --i) {
// data[i] = data[i-1];
// }
// data[index] = value;
// count++;
// }
/** Return the first index of a particular value. */
public int index(int what) {
for (int i = 0; i < count; i++) {
if (data[i] == what) {
return i;
}
}
return -1;
}
/**
* Check if a number is a part of the data structure.
*
* @webref intlist:method
* @webBrief Check if a number is a part of the list
*/
public boolean hasValue(int value) {
for (int i = 0; i < count; i++) {
if (data[i] == value) {
return true;
}
}
return false;
}
/**
* Add one to a value.
*
* @webref intlist:method
* @webBrief Add one to a value
*/
public void increment(int index) {
if (count <= index) {
resize(index + 1);
}
data[index]++;
}
private void boundsProblem(int index, String method) {
final String msg = String.format("The list size is %d. " +
"You cannot %s() to element %d.", count, method, index);
throw new ArrayIndexOutOfBoundsException(msg);
}
/**
* Add to a value.
*
* @webref intlist:method
* @webBrief Add to a value
*/
public void add(int index, int amount) {
if (index < count) {
data[index] += amount;
} else {
boundsProblem(index, "add");
}
}
/**
* Subtract from a value.
*
* @webref intlist:method
* @webBrief Subtract from a value
*/
public void sub(int index, int amount) {
if (index < count) {
data[index] -= amount;
} else {
boundsProblem(index, "sub");
}
}
/**
* Multiply a value.
*
* @webref intlist:method
* @webBrief Multiply a value
*/
public void mult(int index, int amount) {
if (index < count) {
data[index] *= amount;
} else {
boundsProblem(index, "mult");
}
}
/**
* Divide a value.
*
* @webref intlist:method
* @webBrief Divide a value
*/
public void div(int index, int amount) {
if (index < count) {
data[index] /= amount;
} else {
boundsProblem(index, "div");
}
}
private void checkMinMax(String functionName) {
if (count == 0) {
String msg =
String.format("Cannot use %s() on an empty %s.",
functionName, getClass().getSimpleName());
throw new RuntimeException(msg);
}
}
/**
* Return the smallest value.
*
* @webref intlist:method
* @webBrief Return the smallest value
*/
public int min() {
checkMinMax("min");
int outgoing = data[0];
for (int i = 1; i < count; i++) {
if (data[i] < outgoing) outgoing = data[i];
}
return outgoing;
}
// returns the index of the minimum value.
// if there are ties, it returns the first one found.
@SuppressWarnings("unused")
public int minIndex() {
checkMinMax("minIndex");
int value = data[0];
int index = 0;
for (int i = 1; i < count; i++) {
if (data[i] < value) {
value = data[i];
index = i;
}
}
return index;
}
/**
* Return the largest value.
*
* @webref intlist:method
* @webBrief Return the largest value
*/
public int max() {
checkMinMax("max");
int outgoing = data[0];
for (int i = 1; i < count; i++) {
if (data[i] > outgoing) outgoing = data[i];
}
return outgoing;
}
// returns the index of the maximum value.
// if there are ties, it returns the first one found.
public int maxIndex() {
checkMinMax("maxIndex");
int value = data[0];
int index = 0;
for (int i = 1; i < count; i++) {
if (data[i] > value) {
value = data[i];
index = i;
}
}
return index;
}
public int sum() {
long amount = sumLong();
if (amount > Integer.MAX_VALUE) {
throw new RuntimeException("sum() exceeds " + Integer.MAX_VALUE + ", use sumLong()");
}
if (amount < Integer.MIN_VALUE) {
throw new RuntimeException("sum() less than " + Integer.MIN_VALUE + ", use sumLong()");
}
return (int) amount;
}
public long sumLong() {
long sum = 0;
for (int i = 0; i < count; i++) {
sum += data[i];
}
return sum;
}
/**
* Sorts the array, lowest to highest.
*
* @webref intlist:method
* @webBrief Sorts the array, lowest to highest
*/
public void sort() {
Arrays.sort(data, 0, count);
}
/**
* A sort in reverse. It's equivalent to running <b>sort()</b> and then
* <b>reverse()</b>, but is more efficient than running each separately.
*
* @webref intlist:method
* @webBrief Reverse sort, orders values from highest to lowest
*/
public void sortReverse() {
new Sort() {
@Override
public int size() {
return count;
}
@Override
public int compare(int a, int b) {
return data[b] - data[a];
}
@Override
public void swap(int a, int b) {
int temp = data[a];
data[a] = data[b];
data[b] = temp;
}
}.run();
}
/**
* Reverse the order of the list.
*
* @webref intlist:method
* @webBrief Reverse the order of the list elements
*/
public void reverse() {
int ii = count - 1;
for (int i = 0; i < count/2; i++) {
int t = data[i];
data[i] = data[ii];
data[ii] = t;
--ii;
}
}
/**
* Randomize the order of the list elements.
*
* @webref intlist:method
* @webBrief Randomize the order of the list elements
*/
@SuppressWarnings("unused")
public void shuffle() {
Random r = new Random();
int num = count;
while (num > 1) {
int value = r.nextInt(num);
num--;
int temp = data[num];
data[num] = data[value];
data[value] = temp;
}
}
/**
* Randomize the list order using the random() function from the specified
* sketch, allowing shuffle() to use its current randomSeed() setting.
*/
@SuppressWarnings("unused")
public void shuffle(PApplet sketch) {
int num = count;
while (num > 1) {
int value = (int) sketch.random(num);
num--;
int temp = data[num];
data[num] = data[value];
data[value] = temp;
}
}
/**
* Return a random value from the list.
*/
public int choice() {
if (count == 0) {
throw new ArrayIndexOutOfBoundsException("No entries in this IntList");
}
return data[(int) (Math.random() * count)];
}
// see notes in StringList
// /**
// * Return a random value from the list, using the
// * randomSeed() from the specified sketch object.
// */
// public int choice(PApplet sketch) {
// if (count == 0) {
// throw new ArrayIndexOutOfBoundsException("No entries in this IntList");
// }
// return data[(int) sketch.random(count)];
// }
public int removeChoice() {
if (count == 0) {
throw new ArrayIndexOutOfBoundsException("No entries in this IntList");
}
int index = (int) (Math.random() * count);
return remove(index);
}
public IntList copy() {
IntList outgoing = new IntList(data);
outgoing.count = count;
return outgoing;
}
/**
* Returns the actual array being used to store the data. For advanced users,
* this is the fastest way to access a large list. Suitable for iterating
* with a for() loop, but modifying the list will have terrible consequences.
*/
public int[] values() {
crop();
return data;
}
@Override
public Iterator<Integer> iterator() {
return new Iterator<>() {
int index = -1;
public void remove() {
IntList.this.remove(index);
index--;
}
public Integer next() {
return data[++index];
}
public boolean hasNext() {
return index+1 < count;
}
};
}
@Deprecated
public int[] array() {
return toArray();
}
/**
* Create a new array with a copy of all the values.
*
* @return an array sized by the length of the list with each of the values.
* @webref intlist:method
* @webBrief Create a new array with a copy of all the values
*/
public int[] toArray() {
return toArray(null);
}
@Deprecated
public int[] array(int[] array) {
return toArray(array);
}
/**
* Copy values into the specified array. If the specified array is
* null or not the same size, a new array will be allocated.
*/
public int[] toArray(int[] array) {
if (array == null || array.length != count) {
array = new int[count];
}
System.arraycopy(data, 0, array, 0, count);
return array;
}
/**
* Returns a normalized version of this array. Called getPercent()
* for consistency with the Dict classes. It's a getter method because
* it needs to return a new list (because IntList/Dict can't do
* percentages or normalization in place on int values).
*/
@SuppressWarnings("unused")
public FloatList getPercent() {
double sum = 0;
for (float value : array()) {
sum += value;
}
FloatList outgoing = new FloatList(count);
for (int i = 0; i < count; i++) {
double percent = data[i] / sum;
outgoing.set(i, (float) percent);
}
return outgoing;
}
@SuppressWarnings("unused")
public IntList getSubset(int start) {
return getSubset(start, count - start);
}
@SuppressWarnings("unused")
public IntList getSubset(int start, int num) {
int[] subset = new int[num];
System.arraycopy(data, start, subset, 0, num);
return new IntList(subset);
}
public String join(String separator) {
if (count == 0) {
return "";
}
StringBuilder sb = new StringBuilder();
sb.append(data[0]);
for (int i = 1; i < count; i++) {
sb.append(separator);
sb.append(data[i]);
}
return sb.toString();
}
public void print() {
for (int i = 0; i < count; i++) {
System.out.format("[%d] %d%n", i, data[i]);
}
}
/**
* Save tab-delimited entries to a file (TSV format, UTF-8 encoding)
*/
public void save(File file) {
PrintWriter writer = PApplet.createWriter(file);
write(writer);
writer.close();
}
/**
* Write entries to a PrintWriter, one per line
*/
public void write(PrintWriter writer) {
for (int i = 0; i < count; i++) {
writer.println(data[i]);
}
writer.flush();
}
/**
* Return this dictionary as a String in JSON format.
*/
public String toJSON() {
return "[ " + join(", ") + " ]";
}
@Override
public String toString() {
return getClass().getSimpleName() + " size=" + size() + " " + toJSON();
}
}
| IntList |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/observable/ObservableFuseableTest.java | {
"start": 846,
"end": 3054
} | class ____ extends RxJavaTest {
@Test
public void syncRange() {
Observable.range(1, 10)
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertFusionMode(QueueFuseable.SYNC)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
@Test
public void syncArray() {
Observable.fromArray(new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertFusionMode(QueueFuseable.SYNC)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
@Test
public void syncIterable() {
Observable.fromIterable(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertFusionMode(QueueFuseable.SYNC)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
@Test
public void syncRangeHidden() {
Observable.range(1, 10).hide()
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertNotFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
@Test
public void syncArrayHidden() {
Observable.fromArray(new Integer[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 })
.hide()
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertNotFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
@Test
public void syncIterableHidden() {
Observable.fromIterable(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
.hide()
.to(TestHelper.<Integer>testConsumer(QueueFuseable.ANY, false))
.assertNotFuseable()
.assertFusionMode(QueueFuseable.NONE)
.assertValues(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.assertNoErrors()
.assertComplete();
}
}
| ObservableFuseableTest |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableMergeWithCompletable.java | {
"start": 4091,
"end": 4827
} | class ____ extends AtomicReference<Disposable>
implements CompletableObserver {
private static final long serialVersionUID = -2935427570954647017L;
final MergeWithSubscriber<?> parent;
OtherObserver(MergeWithSubscriber<?> parent) {
this.parent = parent;
}
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(this, d);
}
@Override
public void onError(Throwable e) {
parent.otherError(e);
}
@Override
public void onComplete() {
parent.otherComplete();
}
}
}
}
| OtherObserver |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/dialect/mysql/ast/clause/MySqlDeclareStatement.java | {
"start": 968,
"end": 1628
} | class ____ extends MySqlStatementImpl {
private List<SQLDeclareItem> varList = new ArrayList<SQLDeclareItem>();
public List<SQLDeclareItem> getVarList() {
return varList;
}
public void addVar(SQLDeclareItem expr) {
varList.add(expr);
}
public void setVarList(List<SQLDeclareItem> varList) {
this.varList = varList;
}
@Override
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, varList);
}
visitor.endVisit(this);
}
@Override
public List getChildren() {
return varList;
}
}
| MySqlDeclareStatement |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/transaction/ejb/AbstractEjbTxDaoTests.java | {
"start": 1752,
"end": 3002
} | class ____ {
protected static final String TEST_NAME = "test-name";
@EJB
protected TestEntityDao dao;
@Autowired
protected EntityManager em;
// The EntityManagerFactory is not actually used by tests. We only declare it
// to ensure that dependency injection works for it.
@Autowired
protected EntityManagerFactory emf;
@Test
void test1InitialState() {
int count = dao.getCount(TEST_NAME);
assertThat(count).as("New TestEntity should have count=0.").isEqualTo(0);
}
@Test
void test2IncrementCount1() {
int count = dao.incrementCount(TEST_NAME);
assertThat(count).as("Expected count=1 after first increment.").isEqualTo(1);
}
/**
* The default implementation of this method assumes that the transaction
* for {@link #test2IncrementCount1()} was committed. Therefore, it is
* expected that the previous increment has been persisted in the database.
*/
@Test
void test3IncrementCount2() {
int count = dao.getCount(TEST_NAME);
assertThat(count).as("Expected count=1 after test2IncrementCount1().").isEqualTo(1);
count = dao.incrementCount(TEST_NAME);
assertThat(count).as("Expected count=2 now.").isEqualTo(2);
}
@AfterEach
void synchronizePersistenceContext() {
em.flush();
}
}
| AbstractEjbTxDaoTests |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/type/java/OffsetDateTimeDescriptorTest.java | {
"start": 345,
"end": 1082
} | class ____ extends AbstractDescriptorTest<OffsetDateTime> {
final OffsetDateTime original = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 2, 0));
final OffsetDateTime copy = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 2, 0));
final OffsetDateTime different = OffsetDateTime.of(LocalDateTime.of( 2016, 10, 8, 15, 13 ), ZoneOffset.ofHoursMinutes( 4, 30));
public OffsetDateTimeDescriptorTest() {
super( OffsetDateTimeJavaType.INSTANCE);
}
@Override
protected Data<OffsetDateTime> getTestData() {
return new Data<>( original, copy, different );
}
@Override
protected boolean shouldBeMutable() {
return false;
}
}
| OffsetDateTimeDescriptorTest |
java | alibaba__nacos | api/src/main/java/com/alibaba/nacos/api/model/response/ConnectionInfo.java | {
"start": 762,
"end": 1483
} | class ____ {
private boolean traced = false;
private Map<String, Boolean> abilityTable;
private ConnectionMetaInfo metaInfo;
public boolean isTraced() {
return traced;
}
public void setTraced(boolean traced) {
this.traced = traced;
}
public void setAbilityTable(Map<String, Boolean> abilityTable) {
this.abilityTable = abilityTable;
}
public Map<String, Boolean> getAbilityTable() {
return this.abilityTable;
}
public ConnectionMetaInfo getMetaInfo() {
return metaInfo;
}
public void setMetaInfo(ConnectionMetaInfo metaInfo) {
this.metaInfo = metaInfo;
}
}
| ConnectionInfo |
java | alibaba__nacos | core/src/main/java/com/alibaba/nacos/core/distributed/raft/RaftEvent.java | {
"start": 2436,
"end": 3833
} | class ____ {
private String groupId;
private String leader;
private Long term = null;
private List<String> raftClusterInfo = Collections.emptyList();
private String errMsg = "";
private RaftEventBuilder() {
}
public RaftEventBuilder groupId(String groupId) {
this.groupId = groupId;
return this;
}
public RaftEventBuilder leader(String leader) {
this.leader = leader;
return this;
}
public RaftEventBuilder term(long term) {
this.term = term;
return this;
}
public RaftEventBuilder raftClusterInfo(List<String> raftClusterInfo) {
this.raftClusterInfo = raftClusterInfo;
return this;
}
public RaftEventBuilder errMsg(String errMsg) {
this.errMsg = errMsg;
return this;
}
public RaftEvent build() {
RaftEvent raftEvent = new RaftEvent();
raftEvent.setGroupId(groupId);
raftEvent.setLeader(leader);
raftEvent.setTerm(term);
raftEvent.setRaftClusterInfo(raftClusterInfo);
raftEvent.setErrMsg(errMsg);
return raftEvent;
}
}
}
| RaftEventBuilder |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/discriminator/JoinedInheritanceForceDiscriminatorTest.java | {
"start": 5508,
"end": 5707
} | class ____ extends CommonBase {
public AnotherEntity() {
}
public AnotherEntity(Long id, String name) {
super( id, name );
}
}
@Entity( name = "ElementGroup" )
public static | AnotherEntity |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/window/processors/UnsliceSyncStateWindowAggProcessor.java | {
"start": 11460,
"end": 15159
} | class ____ implements Trigger.OnMergeContext {
private TimeWindow window;
private Collection<TimeWindow> mergedWindows;
public void open() throws Exception {
trigger.open(this);
}
@Override
public MetricGroup getMetricGroup() {
return metrics;
}
public boolean onProcessingTime(long time) throws Exception {
return trigger.onProcessingTime(time, window);
}
public boolean onEventTime(long time) throws Exception {
return trigger.onEventTime(time, window);
}
public void onMerge() throws Exception {
trigger.onMerge(window, this);
}
public void setWindow(TimeWindow window) {
this.window = window;
}
public void setMergedWindows(Collection<TimeWindow> mergedWindows) {
this.mergedWindows = mergedWindows;
}
@Override
public long getCurrentProcessingTime() {
return ctx.getTimerService().currentProcessingTime();
}
@Override
public long getCurrentWatermark() {
return ctx.getTimerService().currentWatermark();
}
@Override
public void registerProcessingTimeTimer(long time) {
ctx.getTimerService().registerProcessingTimeTimer(window, time);
}
@Override
public void registerEventTimeTimer(long time) {
ctx.getTimerService().registerEventTimeTimer(window, time);
}
@Override
public void deleteProcessingTimeTimer(long time) {
ctx.getTimerService().deleteProcessingTimeTimer(window, time);
}
@Override
public void deleteEventTimeTimer(long time) {
ctx.getTimerService().deleteEventTimeTimer(window, time);
}
@Override
public ZoneId getShiftTimeZone() {
return shiftTimeZone;
}
public void clear() throws Exception {
trigger.clear(window);
}
@Override
public <S extends MergingState<?, ?>> void mergePartitionedState(
StateDescriptor<S, ?> stateDescriptor) {
if (mergedWindows != null && !mergedWindows.isEmpty()) {
try {
State state =
ctx.getKeyedStateBackend()
.getOrCreateKeyedState(
createWindowSerializer(), stateDescriptor);
if (state instanceof InternalMergingState) {
((InternalMergingState<RowData, TimeWindow, ?, ?, ?>) state)
.mergeNamespaces(window, mergedWindows);
} else {
throw new IllegalArgumentException(
"The given state descriptor does not refer to a mergeable state (MergingState)");
}
} catch (Exception e) {
throw new RuntimeException("Error while merging state.", e);
}
}
}
@Override
public <S extends State> S getPartitionedState(StateDescriptor<S, ?> stateDescriptor) {
try {
return ctx.getKeyedStateBackend()
.getPartitionedState(
VoidNamespace.INSTANCE,
VoidNamespaceSerializer.INSTANCE,
stateDescriptor);
} catch (Exception e) {
throw new RuntimeException("Could not retrieve state", e);
}
}
}
}
| TriggerContextImpl |
java | elastic__elasticsearch | libs/core/src/main/java/org/elasticsearch/core/CheckedRunnable.java | {
"start": 617,
"end": 693
} | interface ____<E extends Exception> {
void run() throws E;
}
| CheckedRunnable |
java | netty__netty | transport/src/main/java/io/netty/channel/ChannelFactory.java | {
"start": 780,
"end": 953
} | interface ____<T extends Channel> extends io.netty.bootstrap.ChannelFactory<T> {
/**
* Creates a new channel.
*/
@Override
T newChannel();
}
| ChannelFactory |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java | {
"start": 1535,
"end": 20037
} | class ____ extends MapperServiceTestCase {
private static MappingParser createMappingParser(Settings settings) {
return createMappingParser(settings, IndexVersion.current(), TransportVersion.current());
}
private static MappingParser createMappingParser(Settings settings, IndexVersion version, TransportVersion transportVersion) {
ScriptService scriptService = new ScriptService(
settings,
Collections.emptyMap(),
Collections.emptyMap(),
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
IndexSettings indexSettings = createIndexSettings(version, settings);
IndexAnalyzers indexAnalyzers = createIndexAnalyzers();
SimilarityService similarityService = new SimilarityService(indexSettings, scriptService, Collections.emptyMap());
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, BitsetFilterCache.Listener.NOOP);
Supplier<MappingParserContext> mappingParserContextSupplier = () -> new MappingParserContext(
similarityService::getSimilarity,
type -> mapperRegistry.getMapperParser(type, indexSettings.getIndexVersionCreated()),
mapperRegistry.getRuntimeFieldParsers()::get,
indexSettings.getIndexVersionCreated(),
() -> transportVersion,
() -> {
throw new UnsupportedOperationException();
},
scriptService,
indexAnalyzers,
indexSettings,
indexSettings.getMode().idFieldMapperWithoutFieldData(),
bitsetFilterCache::getBitSetProducer,
null
);
Map<String, MetadataFieldMapper.TypeParser> metadataMapperParsers = mapperRegistry.getMetadataMapperParsers(
indexSettings.getIndexVersionCreated()
);
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappers = new LinkedHashMap<>();
metadataMapperParsers.values().stream().map(parser -> parser.getDefault(mappingParserContextSupplier.get())).forEach(m -> {
if (m != null) {
metadataMappers.put(m.getClass(), m);
}
});
return new MappingParser(
mappingParserContextSupplier,
metadataMapperParsers,
() -> metadataMappers,
type -> MapperService.SINGLE_MAPPING_NAME
);
}
public void testFieldNameWithDotsDisallowed() throws Exception {
XContentBuilder builder = mapping(b -> {
b.startObject("foo.bar").field("type", "text").endObject();
b.startObject("foo.baz").field("type", "keyword").endObject();
});
Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
Mapper object = mapping.getRoot().getMapper("foo");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
ObjectMapper objectMapper = (ObjectMapper) object;
assertNotNull(objectMapper.getMapper("bar"));
assertNotNull(objectMapper.getMapper("baz"));
}
public void testFieldNameWithDeepDots() throws Exception {
XContentBuilder builder = mapping(b -> {
b.startObject("foo.bar").field("type", "text").endObject();
b.startObject("foo.baz");
{
b.startObject("properties");
{
b.startObject("deep.field").field("type", "keyword").endObject();
}
b.endObject();
}
b.endObject();
});
Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
MappingLookup mappingLookup = MappingLookup.fromMapping(mapping);
assertNotNull(mappingLookup.getMapper("foo.bar"));
assertNotNull(mappingLookup.getMapper("foo.baz.deep.field"));
assertNotNull(mappingLookup.objectMappers().get("foo"));
}
public void testFieldNameWithDotPrefixDisallowed() throws IOException {
XContentBuilder builder = mapping(b -> {
b.startObject("foo").field("type", "text").endObject();
b.startObject("foo.baz").field("type", "keyword").endObject();
});
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertTrue(e.getMessage(), e.getMessage().contains("mapper [foo] cannot be changed from type [text] to [ObjectMapper]"));
}
public void testMultiFieldsWithFieldAlias() throws IOException {
XContentBuilder builder = mapping(b -> {
b.startObject("field");
{
b.field("type", "text");
b.startObject("fields");
{
b.startObject("alias");
{
b.field("type", "alias");
b.field("path", "other-field");
}
b.endObject();
}
b.endObject();
}
b.endObject();
b.startObject("other-field").field("type", "keyword").endObject();
});
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("Type [alias] cannot be used in multi field", e.getMessage());
}
public void testBadMetadataMapper() throws IOException {
XContentBuilder builder = topMapping(b -> { b.field(RoutingFieldMapper.NAME, "required"); });
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("[_routing] config must be an object", e.getMessage());
}
public void testMergeSubfieldWhileParsing() throws Exception {
/*
If we are parsing mappings that hold the definition of the same field twice, the two are merged together. This can happen when
mappings have the same field specified using the object notation as well as the dot notation, as well as when applying index
templates, in which case the two definitions may come from separate index templates that end up in the same map (through
XContentHelper#mergeDefaults, see MetadataCreateIndexService#parseV1Mappings).
We had a bug (https://github.com/elastic/elasticsearch/issues/88573) triggered by this scenario that caused the merged leaf fields
to get the wrong path (missing the first portion).
*/
String mappingAsString = """
{
"_doc": {
"properties": {
"obj": {
"properties": {
"source": {
"properties": {
"geo": {
"properties": {
"location": {
"type": "geo_point"
}
}
}
}
}
}
},
"obj.source.geo.location" : {
"type": "geo_point"
}
}
}
}
""";
Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(mappingAsString));
assertEquals(1, mapping.getRoot().mappers.size());
Mapper object = mapping.getRoot().getMapper("obj");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("obj", object.leafName());
assertEquals("obj", object.fullPath());
ObjectMapper objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size());
object = objectMapper.getMapper("source");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("source", object.leafName());
assertEquals("obj.source", object.fullPath());
objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size());
object = objectMapper.getMapper("geo");
assertThat(object, CoreMatchers.instanceOf(ObjectMapper.class));
assertEquals("geo", object.leafName());
assertEquals("obj.source.geo", object.fullPath());
objectMapper = (ObjectMapper) object;
assertEquals(1, objectMapper.mappers.size());
Mapper location = objectMapper.getMapper("location");
assertThat(location, CoreMatchers.instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) location;
assertEquals("obj.source.geo.location", geoPointFieldMapper.fullPath());
assertEquals("location", geoPointFieldMapper.leafName());
assertEquals("obj.source.geo.location", geoPointFieldMapper.mappedFieldType.name());
}
private static String randomFieldType() {
return randomBoolean() ? KeywordFieldMapper.CONTENT_TYPE : ObjectMapper.CONTENT_TYPE;
}
public void testFieldStartingWithDot() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject(".foo").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
// TODO isn't this error misleading?
assertEquals("field name cannot be an empty string", iae.getMessage());
}
public void testFieldEndingWithDot() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject("foo.").field("type", randomFieldType()).endObject());
Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
// TODO this needs fixing as part of addressing https://github.com/elastic/elasticsearch/issues/28948
assertNotNull(mapping.getRoot().mappers.get("foo"));
assertNull(mapping.getRoot().mappers.get("foo."));
}
public void testFieldTrailingDots() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject("top..foo").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
// TODO isn't this error misleading?
assertEquals("field name cannot be an empty string", iae.getMessage());
}
public void testDottedFieldEndingWithDot() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject("foo.bar.").field("type", randomFieldType()).endObject());
Mapping mapping = createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
// TODO this needs fixing as part of addressing https://github.com/elastic/elasticsearch/issues/28948
assertNotNull(((ObjectMapper) mapping.getRoot().mappers.get("foo")).mappers.get("bar"));
assertNull(((ObjectMapper) mapping.getRoot().mappers.get("foo")).mappers.get("bar."));
}
public void testFieldStartingAndEndingWithDot() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject("foo..bar.").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
// TODO isn't this error misleading?
assertEquals("field name cannot be an empty string", iae.getMessage());
}
public void testDottedFieldWithTrailingWhitespace() throws Exception {
XContentBuilder builder = mapping(b -> b.startObject("top. .foo").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
// TODO isn't this error misleading?
assertEquals("field name cannot contain only whitespaces", iae.getMessage());
}
public void testEmptyFieldName() throws Exception {
{
XContentBuilder builder = mapping(b -> b.startObject("").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("field name cannot be an empty string", iae.getMessage());
}
{
XContentBuilder builder = mappingNoSubobjects(b -> b.startObject("").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("field name cannot be an empty string", iae.getMessage());
}
}
public void testBlankFieldName() throws Exception {
{
XContentBuilder builder = mapping(b -> b.startObject(" ").field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("field name cannot contain only whitespaces", iae.getMessage());
}
{
XContentBuilder builder = mappingNoSubobjects(b -> b.startObject(" ").field("type", "keyword").endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("field name cannot contain only whitespaces", iae.getMessage());
}
}
public void testBlankFieldNameBefore8_6_0() throws Exception {
IndexVersion version = IndexVersionUtils.randomVersionBetween(
random(),
IndexVersions.MINIMUM_READONLY_COMPATIBLE,
IndexVersions.V_8_5_0
);
TransportVersion transportVersion = TransportVersions.V_8_5_0;
{
XContentBuilder builder = mapping(b -> b.startObject(" ").field("type", randomFieldType()).endObject());
MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion);
Mapping mapping = mappingParser.parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
assertNotNull(mapping.getRoot().getMapper(" "));
}
{
XContentBuilder builder = mapping(b -> b.startObject("top. .foo").field("type", randomFieldType()).endObject());
MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion);
Mapping mapping = mappingParser.parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
assertNotNull(((ObjectMapper) mapping.getRoot().getMapper("top")).getMapper(" "));
}
{
XContentBuilder builder = mappingNoSubobjects(b -> b.startObject(" ").field("type", "keyword").endObject());
MappingParser mappingParser = createMappingParser(Settings.EMPTY, version, transportVersion);
Mapping mapping = mappingParser.parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
assertNotNull(mapping.getRoot().getMapper(" "));
}
}
public void testFieldNameDotsOnly() throws Exception {
String[] fieldNames = { ".", "..", "..." };
for (String fieldName : fieldNames) {
XContentBuilder builder = mapping(b -> b.startObject(fieldName).field("type", randomFieldType()).endObject());
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> createMappingParser(Settings.EMPTY).parse("_doc", new CompressedXContent(BytesReference.bytes(builder)))
);
assertEquals("field name cannot contain only dots", iae.getMessage());
}
}
public void testDynamicFieldEdgeCaseNamesSubobjectsFalse() throws Exception {
MappingParser mappingParser = createMappingParser(Settings.EMPTY);
for (String fieldName : DocumentParserTests.VALID_FIELD_NAMES_NO_SUBOBJECTS) {
XContentBuilder builder = mappingNoSubobjects(b -> b.startObject(fieldName).field("type", "keyword").endObject());
assertNotNull(mappingParser.parse("_doc", new CompressedXContent(BytesReference.bytes(builder))));
}
}
public void testDynamicFieldEdgeCaseNamesRuntimeSection() throws Exception {
// TODO these combinations are not accepted by default, but they are in the runtime section, though they are not accepted when
// parsing documents with subobjects enabled
MappingParser mappingParser = createMappingParser(Settings.EMPTY);
for (String fieldName : DocumentParserTests.VALID_FIELD_NAMES_NO_SUBOBJECTS) {
XContentBuilder builder = runtimeMapping(b -> b.startObject(fieldName).field("type", "keyword").endObject());
mappingParser.parse("_doc", new CompressedXContent(BytesReference.bytes(builder)));
}
}
}
| MappingParserTests |
java | alibaba__nacos | core/src/test/java/com/alibaba/nacos/core/auth/AuthFilterTest.java | {
"start": 2319,
"end": 12193
} | class ____ {
private AuthFilter authFilter;
@Mock
private NacosAuthConfig authConfig;
@Mock
private ControllerMethodsCache methodsCache;
@Mock
private InnerApiAuthEnabled innerApiAuthEnabled;
@Mock
FilterChain filterChain;
@Mock
HttpServletRequest request;
@Mock
HttpServletResponse response;
@BeforeEach
void setUp() {
authFilter = new AuthFilter(authConfig, methodsCache, innerApiAuthEnabled);
}
@AfterEach
void tearDown() {
RequestContextHolder.removeContext();
}
@Test
void testDoFilterDisabledAuth() throws ServletException, IOException {
when(authConfig.isAuthEnabled()).thenReturn(false);
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
@Secured
void testDoFilterWithoutServerIdentity() throws ServletException, IOException, NoSuchMethodException {
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithoutServerIdentity"));
when(authConfig.isAuthEnabled()).thenReturn(true);
authFilter.doFilter(request, response, filterChain);
verify(filterChain, never()).doFilter(request, response);
verify(response).sendError(403,
"Invalid server identity key or value, Please make sure set `nacos.core.auth.server.identity.key`"
+ " and `nacos.core.auth.server.identity.value`, or open `nacos.core.auth.enable.userAgentAuthWhite`");
}
@Test
@Secured
void testDoFilterWithServerIdentity() throws ServletException, IOException, NoSuchMethodException {
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithServerIdentity"));
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(request.getHeader("1")).thenReturn("2");
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
@Secured
void testDoFilterWithoutMethod() throws ServletException, IOException {
when(authConfig.isAuthEnabled()).thenReturn(true);
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
void testDoFilterWithoutSecured() throws ServletException, IOException, NoSuchMethodException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithoutSecured"));
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
@Secured
void testDoFilterWithNoNeedAuthSecured() throws NoSuchMethodException, ServletException, IOException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithNoNeedAuthSecured"));
HttpProtocolAuthService protocolAuthService = injectMockPlugins();
when(protocolAuthService.enableAuth(any(Secured.class))).thenReturn(false);
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
@Secured
void testDoFilterWithNeedAuthSecuredSuccess()
throws NoSuchMethodException, ServletException, IOException, AccessException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithNeedAuthSecuredSuccess"));
HttpProtocolAuthService protocolAuthService = injectMockPlugins();
when(protocolAuthService.enableAuth(any(Secured.class))).thenReturn(true);
doReturn(new IdentityContext()).when(protocolAuthService).parseIdentity(eq(request));
doReturn(Resource.EMPTY_RESOURCE).when(protocolAuthService).parseResource(eq(request), any(Secured.class));
when(protocolAuthService.validateIdentity(any(IdentityContext.class), any(Resource.class))).thenReturn(
AuthResult.successResult());
when(protocolAuthService.validateAuthority(any(IdentityContext.class), any(Permission.class))).thenReturn(
AuthResult.successResult());
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
@Test
@Secured
void testDoFilterWithNeedAuthSecuredIdentityFailure()
throws NoSuchMethodException, ServletException, IOException, AccessException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithNeedAuthSecuredIdentityFailure"));
HttpProtocolAuthService protocolAuthService = injectMockPlugins();
when(protocolAuthService.enableAuth(any(Secured.class))).thenReturn(true);
doReturn(new IdentityContext()).when(protocolAuthService).parseIdentity(eq(request));
doReturn(Resource.EMPTY_RESOURCE).when(protocolAuthService).parseResource(eq(request), any(Secured.class));
when(protocolAuthService.validateIdentity(any(IdentityContext.class), any(Resource.class))).thenReturn(
AuthResult.failureResult(403, "test"));
authFilter.doFilter(request, response, filterChain);
verify(filterChain, never()).doFilter(request, response);
verify(response).sendError(eq(403), anyString());
}
@Test
@Secured
void testDoFilterWithNeedAuthSecuredAuthorityFailure()
throws NoSuchMethodException, ServletException, IOException, AccessException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithNeedAuthSecuredAuthorityFailure"));
HttpProtocolAuthService protocolAuthService = injectMockPlugins();
when(protocolAuthService.enableAuth(any(Secured.class))).thenReturn(true);
doReturn(new IdentityContext()).when(protocolAuthService).parseIdentity(eq(request));
doReturn(Resource.EMPTY_RESOURCE).when(protocolAuthService).parseResource(eq(request), any(Secured.class));
when(protocolAuthService.validateIdentity(any(IdentityContext.class), any(Resource.class))).thenReturn(
AuthResult.successResult());
when(protocolAuthService.validateAuthority(any(IdentityContext.class), any(Permission.class))).thenReturn(
AuthResult.failureResult(403, "test"));
authFilter.doFilter(request, response, filterChain);
verify(filterChain, never()).doFilter(request, response);
verify(response).sendError(eq(403), anyString());
}
@Test
@Secured(tags = Constants.Tag.ONLY_IDENTITY)
void testDoFilterWithNeedAuthSecuredOnlyIdentity()
throws NoSuchMethodException, ServletException, IOException, AccessException {
when(authConfig.isAuthEnabled()).thenReturn(true);
when(authConfig.getServerIdentityKey()).thenReturn("1");
when(authConfig.getServerIdentityValue()).thenReturn("2");
when(methodsCache.getMethod(request)).thenReturn(
this.getClass().getDeclaredMethod("testDoFilterWithNeedAuthSecuredOnlyIdentity"));
HttpProtocolAuthService protocolAuthService = injectMockPlugins();
when(protocolAuthService.enableAuth(any(Secured.class))).thenReturn(true);
doReturn(new IdentityContext()).when(protocolAuthService).parseIdentity(eq(request));
doReturn(Resource.EMPTY_RESOURCE).when(protocolAuthService).parseResource(eq(request), any(Secured.class));
when(protocolAuthService.validateIdentity(any(IdentityContext.class), any(Resource.class))).thenReturn(
AuthResult.successResult());
authFilter.doFilter(request, response, filterChain);
verify(filterChain).doFilter(request, response);
verify(response, never()).sendError(anyInt(), anyString());
}
private HttpProtocolAuthService injectMockPlugins() {
HttpProtocolAuthService protocolAuthService = new HttpProtocolAuthService(authConfig);
protocolAuthService.initialize();
HttpProtocolAuthService spyProtocolAuthService = spy(protocolAuthService);
ReflectionTestUtils.setField(authFilter, "protocolAuthService", spyProtocolAuthService);
return spyProtocolAuthService;
}
}
| AuthFilterTest |
java | elastic__elasticsearch | test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java | {
"start": 1147,
"end": 5946
} | interface ____<T extends LocalSpecBuilder<?>> {
/**
* Register a {@link SettingsProvider}.
*/
T settings(SettingsProvider settingsProvider);
/**
* Add a new node setting.
*/
T setting(String setting, String value);
/**
* Add a new node setting computed by the given supplier.
*/
T setting(String setting, Supplier<String> value);
/**
* Add a new node setting computed by the given supplier when the given predicate evaluates to {@code true}.
*/
T setting(String setting, Supplier<String> value, Predicate<LocalNodeSpec> predicate);
/**
* Register a {@link EnvironmentProvider}.
*/
T environment(EnvironmentProvider environmentProvider);
/**
* Add a new node environment variable.
*/
T environment(String key, String value);
/**
* Add a new node environment variable computed by the given supplier.
*/
T environment(String key, Supplier<String> supplier);
/**
* Set the cluster {@link DistributionType}. By default, the {@link DistributionType#INTEG_TEST} distribution is used.
*/
T distribution(DistributionType type);
/**
* Ensure module is installed into the distribution when using the {@link DistributionType#INTEG_TEST} distribution. This is ignored
* when the {@link DistributionType#DEFAULT} is being used.
*/
T module(String moduleName);
/**
* Ensure module is installed into the distribution when using the {@link DistributionType#INTEG_TEST} distribution. This is ignored
* when the {@link DistributionType#DEFAULT} is being used.
*/
T module(String moduleName, Consumer<? super PluginInstallSpec> config);
/**
* Ensure plugin is installed into the distribution.
*/
T plugin(String pluginName);
/**
* Ensure plugin is installed into the distribution.
*/
T plugin(String pluginName, Consumer<? super PluginInstallSpec> config);
/**
* Require feature to be enabled in the cluster.
*/
T feature(FeatureFlag feature);
/**
* Adds a secure setting to the node keystore.
*/
T keystore(String key, String value);
/**
* Adds a secure file to the node keystore.
*/
T keystore(String key, Resource file);
/**
* Add a secure setting computed by the given supplier.
*/
T keystore(String key, Supplier<String> supplier);
/**
* Add a secure setting computed by the given supplier when the given predicate evaluates to {@code true}.
*/
T keystore(String key, Supplier<String> supplier, Predicate<LocalNodeSpec> predicate);
/**
* Register a {@link SettingsProvider} for keystore settings.
*/
T keystore(SettingsProvider settingsProvider);
/**
* Sets the security setting keystore password.
*/
T keystorePassword(String password);
/**
* Adds a file to the node config directory
*/
T configFile(String fileName, Resource configFile);
/**
* Sets the version of Elasticsearch. Defaults to {@link Version#CURRENT}.
*/
T version(Version version);
/**
* Sets the version of Elasticsearch. Defaults to {@link Version#CURRENT}.
*/
T version(String version);
/**
* Sets the version of Elasticsearch and whether it is a detached version.
* If not set, then defaults to {@link Version#CURRENT} and {@code false}.
*
* @param version the ES cluster version string
* @param detachedVersion true if using unreleased version of Elasticsearch that is also different from the local current HEAD.
* Defaults to false.
*/
T version(String version, boolean detachedVersion);
/**
* Adds a system property to node JVM arguments.
*/
T systemProperty(String property, String value);
/**
* Adds a system property to node JVM arguments computed by the given supplier
*/
T systemProperty(String property, Supplier<String> supplier);
/**
* Adds a system property to node JVM arguments computed by the given supplier
* when the given predicate evaluates to {@code true}.
*/
T systemProperty(String setting, Supplier<String> value, Predicate<LocalNodeSpec> predicate);
/**
* Register a {@link SystemPropertyProvider}.
*/
T systemProperties(SystemPropertyProvider systemPropertyProvider);
/**
* Adds an additional command line argument to node JVM arguments.
*/
T jvmArg(String arg);
/**
* Register a supplier to provide the config directory. The default config directory
* is used when the supplier is null or the return value of the supplier is null.
*/
T withConfigDir(Supplier<Path> configDirSupplier);
}
| LocalSpecBuilder |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/Float2DArrayAssertBaseTest.java | {
"start": 750,
"end": 843
} | class ____ {@link Float2DArrayAssert} tests.
*
* @author Maciej Wajcht
*/
public abstract | for |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/NestedExceptionUtils.java | {
"start": 1064,
"end": 2858
} | class ____ {
/**
* Build a message for the given base message and root cause.
* @param message the base message
* @param cause the root cause
* @return the full exception message
* @deprecated as of 6.0, in favor of custom exception messages
* with selective inclusion of cause messages
*/
@Deprecated(since = "6.0")
public static @Nullable String buildMessage(@Nullable String message, @Nullable Throwable cause) {
if (cause == null) {
return message;
}
StringBuilder sb = new StringBuilder(64);
if (message != null) {
sb.append(message).append("; ");
}
sb.append("nested exception is ").append(cause);
return sb.toString();
}
/**
* Retrieve the innermost cause of the given exception, if any.
* @param original the original exception to introspect
* @return the innermost exception, or {@code null} if none
* @since 4.3.9
*/
public static @Nullable Throwable getRootCause(@Nullable Throwable original) {
if (original == null) {
return null;
}
Throwable rootCause = null;
Throwable cause = original.getCause();
while (cause != null && cause != rootCause) {
rootCause = cause;
cause = cause.getCause();
}
return rootCause;
}
/**
* Retrieve the most specific cause of the given exception, that is,
* either the innermost cause (root cause) or the exception itself.
* <p>Differs from {@link #getRootCause} in that it falls back
* to the original exception if there is no root cause.
* @param original the original exception to introspect
* @return the most specific cause (never {@code null})
* @since 4.3.9
*/
public static Throwable getMostSpecificCause(Throwable original) {
Throwable rootCause = getRootCause(original);
return (rootCause != null ? rootCause : original);
}
}
| NestedExceptionUtils |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/support/AutoCreateIndexTests.java | {
"start": 1341,
"end": 19665
} | class ____ extends ESTestCase {
private static final String TEST_SYSTEM_INDEX_NAME = ".test-system-index";
public void testParseFailed() {
try {
Settings settings = Settings.builder().put("action.auto_create_index", ",,,").build();
newAutoCreateIndex(settings);
fail("initialization should have failed");
} catch (IllegalArgumentException ex) {
assertEquals(
"Can't parse [,,,] for setting [action.auto_create_index] must be either [true, false, or a "
+ "comma separated list of index patterns]",
ex.getMessage()
);
}
}
public void testParseFailedMissingIndex() {
String prefix = randomFrom("+", "-");
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), prefix).build();
try {
newAutoCreateIndex(settings);
fail("initialization should have failed");
} catch (IllegalArgumentException ex) {
assertEquals(
"Can't parse [" + prefix + "] for setting [action.auto_create_index] must contain an index name after [" + prefix + "]",
ex.getMessage()
);
}
}
public void testHandleSpaces() { // see #21449
Settings settings = Settings.builder()
.put(
AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(),
randomFrom(
".marvel-, .security, .watches, .triggered_watches, .watcher-history-",
".marvel-,.security,.watches,.triggered_watches,.watcher-history-"
)
)
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
List<Tuple<String, Boolean>> expressions = autoCreateIndex.getAutoCreate().getExpressions();
Map<String, Boolean> map = new HashMap<>();
for (Tuple<String, Boolean> t : expressions) {
map.put(t.v1(), t.v2());
}
assertTrue(map.get(".marvel-"));
assertTrue(map.get(".security"));
assertTrue(map.get(".watches"));
assertTrue(map.get(".triggered_watches"));
assertTrue(map.get(".watcher-history-"));
assertEquals(5, map.size());
}
public void testAutoCreationDisabled() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
String randomIndex = randomAlphaOfLengthBetween(1, 10);
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> autoCreateIndex.shouldAutoCreate(randomIndex, buildProjectMetadata())
);
assertEquals("no such index [" + randomIndex + "] and [action.auto_create_index] is [false]", e.getMessage());
}
public void testSystemIndexWithAutoCreationDisabled() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertThat(autoCreateIndex.shouldAutoCreate(TEST_SYSTEM_INDEX_NAME, buildProjectMetadata()), equalTo(true));
}
public void testAutoCreationEnabled() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), true).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildProjectMetadata()), equalTo(true));
}
public void testDefaultAutoCreation() {
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(Settings.EMPTY);
assertThat(autoCreateIndex.shouldAutoCreate(randomAlphaOfLengthBetween(1, 10), buildProjectMetadata()), equalTo(true));
}
public void testExistingIndex() {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom(true, false, randomAlphaOfLengthBetween(7, 10)).toString())
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertThat(
autoCreateIndex.shouldAutoCreate(randomFrom("index1", "index2", "index3"), buildProjectMetadata("index1", "index2", "index3")),
equalTo(false)
);
}
public void testAutoCreationPatternEnabled() {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+index*", "index*"))
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
assertThat(autoCreateIndex.shouldAutoCreate("index" + randomAlphaOfLengthBetween(1, 5), projectMetadata), equalTo(true));
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testAutoCreationPatternDisabled() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-index*").build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
expectForbidden(projectMetadata, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*");
/* When patterns are specified, even if the are all negative, the default is can't create. So a pure negative pattern is the same
* as false, really. */
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testAutoCreationSystemIndexPatternDisabled() {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "-" + TEST_SYSTEM_INDEX_NAME + "*")
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertThat(autoCreateIndex.shouldAutoCreate(TEST_SYSTEM_INDEX_NAME, buildProjectMetadata()), equalTo(true));
}
public void testAutoCreationMultiplePatternsWithWildcards() {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomFrom("+test*,-index*", "test*,-index*"))
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
expectForbidden(projectMetadata, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5), "-index*");
assertThat(autoCreateIndex.shouldAutoCreate("test" + randomAlphaOfLengthBetween(1, 5), projectMetadata), equalTo(true));
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testAutoCreationMultiplePatternsNoWildcards() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-index1").build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
assertThat(autoCreateIndex.shouldAutoCreate("test1", projectMetadata), equalTo(true));
expectNotMatch(projectMetadata, autoCreateIndex, "index" + randomAlphaOfLengthBetween(1, 5));
expectNotMatch(projectMetadata, autoCreateIndex, "test" + randomAlphaOfLengthBetween(2, 5));
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testAutoCreationMultipleIndexNames() {
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "test1,test2").build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
assertThat(autoCreateIndex.shouldAutoCreate("test1", projectMetadata), equalTo(true));
assertThat(autoCreateIndex.shouldAutoCreate("test2", projectMetadata), equalTo(true));
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testAutoCreationConflictingPatternsFirstWins() {
Settings settings = Settings.builder()
.put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "+test1,-test1,-test2,+test2")
.build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault()).build();
assertThat(autoCreateIndex.shouldAutoCreate("test1", projectMetadata), equalTo(true));
expectForbidden(projectMetadata, autoCreateIndex, "test2", "-test2");
expectNotMatch(projectMetadata, autoCreateIndex, "does_not_match" + randomAlphaOfLengthBetween(1, 5));
}
public void testUpdate() {
boolean value = randomBoolean();
Settings settings;
if (value && randomBoolean()) {
settings = Settings.EMPTY;
} else {
settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), value).build();
}
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
SystemIndices systemIndices = EmptySystemIndices.INSTANCE;
AutoCreateIndex autoCreateIndex = new AutoCreateIndex(
settings,
clusterSettings,
TestIndexNameExpressionResolver.newInstance(systemIndices),
systemIndices
);
assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(value));
Settings newSettings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), value == false).build();
clusterSettings.applySettings(newSettings);
assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(value == false));
newSettings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), "logs-*").build();
clusterSettings.applySettings(newSettings);
assertThat(autoCreateIndex.getAutoCreate().isAutoCreateIndex(), equalTo(true));
assertThat(autoCreateIndex.getAutoCreate().getExpressions().size(), equalTo(1));
assertThat(autoCreateIndex.getAutoCreate().getExpressions().get(0).v1(), equalTo("logs-*"));
}
/**
* Check that if a template matches the index to be created, but that template does not have a value
* for the allow_auto_create setting at all, and the auto_create_index setting matches the index
* to be created, then the null in the template does not override the auto_create_index logic and the
* index can be created.
*/
public void testNullAllowAutoCreateInTemplateDoesNotOverrideMatchingAutoCreateIndexSetting() {
String randomIndex = randomAlphaOfLengthBetween(2, 10);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(randomIndex.charAt(0) + "*"))
.componentTemplates(List.of())
.metadata(Map.of())
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.indexTemplates(Map.of("test_template", template))
.build();
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), randomIndex.charAt(0) + "*").build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertTrue(autoCreateIndex.shouldAutoCreate(randomIndex, projectMetadata));
}
/**
* Check that if a template matches the index to be created, but that template does not have a value
* for the allow_auto_create setting at all, then it does not cause the auto-create logic to trip over
* on a null value.
*/
public void testCanHandleNullAutoCreateSettingInTemplate() {
String randomIndex = randomAlphaOfLengthBetween(2, 10);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(randomIndex.charAt(0) + "*"))
.componentTemplates(List.of())
.metadata(Map.of())
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.indexTemplates(Map.of("test_template", template))
.build();
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> autoCreateIndex.shouldAutoCreate(randomIndex, projectMetadata)
);
assertEquals("no such index [" + randomIndex + "] and [action.auto_create_index] is [false]", e.getMessage());
}
/**
* Check that if a template matches the index to be created, but that template has the allow_auto_create
* setting turned off, then it overrides the global setting.
*/
public void testDisabledAutoCreateTemplateSettingDoesNotOverride() {
String randomIndex = randomAlphaOfLengthBetween(2, 10);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(randomIndex.charAt(0) + "*"))
.componentTemplates(List.of())
.metadata(Map.of())
.allowAutoCreate(false)
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.indexTemplates(Map.of("test_template", template))
.build();
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> autoCreateIndex.shouldAutoCreate(randomIndex, projectMetadata)
);
assertEquals("no such index [composable template [" + randomIndex.charAt(0) + "*] forbids index auto creation]", e.getMessage());
}
/**
* Check that if a template matches the index to be created, and that template has the allow_auto_create
* setting enabled, then it overrides the global setting.
*/
public void testEnabledAutoCreateTemplateSettingDoesOverride() {
String randomIndex = randomAlphaOfLengthBetween(2, 10);
final ComposableIndexTemplate template = ComposableIndexTemplate.builder()
.indexPatterns(List.of(randomIndex.charAt(0) + "*"))
.componentTemplates(List.of())
.metadata(Map.of())
.allowAutoCreate(true)
.build();
final ProjectMetadata projectMetadata = ProjectMetadata.builder(randomProjectIdOrDefault())
.indexTemplates(Map.of("test_template", template))
.build();
Settings settings = Settings.builder().put(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), false).build();
AutoCreateIndex autoCreateIndex = newAutoCreateIndex(settings);
assertThat(autoCreateIndex.shouldAutoCreate(randomIndex, projectMetadata), equalTo(true));
}
// private static ClusterState buildClusterState(String... indices) {
// Metadata.Builder metadata = Metadata.builder();
// for (String index : indices) {
// metadata.put(IndexMetadata.builder(index).settings(settings(IndexVersion.current())).numberOfShards(1).numberOfReplicas(1));
// }
// return ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build();
// }
private static ProjectMetadata buildProjectMetadata(String... indices) {
final ProjectMetadata.Builder builder = ProjectMetadata.builder(randomProjectIdOrDefault());
for (String index : indices) {
builder.put(IndexMetadata.builder(index).settings(settings(IndexVersion.current())).numberOfShards(1).numberOfReplicas(1));
}
return builder.build();
}
private AutoCreateIndex newAutoCreateIndex(Settings settings) {
SystemIndices systemIndices = new SystemIndices(
List.of(
new SystemIndices.Feature(
"plugin",
"test feature",
List.of(SystemIndexDescriptorUtils.createUnmanaged(TEST_SYSTEM_INDEX_NAME + "*", ""))
)
)
);
return new AutoCreateIndex(
settings,
new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS),
TestIndexNameExpressionResolver.newInstance(systemIndices),
systemIndices
);
}
private void expectNotMatch(ProjectMetadata projectMetadata, AutoCreateIndex autoCreateIndex, String index) {
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> autoCreateIndex.shouldAutoCreate(index, projectMetadata)
);
assertEquals(
"no such index [" + index + "] and [action.auto_create_index] ([" + autoCreateIndex.getAutoCreate() + "]) doesn't match",
e.getMessage()
);
}
private void expectForbidden(ProjectMetadata projectMetadata, AutoCreateIndex autoCreateIndex, String index, String forbiddingPattern) {
IndexNotFoundException e = expectThrows(
IndexNotFoundException.class,
() -> autoCreateIndex.shouldAutoCreate(index, projectMetadata)
);
assertEquals(
"no such index ["
+ index
+ "] and [action.auto_create_index] contains ["
+ forbiddingPattern
+ "] which forbids automatic creation of the index",
e.getMessage()
);
}
}
| AutoCreateIndexTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java | {
"start": 1631,
"end": 4837
} | class ____ extends TransportTasksAction<CancellableTask, CancelTasksRequest, ListTasksResponse, TaskInfo> {
public static final String NAME = "cluster:admin/tasks/cancel";
public static final ActionType<ListTasksResponse> TYPE = new ActionType<>(NAME);
@Inject
public TransportCancelTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) {
super(
NAME,
clusterService,
transportService,
actionFilters,
CancelTasksRequest::new,
TaskInfo::from,
// Cancellation is usually lightweight, and runs on the transport thread if the task didn't even start yet, but some
// implementations of CancellableTask#onCancelled() are nontrivial so we use GENERIC here. TODO could it be SAME?
transportService.getThreadPool().executor(ThreadPool.Names.GENERIC)
);
}
@Override
protected ListTasksResponse newResponse(
CancelTasksRequest request,
List<TaskInfo> tasks,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions
) {
return new ListTasksResponse(tasks, taskOperationFailures, failedNodeExceptions);
}
protected List<CancellableTask> processTasks(CancelTasksRequest request) {
if (request.getTargetTaskId().isSet()) {
// we are only checking one task, we can optimize it
CancellableTask task = taskManager.getCancellableTask(request.getTargetTaskId().getId());
if (task != null) {
if (request.match(task)) {
return List.of(task);
} else {
throw new IllegalArgumentException("task [" + request.getTargetTaskId() + "] doesn't support this operation");
}
} else {
if (taskManager.getTask(request.getTargetTaskId().getId()) != null) {
// The task exists, but doesn't support cancellation
throw new IllegalArgumentException("task [" + request.getTargetTaskId() + "] doesn't support cancellation");
} else {
throw new ResourceNotFoundException("task [{}] is not found", request.getTargetTaskId());
}
}
} else {
final var tasks = new ArrayList<CancellableTask>();
for (CancellableTask task : taskManager.getCancellableTasks().values()) {
if (request.match(task)) {
tasks.add(task);
}
}
return tasks;
}
}
@Override
protected void taskOperation(
CancellableTask actionTask,
CancelTasksRequest request,
CancellableTask cancellableTask,
ActionListener<TaskInfo> listener
) {
String nodeId = clusterService.localNode().getId();
taskManager.cancelTaskAndDescendants(
cancellableTask,
request.getReason(),
request.waitForCompletion(),
listener.map(r -> cancellableTask.taskInfo(nodeId, false))
);
}
}
| TransportCancelTasksAction |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/Methods.java | {
"start": 19925,
"end": 21152
} | class ____ implements BiFunction<String, ClassVisitor, ClassVisitor> {
private final Set<MethodKey> methodsFromWhichToRemoveFinal;
public RemoveFinalFromMethod(Set<MethodKey> methodsFromWhichToRemoveFinal) {
this.methodsFromWhichToRemoveFinal = methodsFromWhichToRemoveFinal;
}
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassTransformer transformer = new ClassTransformer(className);
for (MethodKey key : methodsFromWhichToRemoveFinal) {
LOGGER.debug("Final modifier removed from method " + key.name + " of class " + className);
transformer.modifyMethod(MethodDescriptor.of(key.method)).removeModifiers(Opcodes.ACC_FINAL);
}
return transformer.applyTo(classVisitor);
}
}
/**
* This stateful predicate can be used to skip methods that should not be added to the generated subclass.
* <p>
* Don't forget to call {@link SubclassSkipPredicate#startProcessing(ClassInfo, ClassInfo)} before the methods are processed
* and {@link SubclassSkipPredicate#methodsProcessed()} afterwards.
*/
static | RemoveFinalFromMethod |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/examples/HTTPExamples.java | {
"start": 2508,
"end": 22890
} | class ____ {
public void example1(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
}
public void example2(Vertx vertx) {
HttpServerOptions options = new HttpServerOptions().setMaxWebSocketFrameSize(1000000);
HttpServer server = vertx.createHttpServer(options);
}
public void exampleServerLogging(Vertx vertx) {
HttpServerOptions options = new HttpServerOptions().setLogActivity(true);
HttpServer server = vertx.createHttpServer(options);
}
public void example3(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server.listen();
}
public void example4(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server.listen(8080, "myhost.com");
}
public void example5(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server
.listen(8080, "myhost.com")
.onComplete(res -> {
if (res.succeeded()) {
System.out.println("Server is now listening!");
} else {
System.out.println("Failed to bind!");
}
});
}
public void example6(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server.requestHandler(request -> {
// Handle the request in here
});
}
public void example7(Vertx vertx) {
HttpServer server = vertx.createHttpServer();
server.requestHandler(request -> {
// Handle the request in here
HttpMethod method = request.method();
});
}
public void example7_1(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
request.response().end("Hello world");
}).listen(8080);
}
public void example8(HttpServerRequest request) {
MultiMap headers = request.headers();
// Get the User-Agent:
System.out.println("User agent is " + headers.get("user-agent"));
// You can also do this and get the same result:
System.out.println("User agent is " + headers.get("User-Agent"));
}
public void example9(HttpServerRequest request) {
request.handler(buffer -> {
System.out.println("I have received a chunk of the body of length " + buffer.length());
});
}
public void example10(HttpServerRequest request) {
// Create an empty buffer
Buffer totalBuffer = Buffer.buffer();
request.handler(buffer -> {
System.out.println("I have received a chunk of the body of length " + buffer.length());
totalBuffer.appendBuffer(buffer);
});
request.endHandler(v -> {
System.out.println("Full body received, length = " + totalBuffer.length());
});
}
public void example11(HttpServerRequest request) {
request.bodyHandler(totalBuffer -> {
System.out.println("Full body received, length = " + totalBuffer.length());
});
}
public void example12(HttpServer server) {
server.requestHandler(request -> {
request.setExpectMultipart(true);
request.endHandler(v -> {
// The body has now been fully read, so retrieve the form attributes
MultiMap formAttributes = request.formAttributes();
});
});
}
public void example13(HttpServer server) {
server.requestHandler(request -> {
request.setExpectMultipart(true);
request.uploadHandler(upload -> {
System.out.println("Got a file upload " + upload.name());
});
});
}
public void example14(HttpServerRequest request) {
request.uploadHandler(upload -> {
upload.handler(chunk -> {
System.out.println("Received a chunk of the upload of length " + chunk.length());
});
});
}
public void example15(HttpServerRequest request) {
request.uploadHandler(upload -> {
upload.streamToFileSystem("myuploads_directory/" + upload.filename());
});
}
public void exampleHandlingCookies(HttpServerRequest request) {
Cookie someCookie = request.getCookie("mycookie");
String cookieValue = someCookie.getValue();
// Do something with cookie...
// Add a cookie - this will get written back in the response automatically
request.response().addCookie(Cookie.cookie("othercookie", "somevalue"));
}
public void example16(HttpServerRequest request, Buffer buffer) {
HttpServerResponse response = request.response();
response.write(buffer);
}
public void example17(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.write("hello world!");
}
public void example18(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.write("hello world!", "UTF-16");
}
public void example19(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.write("hello world!");
response.end();
}
public void example20(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.end("hello world!");
}
public void example21(HttpServerRequest request) {
HttpServerResponse response = request.response();
MultiMap headers = response.headers();
headers.set("content-type", "text/html");
headers.set("other-header", "wibble");
}
public void example22(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.putHeader("content-type", "text/html").putHeader("other-header", "wibble");
}
public void example23(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.setChunked(true);
}
public void example24(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.setChunked(true);
MultiMap trailers = response.trailers();
trailers.set("X-wibble", "woobble").set("X-quux", "flooble");
}
public void example25(HttpServerRequest request) {
HttpServerResponse response = request.response();
response.setChunked(true);
response.putTrailer("X-wibble", "woobble").putTrailer("X-quux", "flooble");
}
public void example26(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
String file = "";
if (request.path().equals("/")) {
file = "index.html";
} else if (!request.path().contains("..")) {
file = request.path();
}
request.response().sendFile("web/" + file);
}).listen(8080);
}
public void example26b(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
long offset = 0;
try {
offset = Long.parseLong(request.getParam("start"));
} catch (NumberFormatException e) {
// error handling...
}
long end = Long.MAX_VALUE;
try {
end = Long.parseLong(request.getParam("end"));
} catch (NumberFormatException e) {
// error handling...
}
request.response().sendFile("web/mybigfile.txt", offset, end);
}).listen(8080);
}
public void example26c(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
long offset = 0;
try {
offset = Long.parseLong(request.getParam("start"));
} catch (NumberFormatException e) {
// error handling...
}
request.response().sendFile("web/mybigfile.txt", offset);
}).listen(8080);
}
public void example27(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
HttpServerResponse response = request.response();
if (request.method() == HttpMethod.PUT) {
response.setChunked(true);
request.pipeTo(response);
} else {
response.setStatusCode(400).end();
}
}).listen(8080);
}
public void sendHttpServerResponse(Vertx vertx) {
vertx.createHttpServer().requestHandler(request -> {
HttpServerResponse response = request.response();
if (request.method() == HttpMethod.PUT) {
response.send(request);
} else {
response.setStatusCode(400).end();
}
}).listen(8080);
}
public void example28(Vertx vertx) {
HttpClientAgent client = vertx.createHttpClient();
}
public void example29(Vertx vertx) {
HttpClientOptions options = new HttpClientOptions().setKeepAlive(false);
HttpClientAgent client = vertx.createHttpClient(options);
}
public void examplePoolConfiguration(Vertx vertx) {
PoolOptions options = new PoolOptions().setHttp1MaxSize(10);
HttpClientAgent client = vertx.createHttpClient(options);
}
public void exampleClientLogging(Vertx vertx) {
HttpClientOptions options = new HttpClientOptions().setLogActivity(true);
HttpClientAgent client = vertx.createHttpClient(options);
}
public void exampleClientBuilder01(Vertx vertx, HttpClientOptions options) {
// Pretty much like vertx.createHttpClient(options)
HttpClientAgent build = vertx
.httpClientBuilder()
.with(options)
.build();
}
public void example30(HttpClient client) {
client
.request(HttpMethod.GET, 8080, "myserver.mycompany.com", "/some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
// Connected to the server
}
});
}
public void example31(Vertx vertx) {
// Set the default host
HttpClientOptions options = new HttpClientOptions().setDefaultHost("wibble.com");
// Can also set default port if you want...
HttpClientAgent client = vertx.createHttpClient(options);
client
.request(HttpMethod.GET, "/some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
HttpClientRequest request = ar1.result();
request
.send()
.onComplete(ar2 -> {
if (ar2.succeeded()) {
HttpClientResponse response = ar2.result();
System.out.println("Received response with status code " + response.statusCode());
}
});
}
});
}
public void example32(Vertx vertx) {
HttpClientAgent client = vertx.createHttpClient();
// Write some headers using the headers multi-map
MultiMap headers = HttpHeaders.set("content-type", "application/json").set("other-header", "foo");
client
.request(HttpMethod.GET, "some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
if (ar1.succeeded()) {
HttpClientRequest request = ar1.result();
request.headers().addAll(headers);
request
.send()
.onComplete(ar2 -> {
HttpClientResponse response = ar2.result();
System.out.println("Received response with status code " + response.statusCode());
});
}
}
});
}
public void example33(HttpClientRequest request) {
// Write some headers using the putHeader method
request.putHeader("content-type", "application/json")
.putHeader("other-header", "foo");
}
public void sendRequest01(HttpClient client) {
client
.request(HttpMethod.GET, 8080, "myserver.mycompany.com", "/some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
HttpClientRequest request = ar1.result();
// Send the request and process the response
request
.send()
.onComplete(ar -> {
if (ar.succeeded()) {
HttpClientResponse response = ar.result();
System.out.println("Received response with status code " + response.statusCode());
} else {
System.out.println("Something went wrong " + ar.cause().getMessage());
}
});
}
});
}
public void sendRequest02(HttpClient client) {
client
.request(HttpMethod.GET, 8080, "myserver.mycompany.com", "/some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
HttpClientRequest request = ar1.result();
// Send the request and process the response
request
.send("Hello World")
.onComplete(ar -> {
if (ar.succeeded()) {
HttpClientResponse response = ar.result();
System.out.println("Received response with status code " + response.statusCode());
} else {
System.out.println("Something went wrong " + ar.cause().getMessage());
}
});
}
});
}
public void sendRequest03(HttpClientRequest request) {
// Send the request and process the response
request
.send(Buffer.buffer("Hello World"))
.onComplete(ar -> {
if (ar.succeeded()) {
HttpClientResponse response = ar.result();
System.out.println("Received response with status code " + response.statusCode());
} else {
System.out.println("Something went wrong " + ar.cause().getMessage());
}
});
}
public void sendRequest04(HttpClientRequest request, ReadStream<Buffer> stream) {
// Send the request and process the response
request
.putHeader(HttpHeaders.CONTENT_LENGTH, "1000")
.send(stream)
.onComplete(ar -> {
if (ar.succeeded()) {
HttpClientResponse response = ar.result();
System.out.println("Received response with status code " + response.statusCode());
} else {
System.out.println("Something went wrong " + ar.cause().getMessage());
}
});
}
public void example34(Vertx vertx, String body) {
HttpClientAgent client = vertx.createHttpClient();
client.request(HttpMethod.POST, "some-uri")
.onSuccess(request -> {
request.response().onSuccess(response -> {
System.out.println("Received response with status code " + response.statusCode());
});
// Now do stuff with the request
request.putHeader("content-length", "1000");
request.putHeader("content-type", "text/plain");
request.write(body);
// Make sure the request is ended when you're done with it
request.end();
});
}
public void example35(HttpClientRequest request) {
// Write string encoded in UTF-8
request.write("some data");
// Write string encoded in specific encoding
request.write("some other data", "UTF-16");
// Write a buffer
Buffer buffer = Buffer.buffer();
buffer.appendInt(123).appendLong(245l);
request.write(buffer);
}
public void example36(HttpClientRequest request) {
// Write string and end the request (send it) in a single call
request.end("some simple data");
// Write buffer and end the request (send it) in a single call
Buffer buffer = Buffer.buffer().appendDouble(12.34d).appendLong(432l);
request.end(buffer);
}
public void example39(HttpClientRequest request) {
request.end();
}
public void example40(HttpClientRequest request) {
// End the request with a string
request.end("some-data");
// End it with a buffer
Buffer buffer = Buffer.buffer().appendFloat(12.3f).appendInt(321);
request.end(buffer);
}
public void example41(HttpClientRequest request) {
request.setChunked(true);
// Write some chunks
for (int i = 0; i < 10; i++) {
request.write("this-is-chunk-" + i);
}
request.end();
}
public void sendForm(HttpClientRequest request) {
ClientForm form = ClientForm.form();
form.attribute("firstName", "Dale");
form.attribute("lastName", "Cooper");
// Submit the form as a form URL encoded body
request
.send(form)
.onSuccess(res -> {
// OK
});
}
public void sendMultipart(HttpClientRequest request) {
ClientForm form = ClientForm.form();
form.attribute("firstName", "Dale");
form.attribute("lastName", "Cooper");
// Submit the form as a multipart form body
request
.putHeader("content-type", "multipart/form-data")
.send(form)
.onSuccess(res -> {
// OK
});
}
public void sendMultipartWithFileUpload(HttpClientRequest request) {
ClientMultipartForm form = ClientMultipartForm.multipartForm()
.attribute("imageDescription", "a very nice image")
.binaryFileUpload(
"imageFile",
"image.jpg",
"/path/to/image",
"image/jpeg");
// Submit the form as a multipart form body
request
.send(form)
.onSuccess(res -> {
// OK
});
}
public void clientIdleTimeout(HttpClient client, int port, String host, String uri, int timeoutMS) {
Future<Buffer> fut = client
.request(new RequestOptions()
.setHost(host)
.setPort(port)
.setURI(uri)
.setIdleTimeout(timeoutMS))
.compose(request -> request.send().compose(HttpClientResponse::body));
}
public void clientConnectTimeout(HttpClient client, int port, String host, String uri, int timeoutMS) {
Future<Buffer> fut = client
.request(new RequestOptions()
.setHost(host)
.setPort(port)
.setURI(uri)
.setConnectTimeout(timeoutMS))
.compose(request -> request.send().compose(HttpClientResponse::body));
}
public void clientTimeout(HttpClient client, int port, String host, String uri, int timeoutMS) {
Future<Buffer> fut = client
.request(new RequestOptions()
.setHost(host)
.setPort(port)
.setURI(uri)
.setTimeout(timeoutMS))
.compose(request -> request.send().compose(HttpClientResponse::body));
}
public void useRequestAsStream(HttpClientRequest request) {
request.setChunked(true);
request.write("chunk-1");
request.write("chunk-2");
request.end();
}
public void setRequestExceptionHandler(HttpClientRequest request) {
request.exceptionHandler(err -> {
System.out.println("Write failure " + err.getMessage());
});
}
public void example44(HttpClientRequest request, AsyncFile file) {
request.setChunked(true);
file.pipeTo(request);
}
public void example45(HttpClientRequest request) {
// Send the request
request
.send()
.onComplete(ar2 -> {
if (ar2.succeeded()) {
HttpClientResponse response = ar2.result();
// the status code - e.g. 200 or 404
System.out.println("Status code is " + response.statusCode());
// the status message e.g. "OK" or "Not Found".
System.out.println("Status message is " + response.statusMessage());
}
});
}
public void example46(HttpClientResponse response) {
String contentType = response.headers().get("content-type");
String contentLength = response.headers().get("content-lengh");
}
public void example47(HttpClient client) {
client
.request(HttpMethod.GET, "some-uri")
.onComplete(ar1 -> {
if (ar1.succeeded()) {
HttpClientRequest request = ar1.result();
request
.send()
.onComplete(ar2 -> {
HttpClientResponse response = ar2.result();
response.handler(buffer -> {
System.out.println("Received a part of the response body: " + buffer);
});
});
}
});
}
public void example48(HttpClientRequest request) {
request
.send()
.onComplete(ar2 -> {
if (ar2.succeeded()) {
HttpClientResponse response = ar2.result();
// Create an empty buffer
Buffer totalBuffer = Buffer.buffer();
response.handler(buffer -> {
System.out.println("Received a part of the response body: " + buffer.length());
totalBuffer.appendBuffer(buffer);
});
response.endHandler(v -> {
// Now all the body has been read
System.out.println("Total response body length is " + totalBuffer.length());
});
}
});
}
public void example49(HttpClientRequest request) {
request
.send()
.onComplete(ar1 -> {
if (ar1.succeeded()) {
HttpClientResponse response = ar1.result();
response
.body()
.onComplete(ar2 -> {
if (ar2.succeeded()) {
Buffer body = ar2.result();
// Now all the body has been read
System.out.println("Total response body length is " + body.length());
}
});
}
});
}
private | HTTPExamples |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/headers/ReactiveClientHeadersFromBuilderTest.java | {
"start": 3253,
"end": 4090
} | class ____ {
@GET
@Produces("application/json")
public Map<String, List<String>> returnHeaderValues(@Context HttpHeaders headers) {
return headers.getRequestHeaders();
}
@Path("/call-client")
@POST
public Map<String, List<String>> callClient(String uri) {
ReactiveClientHeadersFromBuilderTest.Client client = QuarkusRestClientBuilder.newBuilder()
.baseUri(URI.create(uri))
.clientHeadersFactory(CustomReactiveClientHeadersFactory.class)
.register(new TestJacksonBasicMessageBodyReader())
.build(ReactiveClientHeadersFromBuilderTest.Client.class);
return client.getWithHeader(DIRECT_HEADER_PARAM_VAL);
}
}
@ApplicationScoped
public static | Resource |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/env/EnvironmentCapable.java | {
"start": 838,
"end": 1934
} | interface ____ used primarily
* for performing {@code instanceof} checks in framework methods that accept BeanFactory
* instances that may or may not actually be ApplicationContext instances in order to interact
* with the environment if indeed it is available.
*
* <p>As mentioned, {@link org.springframework.context.ApplicationContext ApplicationContext}
* extends EnvironmentCapable, and thus exposes a {@link #getEnvironment()} method; however,
* {@link org.springframework.context.ConfigurableApplicationContext ConfigurableApplicationContext}
* redefines {@link org.springframework.context.ConfigurableApplicationContext#getEnvironment
* getEnvironment()} and narrows the signature to return a {@link ConfigurableEnvironment}.
* The effect is that an Environment object is 'read-only' until it is being accessed from
* a ConfigurableApplicationContext, at which point it too may be configured.
*
* @author Chris Beams
* @since 3.1
* @see Environment
* @see ConfigurableEnvironment
* @see org.springframework.context.ConfigurableApplicationContext#getEnvironment()
*/
public | is |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/builders/NamespaceHttpTests.java | {
"start": 22914,
"end": 23284
} | class ____ {
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeHttpRequests((requests) -> requests
.anyRequest().permitAll())
.servletApi((api) -> api
.disable());
return http.build();
// @formatter:on
}
}
@Configuration
@EnableWebSecurity
static | ServletApiProvisionConfig |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/JobSubmitHeaders.java | {
"start": 1499,
"end": 3263
} | class ____
implements RuntimeMessageHeaders<
JobSubmitRequestBody, JobSubmitResponseBody, EmptyMessageParameters> {
private static final String URL = "/jobs";
private static final JobSubmitHeaders INSTANCE = new JobSubmitHeaders();
private JobSubmitHeaders() {}
@Override
public Class<JobSubmitRequestBody> getRequestClass() {
return JobSubmitRequestBody.class;
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.POST;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
@Override
public Class<JobSubmitResponseBody> getResponseClass() {
return JobSubmitResponseBody.class;
}
@Override
public HttpResponseStatus getResponseStatusCode() {
return HttpResponseStatus.ACCEPTED;
}
@Override
public EmptyMessageParameters getUnresolvedMessageParameters() {
return EmptyMessageParameters.getInstance();
}
public static JobSubmitHeaders getInstance() {
return INSTANCE;
}
@Override
public String getDescription() {
return "Submits a job. This call is primarily intended to be used by the Flink client. This call expects a "
+ "multipart/form-data request that consists of file uploads for the serialized JobGraph, jars and "
+ "distributed cache artifacts and an attribute named \""
+ FileUploadHandler.HTTP_ATTRIBUTE_REQUEST
+ "\" for "
+ "the JSON payload.";
}
@Override
public boolean acceptsFileUploads() {
return true;
}
@Override
public String operationId() {
return "submitJob";
}
}
| JobSubmitHeaders |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/embedded/CorpType.java | {
"start": 362,
"end": 674
} | class ____ {
private Integer id;
private String type;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Column(name = "`type`")
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
| CorpType |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/support/SecurityQueryTemplateEvaluatorTests.java | {
"start": 1566,
"end": 5843
} | class ____ extends ESTestCase {
private ScriptService scriptService;
@Before
public void setup() throws Exception {
scriptService = mock(ScriptService.class);
}
public void testTemplating() throws Exception {
User user = new User("_username", new String[] { "role1", "role2" }, "_full_name", "_email", Map.of("key", "value"), true);
TemplateScript.Factory compiledTemplate = templateParams -> new TemplateScript(templateParams) {
@Override
public String execute() {
return "rendered_text";
}
};
when(scriptService.compile(any(Script.class), eq(TemplateScript.CONTEXT))).thenReturn(compiledTemplate);
XContentBuilder builder = jsonBuilder();
String query = Strings.toString(new TermQueryBuilder("field", "{{_user.username}}").toXContent(builder, ToXContent.EMPTY_PARAMS));
Script script = new Script(ScriptType.INLINE, "mustache", query, Collections.singletonMap("custom", "value"));
builder = jsonBuilder().startObject().field("template");
script.toXContent(builder, ToXContent.EMPTY_PARAMS);
String querySource = Strings.toString(builder.endObject());
SecurityQueryTemplateEvaluator.evaluateTemplate(querySource, scriptService, user);
ArgumentCaptor<Script> argument = ArgumentCaptor.forClass(Script.class);
verify(scriptService).compile(argument.capture(), eq(TemplateScript.CONTEXT));
Script usedScript = argument.getValue();
assertThat(usedScript.getIdOrCode(), equalTo(script.getIdOrCode()));
assertThat(usedScript.getType(), equalTo(script.getType()));
assertThat(usedScript.getLang(), equalTo("mustache"));
assertThat(usedScript.getOptions(), equalTo(script.getOptions()));
assertThat(usedScript.getParams().size(), equalTo(2));
assertThat(usedScript.getParams().get("custom"), equalTo("value"));
Map<String, Object> userModel = new HashMap<>();
userModel.put("username", user.principal());
userModel.put("full_name", user.fullName());
userModel.put("email", user.email());
userModel.put("roles", Arrays.asList(user.roles()));
userModel.put("metadata", user.metadata());
assertThat(usedScript.getParams().get("_user"), equalTo(userModel));
}
public void testDocLevelSecurityTemplateWithOpenIdConnectStyleMetadata() throws Exception {
User user = new User(
randomAlphaOfLength(8),
generateRandomStringArray(5, 5, false),
randomAlphaOfLength(9),
"sample@example.com",
Map.of("oidc(email)", "sample@example.com"),
true
);
final MustacheScriptEngine mustache = new MustacheScriptEngine(Settings.EMPTY);
when(scriptService.compile(any(Script.class), eq(TemplateScript.CONTEXT))).thenAnswer(inv -> {
assertThat(inv.getArguments(), arrayWithSize(2));
Script script = (Script) inv.getArguments()[0];
TemplateScript.Factory factory = mustache.compile(
script.getIdOrCode(),
script.getIdOrCode(),
TemplateScript.CONTEXT,
script.getOptions()
);
return factory;
});
String template = """
{
"template": {
"source": {
"term": {
"field": "{{_user.metadata.oidc(email)}}"
}
}
}
}""";
String evaluated = SecurityQueryTemplateEvaluator.evaluateTemplate(template, scriptService, user);
assertThat(evaluated, equalTo("""
{"term":{"field":"sample@example.com"}}"""));
}
public void testSkipTemplating() throws Exception {
XContentBuilder builder = jsonBuilder();
String querySource = Strings.toString(new TermQueryBuilder("field", "value").toXContent(builder, ToXContent.EMPTY_PARAMS));
String result = SecurityQueryTemplateEvaluator.evaluateTemplate(querySource, scriptService, null);
assertThat(result, sameInstance(querySource));
verifyNoMoreInteractions(scriptService);
}
}
| SecurityQueryTemplateEvaluatorTests |
java | spring-projects__spring-security | kerberos/kerberos-core/src/main/java/org/springframework/security/kerberos/authentication/KerberosServiceRequestToken.java | {
"start": 1818,
"end": 7136
} | class ____ extends AbstractAuthenticationToken implements KerberosAuthentication {
private static final long serialVersionUID = 395488921064775014L;
private final byte[] token;
private final Object principal;
private final transient KerberosTicketValidation ticketValidation;
private JaasSubjectHolder jaasSubjectHolder;
/**
* Creates an authenticated token, normally used as an output of an authentication
* provider.
* @param principal the user principal (mostly of instance <code>UserDetails</code>)
* @param ticketValidation result of ticket validation
* @param authorities the authorities which are granted to the user
* @param token the Kerberos/SPNEGO token
* @see UserDetails
*/
public KerberosServiceRequestToken(Object principal, KerberosTicketValidation ticketValidation,
Collection<? extends GrantedAuthority> authorities, byte[] token) {
super(authorities);
this.token = token;
this.principal = principal;
this.ticketValidation = ticketValidation;
this.jaasSubjectHolder = new JaasSubjectHolder(ticketValidation.subject(), ticketValidation.username());
super.setAuthenticated(true);
}
/**
* Creates an unauthenticated instance which should then be authenticated by
* <code>KerberosServiceAuthenticationProvider</code>.
* @param token Kerberos/SPNEGO token
* @see KerberosServiceAuthenticationProvider
*/
public KerberosServiceRequestToken(byte[] token) {
super(AuthorityUtils.NO_AUTHORITIES);
this.token = token;
this.ticketValidation = null;
this.principal = null;
}
/**
* equals() is based only on the Kerberos token
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
KerberosServiceRequestToken other = (KerberosServiceRequestToken) obj;
if (!Arrays.equals(this.token, other.token)) {
return false;
}
return true;
}
/**
* Calculates hashcode based on the Kerberos token
*/
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + Arrays.hashCode(this.token);
return result;
}
@Override
public Object getCredentials() {
return null;
}
@Override
public Object getPrincipal() {
return this.principal;
}
/**
* Returns the Kerberos token
* @return the token data
*/
public byte[] getToken() {
return this.token;
}
/**
* Gets the ticket validation
* @return the ticket validation (which will be null if the token is unauthenticated)
*/
public KerberosTicketValidation getTicketValidation() {
return this.ticketValidation;
}
/**
* Determines whether an authenticated token has a response token
* @return whether a response token is available
*/
public boolean hasResponseToken() {
return this.ticketValidation != null && this.ticketValidation.responseToken() != null;
}
/**
* Gets the (Base64) encoded response token assuming one is available.
* @return encoded response token
*/
public String getEncodedResponseToken() {
if (!hasResponseToken()) {
throw new IllegalStateException("Unauthenticated or no response token");
}
return Base64.getEncoder().encodeToString(this.ticketValidation.responseToken());
}
/**
* Unwraps an encrypted message using the gss context
* @param data the data
* @param offset data offset
* @param length data length
* @return the decrypted message
* @throws PrivilegedActionException if jaas throws and error
*/
public byte[] decrypt(final byte[] data, final int offset, final int length) throws PrivilegedActionException {
return Subject.doAs(getTicketValidation().subject(), new PrivilegedExceptionAction<byte[]>() {
public byte[] run() throws Exception {
final GSSContext context = getTicketValidation().getGssContext();
return context.unwrap(data, offset, length, new MessageProp(true));
}
});
}
/**
* Unwraps an encrypted message using the gss context
* @param data the data
* @return the decrypted message
* @throws PrivilegedActionException if jaas throws and error
*/
public byte[] decrypt(final byte[] data) throws PrivilegedActionException {
return decrypt(data, 0, data.length);
}
/**
* Wraps an message using the gss context
* @param data the data
* @param offset data offset
* @param length data length
* @return the encrypted message
* @throws PrivilegedActionException if jaas throws and error
*/
public byte[] encrypt(final byte[] data, final int offset, final int length) throws PrivilegedActionException {
return Subject.doAs(getTicketValidation().subject(), new PrivilegedExceptionAction<byte[]>() {
public byte[] run() throws Exception {
final GSSContext context = getTicketValidation().getGssContext();
return context.wrap(data, offset, length, new MessageProp(true));
}
});
}
/**
* Wraps an message using the gss context
* @param data the data
* @return the encrypted message
* @throws PrivilegedActionException if jaas throws and error
*/
public byte[] encrypt(final byte[] data) throws PrivilegedActionException {
return encrypt(data, 0, data.length);
}
@Override
public JaasSubjectHolder getJaasSubjectHolder() {
return this.jaasSubjectHolder;
}
}
| KerberosServiceRequestToken |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/bloom/RetouchedBloomFilter.java | {
"start": 3735,
"end": 12884
} | class ____ extends BloomFilter
implements RemoveScheme {
/**
* KeyList vector (or ElementList Vector, as defined in the paper) of false positives.
*/
List<Key>[] fpVector;
/**
* KeyList vector of keys recorded in the filter.
*/
List<Key>[] keyVector;
/**
* Ratio vector.
*/
double[] ratio;
private Random rand;
/** Default constructor - use with readFields */
public RetouchedBloomFilter() {}
/**
* Constructor
* @param vectorSize The vector size of <i>this</i> filter.
* @param nbHash The number of hash function to consider.
* @param hashType type of the hashing function (see
* {@link org.apache.hadoop.util.hash.Hash}).
*/
public RetouchedBloomFilter(int vectorSize, int nbHash, int hashType) {
super(vectorSize, nbHash, hashType);
this.rand = null;
createVector();
}
@Override
public void add(Key key) {
if (key == null) {
throw new NullPointerException("key can not be null");
}
int[] h = hash.hash(key);
hash.clear();
for (int i = 0; i < nbHash; i++) {
bits.set(h[i]);
keyVector[h[i]].add(key);
}
}
/**
* Adds a false positive information to <i>this</i> retouched Bloom filter.
* <p>
* <b>Invariant</b>: if the false positive is <code>null</code>, nothing happens.
* @param key The false positive key to add.
*/
public void addFalsePositive(Key key) {
if (key == null) {
throw new NullPointerException("key can not be null");
}
int[] h = hash.hash(key);
hash.clear();
for (int i = 0; i < nbHash; i++) {
fpVector[h[i]].add(key);
}
}
/**
* Adds a collection of false positive information to <i>this</i> retouched Bloom filter.
* @param coll The collection of false positive.
*/
public void addFalsePositive(Collection<Key> coll) {
if (coll == null) {
throw new NullPointerException("Collection<Key> can not be null");
}
for (Key k : coll) {
addFalsePositive(k);
}
}
/**
* Adds a list of false positive information to <i>this</i> retouched Bloom filter.
* @param keys The list of false positive.
*/
public void addFalsePositive(List<Key> keys) {
if (keys == null) {
throw new NullPointerException("ArrayList<Key> can not be null");
}
for (Key k : keys) {
addFalsePositive(k);
}
}
/**
* Adds an array of false positive information to <i>this</i> retouched Bloom filter.
* @param keys The array of false positive.
*/
public void addFalsePositive(Key[] keys) {
if (keys == null) {
throw new NullPointerException("Key[] can not be null");
}
for (int i = 0; i < keys.length; i++) {
addFalsePositive(keys[i]);
}
}
/**
* Performs the selective clearing for a given key.
* @param k The false positive key to remove from <i>this</i> retouched Bloom filter.
* @param scheme The selective clearing scheme to apply.
*/
public void selectiveClearing(Key k, short scheme) {
if (k == null) {
throw new NullPointerException("Key can not be null");
}
if (!membershipTest(k)) {
throw new IllegalArgumentException("Key is not a member");
}
int index = 0;
int[] h = hash.hash(k);
switch(scheme) {
case RANDOM:
index = randomRemove();
break;
case MINIMUM_FN:
index = minimumFnRemove(h);
break;
case MAXIMUM_FP:
index = maximumFpRemove(h);
break;
case RATIO:
index = ratioRemove(h);
break;
default:
throw new AssertionError("Undefined selective clearing scheme");
}
clearBit(index);
}
private int randomRemove() {
if (rand == null) {
rand = new Random();
}
return rand.nextInt(nbHash);
}
/**
* Chooses the bit position that minimizes the number of false negative generated.
* @param h The different bit positions.
* @return The position that minimizes the number of false negative generated.
*/
private int minimumFnRemove(int[] h) {
int minIndex = Integer.MAX_VALUE;
double minValue = Double.MAX_VALUE;
for (int i = 0; i < nbHash; i++) {
double keyWeight = getWeight(keyVector[h[i]]);
if (keyWeight < minValue) {
minIndex = h[i];
minValue = keyWeight;
}
}
return minIndex;
}
/**
* Chooses the bit position that maximizes the number of false positive removed.
* @param h The different bit positions.
* @return The position that maximizes the number of false positive removed.
*/
private int maximumFpRemove(int[] h) {
int maxIndex = Integer.MIN_VALUE;
double maxValue = Double.MIN_VALUE;
for (int i = 0; i < nbHash; i++) {
double fpWeight = getWeight(fpVector[h[i]]);
if (fpWeight > maxValue) {
maxValue = fpWeight;
maxIndex = h[i];
}
}
return maxIndex;
}
/**
* Chooses the bit position that minimizes the number of false negative generated while maximizing.
* the number of false positive removed.
* @param h The different bit positions.
* @return The position that minimizes the number of false negative generated while maximizing.
*/
private int ratioRemove(int[] h) {
computeRatio();
int minIndex = Integer.MAX_VALUE;
double minValue = Double.MAX_VALUE;
for (int i = 0; i < nbHash; i++) {
if (ratio[h[i]] < minValue) {
minValue = ratio[h[i]];
minIndex = h[i];
}
}
return minIndex;
}
/**
* Clears a specified bit in the bit vector and keeps up-to-date the KeyList vectors.
* @param index The position of the bit to clear.
*/
private void clearBit(int index) {
if (index < 0 || index >= vectorSize) {
throw new ArrayIndexOutOfBoundsException(index);
}
List<Key> kl = keyVector[index];
List<Key> fpl = fpVector[index];
// update key list
int listSize = kl.size();
for (int i = 0; i < listSize && !kl.isEmpty(); i++) {
removeKey(kl.get(0), keyVector);
}
kl.clear();
keyVector[index].clear();
//update false positive list
listSize = fpl.size();
for (int i = 0; i < listSize && !fpl.isEmpty(); i++) {
removeKey(fpl.get(0), fpVector);
}
fpl.clear();
fpVector[index].clear();
//update ratio
ratio[index] = 0.0;
//update bit vector
bits.clear(index);
}
/**
* Removes a given key from <i>this</i> filer.
* @param k The key to remove.
* @param vector The counting vector associated to the key.
*/
private void removeKey(Key k, List<Key>[] vector) {
if (k == null) {
throw new NullPointerException("Key can not be null");
}
if (vector == null) {
throw new NullPointerException("ArrayList<Key>[] can not be null");
}
int[] h = hash.hash(k);
hash.clear();
for (int i = 0; i < nbHash; i++) {
vector[h[i]].remove(k);
}
}
/**
* Computes the ratio A/FP.
*/
private void computeRatio() {
for (int i = 0; i < vectorSize; i++) {
double keyWeight = getWeight(keyVector[i]);
double fpWeight = getWeight(fpVector[i]);
if (keyWeight > 0 && fpWeight > 0) {
ratio[i] = keyWeight / fpWeight;
}
}
}
private double getWeight(List<Key> keyList) {
double weight = 0.0;
for (Key k : keyList) {
weight += k.getWeight();
}
return weight;
}
/**
* Creates and initialises the various vectors.
*/
@SuppressWarnings("unchecked")
private void createVector() {
fpVector = new List[vectorSize];
keyVector = new List[vectorSize];
ratio = new double[vectorSize];
for (int i = 0; i < vectorSize; i++) {
fpVector[i] = Collections.synchronizedList(new ArrayList<Key>());
keyVector[i] = Collections.synchronizedList(new ArrayList<Key>());
ratio[i] = 0.0;
}
}
// Writable
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
for (int i = 0; i < fpVector.length; i++) {
List<Key> list = fpVector[i];
out.writeInt(list.size());
for (Key k : list) {
k.write(out);
}
}
for (int i = 0; i < keyVector.length; i++) {
List<Key> list = keyVector[i];
out.writeInt(list.size());
for (Key k : list) {
k.write(out);
}
}
for (int i = 0; i < ratio.length; i++) {
out.writeDouble(ratio[i]);
}
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
createVector();
for (int i = 0; i < fpVector.length; i++) {
List<Key> list = fpVector[i];
int size = in.readInt();
for (int j = 0; j < size; j++) {
Key k = new Key();
k.readFields(in);
list.add(k);
}
}
for (int i = 0; i < keyVector.length; i++) {
List<Key> list = keyVector[i];
int size = in.readInt();
for (int j = 0; j < size; j++) {
Key k = new Key();
k.readFields(in);
list.add(k);
}
}
for (int i = 0; i < ratio.length; i++) {
ratio[i] = in.readDouble();
}
}
}
| RetouchedBloomFilter |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/state/TestTaskLocalStateStore.java | {
"start": 1232,
"end": 4757
} | class ____ implements TaskLocalStateStore {
private final SortedMap<Long, TaskStateSnapshot> taskStateSnapshotsByCheckpointID;
private final LocalRecoveryConfig localRecoveryConfig;
private boolean disposed;
public TestTaskLocalStateStore() {
this(TestLocalRecoveryConfig.disabled());
}
public TestTaskLocalStateStore(@Nonnull LocalRecoveryConfig localRecoveryConfig) {
this.localRecoveryConfig = localRecoveryConfig;
this.taskStateSnapshotsByCheckpointID = new TreeMap<>();
this.disposed = false;
}
@Override
public void storeLocalState(long checkpointId, @Nullable TaskStateSnapshot localState) {
Preconditions.checkState(!disposed);
taskStateSnapshotsByCheckpointID.put(checkpointId, localState);
}
@Nullable
@Override
public TaskStateSnapshot retrieveLocalState(long checkpointID) {
Preconditions.checkState(!disposed);
return taskStateSnapshotsByCheckpointID.get(checkpointID);
}
public void dispose() {
if (!disposed) {
disposed = true;
for (TaskStateSnapshot stateSnapshot : taskStateSnapshotsByCheckpointID.values()) {
try {
stateSnapshot.discardState();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
taskStateSnapshotsByCheckpointID.clear();
}
}
@Nonnull
@Override
public LocalRecoveryConfig getLocalRecoveryConfig() {
Preconditions.checkState(!disposed);
return Preconditions.checkNotNull(localRecoveryConfig);
}
@Override
public void confirmCheckpoint(long confirmedCheckpointId) {
Preconditions.checkState(!disposed);
Iterator<Map.Entry<Long, TaskStateSnapshot>> iterator =
taskStateSnapshotsByCheckpointID.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<Long, TaskStateSnapshot> entry = iterator.next();
if (entry.getKey() < confirmedCheckpointId) {
iterator.remove();
try {
entry.getValue().discardState();
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
break;
}
}
}
@Override
public void abortCheckpoint(long abortedCheckpointId) {
Preconditions.checkState(!disposed);
Iterator<Map.Entry<Long, TaskStateSnapshot>> iterator =
taskStateSnapshotsByCheckpointID.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<Long, TaskStateSnapshot> entry = iterator.next();
if (entry.getKey() == abortedCheckpointId) {
iterator.remove();
try {
entry.getValue().discardState();
} catch (Exception e) {
throw new RuntimeException(e);
}
} else if (entry.getKey() > abortedCheckpointId) {
break;
}
}
}
@Override
public void pruneMatchingCheckpoints(LongPredicate matcher) {
taskStateSnapshotsByCheckpointID.keySet().removeIf(matcher::test);
}
public boolean isDisposed() {
return disposed;
}
public SortedMap<Long, TaskStateSnapshot> getTaskStateSnapshotsByCheckpointID() {
return taskStateSnapshotsByCheckpointID;
}
}
| TestTaskLocalStateStore |
java | quarkusio__quarkus | integration-tests/gradle/src/test/java/io/quarkus/gradle/MultiModuleKotlinProjectBuildTest.java | {
"start": 152,
"end": 669
} | class ____ extends QuarkusGradleWrapperTestBase {
@Test
public void testBasicMultiModuleBuild() throws Exception {
final File projectDir = getProjectDir("multi-module-kotlin-project");
final BuildResult build = runGradleWrapper(projectDir, "clean", "build");
assertThat(BuildResult.isSuccessful(build.getTasks().get(":quarkusGenerateCode"))).isTrue();
assertThat(BuildResult.isSuccessful(build.getTasks().get(":compileKotlin"))).isTrue();
}
}
| MultiModuleKotlinProjectBuildTest |
java | spring-projects__spring-framework | spring-beans/src/jmh/java/org/springframework/beans/AbstractPropertyAccessorBenchmark.java | {
"start": 1246,
"end": 1329
} | class ____ {
@State(Scope.Benchmark)
public static | AbstractPropertyAccessorBenchmark |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/deployment/src/test/java/io/quarkus/smallrye/reactivemessaging/signatures/PublisherSignatureTest.java | {
"start": 4171,
"end": 4558
} | class ____ extends Spy {
@Outgoing("B")
public Publisher<Integer> produce() {
return ReactiveStreams.of(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
.buildRs();
}
@Incoming("B")
public void consume(Integer item) {
items.add(item);
}
}
@ApplicationScoped
public static | BeanProducingAPublisherOfPayload |
java | hibernate__hibernate-orm | hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/lazy/LazyFieldsTest.java | {
"start": 951,
"end": 1529
} | class ____ {
private static final Long ID = 1L;
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
TestEntity testEntity = new TestEntity( ID, "test data", "lazyString", "group A" );
session.persist( testEntity );
}
);
}
@Test
public void testUpdate(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
TestEntity testEntity = session.find( TestEntity.class, ID );
testEntity.setData( "modified test data" );
}
);
}
@Entity(name = "TestEntity")
public static | LazyFieldsTest |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsFormatCommentTest22.java | {
"start": 121,
"end": 809
} | class ____ extends TestCase {
public void test_if() throws Exception {
String sql = "select if(a>0,1, 0) from dual";
assertEquals("SELECT IF(a > 0, 1, 0)"
+ "\nFROM dual", SQLUtils.formatOdps(sql));
}
public void test_coalesce() throws Exception {
String sql = "select coalesce(f1,f2) from dual";
assertEquals("SELECT COALESCE(f1, f2)"
+ "\nFROM dual", SQLUtils.formatOdps(sql));
}
public void test_count() throws Exception {
String sql = "select count(*) from dual";
assertEquals("SELECT count(*)"
+ "\nFROM dual", SQLUtils.formatOdps(sql));
}
}
| OdpsFormatCommentTest22 |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/DynamicRouterEndpointBuilderFactory.java | {
"start": 27339,
"end": 29860
} | interface ____
extends
EndpointProducerBuilder {
default DynamicRouterEndpointBuilder basic() {
return (DynamicRouterEndpointBuilder) this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDynamicRouterEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer (advanced)
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AdvancedDynamicRouterEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
public | AdvancedDynamicRouterEndpointBuilder |
java | apache__flink | flink-python/src/main/java/org/apache/flink/python/env/embedded/EmbeddedPythonEnvironment.java | {
"start": 1117,
"end": 1580
} | class ____ implements PythonEnvironment {
private final PythonInterpreterConfig config;
private final Map<String, String> env;
public EmbeddedPythonEnvironment(PythonInterpreterConfig config, Map<String, String> env) {
this.config = config;
this.env = env;
}
public PythonInterpreterConfig getConfig() {
return config;
}
public Map<String, String> getEnv() {
return env;
}
}
| EmbeddedPythonEnvironment |
java | apache__thrift | lib/javame/src/org/apache/thrift/TBaseHelper.java | {
"start": 923,
"end": 5878
} | class ____ {
public static int compareTo(boolean a, boolean b) {
return (a == b) ? 0 : (a ? 1 : -1);
}
public static int compareTo(Boolean a, Boolean b) {
return (a.booleanValue() == b.booleanValue()) ? 0 : (a.booleanValue() ? 1 : -1);
}
public static int compareTo(Boolean a, boolean b) {
return (a.booleanValue() == b) ? 0 : (a.booleanValue() ? 1 : -1);
}
public static Boolean booleanValueOf(boolean b) {
return (b ? Boolean.TRUE : Boolean.FALSE);
}
public static int compareTo(byte a, byte b) {
if (a < b) {
return -1;
} else if (b < a) {
return 1;
} else {
return 0;
}
}
public static int compareTo(short a, short b) {
if (a < b) {
return -1;
} else if (b < a) {
return 1;
} else {
return 0;
}
}
public static int compareTo(int a, int b) {
if (a < b) {
return -1;
} else if (b < a) {
return 1;
} else {
return 0;
}
}
public static int compareTo(long a, long b) {
if (a < b) {
return -1;
} else if (b < a) {
return 1;
} else {
return 0;
}
}
public static int compareTo(double a, double b) {
if (a < b) {
return -1;
} else if (b < a) {
return 1;
} else {
return 0;
}
}
public static int compareTo(String a, String b) {
return a.compareTo(b);
}
public static int compareTo(byte[] a, byte[] b) {
int sizeCompare = compareTo(a.length, b.length);
if (sizeCompare != 0) {
return sizeCompare;
}
for (int i = 0; i < a.length; i++) {
int byteCompare = compareTo(a, b);
if (byteCompare != 0) {
return byteCompare;
}
}
return 0;
}
public static int compareTo(Object a, Object b) {
if (a instanceof Vector) {
return compareTo((Vector)a, (Vector)b);
} if (a instanceof Hashtable) {
return compareTo((Hashtable)a, (Hashtable)b);
} else {
return ((TBase)a).compareTo(b);
}
}
public static int compareTo(Vector a, Vector b) {
int lastComparison = compareTo(a.size(), b.size());
if (lastComparison != 0) {
return lastComparison;
}
for (int i = 0; i < a.size(); i++) {
Object oA = a.elementAt(i);
Object oB = b.elementAt(i);
lastComparison = compareTo(oA, oB);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public static int compareTo(Hashtable a, Hashtable b) {
int lastComparison = compareTo(a.size(), b.size());
if (lastComparison != 0) {
return lastComparison;
}
Enumeration enumA = a.keys();
Enumeration enumB = b.keys();
while (lastComparison == 0 && enumA.hasMoreElements()) {
Object keyA = enumA.nextElement();
Object keyB = enumB.nextElement();
lastComparison = compareTo(keyA, keyB);
if (lastComparison == 0) {
lastComparison = compareTo(a.get(keyA), b.get(keyB));
}
}
return lastComparison;
}
public static int compareTo(TEnum a, TEnum b) {
return compareTo(a.getValue(), b.getValue());
}
/*
public static int compareTo(List a, List b) {
int lastComparison = compareTo(a.size(), b.size());
if (lastComparison != 0) {
return lastComparison;
}
for (int i = 0; i < a.size(); i++) {
Object oA = a.get(i);
Object oB = b.get(i);
if (oA instanceof List) {
lastComparison = compareTo((List) oA, (List) oB);
} else {
lastComparison = compareTo((Comparable) oA, (Comparable) oB);
}
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
*/
public static void toString(byte[] bytes, StringBuffer sb) {
toString(bytes, 0, bytes.length, sb);
}
public static void toString(byte[] buf, int arrayOffset, int origLimit, StringBuffer sb) {
int limit = (origLimit - arrayOffset > 128) ? arrayOffset + 128 : origLimit;
for (int i = arrayOffset; i < limit; i++) {
if (i > arrayOffset) {
sb.append(" ");
}
sb.append(paddedByteString(buf[i]));
}
if (origLimit != limit) {
sb.append("...");
}
}
public static String paddedByteString(byte b) {
int extended = (b | 0x100) & 0x1ff;
return Integer.toHexString(extended).toUpperCase().substring(1);
}
}
| TBaseHelper |
java | apache__flink | flink-end-to-end-tests/flink-metrics-reporter-prometheus-test/src/test/java/org/apache/flink/metrics/prometheus/tests/PrometheusReporterEndToEndITCase.java | {
"start": 11644,
"end": 12596
} | class ____ {
private final String jarLocationDescription;
private final Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup;
private TestParams(
String jarLocationDescription,
Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup) {
this.jarLocationDescription = jarLocationDescription;
this.builderSetup = builderSetup;
}
public static TestParams from(
String jarLocationDesription,
Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> builderSetup) {
return new TestParams(jarLocationDesription, builderSetup);
}
public Consumer<FlinkResourceSetup.FlinkResourceSetupBuilder> getBuilderSetup() {
return builderSetup;
}
@Override
public String toString() {
return jarLocationDescription;
}
}
}
| TestParams |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/BasicDeserializerFactory.java | {
"start": 80033,
"end": 83101
} | class ____ {
// We do some defaulting for abstract Collection classes and
// interfaces, to avoid having to use exact types or annotations in
// cases where the most common concrete Collection will do.
final static HashMap<String, Class<? extends Collection>> _collectionFallbacks;
static {
HashMap<String, Class<? extends Collection>> fallbacks = new HashMap<>();
final Class<? extends Collection> DEFAULT_LIST = ArrayList.class;
final Class<? extends Collection> DEFAULT_SET = HashSet.class;
fallbacks.put(Collection.class.getName(), DEFAULT_LIST);
fallbacks.put(List.class.getName(), DEFAULT_LIST);
fallbacks.put(Set.class.getName(), DEFAULT_SET);
fallbacks.put(SortedSet.class.getName(), TreeSet.class);
fallbacks.put(Queue.class.getName(), LinkedList.class);
// 09-Feb-2019, tatu: How did we miss these? Related in [databind#2251] problem
fallbacks.put(AbstractList.class.getName(), DEFAULT_LIST);
fallbacks.put(AbstractSet.class.getName(), DEFAULT_SET);
// 09-Feb-2019, tatu: And more esoteric types added in JDK6
fallbacks.put(Deque.class.getName(), LinkedList.class);
fallbacks.put(NavigableSet.class.getName(), TreeSet.class);
// Sequenced types added in JDK21
fallbacks.put("java.util.SequencedCollection", DEFAULT_LIST);
fallbacks.put("java.util.SequencedSet", LinkedHashSet.class);
_collectionFallbacks = fallbacks;
}
// We do some defaulting for abstract Map classes and
// interfaces, to avoid having to use exact types or annotations in
// cases where the most common concrete Maps will do.
final static HashMap<String, Class<? extends Map>> _mapFallbacks;
static {
HashMap<String, Class<? extends Map>> fallbacks = new HashMap<>();
final Class<? extends Map> DEFAULT_MAP = LinkedHashMap.class;
fallbacks.put(Map.class.getName(), DEFAULT_MAP);
fallbacks.put(AbstractMap.class.getName(), DEFAULT_MAP);
fallbacks.put(ConcurrentMap.class.getName(), ConcurrentHashMap.class);
fallbacks.put(SortedMap.class.getName(), TreeMap.class);
fallbacks.put(java.util.NavigableMap.class.getName(), TreeMap.class);
fallbacks.put(java.util.concurrent.ConcurrentNavigableMap.class.getName(),
java.util.concurrent.ConcurrentSkipListMap.class);
// Sequenced types added in JDK21
fallbacks.put("java.util.SequencedMap", LinkedHashMap.class);
_mapFallbacks = fallbacks;
}
public static Class<?> findCollectionFallback(JavaType type) {
return _collectionFallbacks.get(type.getRawClass().getName());
}
public static Class<?> findMapFallback(JavaType type) {
return _mapFallbacks.get(type.getRawClass().getName());
}
}
}
| ContainerDefaultMappings |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/TestKeyConverters.java | {
"start": 1277,
"end": 5401
} | class ____ {
@Test
void testAppIdKeyConverter() {
AppIdKeyConverter appIdKeyConverter = new AppIdKeyConverter();
long currentTs = System.currentTimeMillis();
ApplicationId appId1 = ApplicationId.newInstance(currentTs, 1);
ApplicationId appId2 = ApplicationId.newInstance(currentTs, 2);
ApplicationId appId3 = ApplicationId.newInstance(currentTs + 300, 1);
String appIdStr1 = appId1.toString();
String appIdStr2 = appId2.toString();
String appIdStr3 = appId3.toString();
byte[] appIdBytes1 = appIdKeyConverter.encode(appIdStr1);
byte[] appIdBytes2 = appIdKeyConverter.encode(appIdStr2);
byte[] appIdBytes3 = appIdKeyConverter.encode(appIdStr3);
// App ids' should be encoded in a manner wherein descending order
// is maintained.
assertTrue(
Bytes.compareTo(appIdBytes1, appIdBytes2) > 0
&& Bytes.compareTo(appIdBytes1, appIdBytes3) > 0
&& Bytes.compareTo(appIdBytes2, appIdBytes3) > 0,
"Ordering of app ids' is incorrect");
String decodedAppId1 = appIdKeyConverter.decode(appIdBytes1);
String decodedAppId2 = appIdKeyConverter.decode(appIdBytes2);
String decodedAppId3 = appIdKeyConverter.decode(appIdBytes3);
assertEquals(appIdStr1, decodedAppId1);
assertEquals(appIdStr2, decodedAppId2);
assertEquals(appIdStr3, decodedAppId3);
}
@Test
void testEventColumnNameConverter() {
String eventId = "=foo_=eve=nt=";
byte[] valSepBytes = Bytes.toBytes(Separator.VALUES.getValue());
byte[] maxByteArr =
Bytes.createMaxByteArray(Bytes.SIZEOF_LONG - valSepBytes.length);
byte[] ts = Bytes.add(valSepBytes, maxByteArr);
Long eventTs = Bytes.toLong(ts);
byte[] byteEventColName =
new EventColumnName(eventId, eventTs, null).getColumnQualifier();
KeyConverter<EventColumnName> eventColumnNameConverter =
new EventColumnNameConverter();
EventColumnName eventColName =
eventColumnNameConverter.decode(byteEventColName);
assertEquals(eventId, eventColName.getId());
assertEquals(eventTs, eventColName.getTimestamp());
assertNull(eventColName.getInfoKey());
String infoKey = "f=oo_event_in=fo=_key";
byteEventColName =
new EventColumnName(eventId, eventTs, infoKey).getColumnQualifier();
eventColName = eventColumnNameConverter.decode(byteEventColName);
assertEquals(eventId, eventColName.getId());
assertEquals(eventTs, eventColName.getTimestamp());
assertEquals(infoKey, eventColName.getInfoKey());
}
@Test
void testLongKeyConverter() {
LongKeyConverter longKeyConverter = new LongKeyConverter();
confirmLongKeyConverter(longKeyConverter, Long.MIN_VALUE);
confirmLongKeyConverter(longKeyConverter, -1234567890L);
confirmLongKeyConverter(longKeyConverter, -128L);
confirmLongKeyConverter(longKeyConverter, -127L);
confirmLongKeyConverter(longKeyConverter, -1L);
confirmLongKeyConverter(longKeyConverter, 0L);
confirmLongKeyConverter(longKeyConverter, 1L);
confirmLongKeyConverter(longKeyConverter, 127L);
confirmLongKeyConverter(longKeyConverter, 128L);
confirmLongKeyConverter(longKeyConverter, 1234567890L);
confirmLongKeyConverter(longKeyConverter, Long.MAX_VALUE);
}
private void confirmLongKeyConverter(LongKeyConverter longKeyConverter,
Long testValue) {
Long decoded = longKeyConverter.decode(longKeyConverter.encode(testValue));
assertEquals(testValue, decoded);
}
@Test
void testStringKeyConverter() {
StringKeyConverter stringKeyConverter = new StringKeyConverter();
String phrase = "QuackAttack now!";
for (int i = 0; i < phrase.length(); i++) {
String sub = phrase.substring(i, phrase.length());
confirmStrignKeyConverter(stringKeyConverter, sub);
confirmStrignKeyConverter(stringKeyConverter, sub + sub);
}
}
private void confirmStrignKeyConverter(StringKeyConverter stringKeyConverter,
String testValue) {
String decoded =
stringKeyConverter.decode(stringKeyConverter.encode(testValue));
assertEquals(testValue, decoded);
}
}
| TestKeyConverters |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/handler/invocation/HandlerMethodArgumentResolver.java | {
"start": 840,
"end": 1006
} | interface ____ resolving method parameters into argument values
* in the context of a given {@link Message}.
*
* @author Rossen Stoyanchev
* @since 4.0
*/
public | for |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/Spr16179Tests.java | {
"start": 894,
"end": 1898
} | class ____ {
@Test
void repro() {
try (AnnotationConfigApplicationContext bf = new AnnotationConfigApplicationContext(AssemblerConfig.class, AssemblerInjection.class)) {
assertThat(bf.getBean(AssemblerInjection.class).assembler0).isSameAs(bf.getBean("someAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler1).isNull();
assertThat(bf.getBean(AssemblerInjection.class).assembler2).isSameAs(bf.getBean("pageAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler3).isSameAs(bf.getBean("pageAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler4).isSameAs(bf.getBean("pageAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler5).isSameAs(bf.getBean("pageAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler6).isSameAs(bf.getBean("pageAssembler"));
assertThat(bf.getBean(AssemblerInjection.class).assembler7).isSameAs(bf.getBean("pageAssembler"));
}
}
@Configuration
static | Spr16179Tests |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/support/PropertySourceProcessorTests.java | {
"start": 2609,
"end": 3470
} | class ____ {
@Test
void processorFailsOnPlaceholderResolutionException() {
assertProcessorFailsOnError(PlaceholderResolutionExceptionPropertySourceFactory.class, PlaceholderResolutionException.class);
}
@Test
void processorFailsOnFileNotFoundException() {
assertProcessorFailsOnError(FileNotFoundExceptionPropertySourceFactory.class, FileNotFoundException.class);
}
private void assertProcessorFailsOnError(
Class<? extends PropertySourceFactory> factoryClass, Class<? extends Throwable> exceptionType) {
PropertySourceDescriptor descriptor =
new PropertySourceDescriptor(List.of(PROPS_FILE), false, null, factoryClass, null);
assertThatExceptionOfType(exceptionType).isThrownBy(() -> processor.processPropertySource(descriptor));
assertThat(environment.getPropertySources()).hasSize(2);
}
}
@Nested
| FailOnErrorTests |
java | apache__camel | components/camel-stream/src/test/java/org/apache/camel/component/stream/StreamHeaderTest.java | {
"start": 2109,
"end": 2282
} | class ____ extends OutputStream {
@Override
public void write(int b) {
sb.append((char) b);
}
}
// END SNIPPET: e1
}
| MyOutputStream |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/ExitCodeEvent.java | {
"start": 876,
"end": 1345
} | class ____ extends ApplicationEvent {
private final int exitCode;
/**
* Create a new {@link ExitCodeEvent} instance.
* @param source the source of the event
* @param exitCode the exit code
*/
public ExitCodeEvent(Object source, int exitCode) {
super(source);
this.exitCode = exitCode;
}
/**
* Return the exit code that will be used to exit the JVM.
* @return the exit code
*/
public int getExitCode() {
return this.exitCode;
}
}
| ExitCodeEvent |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/resourceplugin/deviceframework/TestDeviceMappingManager.java | {
"start": 12828,
"end": 13716
} | class ____ implements Callable<Integer> {
private DeviceResourceHandlerImpl deviceResourceHandler;
private Container container;
private boolean doCleanup;
private int cId;
MyContainerLaunch(DeviceResourceHandlerImpl dri,
Container c, int id, boolean cleanup) {
deviceResourceHandler = dri;
container = c;
doCleanup = cleanup;
cId = id;
}
@Override
public Integer call() throws Exception {
try {
deviceResourceHandler.preStart(container);
if (doCleanup) {
int seconds = new Random().nextInt(5);
LOG.info("sleep " + seconds);
Thread.sleep(seconds * 1000);
deviceResourceHandler.postComplete(getContainerId(cId));
}
} catch (ResourceHandlerException e) {
e.printStackTrace();
}
return 0;
}
}
private static | MyContainerLaunch |
java | apache__camel | components/camel-ai/camel-djl/src/main/java/org/apache/camel/component/djl/model/tabular/ZooLinearRegressionPredictor.java | {
"start": 1014,
"end": 1279
} | class ____ extends AbstractPredictor {
public ZooLinearRegressionPredictor(DJLEndpoint endpoint) {
super(endpoint);
}
@Override
public void process(Exchange exchange) throws Exception {
// TODO: impl
}
}
| ZooLinearRegressionPredictor |
java | quarkusio__quarkus | extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/basic/ClientWithImplementationsTest.java | {
"start": 409,
"end": 978
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MyClient.class, MyImplementation.class))
.withConfigurationResource("client-with-implementations.properties");
@RestClient
MyClient client;
@Test
public void testClientBeanHasBeenCreated() {
Assertions.assertEquals("hello", client.get());
}
@Path("/client")
@RegisterRestClient(configKey = "my-client")
public | ClientWithImplementationsTest |
java | playframework__playframework | persistence/play-java-jdbc/src/main/java/play/db/DBModule.java | {
"start": 567,
"end": 1861
} | class ____ extends Module {
private static final Logger logger = LoggerFactory.getLogger(DBModule.class);
@Override
public List<Binding<?>> bindings(final Environment environment, final Config config) {
String dbKey = config.getString("play.db.config");
String defaultDb = config.getString("play.db.default");
ImmutableList.Builder<Binding<?>> list = new ImmutableList.Builder<>();
list.add(bindClass(ConnectionPool.class).to(DefaultConnectionPool.class));
list.add(bindClass(DBApi.class).to(DefaultDBApi.class));
try {
Set<String> dbs = config.getConfig(dbKey).root().keySet();
for (String db : dbs) {
list.add(
bindClass(Database.class).qualifiedWith(named(db)).to(new NamedDatabaseProvider(db)));
}
if (dbs.contains(defaultDb)) {
list.add(
bindClass(Database.class)
.to(bindClass(Database.class).qualifiedWith(named(defaultDb))));
}
} catch (com.typesafe.config.ConfigException.Missing ex) {
logger.warn("Configuration not found for database: {}", ex.getMessage());
}
return list.build();
}
private NamedDatabase named(String name) {
return new NamedDatabaseImpl(name);
}
/** Inject provider for named databases. */
public static | DBModule |
java | apache__camel | dsl/camel-endpointdsl/src/test/java/org/apache/camel/builder/endpoint/SedaToDSimpleExpressionTest.java | {
"start": 1029,
"end": 1891
} | class ____ extends BaseEndpointDslTest {
@EndpointInject(value = "mock:result")
private MockEndpoint result;
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new EndpointRouteBuilder() {
@Override
public void configure() throws Exception {
from(direct("start")).toD(seda("${exchangeProperty.whereTo}").advanced().blockWhenFull(true));
from("seda:cheese").to("mock:result");
}
};
}
@Test
public void test() throws Exception {
MockEndpoint resultEndpoint = getMockEndpoint("mock:result");
resultEndpoint.expectedMessageCount(1);
template.sendBodyAndProperty("direct:start", "Hello World", "whereTo", "cheese");
MockEndpoint.assertIsSatisfied(context);
}
}
| SedaToDSimpleExpressionTest |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 51576,
"end": 52085
} | class ____ loaded.
* </p>
*
* @since 3.4
*/
public static final boolean IS_OS_MAC_OSX_CHEETAH = getOsMatches("Mac OS X", "10.0");
/**
* The constant {@code true} if this is macOS X Puma.
* <p>
* The value depends on the value of the {@link #OS_NAME} and {@link #OS_VERSION} constants.
* </p>
* <p>
* The value is {@code false} if {@link #OS_NAME} or {@link #OS_VERSION} is {@code null}.
* </p>
* <p>
* This value is initialized when the | is |
java | eclipse-vertx__vert.x | vertx-core/src/main/java/io/vertx/core/http/HttpClientConnection.java | {
"start": 991,
"end": 1538
} | interface ____ extends HttpConnection, HttpClient {
/**
* @return the number of active request/response (streams)
*/
long activeStreams();
/**
* @return the max number of concurrent active streams this connection can handle
*/
long maxActiveStreams();
@Override
default Future<Void> shutdown() {
return HttpConnection.super.shutdown();
}
@Override
Future<Void> shutdown(long timeout, TimeUnit unit);
@Override
default Future<Void> close() {
return HttpConnection.super.close();
}
}
| HttpClientConnection |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/server/reactive/ServerHttpRequestTests.java | {
"start": 9050,
"end": 9735
} | class ____ extends MockHttpServletRequest {
TestHttpServletRequest(URI uri) {
super("GET", uri.getRawPath());
if (uri.getScheme() != null) {
setScheme(uri.getScheme());
}
if (uri.getHost() != null) {
setServerName(uri.getHost());
}
if (uri.getPort() != -1) {
setServerPort(uri.getPort());
}
if (uri.getRawQuery() != null) {
setQueryString(uri.getRawQuery());
}
}
@Override
public ServletInputStream getInputStream() {
return new DelegatingServletInputStream(new ByteArrayInputStream(new byte[0])) {
@Override
public void setReadListener(ReadListener readListener) {
// Ignore
}
};
}
}
}
| TestHttpServletRequest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/MapHost.java | {
"start": 1193,
"end": 2889
} | enum ____ {
IDLE, // No map outputs available
BUSY, // Map outputs are being fetched
PENDING, // Known map outputs which need to be fetched
PENALIZED // Host penalized due to shuffle failures
}
private State state = State.IDLE;
private final String hostName;
private final String baseUrl;
private List<TaskAttemptID> maps = new ArrayList<TaskAttemptID>();
public MapHost(String hostName, String baseUrl) {
this.hostName = hostName;
this.baseUrl = baseUrl;
}
public State getState() {
return state;
}
public String getHostName() {
return hostName;
}
public String getBaseUrl() {
return baseUrl;
}
public synchronized void addKnownMap(TaskAttemptID mapId) {
maps.add(mapId);
if (state == State.IDLE) {
state = State.PENDING;
}
}
public synchronized List<TaskAttemptID> getAndClearKnownMaps() {
List<TaskAttemptID> currentKnownMaps = maps;
maps = new ArrayList<TaskAttemptID>();
return currentKnownMaps;
}
public synchronized void markBusy() {
state = State.BUSY;
}
public synchronized int getNumKnownMapOutputs() {
return maps.size();
}
/**
* Called when the node is done with its penalty or done copying.
* @return the host's new state
*/
public synchronized State markAvailable() {
if (maps.isEmpty()) {
state = State.IDLE;
} else {
state = State.PENDING;
}
return state;
}
@Override
public String toString() {
return hostName;
}
/**
* Mark the host as penalized
*/
public synchronized void penalize() {
state = State.PENALIZED;
}
}
| State |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/short2darray/Short2DArrayAssert_hasNumberOfRows_Test.java | {
"start": 842,
"end": 1188
} | class ____ extends Short2DArrayAssertBaseTest {
@Override
protected Short2DArrayAssert invoke_api_method() {
return assertions.hasNumberOfRows(1);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertNumberOfRows(getInfo(assertions), getActual(assertions), 1);
}
}
| Short2DArrayAssert_hasNumberOfRows_Test |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/MetaAttributable.java | {
"start": 248,
"end": 437
} | interface ____ {
Map<String, MetaAttribute> getMetaAttributes();
void setMetaAttributes(Map<String, MetaAttribute> metas);
MetaAttribute getMetaAttribute(String name);
}
| MetaAttributable |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/util/ReflectionUtilsTests.java | {
"start": 28413,
"end": 30028
} | class ____ {
@SuppressWarnings("DataFlowIssue")
@Test
void tryToGetResourcePreconditions() {
assertPreconditionViolationFor(() -> ReflectionUtils.tryToGetResources(""));
assertPreconditionViolationFor(() -> ReflectionUtils.tryToGetResources(" "));
assertPreconditionViolationFor(() -> ReflectionUtils.tryToGetResources(null));
assertPreconditionViolationFor(
() -> ReflectionUtils.tryToGetResources("org/junit/platform/commons/example.resource", null));
}
@Test
void tryToGetResource() {
var tryToGetResource = ReflectionUtils.tryToGetResources("org/junit/platform/commons/example.resource");
var resource = assertDoesNotThrow(tryToGetResource::get);
assertAll( //
() -> assertThat(resource).hasSize(1), //
() -> assertThat(resource).extracting(Resource::getName) //
.containsExactly("org/junit/platform/commons/example.resource"));
}
@Test
void tryToGetResourceWithPrefixedSlash() {
var tryToGetResource = ReflectionUtils.tryToGetResources("/org/junit/platform/commons/example.resource");
var resource = assertDoesNotThrow(tryToGetResource::get);
assertAll( //
() -> assertThat(resource).hasSize(1), //
() -> assertThat(resource).extracting(Resource::getName) //
.containsExactly("org/junit/platform/commons/example.resource"));
}
@Test
void tryToGetResourceWhenResourceNotFound() {
var tryToGetResource = ReflectionUtils.tryToGetResources("org/junit/platform/commons/no-such.resource");
var resource = assertDoesNotThrow(tryToGetResource::get);
assertThat(resource).isEmpty();
}
}
@Nested
| ResourceLoadingTests |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java | {
"start": 1602,
"end": 7143
} | class ____ extends ESTestCase {
public void testSerialization() throws Exception {
SearchScrollRequest searchScrollRequest = createSearchScrollRequest();
try (BytesStreamOutput output = new BytesStreamOutput()) {
searchScrollRequest.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
SearchScrollRequest deserializedRequest = new SearchScrollRequest(in);
assertEquals(deserializedRequest, searchScrollRequest);
assertEquals(deserializedRequest.hashCode(), searchScrollRequest.hashCode());
assertNotSame(deserializedRequest, searchScrollRequest);
}
}
}
public void testInternalScrollSearchRequestSerialization() throws IOException {
SearchScrollRequest searchScrollRequest = createSearchScrollRequest();
InternalScrollSearchRequest internalScrollSearchRequest = new InternalScrollSearchRequest(
searchScrollRequest,
new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong())
);
try (BytesStreamOutput output = new BytesStreamOutput()) {
internalScrollSearchRequest.writeTo(output);
try (StreamInput in = output.bytes().streamInput()) {
InternalScrollSearchRequest deserializedRequest = new InternalScrollSearchRequest(in);
assertEquals(deserializedRequest.contextId().getId(), internalScrollSearchRequest.contextId().getId());
assertEquals(deserializedRequest.scroll(), internalScrollSearchRequest.scroll());
assertNotSame(deserializedRequest, internalScrollSearchRequest);
}
}
}
public void testFromXContent() throws Exception {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
if (randomBoolean()) {
// test that existing values get overridden
searchScrollRequest = createSearchScrollRequest();
}
try (
XContentParser parser = createParser(
XContentFactory.jsonBuilder().startObject().field("scroll_id", "SCROLL_ID").field("scroll", "1m").endObject()
)
) {
searchScrollRequest.fromXContent(parser);
}
assertEquals("SCROLL_ID", searchScrollRequest.scrollId());
assertEquals(TimeValue.parseTimeValue("1m", null, "scroll"), searchScrollRequest.scroll());
}
public void testFromXContentWithUnknownParamThrowsException() throws Exception {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
XContentParser invalidContent = createParser(
XContentFactory.jsonBuilder().startObject().field("scroll_id", "value_2").field("unknown", "keyword").endObject()
);
Exception e = expectThrows(IllegalArgumentException.class, () -> searchScrollRequest.fromXContent(invalidContent));
assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]"));
}
public void testToXContent() throws IOException {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
searchScrollRequest.scrollId("SCROLL_ID");
searchScrollRequest.scroll(TimeValue.timeValueMinutes(1));
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
searchScrollRequest.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("""
{"scroll_id":"SCROLL_ID","scroll":"1m"}""", Strings.toString(builder));
}
}
public void testToAndFromXContent() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
boolean humanReadable = randomBoolean();
SearchScrollRequest originalRequest = createSearchScrollRequest();
BytesReference originalBytes = toShuffledXContent(originalRequest, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
SearchScrollRequest parsedRequest = new SearchScrollRequest();
try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) {
parsedRequest.fromXContent(parser);
}
assertEquals(originalRequest, parsedRequest);
BytesReference parsedBytes = XContentHelper.toXContent(parsedRequest, xContentType, humanReadable);
assertToXContentEquivalent(originalBytes, parsedBytes, xContentType);
}
public void testEqualsAndHashcode() {
checkEqualsAndHashCode(createSearchScrollRequest(), SearchScrollRequestTests::copyRequest, SearchScrollRequestTests::mutate);
}
public static SearchScrollRequest createSearchScrollRequest() {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(randomAlphaOfLengthBetween(3, 10));
searchScrollRequest.scroll(randomPositiveTimeValue());
return searchScrollRequest;
}
private static SearchScrollRequest copyRequest(SearchScrollRequest searchScrollRequest) {
SearchScrollRequest result = new SearchScrollRequest();
result.scrollId(searchScrollRequest.scrollId());
result.scroll(searchScrollRequest.scroll());
return result;
}
private static SearchScrollRequest mutate(SearchScrollRequest original) {
SearchScrollRequest copy = copyRequest(original);
if (randomBoolean()) {
return copy.scrollId(original.scrollId() + "xyz");
} else {
return copy.scroll(new TimeValue(original.scroll().getMillis() + 1));
}
}
}
| SearchScrollRequestTests |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/configurers/AbstractConfigAttributeRequestMatcherRegistry.java | {
"start": 5238,
"end": 5695
} | class ____ {
private final RequestMatcher requestMatcher;
private final Collection<ConfigAttribute> configAttrs;
UrlMapping(RequestMatcher requestMatcher, Collection<ConfigAttribute> configAttrs) {
this.requestMatcher = requestMatcher;
this.configAttrs = configAttrs;
}
RequestMatcher getRequestMatcher() {
return this.requestMatcher;
}
Collection<ConfigAttribute> getConfigAttrs() {
return this.configAttrs;
}
}
}
| UrlMapping |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/packagescan/util/AbstractObjectUtils.java | {
"start": 22589,
"end": 22808
} | class ____ for the given object.
*
* <p>Returns a {@code "null"} String if {@code obj} is {@code null}.
*
* @param obj the object to introspect (may be {@code null})
* @return the corresponding | name |
java | apache__dubbo | dubbo-plugin/dubbo-auth/src/main/java/org/apache/dubbo/auth/filter/ProviderAuthHeaderFilter.java | {
"start": 1366,
"end": 2411
} | class ____ implements HeaderFilter {
private final FrameworkModel frameworkModel;
public ProviderAuthHeaderFilter(FrameworkModel frameworkModel) {
this.frameworkModel = frameworkModel;
}
@Override
public RpcInvocation invoke(Invoker<?> invoker, RpcInvocation invocation) throws RpcException {
URL url = invoker.getUrl();
boolean shouldAuth = url.getParameter(Constants.AUTH_KEY, false);
if (shouldAuth) {
Authenticator authenticator = frameworkModel
.getExtensionLoader(Authenticator.class)
.getExtension(url.getParameter(Constants.AUTHENTICATOR_KEY, Constants.DEFAULT_AUTHENTICATOR));
try {
authenticator.authenticate(invocation, url);
} catch (Exception e) {
throw new RpcException(AUTHORIZATION_EXCEPTION, "No Auth.");
}
invocation.getAttributes().put(Constants.AUTH_SUCCESS, Boolean.TRUE);
}
return invocation;
}
}
| ProviderAuthHeaderFilter |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/AuthorizeHttpRequestsConfigurerTests.java | {
"start": 63399,
"end": 63921
} | class ____ {
@Bean
SecurityFilterChain security(HttpSecurity http) throws Exception {
PathPatternRequestMatcher.Builder mvc = PathPatternRequestMatcher.withDefaults().basePath("/mvc");
// @formatter:off
http
.authorizeHttpRequests((authorize) -> authorize
.requestMatchers(mvc.matcher("/path/**")).hasRole("USER")
)
.httpBasic(withDefaults());
// @formatter:on
return http.build();
}
}
@Configuration
@EnableWebSecurity
@EnableWebMvc
static | PathPatternRequestMatcherBuilderConfig |
java | quarkusio__quarkus | extensions/smallrye-jwt/deployment/src/main/java/io/quarkus/smallrye/jwt/deployment/SmallRyeJwtProcessor.java | {
"start": 2902,
"end": 11000
} | class ____ {
private static final Logger log = Logger.getLogger(SmallRyeJwtProcessor.class.getName());
private static final String CLASSPATH_SCHEME = "classpath:";
static final String MP_JWT_VERIFY_KEY_LOCATION = "mp.jwt.verify.publickey.location";
private static final String MP_JWT_DECRYPT_KEY_LOCATION = "mp.jwt.decrypt.key.location";
private static final DotName CLAIM_NAME = DotName.createSimple(Claim.class.getName());
private static final DotName CLAIMS_NAME = DotName.createSimple(Claims.class.getName());
private static final DotName CLAIM_VALUE_NAME = DotName.createSimple(ClaimValue.class);
private static final DotName REQUEST_SCOPED_NAME = DotName.createSimple(RequestScoped.class);
private static final Set<DotName> ALL_PROVIDER_NAMES = Set.of(DotNames.PROVIDER, DotNames.INSTANCE,
DotNames.INJECTABLE_INSTANCE);
SmallRyeJwtBuildTimeConfig config;
@BuildStep(onlyIf = IsEnabled.class)
ExtensionSslNativeSupportBuildItem enableSslInNative() {
return new ExtensionSslNativeSupportBuildItem(Feature.SMALLRYE_JWT);
}
@BuildStep(onlyIf = IsEnabled.class)
public void provideSecurityInformation(BuildProducer<SecurityInformationBuildItem> securityInformationProducer) {
securityInformationProducer.produce(SecurityInformationBuildItem.JWT());
}
/**
* Register the CDI beans that are needed by the MP-JWT extension
*
* @param additionalBeans - producer for additional bean items
*/
@BuildStep
void registerAdditionalBeans(BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<ReflectiveClassBuildItem> reflectiveClasses) {
if (config.enabled()) {
AdditionalBeanBuildItem.Builder unremovable = AdditionalBeanBuildItem.builder().setUnremovable();
unremovable.addBeanClass(MpJwtValidator.class);
unremovable.addBeanClass(JsonWebTokenCredentialProducer.class);
unremovable.addBeanClass(JWTAuthMechanism.class);
unremovable.addBeanClass(ClaimValueProducer.class);
additionalBeans.produce(unremovable.build());
}
AdditionalBeanBuildItem.Builder removable = AdditionalBeanBuildItem.builder();
removable.addBeanClass(JWTAuthContextInfoProvider.class);
removable.addBeanClass(DefaultJWTParser.class);
removable.addBeanClass(CommonJwtProducer.class);
removable.addBeanClass(RawClaimTypeProducer.class);
removable.addBeanClass(JsonValueProducer.class);
removable.addBeanClass(JwtPrincipalProducer.class);
removable.addBeanClass(JWTCallerPrincipalFactoryProducer.class);
removable.addBeanClass(Claim.class);
additionalBeans.produce(removable.build());
reflectiveClasses.produce(ReflectiveClassBuildItem.builder(SignatureAlgorithm.class, KeyEncryptionAlgorithm.class)
.reason(getClass().getName())
.methods().fields().build());
}
/**
* Register this extension as an MP-JWT feature
*
* @return FeatureBuildItem
*/
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(Feature.SMALLRYE_JWT);
}
/**
* If the configuration specified a deployment local key resource, register it in native mode
*
* @return NativeImageResourceBuildItem
*/
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
void registerNativeImageResources(BuildProducer<NativeImageResourceBuildItem> nativeImageResource) {
Config config = ConfigProvider.getConfig();
registerKeyLocationResource(config, MP_JWT_VERIFY_KEY_LOCATION, nativeImageResource);
registerKeyLocationResource(config, MP_JWT_DECRYPT_KEY_LOCATION, nativeImageResource);
}
private void registerKeyLocationResource(Config config, String propertyName,
BuildProducer<NativeImageResourceBuildItem> nativeImageResource) {
Optional<String> keyLocation = config.getOptionalValue(propertyName, String.class);
if (keyLocation.isPresent() && keyLocation.get().length() > 1
&& (keyLocation.get().indexOf(':') < 0 || (keyLocation.get().startsWith(CLASSPATH_SCHEME)
&& keyLocation.get().length() > CLASSPATH_SCHEME.length()))) {
log.infof("Adding %s to native image", keyLocation.get());
String location = keyLocation.get();
// It can only be `classpath:` at this point
if (location.startsWith(CLASSPATH_SCHEME)) {
location = location.substring(CLASSPATH_SCHEME.length());
}
if (location.startsWith("/")) {
location = location.substring(1);
}
nativeImageResource.produce(new NativeImageResourceBuildItem(location));
}
}
/**
* Register the SHA256withRSA signature provider
*
* @return JCAProviderBuildItem for SHA256withRSA signature provider
*/
@BuildStep
JCAProviderBuildItem registerRSASigProvider() {
return new JCAProviderBuildItem(config.rsaSigProvider());
}
@BuildStep
void registerOptionalClaimProducer(BeanRegistrationPhaseBuildItem beanRegistrationPhase,
BuildProducer<BeanConfiguratorBuildItem> beanConfigurator) {
Set<Type> additionalTypes = new HashSet<>();
// First analyze all relevant injection points
for (InjectionPointInfo injectionPoint : beanRegistrationPhase.getContext().get(BuildExtension.Key.INJECTION_POINTS)) {
if (injectionPoint.hasDefaultedQualifier()) {
continue;
}
AnnotationInstance claimQualifier = injectionPoint.getRequiredQualifier(CLAIM_NAME);
if (claimQualifier != null) {
Type actualType = injectionPoint.getRequiredType();
Optional<BeanInfo> bean = injectionPoint.getTargetBean();
if (bean.isPresent()) {
DotName scope = bean.get().getScope().getDotName();
if (!REQUEST_SCOPED_NAME.equals(scope)
&& (!ALL_PROVIDER_NAMES.contains(injectionPoint.getType().name())
&& !CLAIM_VALUE_NAME.equals(actualType.name()))) {
String error = String.format(
"%s type can not be used to represent JWT claims in @Singleton or @ApplicationScoped beans"
+ ", make the bean @RequestScoped or wrap this type with org.eclipse.microprofile.jwt.ClaimValue"
+ " or jakarta.inject.Provider or jakarta.enterprise.inject.Instance",
actualType.name());
throw new IllegalStateException(error);
}
}
if (injectionPoint.getType().name().equals(DotNames.PROVIDER) && actualType.name().equals(DotNames.OPTIONAL)) {
additionalTypes.add(actualType);
}
}
}
// Register a custom bean
BeanConfigurator<Optional<?>> configurator = beanRegistrationPhase.getContext().configure(Optional.class);
for (Type type : additionalTypes) {
configurator.addType(type);
}
configurator.scope(BuiltinScope.DEPENDENT.getInfo());
configurator.qualifiers(AnnotationInstance.create(CLAIM_NAME, null,
new AnnotationValue[] { AnnotationValue.createStringValue("value", ""),
AnnotationValue.createEnumValue("standard", CLAIMS_NAME, "UNKNOWN") }));
configurator.creator(RawOptionalClaimCreator.class);
beanConfigurator.produce(new BeanConfiguratorBuildItem(configurator));
}
@BuildStep
List<HttpAuthMechanismAnnotationBuildItem> registerHttpAuthMechanismAnnotation() {
return List.of(
new HttpAuthMechanismAnnotationBuildItem(DotName.createSimple(BearerTokenAuthentication.class), BEARER));
}
public static | SmallRyeJwtProcessor |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/view/InternalResourceView.java | {
"start": 2646,
"end": 9227
} | class ____ extends AbstractUrlBasedView {
private boolean alwaysInclude = false;
private boolean preventDispatchLoop = false;
/**
* Constructor for use as a bean.
* @see #setUrl
* @see #setAlwaysInclude
*/
public InternalResourceView() {
}
/**
* Create a new InternalResourceView with the given URL.
* @param url the URL to forward to
* @see #setAlwaysInclude
*/
public InternalResourceView(String url) {
super(url);
}
/**
* Create a new InternalResourceView with the given URL.
* @param url the URL to forward to
* @param alwaysInclude whether to always include the view rather than forward to it
*/
public InternalResourceView(String url, boolean alwaysInclude) {
super(url);
this.alwaysInclude = alwaysInclude;
}
/**
* Specify whether to always include the view rather than forward to it.
* <p>Default is "false". Switch this flag on to enforce the use of a
* Servlet include, even if a forward would be possible.
* @see jakarta.servlet.RequestDispatcher#forward
* @see jakarta.servlet.RequestDispatcher#include
* @see #useInclude(jakarta.servlet.http.HttpServletRequest, jakarta.servlet.http.HttpServletResponse)
*/
public void setAlwaysInclude(boolean alwaysInclude) {
this.alwaysInclude = alwaysInclude;
}
/**
* Set whether to explicitly prevent dispatching back to the
* current handler path.
* <p>Default is "false". Switch this to "true" for convention-based
* views where a dispatch back to the current handler path is a
* definitive error.
*/
public void setPreventDispatchLoop(boolean preventDispatchLoop) {
this.preventDispatchLoop = preventDispatchLoop;
}
/**
* An ApplicationContext is not strictly required for InternalResourceView.
*/
@Override
protected boolean isContextRequired() {
return false;
}
/**
* Render the internal resource given the specified model.
* This includes setting the model as request attributes.
*/
@Override
protected void renderMergedOutputModel(
Map<String, Object> model, HttpServletRequest request, HttpServletResponse response) throws Exception {
// Expose the model object as request attributes.
exposeModelAsRequestAttributes(model, request);
// Expose helpers as request attributes, if any.
exposeHelpers(request);
// Determine the path for the request dispatcher.
String dispatcherPath = prepareForRendering(request, response);
// Obtain a RequestDispatcher for the target resource (typically a JSP).
RequestDispatcher rd = getRequestDispatcher(request, dispatcherPath);
if (rd == null) {
throw new ServletException("Could not get RequestDispatcher for [" + getUrl() +
"]: Check that the corresponding file exists within your web application archive!");
}
// If already included or response already committed, perform include, else forward.
if (useInclude(request, response)) {
response.setContentType(getContentType());
if (logger.isDebugEnabled()) {
logger.debug("Including [" + getUrl() + "]");
}
rd.include(request, response);
}
else {
// Note: The forwarded resource is supposed to determine the content type itself.
if (logger.isDebugEnabled()) {
logger.debug("Forwarding to [" + getUrl() + "]");
}
rd.forward(request, response);
}
}
/**
* Expose helpers unique to each rendering operation. This is necessary so that
* different rendering operations can't overwrite each other's contexts etc.
* <p>Called by {@link #renderMergedOutputModel(Map, HttpServletRequest, HttpServletResponse)}.
* The default implementation is empty. This method can be overridden to add
* custom helpers as request attributes.
* @param request current HTTP request
* @throws Exception if there's a fatal error while we're adding attributes
* @see #renderMergedOutputModel
* @see JstlView#exposeHelpers
*/
protected void exposeHelpers(HttpServletRequest request) throws Exception {
}
/**
* Prepare for rendering, and determine the request dispatcher path
* to forward to (or to include).
* <p>This implementation simply returns the configured URL.
* Subclasses can override this to determine a resource to render,
* typically interpreting the URL in a different manner.
* @param request current HTTP request
* @param response current HTTP response
* @return the request dispatcher path to use
* @throws Exception if preparations failed
* @see #getUrl()
*/
protected String prepareForRendering(HttpServletRequest request, HttpServletResponse response)
throws Exception {
String path = getUrl();
Assert.state(path != null, "'url' not set");
if (this.preventDispatchLoop) {
String uri = request.getRequestURI();
if (path.startsWith("/") ? uri.equals(path) : uri.equals(StringUtils.applyRelativePath(uri, path))) {
throw new ServletException("Circular view path [" + path + "]: would dispatch back " +
"to the current handler URL [" + uri + "] again. Check your ViewResolver setup! " +
"(Hint: This may be the result of an unspecified view, due to default view name generation.)");
}
}
return path;
}
/**
* Obtain the RequestDispatcher to use for the forward/include.
* <p>The default implementation simply calls
* {@link HttpServletRequest#getRequestDispatcher(String)}.
* Can be overridden in subclasses.
* @param request current HTTP request
* @param path the target URL (as returned from {@link #prepareForRendering})
* @return a corresponding RequestDispatcher
*/
protected @Nullable RequestDispatcher getRequestDispatcher(HttpServletRequest request, String path) {
return request.getRequestDispatcher(path);
}
/**
* Determine whether to use RequestDispatcher's {@code include} or
* {@code forward} method.
* <p>Performs a check whether an include URI attribute is found in the request,
* indicating an include request, and whether the response has already been committed.
* In both cases, an include will be performed, as a forward is not possible anymore.
* @param request current HTTP request
* @param response current HTTP response
* @return {@code true} for include, {@code false} for forward
* @see jakarta.servlet.RequestDispatcher#forward
* @see jakarta.servlet.RequestDispatcher#include
* @see jakarta.servlet.ServletResponse#isCommitted
* @see org.springframework.web.util.WebUtils#isIncludeRequest
*/
protected boolean useInclude(HttpServletRequest request, HttpServletResponse response) {
return (this.alwaysInclude || WebUtils.isIncludeRequest(request) || response.isCommitted());
}
}
| InternalResourceView |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TempDirectoryTests.java | {
"start": 18062,
"end": 19817
} | class ____ {
@Test
@DisplayName("that uses test method name as temp dir name prefix")
void supportsFactoryWithTestMethodNameAsPrefix() {
executeTestsForClass(FactoryWithTestMethodNameAsPrefixTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
@Test
@DisplayName("that uses custom temp dir parent directory")
void supportsFactoryWithCustomParentDirectory() {
executeTestsForClass(FactoryWithCustomParentDirectoryTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
@Test
@DisplayName("that uses com.github.marschall:memoryfilesystem")
void supportsFactoryWithMemoryFileSystem() {
executeTestsForClass(FactoryWithMemoryFileSystemTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
@Test
@DisplayName("that uses com.google.jimfs:jimfs")
void supportsFactoryWithJimfs() {
executeTestsForClass(FactoryWithJimfsTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
@Test
@DisplayName("that uses annotated element name as temp dir name prefix")
void supportsFactoryWithAnnotatedElementNameAsPrefix() {
executeTestsForClass(FactoryWithAnnotatedElementNameAsPrefixTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
@Test
@DisplayName("that uses custom meta-annotation")
void supportsFactoryWithCustomMetaAnnotation() {
executeTestsForClass(FactoryWithCustomMetaAnnotationTestCase.class).testEvents()//
.assertStatistics(stats -> stats.started(1).succeeded(1));
}
}
@Nested
@DisplayName("supports default factory")
@TestMethodOrder(OrderAnnotation.class)
| Factory |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/reflect/ConstructorUtils.java | {
"start": 9691,
"end": 10321
} | class ____ invocation of {@link SecurityManager#checkPackageAccess(String)} denies access to the
* package of the class.
* @see #invokeConstructor(Class, Object[], Class[])
*/
public static <T> T invokeConstructor(final Class<T> cls, final Object... args)
throws NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException {
final Object[] actuals = ArrayUtils.nullToEmpty(args);
return invokeConstructor(cls, actuals, ClassUtils.toClass(actuals));
}
/**
* Returns a new instance of the specified | and |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java | {
"start": 811,
"end": 1494
} | class ____ extends HandledTransportAction<ValidateDetectorAction.Request, AcknowledgedResponse> {
@Inject
public TransportValidateDetectorAction(TransportService transportService, ActionFilters actionFilters) {
super(
ValidateDetectorAction.NAME,
transportService,
actionFilters,
ValidateDetectorAction.Request::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
}
@Override
protected void doExecute(Task task, ValidateDetectorAction.Request request, ActionListener<AcknowledgedResponse> listener) {
listener.onResponse(AcknowledgedResponse.TRUE);
}
}
| TransportValidateDetectorAction |
java | spring-projects__spring-boot | integration-test/spring-boot-server-integration-tests/src/intTest/java/org/springframework/boot/context/embedded/Application.java | {
"start": 786,
"end": 1267
} | class ____ {
private final String packaging;
private final String container;
Application(String packaging, String container) {
this.packaging = packaging;
this.container = container;
}
String getPackaging() {
return this.packaging;
}
String getContainer() {
return this.container;
}
File getArchive() {
return new File("build/spring-boot-server-tests-app/build/libs/spring-boot-server-tests-app-" + this.container
+ "." + this.packaging);
}
}
| Application |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/SerializationOrderTest.java | {
"start": 1940,
"end": 2408
} | class ____ {
private final int a;
private final int b;
@JsonCreator
public BeanForGH311(@JsonProperty("b") int b, @JsonProperty("a") int a) { //b and a are out of order, although alphabetic = true
this.a = a;
this.b = b;
}
public int getA() { return a; }
public int getB() { return b; }
}
// We'll expect ordering of "FUBAR"
@JsonPropertyOrder({ "f" })
static | BeanForGH311 |
java | apache__camel | core/camel-main/src/main/java/org/apache/camel/main/TelemetryDevConfigurationProperties.java | {
"start": 1019,
"end": 3336
} | class ____ implements BootstrapCloseable {
private MainConfigurationProperties parent;
private boolean enabled;
private String excludePatterns;
private boolean traceProcessors;
private String traceFormat;
public TelemetryDevConfigurationProperties(MainConfigurationProperties parent) {
this.parent = parent;
}
public MainConfigurationProperties end() {
return parent;
}
@Override
public void close() {
parent = null;
}
public boolean isEnabled() {
return enabled;
}
/**
* To enable TelemetryDev
*/
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getExcludePatterns() {
return excludePatterns;
}
/**
* Adds an exclude pattern that will disable tracing for Camel messages that matches the pattern. Multiple patterns
* can be separated by comma.
*/
public void setExcludePatterns(String excludePatterns) {
this.excludePatterns = excludePatterns;
}
public boolean isTraceProcessors() {
return traceProcessors;
}
/**
* Setting this to true will create new TelemetrySimple Spans for each Camel Processors. Use the excludePattern
* property to filter out Processors.
*/
public void setTraceProcessors(boolean traceProcessors) {
this.traceProcessors = traceProcessors;
}
public String getTraceFormat() {
return traceFormat;
}
/**
* The output format for traces.
*/
public void setTraceFormat(String traceFormat) {
this.traceFormat = traceFormat;
}
public TelemetryDevConfigurationProperties withEnabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public TelemetryDevConfigurationProperties withExcludePatterns(String excludePatterns) {
this.excludePatterns = excludePatterns;
return this;
}
public TelemetryDevConfigurationProperties withTraceProcessors(boolean traceProcessors) {
this.traceProcessors = traceProcessors;
return this;
}
public TelemetryDevConfigurationProperties withTraceFromat(String traceFormat) {
this.traceFormat = traceFormat;
return this;
}
}
| TelemetryDevConfigurationProperties |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/serializecircular/UtilityTester.java | {
"start": 892,
"end": 2142
} | class ____ {
public static void serializeAndDeserializeObject(Object myObject) {
try {
deserialzeObject(serializeObject(myObject));
} catch (IOException e) {
System.out.println("Exception: " + e.toString());
}
}
private static byte[] serializeObject(Object myObject) throws IOException {
try {
ByteArrayOutputStream myByteArrayOutputStream = new ByteArrayOutputStream();
// Serialize to a byte array
try (ObjectOutputStream myObjectOutputStream = new ObjectOutputStream(myByteArrayOutputStream)) {
myObjectOutputStream.writeObject(myObject);
}
return myByteArrayOutputStream.toByteArray();
} catch (Exception anException) {
throw new RuntimeException("Problem serializing: " + anException.toString(), anException);
}
}
private static Object deserialzeObject(byte[] aSerializedObject) {
// Deserialize from a byte array
try (ObjectInputStream myObjectInputStream = new ObjectInputStream(new ByteArrayInputStream(aSerializedObject))) {
return myObjectInputStream.readObject();
} catch (Exception anException) {
throw new RuntimeException("Problem deserializing", anException);
}
}
private UtilityTester() {
}
}
| UtilityTester |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/SchemaBuilder.java | {
"start": 72773,
"end": 73288
} | class ____<R> extends FieldDefault<R, StringDefault<R>> {
private StringDefault(FieldBuilder<R> field) {
super(field);
}
/** Completes this field with the default value provided. Cannot be null. **/
public final FieldAssembler<R> stringDefault(String defaultVal) {
return super.usingDefault(defaultVal);
}
@Override
final StringDefault<R> self() {
return this;
}
}
/** Choose whether to use a default value for the field or not. **/
public static | StringDefault |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/tags/form/CheckboxesTagTests.java | {
"start": 1995,
"end": 38419
} | class ____ extends AbstractFormTagTests {
private CheckboxesTag tag;
private TestBean bean;
@Override
@SuppressWarnings("serial")
protected void onSetUp() {
this.tag = new CheckboxesTag() {
@Override
protected TagWriter createTagWriter() {
return new TagWriter(getWriter());
}
};
this.tag.setPageContext(getPageContext());
}
@Test
void withMultiValueArray() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"foo", "bar", "baz"});
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("foo");
assertThat(spanElement1.getStringValue()).isEqualTo("foo");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("bar");
assertThat(spanElement2.getStringValue()).isEqualTo("bar");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("baz");
assertThat(spanElement3.getStringValue()).isEqualTo("baz");
}
@Test
void withMultiValueArrayAndDynamicAttributes() throws Exception {
String dynamicAttribute1 = "attr1";
String dynamicAttribute2 = "attr2";
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"foo", "bar", "baz"});
this.tag.setDynamicAttribute(null, dynamicAttribute1, dynamicAttribute1);
this.tag.setDynamicAttribute(null, dynamicAttribute2, dynamicAttribute2);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("foo");
assertThat(spanElement1.getStringValue()).isEqualTo("foo");
assertThat(checkboxElement1.attribute(dynamicAttribute1).getValue()).isEqualTo(dynamicAttribute1);
assertThat(checkboxElement1.attribute(dynamicAttribute2).getValue()).isEqualTo(dynamicAttribute2);
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("bar");
assertThat(spanElement2.getStringValue()).isEqualTo("bar");
assertThat(checkboxElement2.attribute(dynamicAttribute1).getValue()).isEqualTo(dynamicAttribute1);
assertThat(checkboxElement2.attribute(dynamicAttribute2).getValue()).isEqualTo(dynamicAttribute2);
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("baz");
assertThat(spanElement3.getStringValue()).isEqualTo("baz");
assertThat(checkboxElement3.attribute(dynamicAttribute1).getValue()).isEqualTo(dynamicAttribute1);
assertThat(checkboxElement3.attribute(dynamicAttribute2).getValue()).isEqualTo(dynamicAttribute2);
}
@Test
void withMultiValueArrayWithDelimiter() throws Exception {
this.tag.setDelimiter("<br/>");
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"foo", "bar", "baz"});
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element delimiterElement1 = spanElement1.element("br");
assertThat(delimiterElement1).isNull();
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("foo");
assertThat(spanElement1.getStringValue()).isEqualTo("foo");
Element spanElement2 = document.getRootElement().elements().get(1);
Element delimiterElement2 = spanElement2.elements().get(0);
assertThat(delimiterElement2.getName()).isEqualTo("br");
Element checkboxElement2 = spanElement2.elements().get(1);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("bar");
assertThat(spanElement2.getStringValue()).isEqualTo("bar");
Element spanElement3 = document.getRootElement().elements().get(2);
Element delimiterElement3 = spanElement3.elements().get(0);
assertThat(delimiterElement3.getName()).isEqualTo("br");
Element checkboxElement3 = spanElement3.elements().get(1);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("baz");
assertThat(spanElement3.getStringValue()).isEqualTo("baz");
}
@Test
void withMultiValueMap() throws Exception {
this.tag.setPath("stringArray");
Map m = new LinkedHashMap();
m.put("foo", "FOO");
m.put("bar", "BAR");
m.put("baz", "BAZ");
this.tag.setItems(m);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("foo");
assertThat(spanElement1.getStringValue()).isEqualTo("FOO");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("bar");
assertThat(spanElement2.getStringValue()).isEqualTo("BAR");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("baz");
assertThat(spanElement3.getStringValue()).isEqualTo("BAZ");
}
@Test
void withPetItemsMap() throws Exception {
this.tag.setPath("someSet");
Map m = new LinkedHashMap();
m.put(new ItemPet("PET1"), "PET1Label");
m.put(new ItemPet("PET2"), "PET2Label");
m.put(new ItemPet("PET3"), "PET3Label");
this.tag.setItems(m);
tag.setItemValue("name");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("PET1");
assertThat(spanElement1.getStringValue()).isEqualTo("PET1Label");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("PET2");
assertThat(spanElement2.getStringValue()).isEqualTo("PET2Label");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("PET3");
assertThat(spanElement3.getStringValue()).isEqualTo("PET3Label");
}
@Test
void withMultiValueMapWithDelimiter() throws Exception {
String delimiter = " | ";
this.tag.setDelimiter(delimiter);
this.tag.setPath("stringArray");
Map m = new LinkedHashMap();
m.put("foo", "FOO");
m.put("bar", "BAR");
m.put("baz", "BAZ");
this.tag.setItems(m);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("foo");
assertThat(spanElement1.getStringValue()).isEqualTo("FOO");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("bar");
assertThat(spanElement2.getStringValue()).isEqualTo((delimiter + "BAR"));
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("baz");
assertThat(spanElement3.getStringValue()).isEqualTo((delimiter + "BAZ"));
}
@Test
void withMultiValueWithEditor() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {" foo", " bar", " baz"});
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
MyStringTrimmerEditor editor = new MyStringTrimmerEditor();
bindingResult.getPropertyEditorRegistry().registerCustomEditor(String.class, editor);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
assertThat(editor.allProcessedValues).hasSize(3);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo(" foo");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo(" bar");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo(" baz");
}
@Test
void withMultiValueWithReverseEditor() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"FOO", "BAR", "BAZ"});
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
MyLowerCaseEditor editor = new MyLowerCaseEditor();
bindingResult.getPropertyEditorRegistry().registerCustomEditor(String.class, editor);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("FOO");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("BAR");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("BAZ");
}
@Test
void withMultiValueWithFormatter() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {" foo", " bar", " baz"});
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
FormattingConversionService cs = new FormattingConversionService();
cs.addFormatterForFieldType(String.class, new Formatter<String>() {
@Override
public String print(String object, Locale locale) {
return object;
}
@Override
public String parse(String text, Locale locale) {
return text.trim();
}
});
bindingResult.initConversion(cs);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo(" foo");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo(" bar");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo(" baz");
}
@Test
void collectionOfPets() throws Exception {
this.tag.setPath("pets");
List allPets = new ArrayList();
allPets.add(new ItemPet("Rudiger"));
allPets.add(new ItemPet("Spot"));
allPets.add(new ItemPet("Checkers"));
allPets.add(new ItemPet("Fluffy"));
allPets.add(new ItemPet("Mufty"));
this.tag.setItems(allPets);
this.tag.setItemValue("name");
this.tag.setItemLabel("label");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("Rudiger");
assertThat(spanElement1.getStringValue()).isEqualTo("RUDIGER");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("Spot");
assertThat(spanElement2.getStringValue()).isEqualTo("SPOT");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("Checkers");
assertThat(spanElement3.getStringValue()).isEqualTo("CHECKERS");
Element spanElement4 = document.getRootElement().elements().get(3);
Element checkboxElement4 = spanElement4.elements().get(0);
assertThat(checkboxElement4.getName()).isEqualTo("input");
assertThat(checkboxElement4.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement4.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement4.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement4.attribute("value").getValue()).isEqualTo("Fluffy");
assertThat(spanElement4.getStringValue()).isEqualTo("FLUFFY");
Element spanElement5 = document.getRootElement().elements().get(4);
Element checkboxElement5 = spanElement5.elements().get(0);
assertThat(checkboxElement5.getName()).isEqualTo("input");
assertThat(checkboxElement5.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement5.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement5.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement5.attribute("value").getValue()).isEqualTo("Mufty");
assertThat(spanElement5.getStringValue()).isEqualTo("MUFTY");
}
/**
* Test case where items toString() doesn't fit the item ID
*/
@Test
void collectionOfItemPets() throws Exception {
this.tag.setPath("someSet");
List allPets = new ArrayList();
allPets.add(new ItemPet("PET1"));
allPets.add(new ItemPet("PET2"));
allPets.add(new ItemPet("PET3"));
this.tag.setItems(allPets);
this.tag.setItemValue("name");
this.tag.setItemLabel("label");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement1.attribute("checked")).as("should be checked").isNotNull();
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("PET1");
assertThat(spanElement1.getStringValue()).isEqualTo("PET1");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement2.attribute("checked")).as("should be checked").isNotNull();
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("PET2");
assertThat(spanElement2.getStringValue()).isEqualTo("PET2");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("someSet");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("PET3");
assertThat(spanElement3.getStringValue()).isEqualTo("PET3");
}
@Test
void collectionOfPetsWithEditor() throws Exception {
this.tag.setPath("pets");
List allPets = new ArrayList();
allPets.add(new ItemPet("Rudiger"));
allPets.add(new ItemPet("Spot"));
allPets.add(new ItemPet("Checkers"));
allPets.add(new ItemPet("Fluffy"));
allPets.add(new ItemPet("Mufty"));
this.tag.setItems(allPets);
this.tag.setItemLabel("label");
this.tag.setId("myId");
BeanPropertyBindingResult bindingResult = new BeanPropertyBindingResult(this.bean, COMMAND_NAME);
PropertyEditorSupport editor = new ItemPet.CustomEditor();
bindingResult.getPropertyEditorRegistry().registerCustomEditor(ItemPet.class, editor);
getPageContext().getRequest().setAttribute(BindingResult.MODEL_KEY_PREFIX + COMMAND_NAME, bindingResult);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement1 = document.getRootElement().elements().get(0);
Element checkboxElement1 = spanElement1.elements().get(0);
assertThat(checkboxElement1.getName()).isEqualTo("input");
assertThat(checkboxElement1.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement1.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement1.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement1.attribute("value").getValue()).isEqualTo("Rudiger");
assertThat(spanElement1.getStringValue()).isEqualTo("RUDIGER");
Element spanElement2 = document.getRootElement().elements().get(1);
Element checkboxElement2 = spanElement2.elements().get(0);
assertThat(checkboxElement2.getName()).isEqualTo("input");
assertThat(checkboxElement2.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement2.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement2.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement2.attribute("value").getValue()).isEqualTo("Spot");
assertThat(spanElement2.getStringValue()).isEqualTo("SPOT");
Element spanElement3 = document.getRootElement().elements().get(2);
Element checkboxElement3 = spanElement3.elements().get(0);
assertThat(checkboxElement3.getName()).isEqualTo("input");
assertThat(checkboxElement3.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement3.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement3.attribute("checked")).as("not checked").isNull();
assertThat(checkboxElement3.attribute("value").getValue()).isEqualTo("Checkers");
assertThat(spanElement3.getStringValue()).isEqualTo("CHECKERS");
Element spanElement4 = document.getRootElement().elements().get(3);
Element checkboxElement4 = spanElement4.elements().get(0);
assertThat(checkboxElement4.getName()).isEqualTo("input");
assertThat(checkboxElement4.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement4.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement4.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement4.attribute("value").getValue()).isEqualTo("Fluffy");
assertThat(spanElement4.getStringValue()).isEqualTo("FLUFFY");
Element spanElement5 = document.getRootElement().elements().get(4);
Element checkboxElement5 = spanElement5.elements().get(0);
assertThat(checkboxElement5.getName()).isEqualTo("input");
assertThat(checkboxElement5.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement5.attribute("name").getValue()).isEqualTo("pets");
assertThat(checkboxElement5.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement5.attribute("value").getValue()).isEqualTo("Mufty");
assertThat(spanElement5.getStringValue()).isEqualTo("MUFTY");
}
@Test
void withNullValue() {
this.tag.setPath("name");
assertThatIllegalArgumentException().as("null value binding to a non-boolean").isThrownBy(
this.tag::doStartTag);
}
@Test
void hiddenElementOmittedOnDisabled() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"foo", "bar", "baz"});
this.tag.setDisabled(true);
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element rootElement = document.getRootElement();
assertThat(rootElement.elements()).as("Both tag and hidden element rendered incorrectly").hasSize(3);
Element spanElement = document.getRootElement().elements().get(0);
Element checkboxElement = spanElement.elements().get(0);
assertThat(checkboxElement.getName()).isEqualTo("input");
assertThat(checkboxElement.attribute("type").getValue()).isEqualTo("checkbox");
assertThat(checkboxElement.attribute("name").getValue()).isEqualTo("stringArray");
assertThat(checkboxElement.attribute("checked").getValue()).isEqualTo("checked");
assertThat(checkboxElement.attribute("disabled").getValue()).isEqualTo("disabled");
assertThat(checkboxElement.attribute("value").getValue()).isEqualTo("foo");
}
@Test
void spanElementCustomizable() throws Exception {
this.tag.setPath("stringArray");
this.tag.setItems(new Object[] {"foo", "bar", "baz"});
this.tag.setElement("element");
int result = this.tag.doStartTag();
assertThat(result).isEqualTo(Tag.SKIP_BODY);
String output = getOutput();
// wrap the output so it is valid XML
output = "<doc>" + output + "</doc>";
SAXReader reader = new SAXReader();
Document document = reader.read(new StringReader(output));
Element spanElement = document.getRootElement().elements().get(0);
assertThat(spanElement.getName()).isEqualTo("element");
}
@Test
void dynamicTypeAttribute() {
assertThatIllegalArgumentException().isThrownBy(() ->
this.tag.setDynamicAttribute(null, "type", "email"))
.withMessage("Attribute type=\"email\" is not allowed");
}
private Date getDate() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.YEAR, 10);
cal.set(Calendar.MONTH, 10);
cal.set(Calendar.DATE, 10);
cal.set(Calendar.HOUR, 10);
cal.set(Calendar.MINUTE, 10);
cal.set(Calendar.SECOND, 10);
return cal.getTime();
}
@Override
protected TestBean createTestBean() {
List colours = new ArrayList();
colours.add(Colour.BLUE);
colours.add(Colour.RED);
colours.add(Colour.GREEN);
List pets = new ArrayList();
pets.add(new Pet("Rudiger"));
pets.add(new Pet("Spot"));
pets.add(new Pet("Fluffy"));
pets.add(new Pet("Mufty"));
Set someObjects = new HashSet();
someObjects.add(new ItemPet("PET1"));
someObjects.add(new ItemPet("PET2"));
this.bean = new TestBean();
this.bean.setDate(getDate());
this.bean.setName("Rob Harrop");
this.bean.setJedi(true);
this.bean.setSomeBoolean(Boolean.TRUE);
this.bean.setStringArray(new String[] {"bar", "foo"});
this.bean.setSomeIntegerArray(new Integer[] {2, 1});
this.bean.setOtherColours(colours);
this.bean.setPets(pets);
this.bean.setSomeSet(someObjects);
List list = new ArrayList();
list.add("foo");
list.add("bar");
this.bean.setSomeList(list);
return this.bean;
}
private static | CheckboxesTagTests |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/RangeTests.java | {
"start": 1411,
"end": 9431
} | class ____ extends ESTestCase {
public void testAreBoundariesInvalid() {
// value, value type, lower, lower type, lower included, higher, higher type, higher included, boundaries invalid
Object[][] tests = {
// dates
{
d("2021-01-01"),
DATETIME,
"2021-01-01",
randomTextType(),
randomBoolean(),
"2022-01-01",
randomTextType(),
randomBoolean(),
false },
{
d("2021-01-01"),
DATETIME,
"2022-01-01",
randomTextType(),
randomBoolean(),
"2021-01-01",
randomTextType(),
randomBoolean(),
true },
{
d("2021-01-01"),
DATETIME,
"now-10y",
randomTextType(),
randomBoolean(),
"2022-01-01",
randomTextType(),
randomBoolean(),
false },
{
d("2021-01-01"),
DATETIME,
"2021-01-01",
randomTextType(),
randomBoolean(),
"now+10y",
randomTextType(),
randomBoolean(),
false },
{
d("2021-01-01"),
DATETIME,
"2021-01-01",
randomTextType(),
randomBoolean(),
"now-100y",
randomTextType(),
randomBoolean(),
false },
{ d("2021-01-01"), DATETIME, "2021-01-01", randomTextType(), true, "2021-01-01", randomTextType(), true, false },
{ d("2021-01-01"), DATETIME, "2021-01-01", randomTextType(), false, "2021-01-01", randomTextType(), true, true },
{ d("2021-01-01"), DATETIME, "2021-01-01", randomTextType(), true, "2021-01-01", randomTextType(), false, true },
{ d("2021-01-01"), DATETIME, "2021-01-01", randomTextType(), false, "2021-01-01", randomTextType(), false, true },
{
d("2021-01-01"),
DATETIME,
d("2022-01-01"),
DATETIME,
randomBoolean(),
"2021-01-01",
randomTextType(),
randomBoolean(),
true },
{ d("2021-01-01"), DATETIME, d("2021-01-01"), DATETIME, false, "2021-01-01", randomTextType(), false, true },
{ d("2021-01-01"), DATETIME, d("2021-01-01"), DATETIME, false, d("2021-01-01"), DATETIME, false, true },
{ d("2021-01-01"), DATETIME, d("2021-01-01"), DATETIME, true, "2021-01-01", randomTextType(), true, false },
{ d("2021-01-01"), DATETIME, d("2021-01-01"), DATETIME, true, d("2021-01-01"), DATETIME, true, false },
{
randomAlphaOfLength(10),
randomTextType(),
d("2021-01-01"),
DATETIME,
randomBoolean(),
"2022-01-01",
randomTextType(),
randomBoolean(),
false },
{
randomAlphaOfLength(10),
randomTextType(),
"2021-01-01",
randomTextType(),
randomBoolean(),
d("2022-01-01"),
DATETIME,
randomBoolean(),
false },
{
randomAlphaOfLength(10),
randomTextType(),
d("2022-01-01"),
DATETIME,
randomBoolean(),
"2021-01-01",
randomTextType(),
randomBoolean(),
true },
{
randomAlphaOfLength(10),
randomTextType(),
"2022-01-01",
randomTextType(),
randomBoolean(),
d("2021-01-01"),
DATETIME,
randomBoolean(),
true },
{
randomAlphaOfLength(10),
randomTextType(),
d("2022-01-01"),
DATETIME,
randomBoolean(),
d("2021-01-01"),
DATETIME,
randomBoolean(),
true },
{
randomAlphaOfLength(10),
randomTextType(),
"now-10y",
randomTextType(),
randomBoolean(),
d("2022-01-01"),
DATETIME,
randomBoolean(),
false },
{ randomAlphaOfLength(10), randomTextType(), d("2021-01-01"), DATETIME, true, "2021-01-01", randomTextType(), true, false },
{ randomAlphaOfLength(10), randomTextType(), d("2021-01-01"), DATETIME, false, "2021-01-01", randomTextType(), true, true },
{ randomAlphaOfLength(10), randomTextType(), "2021-01-01", randomTextType(), true, d("2021-01-01"), DATETIME, false, true },
{ randomAlphaOfLength(10), randomTextType(), d("2021-01-01"), DATETIME, false, d("2021-01-01"), DATETIME, false, true },
// strings
{
randomAlphaOfLength(10),
randomTextType(),
"a",
randomTextType(),
randomBoolean(),
"b",
randomTextType(),
randomBoolean(),
false },
{
randomAlphaOfLength(10),
randomTextType(),
"b",
randomTextType(),
randomBoolean(),
"a",
randomTextType(),
randomBoolean(),
true },
{ randomAlphaOfLength(10), randomTextType(), "a", randomTextType(), false, "a", randomTextType(), false, true },
// numbers
{ 10, randomNumericType(), 1, randomNumericType(), randomBoolean(), 10, randomNumericType(), randomBoolean(), false },
{ 10, randomNumericType(), 10, randomNumericType(), randomBoolean(), 1, randomNumericType(), randomBoolean(), true },
{ 10, randomNumericType(), 1, randomNumericType(), false, 1, randomNumericType(), randomBoolean(), true },
{ 10, randomNumericType(), 1, randomNumericType(), randomBoolean(), 1, randomNumericType(), false, true },
{ 10, randomNumericType(), 1.0, randomNumericType(), randomBoolean(), 10, randomNumericType(), randomBoolean(), false },
{ 10, randomNumericType(), 1, randomNumericType(), randomBoolean(), 10.D, randomNumericType(), randomBoolean(), false },
{ 10, randomNumericType(), 10.0, randomNumericType(), randomBoolean(), 1, randomNumericType(), randomBoolean(), true },
};
for (int i = 0; i < tests.length; i++) {
Object[] test = tests[i];
Range range = new Range(
Source.EMPTY,
l(test[0], (DataType) test[1]),
l(test[2], (DataType) test[3]),
(Boolean) test[4],
l(test[5], (DataType) test[6]),
(Boolean) test[7],
ZoneId.systemDefault()
);
assertEquals(
"failed on test " + i + ": " + Arrays.toString(test),
test[8],
range.areBoundariesInvalid(range.lower().fold(FoldContext.small()), range.upper().fold(FoldContext.small()))
);
}
}
private static ZonedDateTime d(String date) {
return DateUtils.asDateTime(date);
}
private static DataType randomNumericType() {
return randomFrom(INTEGER, SHORT, LONG, UNSIGNED_LONG, FLOAT, DOUBLE);
}
private static DataType randomTextType() {
return randomFrom(KEYWORD, TEXT);
}
}
| RangeTests |
java | spring-projects__spring-framework | spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/ExtendedWebExchangeDataBinder.java | {
"start": 1427,
"end": 1559
} | class ____
* {@link org.springframework.web.reactive.BindingContext}.
*
* @author Rossen Stoyanchev
* @since 6.2.1
*/
public | within |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java | {
"start": 39152,
"end": 40469
} | class ____ extends FSTestWrapperGlobTest {
TestGlobAccessDenied(boolean useFc) {
super(useFc);
}
void run() throws Exception {
privWrap.mkdir(new Path("/nopermission/val"),
new FsPermission((short)0777), true);
privWrap.mkdir(new Path("/norestrictions/val"),
new FsPermission((short)0777), true);
privWrap.setPermission(new Path("/nopermission"),
new FsPermission((short)0));
try {
wrap.globStatus(new Path("/no*/*"),
new AcceptAllPathFilter());
fail("expected to get an AccessControlException when " +
"globbing through a directory we don't have permissions " +
"to list.");
} catch (AccessControlException ioe) {
}
assertEquals("/norestrictions/val",
TestPath.mergeStatuses(wrap.globStatus(
new Path("/norestrictions/*"),
new AcceptAllPathFilter())));
}
}
@Test
public void testGlobAccessDeniedOnFS() throws Exception {
testOnFileSystem(new TestGlobAccessDenied(false));
}
@Test
public void testGlobAccessDeniedOnFC() throws Exception {
testOnFileContext(new TestGlobAccessDenied(true));
}
/**
* Test that trying to list a reserved path on HDFS via the globber works.
**/
private | TestGlobAccessDenied |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/test/java/org/springframework/security/saml2/provider/service/authentication/TestSaml2PostAuthenticationRequests.java | {
"start": 918,
"end": 1392
} | class ____ {
private TestSaml2PostAuthenticationRequests() {
}
public static Saml2PostAuthenticationRequest create() {
RelyingPartyRegistration registration = TestRelyingPartyRegistrations.relyingPartyRegistration().build();
return Saml2PostAuthenticationRequest.withRelyingPartyRegistration(registration)
.authenticationRequestUri("uri")
.samlRequest("samlRequest")
.id("id")
.relayState("relayState")
.build();
}
}
| TestSaml2PostAuthenticationRequests |
java | google__guice | extensions/servlet/src/com/google/inject/servlet/UriPatternType.java | {
"start": 3991,
"end": 5346
} | class ____ implements UriPatternMatcher {
private final Pattern pattern;
private final String originalPattern;
public RegexUriPatternMatcher(String pattern) {
this.originalPattern = pattern;
try {
this.pattern = Pattern.compile(pattern);
} catch (PatternSyntaxException pse) {
throw new IllegalArgumentException("Invalid regex pattern: " + pse.getMessage());
}
}
@Override
public boolean matches(String uri) {
return null != uri && this.pattern.matcher(getUri(uri)).matches();
}
@Override
public String extractPath(String path) {
Matcher matcher = pattern.matcher(path);
if (matcher.matches() && matcher.groupCount() >= 1) {
// Try to capture the everything before the regex begins to match
// the path. This is a rough approximation to try and get parity
// with the servlet style mapping where the path is a capture of
// the URI before the wildcard.
int end = matcher.start(1);
if (end < path.length()) {
return path.substring(0, end);
}
}
return null;
}
@Override
public UriPatternType getPatternType() {
return UriPatternType.REGEX;
}
@Override
public String getOriginalPattern() {
return originalPattern;
}
}
}
| RegexUriPatternMatcher |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/OpentelemetryMetricsComponentBuilderFactory.java | {
"start": 4608,
"end": 5573
} | class ____
extends AbstractComponentBuilder<OpenTelemetryComponent>
implements OpentelemetryMetricsComponentBuilder {
@Override
protected OpenTelemetryComponent buildConcreteComponent() {
return new OpenTelemetryComponent();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "lazyStartProducer": ((OpenTelemetryComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((OpenTelemetryComponent) component).setAutowiredEnabled((boolean) value); return true;
case "meter": ((OpenTelemetryComponent) component).setMeter((io.opentelemetry.api.metrics.Meter) value); return true;
default: return false;
}
}
}
} | OpentelemetryMetricsComponentBuilderImpl |
java | netty__netty | codec-mqtt/src/main/java/io/netty/handler/codec/mqtt/MqttQoS.java | {
"start": 677,
"end": 1295
} | enum ____ {
AT_MOST_ONCE(0),
AT_LEAST_ONCE(1),
EXACTLY_ONCE(2),
FAILURE(0x80);
private final int value;
MqttQoS(int value) {
this.value = value;
}
public int value() {
return value;
}
public static MqttQoS valueOf(int value) {
switch (value) {
case 0:
return AT_MOST_ONCE;
case 1:
return AT_LEAST_ONCE;
case 2:
return EXACTLY_ONCE;
case 0x80:
return FAILURE;
default:
throw new IllegalArgumentException("invalid QoS: " + value);
}
}
}
| MqttQoS |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TestExceptionCheckerTest.java | {
"start": 3823,
"end": 4310
} | class ____ {
@Test(expected = IOException.class)
public void test() throws Exception {
Files.readAllBytes(Paths.get("NOSUCH"));
}
}
""")
.addOutputLines(
"in/ExceptionTest.java",
"""
import static org.junit.Assert.assertThrows;
import java.io.IOException;
import java.nio.file.*;
import org.junit.Test;
| ExceptionTest |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxIndex.java | {
"start": 4195,
"end": 6573
} | class ____<T, I> implements InnerOperator<T, I>,
ConditionalSubscriber<T> {
final ConditionalSubscriber<? super I> actual;
final BiFunction<? super Long, ? super T, ? extends I> indexMapper;
@SuppressWarnings("NotNullFieldNotInitialized") // s initialized in onSubscribe
Subscription s;
boolean done;
long index;
IndexConditionalSubscriber(
ConditionalSubscriber<? super I> cs,
BiFunction<? super Long, ? super T, ? extends I> indexMapper) {
this.actual = cs;
this.indexMapper = indexMapper;
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = s;
actual.onSubscribe(this);
}
}
@Override
public boolean tryOnNext(T t) {
if (done) {
Operators.onNextDropped(t, actual.currentContext());
return true;
}
I typedIndex;
long i = this.index;
try {
typedIndex = indexMapper.apply(i, t);
this.index = i + 1L;
}
catch (Throwable e) {
onError(Operators.onOperatorError(s, e, t, actual.currentContext()));
return true;
}
return actual.tryOnNext(typedIndex);
}
@Override
public void onNext(T t) {
if (done) {
Operators.onNextDropped(t, actual.currentContext());
return;
}
long i = this.index;
try {
I typedIndex = indexMapper.apply(i, t);
this.index = i + 1L;
actual.onNext(typedIndex);
}
catch (Throwable e) {
onError(Operators.onOperatorError(s, e, t, actual.currentContext()));
}
}
@Override
public void onError(Throwable throwable) {
if (done) {
Operators.onErrorDropped(throwable, actual.currentContext());
return;
}
done = true;
actual.onError(throwable);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
actual.onComplete();
}
@Override
public CoreSubscriber<? super I> actual() {
return this.actual;
}
@Override
public void request(long n) {
s.request(n);
}
@Override
public void cancel() {
s.cancel();
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.TERMINATED) return done;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerOperator.super.scanUnsafe(key);
}
}
static | IndexConditionalSubscriber |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/support/nodes/BaseNodesRequest.java | {
"start": 1082,
"end": 3103
} | class ____ extends LegacyActionRequest {
/**
* Sequence of node specifications that describe the nodes that this request should target. See {@link DiscoveryNodes#resolveNodes} for
* a full description of the options. If set, {@link #concreteNodes} is {@code null} and ignored.
**/
private final String[] nodesIds;
/**
* The exact nodes that this request should target. If set, {@link #nodesIds} is {@code null} and ignored.
**/
private final DiscoveryNode[] concreteNodes;
@Nullable // if no timeout
private TimeValue timeout;
protected BaseNodesRequest(String[] nodesIds) {
this.nodesIds = nodesIds;
this.concreteNodes = null;
}
protected BaseNodesRequest(DiscoveryNode... concreteNodes) {
this.nodesIds = null;
this.concreteNodes = concreteNodes;
}
public final String[] nodesIds() {
return nodesIds;
}
@Nullable
public TimeValue timeout() {
return this.timeout;
}
public final void setTimeout(@Nullable TimeValue timeout) {
this.timeout = timeout;
}
@Override
public ActionRequestValidationException validate() {
return null;
}
@Override
public final void writeTo(StreamOutput out) throws IOException {
// `BaseNodesRequest` is rather heavyweight, especially all those `DiscoveryNodes` objects in larger clusters, and there is no need
// to send it out over the wire. Use a dedicated transport request just for the bits you need.
TransportAction.localOnly();
}
/**
* @return the nodes to which this request should fan out.
*/
DiscoveryNode[] resolveNodes(ClusterState clusterState) {
assert nodesIds == null || concreteNodes == null;
return Objects.requireNonNullElseGet(
concreteNodes,
() -> Arrays.stream(clusterState.nodes().resolveNodes(nodesIds)).map(clusterState.nodes()::get).toArray(DiscoveryNode[]::new)
);
}
}
| BaseNodesRequest |
java | apache__camel | components/camel-xj/src/generated/java/org/apache/camel/component/xj/XJComponentConfigurer.java | {
"start": 735,
"end": 870
} | class ____ extends XsltSaxonComponentConfigurer implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
}
| XJComponentConfigurer |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/io/support/PropertiesLoaderSupport.java | {
"start": 1417,
"end": 6180
} | class ____ {
/** Logger available to subclasses. */
protected final Log logger = LogFactory.getLog(getClass());
protected Properties @Nullable [] localProperties;
protected boolean localOverride = false;
private Resource @Nullable [] locations;
private boolean ignoreResourceNotFound = false;
private @Nullable String fileEncoding;
private PropertiesPersister propertiesPersister = DefaultPropertiesPersister.INSTANCE;
/**
* Set local properties, for example, via the "props" tag in XML bean definitions.
* These can be considered defaults, to be overridden by properties
* loaded from files.
*/
public void setProperties(Properties properties) {
this.localProperties = new Properties[] {properties};
}
/**
* Set local properties, for example, via the "props" tag in XML bean definitions,
* allowing for merging multiple properties sets into one.
*/
public void setPropertiesArray(Properties... propertiesArray) {
this.localProperties = propertiesArray;
}
/**
* Set a location of a properties file to be loaded.
* <p>Can point to a classic properties file or to an XML file
* that follows Java's properties XML format.
*/
public void setLocation(Resource location) {
this.locations = new Resource[] {location};
}
/**
* Set locations of properties files to be loaded.
* <p>Can point to classic properties files or to XML files
* that follow Java's properties XML format.
* <p>Note: Properties defined in later files will override
* properties defined earlier files, in case of overlapping keys.
* Hence, make sure that the most specific files are the last
* ones in the given list of locations.
*/
public void setLocations(Resource... locations) {
this.locations = locations;
}
/**
* Set whether local properties override properties from files.
* <p>Default is "false": Properties from files override local defaults.
* Can be switched to "true" to let local properties override defaults
* from files.
*/
public void setLocalOverride(boolean localOverride) {
this.localOverride = localOverride;
}
/**
* Set if failure to find the property resource should be ignored.
* <p>"true" is appropriate if the properties file is completely optional.
* Default is "false".
*/
public void setIgnoreResourceNotFound(boolean ignoreResourceNotFound) {
this.ignoreResourceNotFound = ignoreResourceNotFound;
}
/**
* Set the encoding to use for parsing properties files.
* <p>Default is none, using the {@code java.util.Properties}
* default encoding.
* <p>Only applies to classic properties files, not to XML files.
* @see org.springframework.util.PropertiesPersister#load
*/
public void setFileEncoding(String encoding) {
this.fileEncoding = encoding;
}
/**
* Set the PropertiesPersister to use for parsing properties files.
* The default is {@code DefaultPropertiesPersister}.
* @see DefaultPropertiesPersister#INSTANCE
*/
public void setPropertiesPersister(@Nullable PropertiesPersister propertiesPersister) {
this.propertiesPersister =
(propertiesPersister != null ? propertiesPersister : DefaultPropertiesPersister.INSTANCE);
}
/**
* Return a merged {@link Properties} instance containing both the
* loaded properties and properties set on this component.
*/
protected Properties mergeProperties() throws IOException {
Properties result = new Properties();
if (this.localOverride) {
// Load properties from file upfront, to let local properties override.
loadProperties(result);
}
if (this.localProperties != null) {
for (Properties localProp : this.localProperties) {
CollectionUtils.mergePropertiesIntoMap(localProp, result);
}
}
if (!this.localOverride) {
// Load properties from file afterwards, to let those properties override.
loadProperties(result);
}
return result;
}
/**
* Load properties into the given instance.
* @param props the Properties instance to load into
* @throws IOException in case of I/O errors
* @see #setLocations
*/
protected void loadProperties(Properties props) throws IOException {
if (this.locations != null) {
for (Resource location : this.locations) {
if (logger.isTraceEnabled()) {
logger.trace("Loading properties file from " + location);
}
try {
PropertiesLoaderUtils.fillProperties(
props, new EncodedResource(location, this.fileEncoding), this.propertiesPersister);
}
catch (FileNotFoundException | UnknownHostException | SocketException ex) {
if (this.ignoreResourceNotFound) {
if (logger.isDebugEnabled()) {
logger.debug("Properties resource not found: " + ex.getMessage());
}
}
else {
throw ex;
}
}
}
}
}
}
| PropertiesLoaderSupport |
java | quarkusio__quarkus | test-framework/junit5-component/src/test/java/io/quarkus/test/component/declarative/ListAllInterfaceUnremovableTest.java | {
"start": 358,
"end": 586
} | class ____ {
@Inject
@All
List<SomeInterface> components;
@Test
public void testComponents() {
assertEquals(1, components.size());
}
@Dependent
public static | ListAllInterfaceUnremovableTest |
java | reactor__reactor-core | reactor-core/src/test/java/reactor/core/publisher/SerializedSubscriberTest.java | {
"start": 1316,
"end": 7700
} | class ____ {
//see https://github.com/reactor/reactor-core/issues/2077
@Test
@Tag("slow")
public void onNextRaceWithCancelDoesNotLeak() {
int loops = 0;
while (loops < 100_000) {
CopyOnWriteArrayList<Object> discarded = new CopyOnWriteArrayList<>();
AssertSubscriber<Integer> consumer = new AssertSubscriber<>(
Operators.enableOnDiscard(Context.empty(), discarded::add),
Long.MAX_VALUE);
SerializedSubscriber<Integer> leaky = new SerializedSubscriber<>(consumer);
leaky.onSubscribe(Operators.emptySubscription());
//we use constant so that if debugging is needed we can identify which step caused an issue
leaky.onNext(1);
RaceTestUtils.race(
() -> {
leaky.onNext(2);
leaky.onNext(4);
},
() -> {
leaky.onNext(3);
leaky.cancel();
}
);
//since we want to test for the race condition, we're not interested in iterations where all gets consumed or all gets discarded
//however, we'll smoke test that in these cases at least the other side (discarded or consumed) sees the total number of elements
if (consumer.values().size() == 4) {
assertThat(discarded).as("when consumed all, none discarded").isEmpty();
continue;
}
if (discarded.size() == 4) {
assertThat(consumer.values()).as("when discarded all, none consumed").isEmpty();
continue;
}
//now the meat of the test is to check that total discarded + total seen by consumer = total produced
assertThat(discarded.size() + consumer.values().size())
.as("elements discarded or passed down in round #%s: <%s> and <%s>", loops, discarded, consumer.values())
.isEqualTo(4);
loops++;
}
}
//adaptation of test case exposed in https://github.com/reactor/reactor-core/issues/2077
//we further attempt to detect double discards, and for now ignore these
@Test
void testLeakWithRetryWhenImmediatelyCancelled() throws InterruptedException {
//let's improve readability by using constants for magic number:
// A given element has been discarded before the current operation
final int STATE_DISCARDED = -10;
// A given element has been seen by end Subscriber before the current operation
final int STATE_SEEN = -1;
AtomicInteger createdCount = new AtomicInteger();
AtomicInteger discardedCount = new AtomicInteger();
AtomicInteger seenCount = new AtomicInteger();
AtomicInteger doubleDiscardedCount = new AtomicInteger();
//unacceptable state: both seen and discarded (in any order)
AtomicInteger unacceptableStateCount = new AtomicInteger();
final CountDownLatch latch = new CountDownLatch(4);
Flux.<AtomicInteger>generate(s -> {
int i = createdCount.incrementAndGet();
if (i == 100_000) {
s.next(new AtomicInteger(i));
s.complete();
}
else {
s.next(new AtomicInteger(i));
}
})
.doFinally(sig -> latch.countDown())
.publishOn(Schedulers.single())
.doFinally(sig -> latch.countDown())
.retryWhen(Retry.from(p -> p.take(3, false)))
.doFinally(sig -> latch.countDown())
.cancelOn(Schedulers.parallel())
.doOnDiscard(AtomicInteger.class, i -> {
discardedCount.incrementAndGet();
int previousStatus = i.getAndSet(STATE_DISCARDED);
//here we could switch to printing stacktraces with System.identityHashcode to identify where double discard happens
if (previousStatus == STATE_DISCARDED) {
doubleDiscardedCount.incrementAndGet();
}
else if (previousStatus == STATE_SEEN) {
unacceptableStateCount.incrementAndGet();
}
//otherwise, positive values represent an unseen but discarded value
})
.doFinally(sig -> latch.countDown())
.subscribeWith(new BaseSubscriber<AtomicInteger>() {
@Override
protected void hookOnNext(AtomicInteger value) {
cancel();
int previousStatus = value.getAndSet(STATE_SEEN);
if (previousStatus >= 0) {
//this is a raw value, hasn't been seen nor discarded yet
seenCount.incrementAndGet();
}
else {
//this element has already been seen or discarded, unacceptable
unacceptableStateCount.incrementAndGet();
}
}
});
assertThat(latch.await(5, TimeUnit.SECONDS)).as("latch 5s").isTrue();
with().pollInterval(50, TimeUnit.MILLISECONDS)
.await().atMost(500, TimeUnit.MILLISECONDS)
.untilAsserted(() -> {
int expected = createdCount.get();
int seen = seenCount.get();
int discarded = discardedCount.get() - doubleDiscardedCount.get();
assertThat(unacceptableStateCount).as("unacceptable").hasValue(0);
assertThat(expected)
.withFailMessage("counter not equal to seen+discarded: Expected <%s>, got <%s+%s>=<%s>",
expected, seen, discarded, seen + discarded)
.isEqualTo(seen + discarded);
});
}
@Test
public void scanSerializedSubscriber() {
LambdaSubscriber<String> actual = new LambdaSubscriber<>(null, e -> { }, null, null);
SerializedSubscriber<String> test = new SerializedSubscriber<>(actual);
Subscription subscription = Operators.emptySubscription();
test.onSubscribe(subscription);
assertThat(test.scan(Scannable.Attr.PARENT)).isSameAs(subscription);
assertThat(test.scan(Scannable.Attr.ACTUAL)).isSameAs(actual);
assertThat(test.scan(Scannable.Attr.BUFFERED)).isZero();
assertThat(test.scan(Scannable.Attr.CAPACITY)).isEqualTo(SerializedSubscriber.LinkedArrayNode.DEFAULT_CAPACITY);
assertThat(test.scan(Scannable.Attr.ERROR)).isNull();
assertThat(test.scan(Scannable.Attr.TERMINATED)).isFalse();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.onError(new IllegalStateException("boom"));
assertThat(test.scan(Scannable.Attr.ERROR)).hasMessage("boom");
assertThat(test.scan(Scannable.Attr.TERMINATED)).isTrue();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isFalse();
test.cancel();
assertThat(test.scan(Scannable.Attr.CANCELLED)).isTrue();
}
@Test
public void scanSerializedSubscriberMaxBuffered() {
LambdaSubscriber<String> actual = new LambdaSubscriber<>(null, e -> { }, null, null);
SerializedSubscriber<String> test = new SerializedSubscriber<>(actual);
test.tail = new SerializedSubscriber.LinkedArrayNode<>("");
test.tail.count = Integer.MAX_VALUE;
assertThat(test.scan(Scannable.Attr.BUFFERED)).isEqualTo(Integer.MAX_VALUE);
assertThat(test.scan(Scannable.Attr.LARGE_BUFFERED)).isNull();
}
}
| SerializedSubscriberTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.