language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/search/geo/GeoDistanceQueryBuilderTestCase.java
|
{
"start": 1442,
"end": 15375
}
|
class ____ extends AbstractQueryTestCase<GeoDistanceQueryBuilder> {
protected abstract String getFieldName();
@Override
protected GeoDistanceQueryBuilder doCreateTestQueryBuilder() {
String fieldName = getFieldName();
GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName);
String distance = "" + randomDouble();
if (randomBoolean()) {
DistanceUnit unit = randomFrom(DistanceUnit.values());
distance = distance + unit.toString();
}
int selector = randomIntBetween(0, 2);
switch (selector) {
case 0 -> qb.distance(randomDouble(), randomFrom(DistanceUnit.values()));
case 1 -> qb.distance(distance, randomFrom(DistanceUnit.values()));
case 2 -> qb.distance(distance);
}
qb.point(new GeoPoint(GeometryTestUtils.randomLat(), GeometryTestUtils.randomLon()));
if (randomBoolean()) {
qb.setValidationMethod(randomFrom(GeoValidationMethod.values()));
}
if (randomBoolean()) {
qb.geoDistance(randomFrom(GeoDistance.values()));
}
if (randomBoolean()) {
qb.ignoreUnmapped(randomBoolean());
}
return qb;
}
public void testIllegalValues() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GeoDistanceQueryBuilder(""));
assertEquals("fieldName must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> new GeoDistanceQueryBuilder((String) null));
assertEquals("fieldName must not be null or empty", e.getMessage());
GeoDistanceQueryBuilder query = new GeoDistanceQueryBuilder("fieldName");
e = expectThrows(IllegalArgumentException.class, () -> query.distance(""));
assertEquals("distance must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.distance(null));
assertEquals("distance must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.distance("", DistanceUnit.DEFAULT));
assertEquals("distance must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.distance(null, DistanceUnit.DEFAULT));
assertEquals("distance must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.distance("1", null));
assertEquals("distance unit must not be null", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.distance(1, null));
assertEquals("distance unit must not be null", e.getMessage());
e = expectThrows(
IllegalArgumentException.class,
() -> query.distance(randomIntBetween(Integer.MIN_VALUE, 0), DistanceUnit.DEFAULT)
);
assertEquals("distance must be greater than zero", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.geohash(null));
assertEquals("geohash must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.geohash(""));
assertEquals("geohash must not be null or empty", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> query.geoDistance(null));
assertEquals("geoDistance must not be null", e.getMessage());
}
/**
* Overridden here to ensure the test is only run if at least one type is
* present in the mappings. Geo queries do not execute if the field is not
* explicitly mapped
*/
@Override
public void testToQuery() throws IOException {
super.testToQuery();
}
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/86834")
public void testParsingAndToQueryGeoJSON() throws IOException {
// TODO: GeoJSON support missing for geo_distance query, although all other point formats work
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":{
"type": "Point",
"coordinates": [-70,40]
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQueryWKT() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":"POINT(-70 40)"
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery1() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery2() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":[-70, 40]
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery3() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":"40, -70"
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery4() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"%s":"drn5x1g8cu2y"
}
}
""", GEO_POINT_FIELD_NAME);
GeoPoint geoPoint = GeoPoint.fromGeohash("drn5x1g8cu2y");
assertGeoDistanceRangeQuery(query, geoPoint.getLat(), geoPoint.getLon(), 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery5() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":12,
"unit":"mi",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery6() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12",
"unit":"mi",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
public void testParsingAndToQuery7() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"19.312128",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.DEFAULT);
}
public void testParsingAndToQuery8() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":19.312128,
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.DEFAULT);
}
public void testParsingAndToQuery9() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"19.312128",
"unit":"km",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
}
public void testParsingAndToQuery10() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":19.312128,
"unit":"km",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
}
public void testParsingAndToQuery11() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"19.312128km",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 19.312128, DistanceUnit.KILOMETERS);
}
public void testParsingAndToQuery12() throws IOException {
String query = String.format(java.util.Locale.ROOT, """
{
"geo_distance":{
"distance":"12mi",
"unit":"km",
"%s":{
"lat":40,
"lon":-70
}
}
}
""", GEO_POINT_FIELD_NAME);
assertGeoDistanceRangeQuery(query, 40, -70, 12, DistanceUnit.MILES);
}
private void assertGeoDistanceRangeQuery(String query, double lat, double lon, double distance, DistanceUnit distanceUnit)
throws IOException {
Query parsedQuery = parseQuery(query).toQuery(createSearchExecutionContext());
// The parsedQuery contains IndexOrDocValuesQuery, which wraps LatLonPointDistanceQuery which in turn has default visibility,
// so we cannot access its fields directly to check and have to use toString() here instead.
double qLat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat));
double qLon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon));
assertThat(
parsedQuery.toString(),
containsString("mapped_geo_point:" + qLat + "," + qLon + " +/- " + distanceUnit.toMeters(distance) + " meters")
);
}
public void testFromJson() throws IOException {
String json = """
{
"geo_distance" : {
"pin.location" : [ -70.0, 40.0 ],
"distance" : 12000.0,
"distance_type" : "arc",
"validation_method" : "STRICT",
"ignore_unmapped" : false,
"boost" : 1.0
}
}""";
GeoDistanceQueryBuilder parsed = (GeoDistanceQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, -70.0, parsed.point().getLon(), 0.0001);
assertEquals(json, 40.0, parsed.point().getLat(), 0.0001);
assertEquals(json, 12000.0, parsed.distance(), 0.0001);
}
public void testIgnoreUnmapped() throws IOException {
final GeoDistanceQueryBuilder queryBuilder = new GeoDistanceQueryBuilder("unmapped").point(0.0, 0.0).distance("20m");
queryBuilder.ignoreUnmapped(true);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext();
Query query = queryBuilder.toQuery(searchExecutionContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final GeoDistanceQueryBuilder failingQueryBuilder = new GeoDistanceQueryBuilder("unmapped").point(0.0, 0.0).distance("20m");
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(searchExecutionContext));
assertThat(e.getMessage(), containsString("failed to find geo field [unmapped]"));
}
public void testParseFailsWithMultipleFields() throws IOException {
String json = """
{
"geo_distance" : {
"point1" : {
"lat" : 30, "lon" : 12
},
"point2" : {
"lat" : 30, "lon" : 12
}
}
}""";
ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(json));
assertEquals("[geo_distance] query doesn't support multiple fields, found [point1] and [point2]", e.getMessage());
}
}
|
GeoDistanceQueryBuilderTestCase
|
java
|
quarkusio__quarkus
|
extensions/smallrye-metrics/runtime/src/main/java/io/quarkus/smallrye/metrics/runtime/MetadataHolder.java
|
{
"start": 459,
"end": 2233
}
|
class ____ {
private String name;
private MetricType metricType;
private String description;
private String displayName;
private String unit;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public MetricType getMetricType() {
return metricType;
}
public void setMetricType(MetricType metricType) {
this.metricType = metricType;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
public String getUnit() {
return unit;
}
public void setUnit(String unit) {
this.unit = unit;
}
public static MetadataHolder from(Metadata metadata) {
MetadataHolder result = new MetadataHolder();
result.name = metadata.getName();
result.metricType = metadata.getTypeRaw();
result.description = metadata.getDescription();
result.displayName = metadata.getDisplayName();
result.unit = metadata.getUnit();
return result;
}
public Metadata toMetadata() {
final MetadataBuilder builder = Metadata.builder()
.withName(name);
if (description != null) {
builder.withDescription(description);
}
if (displayName != null) {
builder.withDisplayName(displayName);
}
return builder.withType(metricType)
.withUnit(unit)
.build();
}
}
|
MetadataHolder
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/ForwardingNavigableSetTest.java
|
{
"start": 1680,
"end": 7187
}
|
class ____<T> extends ForwardingNavigableSet<T> {
private final NavigableSet<T> backingSet;
StandardImplForwardingNavigableSet(NavigableSet<T> backingSet) {
this.backingSet = backingSet;
}
@Override
protected NavigableSet<T> delegate() {
return backingSet;
}
@Override
public boolean equals(@Nullable Object object) {
return standardEquals(object);
}
@Override
public int hashCode() {
return standardHashCode();
}
@Override
public boolean addAll(Collection<? extends T> collection) {
return standardAddAll(collection);
}
@Override
public void clear() {
standardClear();
}
@Override
public boolean contains(Object object) {
return standardContains(object);
}
@Override
public boolean containsAll(Collection<?> collection) {
return standardContainsAll(collection);
}
@Override
public boolean remove(Object object) {
return standardRemove(object);
}
@Override
public boolean removeAll(Collection<?> collection) {
return standardRemoveAll(collection);
}
@Override
public boolean retainAll(Collection<?> collection) {
return standardRetainAll(collection);
}
@Override
public Object[] toArray() {
return standardToArray();
}
@Override
public <T> T[] toArray(T[] array) {
return standardToArray(array);
}
@Override
public String toString() {
return standardToString();
}
@Override
public SortedSet<T> subSet(T fromElement, T toElement) {
return standardSubSet(fromElement, toElement);
}
@Override
public @Nullable T lower(T e) {
return standardLower(e);
}
@Override
public @Nullable T floor(T e) {
return standardFloor(e);
}
@Override
public @Nullable T ceiling(T e) {
return standardCeiling(e);
}
@Override
public @Nullable T higher(T e) {
return standardHigher(e);
}
@Override
public @Nullable T pollFirst() {
return standardPollFirst();
}
@Override
public @Nullable T pollLast() {
return standardPollLast();
}
@Override
public SortedSet<T> headSet(T toElement) {
return standardHeadSet(toElement);
}
@Override
public SortedSet<T> tailSet(T fromElement) {
return standardTailSet(fromElement);
}
}
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(ForwardingNavigableSetTest.class);
suite.addTest(
SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
protected Set<String> create(String[] elements) {
return new StandardImplForwardingNavigableSet<>(
new SafeTreeSet<String>(asList(elements)));
}
@Override
public List<String> order(List<String> insertionOrder) {
return new ArrayList<>(Sets.newTreeSet(insertionOrder));
}
})
.named("ForwardingNavigableSet[SafeTreeSet] with standard implementations")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.GENERAL_PURPOSE)
.createTestSuite());
suite.addTest(
SetTestSuiteBuilder.using(
new TestStringSetGenerator() {
@Override
protected Set<String> create(String[] elements) {
SafeTreeSet<String> set = new SafeTreeSet<>(Ordering.natural().nullsFirst());
Collections.addAll(set, elements);
return new StandardImplForwardingNavigableSet<>(set);
}
@Override
public List<String> order(List<String> insertionOrder) {
return new ArrayList<>(Sets.newTreeSet(insertionOrder));
}
})
.named(
"ForwardingNavigableSet[SafeTreeSet[Ordering.natural.nullsFirst]]"
+ " with standard implementations")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.KNOWN_ORDER,
CollectionFeature.GENERAL_PURPOSE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
return suite;
}
@SuppressWarnings({"rawtypes", "unchecked"})
public void testForwarding() {
new ForwardingWrapperTester()
.testForwarding(
NavigableSet.class,
new Function<NavigableSet, NavigableSet>() {
@Override
public NavigableSet apply(NavigableSet delegate) {
return wrap(delegate);
}
});
}
public void testEquals() {
NavigableSet<String> set1 = ImmutableSortedSet.of("one");
NavigableSet<String> set2 = ImmutableSortedSet.of("two");
new EqualsTester()
.addEqualityGroup(set1, wrap(set1), wrap(set1))
.addEqualityGroup(set2, wrap(set2))
.testEquals();
}
private static <T> NavigableSet<T> wrap(NavigableSet<T> delegate) {
return new ForwardingNavigableSet<T>() {
@Override
protected NavigableSet<T> delegate() {
return delegate;
}
};
}
}
|
StandardImplForwardingNavigableSet
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/persistence/TestingLongStateHandleHelper.java
|
{
"start": 2689,
"end": 2960
}
|
interface ____ extends Serializable {
void run(int discardIdx) throws Exception;
}
/**
* {@code LongStateHandle} implements {@link StateObject} to monitor the {@link
* StateObject#discardState()} calls.
*/
public static
|
PreDiscardCallback
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/serializer/NotWriteDefaultValueTest.java
|
{
"start": 202,
"end": 1866
}
|
class ____ extends TestCase {
public void test_for_byte() throws Exception {
VO_Byte vo = new VO_Byte();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_short() throws Exception {
VO_Short vo = new VO_Short();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_int() throws Exception {
VO_Int vo = new VO_Int();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_long() throws Exception {
VO_Long vo = new VO_Long();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_float() throws Exception {
VO_Float vo = new VO_Float();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_double() throws Exception {
VO_Double vo = new VO_Double();
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{}", text);
}
public void test_for_boolean() throws Exception {
VO_Boolean vo = new VO_Boolean();
vo.f1 = true;
String text = JSON.toJSONString(vo, SerializerFeature.NotWriteDefaultValue);
Assert.assertEquals("{\"f1\":true}", text);
}
public static
|
NotWriteDefaultValueTest
|
java
|
elastic__elasticsearch
|
modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/direct/GetDatabaseConfigurationAction.java
|
{
"start": 2141,
"end": 2936
}
|
class ____ extends BaseNodesRequest {
private final String[] databaseIds;
public Request(String... databaseIds) {
super((String[]) null);
this.databaseIds = databaseIds;
}
public String[] getDatabaseIds() {
return databaseIds;
}
@Override
public int hashCode() {
return Arrays.hashCode(databaseIds);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (obj.getClass() != getClass()) {
return false;
}
Request other = (Request) obj;
return Arrays.equals(databaseIds, other.databaseIds);
}
}
public static
|
Request
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/codec/tsdb/es819/ES819TSDBDocValuesProducer.java
|
{
"start": 97534,
"end": 98085
}
|
class ____ {
long termsDictSize;
DirectMonotonicReader.Meta termsAddressesMeta;
int maxTermLength;
long termsDataOffset;
long termsDataLength;
long termsAddressesOffset;
long termsAddressesLength;
int termsDictIndexShift;
DirectMonotonicReader.Meta termsIndexAddressesMeta;
long termsIndexOffset;
long termsIndexLength;
long termsIndexAddressesOffset;
long termsIndexAddressesLength;
int maxBlockLength;
}
static final
|
TermsDictEntry
|
java
|
netty__netty
|
transport-classes-kqueue/src/main/java/io/netty/channel/kqueue/AbstractKQueueStreamChannel.java
|
{
"start": 2097,
"end": 21164
}
|
class ____ extends AbstractKQueueChannel implements DuplexChannel {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(AbstractKQueueStreamChannel.class);
private static final ChannelMetadata METADATA = new ChannelMetadata(false, 16);
private static final String EXPECTED_TYPES =
" (expected: " + StringUtil.simpleClassName(ByteBuf.class) + ", " +
StringUtil.simpleClassName(DefaultFileRegion.class) + ')';
private WritableByteChannel byteChannel;
private final Runnable flushTask = new Runnable() {
@Override
public void run() {
// Calling flush0 directly to ensure we not try to flush messages that were added via write(...) in the
// meantime.
((AbstractKQueueUnsafe) unsafe()).flush0();
}
};
AbstractKQueueStreamChannel(Channel parent, BsdSocket fd, boolean active) {
super(parent, fd, active);
}
AbstractKQueueStreamChannel(Channel parent, BsdSocket fd, SocketAddress remote) {
super(parent, fd, remote);
}
AbstractKQueueStreamChannel(BsdSocket fd) {
this(null, fd, isSoErrorZero(fd));
}
@Override
protected AbstractKQueueUnsafe newUnsafe() {
return new KQueueStreamUnsafe();
}
@Override
public ChannelMetadata metadata() {
return METADATA;
}
/**
* Write bytes form the given {@link ByteBuf} to the underlying {@link java.nio.channels.Channel}.
* @param in the collection which contains objects to write.
* @param buf the {@link ByteBuf} from which the bytes should be written
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but no
* data was accepted</li>
* </ul>
*/
private int writeBytes(ChannelOutboundBuffer in, ByteBuf buf) throws Exception {
int readableBytes = buf.readableBytes();
if (readableBytes == 0) {
in.remove();
return 0;
}
if (buf.hasMemoryAddress() || buf.nioBufferCount() == 1) {
return doWriteBytes(in, buf);
} else {
ByteBuffer[] nioBuffers = buf.nioBuffers();
return writeBytesMultiple(in, nioBuffers, nioBuffers.length, readableBytes,
config().getMaxBytesPerGatheringWrite());
}
}
private void adjustMaxBytesPerGatheringWrite(long attempted, long written, long oldMaxBytesPerGatheringWrite) {
// By default we track the SO_SNDBUF when ever it is explicitly set. However some OSes may dynamically change
// SO_SNDBUF (and other characteristics that determine how much data can be written at once) so we should try
// make a best effort to adjust as OS behavior changes.
if (attempted == written) {
if (attempted << 1 > oldMaxBytesPerGatheringWrite) {
config().setMaxBytesPerGatheringWrite(attempted << 1);
}
} else if (attempted > MAX_BYTES_PER_GATHERING_WRITE_ATTEMPTED_LOW_THRESHOLD && written < attempted >>> 1) {
config().setMaxBytesPerGatheringWrite(attempted >>> 1);
}
}
/**
* Write multiple bytes via {@link IovArray}.
* @param in the collection which contains objects to write.
* @param array The array which contains the content to write.
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but
* no data was accepted</li>
* </ul>
* @throws IOException If an I/O exception occurs during write.
*/
private int writeBytesMultiple(ChannelOutboundBuffer in, IovArray array) throws IOException {
final long expectedWrittenBytes = array.size();
assert expectedWrittenBytes != 0;
final int cnt = array.count();
assert cnt != 0;
final long localWrittenBytes = socket.writevAddresses(array.memoryAddress(0), cnt);
if (localWrittenBytes > 0) {
adjustMaxBytesPerGatheringWrite(expectedWrittenBytes, localWrittenBytes, array.maxBytes());
in.removeBytes(localWrittenBytes);
return 1;
}
return WRITE_STATUS_SNDBUF_FULL;
}
/**
* Write multiple bytes via {@link ByteBuffer} array.
* @param in the collection which contains objects to write.
* @param nioBuffers The buffers to write.
* @param nioBufferCnt The number of buffers to write.
* @param expectedWrittenBytes The number of bytes we expect to write.
* @param maxBytesPerGatheringWrite The maximum number of bytes we should attempt to write.
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but
* no data was accepted</li>
* </ul>
* @throws IOException If an I/O exception occurs during write.
*/
private int writeBytesMultiple(
ChannelOutboundBuffer in, ByteBuffer[] nioBuffers, int nioBufferCnt, long expectedWrittenBytes,
long maxBytesPerGatheringWrite) throws IOException {
assert expectedWrittenBytes != 0;
if (expectedWrittenBytes > maxBytesPerGatheringWrite) {
expectedWrittenBytes = maxBytesPerGatheringWrite;
}
final long localWrittenBytes = socket.writev(nioBuffers, 0, nioBufferCnt, expectedWrittenBytes);
if (localWrittenBytes > 0) {
adjustMaxBytesPerGatheringWrite(expectedWrittenBytes, localWrittenBytes, maxBytesPerGatheringWrite);
in.removeBytes(localWrittenBytes);
return 1;
}
return WRITE_STATUS_SNDBUF_FULL;
}
/**
* Write a {@link DefaultFileRegion}
* @param in the collection which contains objects to write.
* @param region the {@link DefaultFileRegion} from which the bytes should be written
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but
* no data was accepted</li>
* </ul>
*/
private int writeDefaultFileRegion(ChannelOutboundBuffer in, DefaultFileRegion region) throws Exception {
final long regionCount = region.count();
final long offset = region.transferred();
if (offset >= regionCount) {
in.remove();
return 0;
}
final long flushedAmount = socket.sendFile(region, region.position(), offset, regionCount - offset);
if (flushedAmount > 0) {
in.progress(flushedAmount);
if (region.transferred() >= regionCount) {
in.remove();
}
return 1;
} else if (flushedAmount == 0) {
validateFileRegion(region, offset);
}
return WRITE_STATUS_SNDBUF_FULL;
}
/**
* Write a {@link FileRegion}
* @param in the collection which contains objects to write.
* @param region the {@link FileRegion} from which the bytes should be written
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but no
* data was accepted</li>
* </ul>
*/
private int writeFileRegion(ChannelOutboundBuffer in, FileRegion region) throws Exception {
if (region.transferred() >= region.count()) {
in.remove();
return 0;
}
if (byteChannel == null) {
byteChannel = new KQueueSocketWritableByteChannel();
}
final long flushedAmount = region.transferTo(byteChannel, region.transferred());
if (flushedAmount > 0) {
in.progress(flushedAmount);
if (region.transferred() >= region.count()) {
in.remove();
}
return 1;
}
return WRITE_STATUS_SNDBUF_FULL;
}
@Override
protected void doWrite(ChannelOutboundBuffer in) throws Exception {
int writeSpinCount = config().getWriteSpinCount();
do {
final int msgCount = in.size();
// Do gathering write if the outbound buffer entries start with more than one ByteBuf.
if (msgCount > 1 && in.current() instanceof ByteBuf) {
writeSpinCount -= doWriteMultiple(in);
} else if (msgCount == 0) {
// Wrote all messages.
writeFilter(false);
// Return here so we don't set the WRITE flag.
return;
} else { // msgCount == 1
writeSpinCount -= doWriteSingle(in);
}
// We do not break the loop here even if the outbound buffer was flushed completely,
// because a user might have triggered another write and flush when we notify his or her
// listeners.
} while (writeSpinCount > 0);
if (writeSpinCount == 0) {
// It is possible that we have set the write filter, woken up by KQUEUE because the socket is writable, and
// then use our write quantum. In this case we no longer want to set the write filter because the socket is
// still writable (as far as we know). We will find out next time we attempt to write if the socket is
// writable and set the write filter if necessary.
writeFilter(false);
// We used our writeSpin quantum, and should try to write again later.
eventLoop().execute(flushTask);
} else {
// Underlying descriptor can not accept all data currently, so set the WRITE flag to be woken up
// when it can accept more data.
writeFilter(true);
}
}
/**
* Attempt to write a single object.
* @param in the collection which contains objects to write.
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but no
* data was accepted</li>
* </ul>
* @throws Exception If an I/O error occurs.
*/
protected int doWriteSingle(ChannelOutboundBuffer in) throws Exception {
// The outbound buffer contains only one message or it contains a file region.
Object msg = in.current();
if (msg instanceof ByteBuf) {
return writeBytes(in, (ByteBuf) msg);
} else if (msg instanceof DefaultFileRegion) {
return writeDefaultFileRegion(in, (DefaultFileRegion) msg);
} else if (msg instanceof FileRegion) {
return writeFileRegion(in, (FileRegion) msg);
} else {
// Should never reach here.
throw new Error("Unexpected message type: " + className(msg));
}
}
/**
* Attempt to write multiple {@link ByteBuf} objects.
* @param in the collection which contains objects to write.
* @return The value that should be decremented from the write quantum which starts at
* {@link ChannelConfig#getWriteSpinCount()}. The typical use cases are as follows:
* <ul>
* <li>0 - if no write was attempted. This is appropriate if an empty {@link ByteBuf} (or other empty content)
* is encountered</li>
* <li>1 - if a single call to write data was made to the OS</li>
* <li>{@link ChannelUtils#WRITE_STATUS_SNDBUF_FULL} - if an attempt to write data was made to the OS, but no
* data was accepted</li>
* </ul>
* @throws Exception If an I/O error occurs.
*/
private int doWriteMultiple(ChannelOutboundBuffer in) throws Exception {
final long maxBytesPerGatheringWrite = config().getMaxBytesPerGatheringWrite();
IovArray array = ((NativeArrays) registration().attachment()).cleanIovArray();
array.maxBytes(maxBytesPerGatheringWrite);
in.forEachFlushedMessage(array);
if (array.count() >= 1) {
// TODO: Handle the case where cnt == 1 specially.
return writeBytesMultiple(in, array);
}
// cnt == 0, which means the outbound buffer contained empty buffers only.
in.removeBytes(0);
return 0;
}
@Override
protected Object filterOutboundMessage(Object msg) {
if (msg instanceof ByteBuf) {
ByteBuf buf = (ByteBuf) msg;
return UnixChannelUtil.isBufferCopyNeededForWrite(buf)? newDirectBuffer(buf) : buf;
}
if (msg instanceof FileRegion) {
return msg;
}
throw new UnsupportedOperationException(
"unsupported message type: " + StringUtil.simpleClassName(msg) + EXPECTED_TYPES);
}
@UnstableApi
@Override
protected final void doShutdownOutput() throws Exception {
socket.shutdown(false, true);
}
@Override
public boolean isOutputShutdown() {
return socket.isOutputShutdown();
}
@Override
public boolean isInputShutdown() {
return socket.isInputShutdown();
}
@Override
public boolean isShutdown() {
return socket.isShutdown();
}
@Override
public ChannelFuture shutdownOutput() {
return shutdownOutput(newPromise());
}
@Override
public ChannelFuture shutdownOutput(final ChannelPromise promise) {
EventLoop loop = eventLoop();
if (loop.inEventLoop()) {
((AbstractUnsafe) unsafe()).shutdownOutput(promise);
} else {
loop.execute(new Runnable() {
@Override
public void run() {
((AbstractUnsafe) unsafe()).shutdownOutput(promise);
}
});
}
return promise;
}
@Override
public ChannelFuture shutdownInput() {
return shutdownInput(newPromise());
}
@Override
public ChannelFuture shutdownInput(final ChannelPromise promise) {
EventLoop loop = eventLoop();
if (loop.inEventLoop()) {
shutdownInput0(promise);
} else {
loop.execute(new Runnable() {
@Override
public void run() {
shutdownInput0(promise);
}
});
}
return promise;
}
private void shutdownInput0(ChannelPromise promise) {
try {
socket.shutdown(true, false);
} catch (Throwable cause) {
promise.setFailure(cause);
return;
}
promise.setSuccess();
}
@Override
public ChannelFuture shutdown() {
return shutdown(newPromise());
}
@Override
public ChannelFuture shutdown(final ChannelPromise promise) {
ChannelFuture shutdownOutputFuture = shutdownOutput();
if (shutdownOutputFuture.isDone()) {
shutdownOutputDone(shutdownOutputFuture, promise);
} else {
shutdownOutputFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(final ChannelFuture shutdownOutputFuture) throws Exception {
shutdownOutputDone(shutdownOutputFuture, promise);
}
});
}
return promise;
}
private void shutdownOutputDone(final ChannelFuture shutdownOutputFuture, final ChannelPromise promise) {
ChannelFuture shutdownInputFuture = shutdownInput();
if (shutdownInputFuture.isDone()) {
shutdownDone(shutdownOutputFuture, shutdownInputFuture, promise);
} else {
shutdownInputFuture.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture shutdownInputFuture) throws Exception {
shutdownDone(shutdownOutputFuture, shutdownInputFuture, promise);
}
});
}
}
private static void shutdownDone(ChannelFuture shutdownOutputFuture,
ChannelFuture shutdownInputFuture,
ChannelPromise promise) {
Throwable shutdownOutputCause = shutdownOutputFuture.cause();
Throwable shutdownInputCause = shutdownInputFuture.cause();
if (shutdownOutputCause != null) {
if (shutdownInputCause != null) {
logger.debug("Exception suppressed because a previous exception occurred.",
shutdownInputCause);
}
promise.setFailure(shutdownOutputCause);
} else if (shutdownInputCause != null) {
promise.setFailure(shutdownInputCause);
} else {
promise.setSuccess();
}
}
|
AbstractKQueueStreamChannel
|
java
|
apache__camel
|
tooling/camel-tooling-maven/src/main/java/org/apache/camel/tooling/maven/support/DIRegistry.java
|
{
"start": 3785,
"end": 13480
}
|
class ____ be annotated with {@link Named}. (Maybe supporting
* {@link jakarta.inject.Singleton} soon).
*
* @param key the lookup type
* @param type the actual type (to use when instantiating a bean)
*/
public void bind(Class<?> key, Class<?> type) {
String name = key.getName();
for (Annotation ann : type.getAnnotations()) {
if (isNamedAnnotation(ann)) {
name = getNamedAnnotationValue(type);
if (name == null || name.isBlank()) {
name = key.getName();
}
}
}
Constructor<?> defaultConstructor = null;
Comparator<Constructor<?>> byParamCount = Comparator.<Constructor<?>> comparingInt(Constructor::getParameterCount)
.reversed();
Set<Constructor<?>> constructors = new TreeSet<>(byParamCount);
for (Constructor<?> ctr : type.getDeclaredConstructors()) {
if (ctr.getParameterCount() == 0) {
defaultConstructor = ctr;
} else {
if (hasInjectAnnotation(ctr)) {
constructors.add(ctr);
}
}
}
if (constructors.isEmpty() && defaultConstructor != null) {
// no need to lazy evaluate such bean
try {
Object instance = defaultConstructor.newInstance();
bind(name, key, instance);
return;
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new IllegalArgumentException("Problem registering bean of " + type.getName() + " type");
}
}
if (!constructors.isEmpty()) {
Constructor<?> ctr = constructors.iterator().next();
// dependency-cycle alert!
final Type[] parameterTypes = ctr.getGenericParameterTypes();
Supplier<?> lazyCreator = new Supplier<>() {
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public Object get() {
if (underConstruction.contains(this)) {
throw new IllegalStateException(
"Cyclic dependency found when creating bean of "
+ type.getName() + " type");
}
underConstruction.add(this);
try {
final Object[] parameters = new Object[parameterTypes.length];
int pc = 0;
for (Type pt : parameterTypes) {
Class<?> t = null;
Object param = null;
if (pt instanceof ParameterizedType) {
Class<?> rawType = (Class<?>) ((ParameterizedType) pt).getRawType();
// when it's not a collection/map, skip the type parameter part (for now)
Type[] typeArguments = ((ParameterizedType) pt).getActualTypeArguments();
if (Collection.class.isAssignableFrom(rawType)) {
if (typeArguments.length == 1) {
// set or list (for now)
Type vType = typeArguments[0];
t = rawType;
if (Set.class == rawType) {
param = new LinkedHashSet<>();
Map<String, ?> values = findByTypeWithName((Class<?>) vType);
((Set) param).addAll(values.values());
} else if (List.class == rawType) {
param = new ArrayList<>();
Map<String, ?> values = findByTypeWithName((Class<?>) vType);
((List) param).addAll(values.values());
}
}
} else if (Map.class == rawType) {
if (typeArguments.length == 2) {
// first type must be String (name - from @Named or FQCN)
Type vType = typeArguments[1];
t = rawType;
param = new LinkedHashMap<>();
Map<String, ?> values = findByTypeWithName((Class<?>) vType);
((Map) param).putAll(values);
}
} else {
t = rawType;
}
} else if (pt instanceof Class) {
t = (Class<?>) pt;
if (t.isArray()) {
Map<String, ?> values = findByTypeWithName(t.getComponentType());
param = Array.newInstance(t.getComponentType(), values.size());
System.arraycopy(values.values().toArray(), 0, param, 0, values.size());
}
}
if (t == null) {
throw new IllegalArgumentException(
"Can't handle argument of " + pt
+ " type when creating bean of " + type.getName() + " type");
}
if (param == null) {
List<Object> instances = byClass.get(t);
if (instances == null) {
throw new IllegalArgumentException(
"Missing " + t.getName()
+ " instance when creating bean of " + type.getName()
+ " type");
}
if (instances.size() > 1) {
throw new IllegalArgumentException(
"Ambiguous parameter of " + t.getName()
+ " when creating bean of " + type.getName() + " type");
}
param = instances.get(0);
}
// this is where recursion may happen.
parameters[pc++] = param instanceof Supplier ? ((Supplier<?>) param).get() : param;
}
try {
ctr.setAccessible(true);
return ctr.newInstance(parameters);
} catch (InstantiationException | IllegalAccessException
| InvocationTargetException | IllegalArgumentException e) {
throw new IllegalArgumentException(
"Problem instantiating bean of "
+ type.getName() + " type",
e);
}
} finally {
underConstruction.remove(this);
}
}
};
bind(name, key, Suppliers.memorize(lazyCreator));
}
}
/**
* Make an {@code alias} point to the same target bean as existing {@code key}.
*
* @param alias
* @param key
*/
public void alias(Class<?> alias, Class<?> key) {
if (byClass.containsKey(key)) {
List<Object> recipes = byClass.get(key);
byClass.put(alias, recipes);
String id = alias.getName();
if (recipes.size() > 1) {
throw new IllegalArgumentException("Multiple recipes for " + key.getName() + " type");
}
computeIfAbsent(id, k -> new LinkedHashMap<>()).put(alias, recipes.get(0));
}
}
@Override
public void bind(String id, Class<?> type, Object bean) {
byClass.computeIfAbsent(type, c -> new ArrayList<>()).add(bean);
super.bind(id, type, bean);
}
@Override
public void bind(String id, Class<?> type, Supplier<Object> bean) {
byClass.computeIfAbsent(type, c -> new ArrayList<>()).add(bean);
super.bind(id, type, bean);
}
@Override
public void bindAsPrototype(String id, Class<?> type, Supplier<Object> bean) {
byClass.computeIfAbsent(type, c -> new ArrayList<>()).add(bean);
super.bindAsPrototype(id, type, bean);
}
@SuppressWarnings("unchecked")
public <T> T lookupByClass(Class<T> cls) {
List<Object> instances = byClass.get(cls);
if (instances == null) {
return null;
}
if (instances.size() > 1) {
throw new IllegalArgumentException("Multiple beans of " + cls.getName() + " type available");
}
Object instance = instances.get(0);
return (T) (instance instanceof Supplier ? ((Supplier<?>) instance).get() : instance);
}
}
|
may
|
java
|
dropwizard__dropwizard
|
dropwizard-hibernate/src/test/java/io/dropwizard/hibernate/LazyLoadingTest.java
|
{
"start": 3075,
"end": 3827
}
|
class ____ {
private final DropwizardAppExtension<TestConfiguration> appExtension = new DropwizardAppExtension<>(
TestApplicationWithDisabledLazyLoading.class, "hibernate-integration-test.yaml",
new ResourceConfigurationSourceProvider(),
config("dataSource.url", "jdbc:h2:mem:DbTest" + System.nanoTime())
);
@Test
void sendsNullWhenDisabled() {
final Dog raf = appExtension.client().target("http://localhost:" + appExtension.getLocalPort()).path("/dogs/Raf").request(MediaType.APPLICATION_JSON).get(Dog.class);
assertThat(raf.getName()).isEqualTo("Raf");
assertThat(raf.getOwner()).isNull();
}
}
public static
|
LazyLoadingDisabledTest
|
java
|
apache__camel
|
components/camel-google/camel-google-mail/src/generated/java/org/apache/camel/component/google/mail/GmailUsersHistoryEndpointConfigurationConfigurer.java
|
{
"start": 750,
"end": 8155
}
|
class ____ extends org.apache.camel.support.component.PropertyConfigurerSupport implements GeneratedPropertyConfigurer, ExtendedPropertyConfigurerGetter {
private static final Map<String, Object> ALL_OPTIONS;
static {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("AccessToken", java.lang.String.class);
map.put("ApiName", org.apache.camel.component.google.mail.internal.GoogleMailApiName.class);
map.put("ApplicationName", java.lang.String.class);
map.put("ClientId", java.lang.String.class);
map.put("ClientSecret", java.lang.String.class);
map.put("Delegate", java.lang.String.class);
map.put("HistoryTypes", java.util.List.class);
map.put("LabelId", java.lang.String.class);
map.put("MaxResults", java.lang.Long.class);
map.put("MethodName", java.lang.String.class);
map.put("PageToken", java.lang.String.class);
map.put("RefreshToken", java.lang.String.class);
map.put("Scopes", java.lang.String.class);
map.put("ServiceAccountKey", java.lang.String.class);
map.put("StartHistoryId", java.math.BigInteger.class);
map.put("UserId", java.lang.String.class);
ALL_OPTIONS = map;
}
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
org.apache.camel.component.google.mail.GmailUsersHistoryEndpointConfiguration target = (org.apache.camel.component.google.mail.GmailUsersHistoryEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": target.setAccessToken(property(camelContext, java.lang.String.class, value)); return true;
case "apiname":
case "apiName": target.setApiName(property(camelContext, org.apache.camel.component.google.mail.internal.GoogleMailApiName.class, value)); return true;
case "applicationname":
case "applicationName": target.setApplicationName(property(camelContext, java.lang.String.class, value)); return true;
case "clientid":
case "clientId": target.setClientId(property(camelContext, java.lang.String.class, value)); return true;
case "clientsecret":
case "clientSecret": target.setClientSecret(property(camelContext, java.lang.String.class, value)); return true;
case "delegate": target.setDelegate(property(camelContext, java.lang.String.class, value)); return true;
case "historytypes":
case "historyTypes": target.setHistoryTypes(property(camelContext, java.util.List.class, value)); return true;
case "labelid":
case "labelId": target.setLabelId(property(camelContext, java.lang.String.class, value)); return true;
case "maxresults":
case "maxResults": target.setMaxResults(property(camelContext, java.lang.Long.class, value)); return true;
case "methodname":
case "methodName": target.setMethodName(property(camelContext, java.lang.String.class, value)); return true;
case "pagetoken":
case "pageToken": target.setPageToken(property(camelContext, java.lang.String.class, value)); return true;
case "refreshtoken":
case "refreshToken": target.setRefreshToken(property(camelContext, java.lang.String.class, value)); return true;
case "scopes": target.setScopes(property(camelContext, java.lang.String.class, value)); return true;
case "serviceaccountkey":
case "serviceAccountKey": target.setServiceAccountKey(property(camelContext, java.lang.String.class, value)); return true;
case "starthistoryid":
case "startHistoryId": target.setStartHistoryId(property(camelContext, java.math.BigInteger.class, value)); return true;
case "userid":
case "userId": target.setUserId(property(camelContext, java.lang.String.class, value)); return true;
default: return false;
}
}
@Override
public Map<String, Object> getAllOptions(Object target) {
return ALL_OPTIONS;
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return java.lang.String.class;
case "apiname":
case "apiName": return org.apache.camel.component.google.mail.internal.GoogleMailApiName.class;
case "applicationname":
case "applicationName": return java.lang.String.class;
case "clientid":
case "clientId": return java.lang.String.class;
case "clientsecret":
case "clientSecret": return java.lang.String.class;
case "delegate": return java.lang.String.class;
case "historytypes":
case "historyTypes": return java.util.List.class;
case "labelid":
case "labelId": return java.lang.String.class;
case "maxresults":
case "maxResults": return java.lang.Long.class;
case "methodname":
case "methodName": return java.lang.String.class;
case "pagetoken":
case "pageToken": return java.lang.String.class;
case "refreshtoken":
case "refreshToken": return java.lang.String.class;
case "scopes": return java.lang.String.class;
case "serviceaccountkey":
case "serviceAccountKey": return java.lang.String.class;
case "starthistoryid":
case "startHistoryId": return java.math.BigInteger.class;
case "userid":
case "userId": return java.lang.String.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
org.apache.camel.component.google.mail.GmailUsersHistoryEndpointConfiguration target = (org.apache.camel.component.google.mail.GmailUsersHistoryEndpointConfiguration) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "accesstoken":
case "accessToken": return target.getAccessToken();
case "apiname":
case "apiName": return target.getApiName();
case "applicationname":
case "applicationName": return target.getApplicationName();
case "clientid":
case "clientId": return target.getClientId();
case "clientsecret":
case "clientSecret": return target.getClientSecret();
case "delegate": return target.getDelegate();
case "historytypes":
case "historyTypes": return target.getHistoryTypes();
case "labelid":
case "labelId": return target.getLabelId();
case "maxresults":
case "maxResults": return target.getMaxResults();
case "methodname":
case "methodName": return target.getMethodName();
case "pagetoken":
case "pageToken": return target.getPageToken();
case "refreshtoken":
case "refreshToken": return target.getRefreshToken();
case "scopes": return target.getScopes();
case "serviceaccountkey":
case "serviceAccountKey": return target.getServiceAccountKey();
case "starthistoryid":
case "startHistoryId": return target.getStartHistoryId();
case "userid":
case "userId": return target.getUserId();
default: return null;
}
}
}
|
GmailUsersHistoryEndpointConfigurationConfigurer
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OAuth2LoginConfigurerTests.java
|
{
"start": 39357,
"end": 39956
}
|
class ____ extends CommonSecurityFilterChainConfig {
private final InMemoryClientRegistrationRepository clientRegistrationRepository = new InMemoryClientRegistrationRepository(
GOOGLE_CLIENT_REGISTRATION);
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2Login((login) -> login
.clientRegistrationRepository(this.clientRegistrationRepository))
.formLogin(withDefaults());
// @formatter:on
return super.configureFilterChain(http);
}
}
@Configuration
@EnableWebSecurity
static
|
OAuth2LoginConfigFormLogin
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/serialization/PreferSerializationProvider.java
|
{
"start": 857,
"end": 936
}
|
interface ____ {
String getPreferSerialization();
}
|
PreferSerializationProvider
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/AdaptivePoolingAllocator.java
|
{
"start": 74725,
"end": 75214
}
|
interface ____ {
/**
* Allocate a buffer for a chunk. This can be any kind of {@link AbstractByteBuf} implementation.
* @param initialCapacity The initial capacity of the returned {@link AbstractByteBuf}.
* @param maxCapacity The maximum capacity of the returned {@link AbstractByteBuf}.
* @return The buffer that represents the chunk memory.
*/
AbstractByteBuf allocate(int initialCapacity, int maxCapacity);
}
}
|
ChunkAllocator
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/object/ObjectAssert_hasAllNullFieldsOrPropertiesExcept_Test.java
|
{
"start": 964,
"end": 1429
}
|
class ____ extends ObjectAssertBaseTest {
private static final String FIELD_NAME = "color";
@Override
protected ObjectAssert<Jedi> invoke_api_method() {
return assertions.hasAllNullFieldsOrPropertiesExcept(FIELD_NAME);
}
@Override
protected void verify_internal_effects() {
verify(objects).assertHasAllNullFieldsOrPropertiesExcept(getInfo(assertions), getActual(assertions), FIELD_NAME);
}
}
|
ObjectAssert_hasAllNullFieldsOrPropertiesExcept_Test
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/bidirectional/BidirectionalOneToOneCascadeRemoveTest.java
|
{
"start": 2175,
"end": 2759
}
|
class ____ {
@Id
protected String id;
@Column( name = "name_col" )
protected String name;
@Column( name = "value_col" )
protected int value;
// ===========================================================
// relationship fields
@OneToOne( cascade = CascadeType.REMOVE )
@JoinColumn( name = "a1_id" )
protected A a1;
// ===========================================================
// constructors
public B() {
}
public B(String id, String name, int value, A a1) {
this.id = id;
this.name = name;
this.value = value;
this.a1 = a1;
}
}
}
|
B
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/test/java/org/elasticsearch/index/query/LegacySemanticKnnVectorQueryRewriteInterceptorTests.java
|
{
"start": 1550,
"end": 8004
}
|
class ____ extends ESTestCase {
private TestThreadPool threadPool;
private NoOpClient client;
private Index index;
private static final String FIELD_NAME = "fieldName";
private static final String INFERENCE_ID = "inferenceId";
private static final String QUERY = "query";
@Before
public void setup() {
threadPool = createThreadPool();
client = new NoOpClient(threadPool);
index = new Index(randomAlphaOfLength(10), randomAlphaOfLength(10));
}
@After
public void cleanup() {
threadPool.close();
}
public void testKnnQueryWithVectorBuilderIsInterceptedAndRewritten() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), INFERENCE_ID, new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryVectorBuilder queryVectorBuilder = new TextEmbeddingQueryVectorBuilder(INFERENCE_ID, QUERY);
KnnVectorQueryBuilder original = new KnnVectorQueryBuilder(FIELD_NAME, queryVectorBuilder, 10, 100, 10f, null);
if (randomBoolean()) {
float boost = randomFloatBetween(1, 10, randomBoolean());
original.boost(boost);
}
if (randomBoolean()) {
String queryName = randomAlphaOfLength(5);
original.queryName(queryName);
}
testRewrittenInferenceQuery(context, original);
}
public void testKnnWithQueryBuilderWithoutInferenceIdIsInterceptedAndRewritten() throws IOException {
Map<String, InferenceFieldMetadata> inferenceFields = Map.of(
FIELD_NAME,
new InferenceFieldMetadata(index.getName(), INFERENCE_ID, new String[] { FIELD_NAME }, null)
);
QueryRewriteContext context = createQueryRewriteContext(inferenceFields);
QueryVectorBuilder queryVectorBuilder = new TextEmbeddingQueryVectorBuilder(null, QUERY);
KnnVectorQueryBuilder original = new KnnVectorQueryBuilder(FIELD_NAME, queryVectorBuilder, 10, 100, 10f, null);
if (randomBoolean()) {
float boost = randomFloatBetween(1, 10, randomBoolean());
original.boost(boost);
}
if (randomBoolean()) {
String queryName = randomAlphaOfLength(5);
original.queryName(queryName);
}
testRewrittenInferenceQuery(context, original);
}
private void testRewrittenInferenceQuery(QueryRewriteContext context, KnnVectorQueryBuilder original) throws IOException {
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to be intercepted, but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof InterceptedQueryBuilderWrapper
);
InterceptedQueryBuilderWrapper intercepted = (InterceptedQueryBuilderWrapper) rewritten;
assertEquals(original.boost(), intercepted.boost(), 0.0f);
assertEquals(original.queryName(), intercepted.queryName());
assertTrue(intercepted.queryBuilder instanceof NestedQueryBuilder);
NestedQueryBuilder nestedQueryBuilder = (NestedQueryBuilder) intercepted.queryBuilder;
assertEquals(original.boost(), nestedQueryBuilder.boost(), 0.0f);
assertEquals(original.queryName(), nestedQueryBuilder.queryName());
assertEquals(SemanticTextField.getChunksFieldName(FIELD_NAME), nestedQueryBuilder.path());
QueryBuilder innerQuery = nestedQueryBuilder.query();
assertTrue(innerQuery instanceof KnnVectorQueryBuilder);
KnnVectorQueryBuilder knnVectorQueryBuilder = (KnnVectorQueryBuilder) innerQuery;
assertEquals(1.0f, knnVectorQueryBuilder.boost(), 0.0f);
assertNull(knnVectorQueryBuilder.queryName());
assertEquals(SemanticTextField.getEmbeddingsFieldName(FIELD_NAME), knnVectorQueryBuilder.getFieldName());
assertTrue(knnVectorQueryBuilder.queryVectorBuilder() instanceof TextEmbeddingQueryVectorBuilder);
TextEmbeddingQueryVectorBuilder textEmbeddingQueryVectorBuilder = (TextEmbeddingQueryVectorBuilder) knnVectorQueryBuilder
.queryVectorBuilder();
assertEquals(QUERY, textEmbeddingQueryVectorBuilder.getModelText());
assertEquals(INFERENCE_ID, textEmbeddingQueryVectorBuilder.getModelId());
}
public void testKnnVectorQueryOnNonInferenceFieldRemainsUnchanged() throws IOException {
QueryRewriteContext context = createQueryRewriteContext(Map.of()); // No inference fields
QueryVectorBuilder queryVectorBuilder = new TextEmbeddingQueryVectorBuilder(null, QUERY);
QueryBuilder original = new KnnVectorQueryBuilder(FIELD_NAME, queryVectorBuilder, 10, 100, 10f, null);
QueryBuilder rewritten = original.rewrite(context);
assertTrue(
"Expected query to remain knn but was [" + rewritten.getClass().getName() + "]",
rewritten instanceof KnnVectorQueryBuilder
);
assertEquals(original, rewritten);
}
private QueryRewriteContext createQueryRewriteContext(Map<String, InferenceFieldMetadata> inferenceFields) {
IndexMetadata indexMetadata = IndexMetadata.builder(index.getName())
.settings(
Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current())
.put(IndexMetadata.SETTING_INDEX_UUID, index.getUUID())
)
.numberOfShards(1)
.numberOfReplicas(0)
.putInferenceFields(inferenceFields)
.build();
ResolvedIndices resolvedIndices = new MockResolvedIndices(
Map.of(),
new OriginalIndices(new String[] { index.getName() }, IndicesOptions.DEFAULT),
Map.of(index, indexMetadata)
);
return new QueryRewriteContext(
null,
client,
null,
TransportVersion.current(),
RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY,
resolvedIndices,
null,
createRewriteInterceptor(),
null
);
}
@SuppressWarnings("deprecation")
private QueryRewriteInterceptor createRewriteInterceptor() {
return new LegacySemanticKnnVectorQueryRewriteInterceptor();
}
}
|
LegacySemanticKnnVectorQueryRewriteInterceptorTests
|
java
|
spring-projects__spring-security
|
access/src/test/java/org/springframework/security/access/annotation/ExpressionProtectedBusinessServiceImpl.java
|
{
"start": 961,
"end": 2033
}
|
class ____ implements BusinessService {
@Override
public void someAdminMethod() {
}
@Override
public int someOther(String s) {
return 0;
}
@Override
public int someOther(int input) {
return 0;
}
@Override
public void someUserAndAdminMethod() {
}
@Override
public void someUserMethod1() {
}
@Override
public void someUserMethod2() {
}
@Override
@PreFilter(filterTarget = "someList", value = "filterObject == authentication.name or filterObject == 'sam'")
@PostFilter("filterObject == 'bob'")
public List<?> methodReturningAList(List<?> someList) {
return someList;
}
@Override
public List<Object> methodReturningAList(String userName, String arg2) {
return new ArrayList<>();
}
@Override
@PostFilter("filterObject == 'bob'")
public Object[] methodReturningAnArray(Object[] someArray) {
return someArray;
}
@PreAuthorize("#x == 'x' and @number.intValue() == 1294 ")
public void methodWithBeanNamePropertyAccessExpression(String x) {
}
@Override
public void rolesAllowedUser() {
}
}
|
ExpressionProtectedBusinessServiceImpl
|
java
|
spring-projects__spring-boot
|
module/spring-boot-micrometer-tracing-opentelemetry/src/main/java/org/springframework/boot/micrometer/tracing/opentelemetry/autoconfigure/OpenTelemetryPropagationConfigurations.java
|
{
"start": 3780,
"end": 3932
}
|
class ____ {
@Bean
@ConditionalOnMissingBean
TextMapPropagator noopTextMapPropagator() {
return TextMapPropagator.noop();
}
}
}
|
NoPropagation
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/TestDFSIO.java
|
{
"start": 10815,
"end": 14200
}
|
class ____ implements Runnable {
private SequenceFile.Writer writer = null;
private String name;
private long nrBytes;
ControlFileCreateTask(SequenceFile.Writer writer, String name,
long nrBytes) {
this.writer = writer;
this.name = name;
this.nrBytes = nrBytes;
}
@Override
public void run() {
try {
writer.append(new Text(name), new LongWritable(nrBytes));
} catch (Exception e) {
LOG.error(e.getLocalizedMessage());
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException e) {
LOG.error(e.toString());
}
}
writer = null;
}
}
}
@SuppressWarnings("deprecation")
private void createControlFile(FileSystem fs,
long nrBytes, // in bytes
int nrFiles
) throws IOException {
LOG.info("creating control file: " + nrBytes + " bytes, " + nrFiles + " files");
final int maxDirItems = config.getInt(
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY,
DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_DEFAULT);
Path controlDir = getControlDir(config);
if (nrFiles > maxDirItems) {
final String message = "The directory item limit of " + controlDir +
" is exceeded: limit=" + maxDirItems + " items=" + nrFiles;
throw new IOException(message);
}
fs.delete(controlDir, true);
for (int i = 0; i < nrFiles; i++) {
String name = getFileName(i);
Path controlFile = new Path(controlDir, "in_file_" + name);
SequenceFile.Writer writer = null;
try {
writer = SequenceFile.createWriter(fs, config, controlFile,
Text.class, LongWritable.class,
CompressionType.NONE);
Runnable controlFileCreateTask = new ControlFileCreateTask(writer, name, nrBytes);
completionService.submit(controlFileCreateTask, "success");
} catch(Exception e) {
throw new IOException(e.getLocalizedMessage());
}
}
boolean isSuccess = false;
int count = 0;
for (int i = 0; i < nrFiles; i++) {
try {
// Since control file is quiet small, we use 3 minutes here.
Future<String> future = completionService.poll(3, TimeUnit.MINUTES);
if (future != null) {
future.get(3, TimeUnit.MINUTES);
count++;
} else {
break;
}
} catch (ExecutionException | InterruptedException | TimeoutException e) {
throw new IOException(e);
}
}
if (count == nrFiles) {
isSuccess = true;
}
if (isSuccess) {
LOG.info("created control files for: " + nrFiles + " files");
} else {
throw new IOException("Create control files timeout.");
}
}
private static String getFileName(int fIdx) {
return BASE_FILE_NAME + Integer.toString(fIdx);
}
/**
* Write/Read mapper base class.
* <p>
* Collects the following statistics per task:
* <ul>
* <li>number of tasks completed</li>
* <li>number of bytes written/read</li>
* <li>execution time</li>
* <li>i/o rate</li>
* <li>i/o rate squared</li>
* </ul>
*/
private abstract static
|
ControlFileCreateTask
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/clients/admin/DescribeMetadataQuorumOptions.java
|
{
"start": 938,
"end": 1034
}
|
class ____ extends AbstractOptions<DescribeMetadataQuorumOptions> {
}
|
DescribeMetadataQuorumOptions
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/dependencyinjection/code/javaguide/di/EnglishHello.java
|
{
"start": 205,
"end": 337
}
|
class ____ implements Hello {
public String sayHello(String name) {
return "Hello " + name;
}
}
// #implemented-by
|
EnglishHello
|
java
|
hibernate__hibernate-orm
|
hibernate-vector/src/main/java/org/hibernate/vector/internal/SparseFloatVectorJavaType.java
|
{
"start": 983,
"end": 3765
}
|
class ____ extends AbstractClassJavaType<SparseFloatVector> implements BasicPluralJavaType<Float> {
public static final SparseFloatVectorJavaType INSTANCE = new SparseFloatVectorJavaType();
public SparseFloatVectorJavaType() {
super( SparseFloatVector.class, new SparseVectorMutabilityPlan() );
}
@Override
public JavaType<Float> getElementJavaType() {
return FloatJavaType.INSTANCE;
}
@Override
public BasicType<?> resolveType(TypeConfiguration typeConfiguration, Dialect dialect, BasicType<Float> elementType, ColumnTypeInformation columnTypeInformation, JdbcTypeIndicators stdIndicators) {
final int arrayTypeCode = stdIndicators.getPreferredSqlTypeCodeForArray( elementType.getJdbcType().getDefaultSqlTypeCode() );
final JdbcType arrayJdbcType = typeConfiguration.getJdbcTypeRegistry()
.resolveTypeConstructorDescriptor( arrayTypeCode, elementType, columnTypeInformation );
if ( elementType.getValueConverter() != null ) {
throw new IllegalArgumentException( "Can't convert element type of sparse vector" );
}
return typeConfiguration.getBasicTypeRegistry()
.resolve( this, arrayJdbcType,
() -> new BasicCollectionType<>( elementType, arrayJdbcType, this, "sparse_float_vector" ) );
}
@Override
public JdbcType getRecommendedJdbcType(JdbcTypeIndicators indicators) {
return indicators.getJdbcType( SqlTypes.SPARSE_VECTOR_INT8 );
}
@Override
public <X> X unwrap(SparseFloatVector value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
else if ( type.isInstance( value ) ) {
//noinspection unchecked
return (X) value;
}
else if ( float[].class.isAssignableFrom( type ) ) {
return (X) value.toDenseVector();
}
else if ( Object[].class.isAssignableFrom( type ) ) {
//noinspection unchecked
return (X) value.toArray();
}
else if ( String.class.isAssignableFrom( type ) ) {
//noinspection unchecked
return (X) value.toString();
}
else {
throw unknownUnwrap( type );
}
}
@Override
public <X> SparseFloatVector wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
else if (value instanceof SparseFloatVector vector) {
return vector;
}
else if (value instanceof List<?> list) {
//noinspection unchecked
return new SparseFloatVector( (List<Float>) list );
}
else if (value instanceof Object[] array) {
//noinspection unchecked
return new SparseFloatVector( (List<Float>) (List<?>) Arrays.asList( array ) );
}
else if (value instanceof float[] vector) {
return new SparseFloatVector( vector );
}
else if (value instanceof String vector) {
return new SparseFloatVector( vector );
}
else {
throw unknownWrap( value.getClass() );
}
}
private static
|
SparseFloatVectorJavaType
|
java
|
apache__hadoop
|
hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/object/PrefixStorage.java
|
{
"start": 1340,
"end": 8256
}
|
class ____ implements DirectoryStorage {
private final ObjectStorage storage;
private final String prefix;
public PrefixStorage(ObjectStorage storage, String prefix) {
this.storage = storage;
this.prefix = prefix;
}
@Override
public String scheme() {
return storage.scheme();
}
@Override
public BucketInfo bucket() {
return storage.bucket();
}
@Override
public void initialize(Configuration conf, String bucket) {
storage.initialize(conf, bucket);
}
@Override
public Configuration conf() {
return storage.conf();
}
@Override
public ObjectContent get(String key, long offset, long limit) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return storage.get(prefix + key, offset, limit);
}
@Override
public byte[] put(String key, InputStreamProvider streamProvider, long contentLength) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return storage.put(prefix + key, streamProvider, contentLength);
}
@Override
public byte[] append(String key, InputStreamProvider streamProvider, long contentLength) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return storage.append(prefix + key, streamProvider, contentLength);
}
@Override
public void delete(String key) {
Preconditions.checkArgument(key != null, "Object key cannot be null or empty.");
storage.delete(prefix + key);
}
@Override
public List<String> batchDelete(List<String> keys) {
return storage.batchDelete(keys.stream().map(key -> prefix + key).collect(Collectors.toList()));
}
@Override
public void deleteAll(String prefixToDelete) {
storage.deleteAll(this.prefix + prefixToDelete);
}
@Override
public ObjectInfo head(String key) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return removePrefix(storage.head(prefix + key));
}
private ListObjectsResponse removePrefix(ListObjectsResponse response) {
List<ObjectInfo> objects = response.objects().stream()
.map(this::removePrefix)
.collect(Collectors.toList());
List<String> commonPrefixKeys = response.commonPrefixes().stream()
.map(this::removePrefix)
.collect(Collectors.toList());
return new ListObjectsResponse(objects, commonPrefixKeys);
}
@Override
public Iterable<ListObjectsResponse> list(ListObjectsRequest request) {
String startAfter = Strings.isNullOrEmpty(request.startAfter()) ?
request.startAfter() : prefix + request.startAfter();
ListObjectsRequest newReq = ListObjectsRequest.builder()
.prefix(prefix + request.prefix())
.startAfter(startAfter)
.maxKeys(request.maxKeys())
.delimiter(request.delimiter())
.build();
return Iterables.transform(storage.list(newReq), this::removePrefix);
}
@Override
public MultipartUpload createMultipartUpload(String key) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return removePrefix(storage.createMultipartUpload(prefix + key));
}
@Override
public Part uploadPart(
String key, String uploadId, int partNum,
InputStreamProvider streamProvider, long contentLength) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return storage.uploadPart(prefix + key, uploadId, partNum, streamProvider, contentLength);
}
@Override
public byte[] completeUpload(String key, String uploadId, List<Part> uploadParts) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
return storage.completeUpload(prefix + key, uploadId, uploadParts);
}
@Override
public void abortMultipartUpload(String key, String uploadId) {
Preconditions.checkArgument(key != null && key.length() > 0,
"Object key cannot be null or empty.");
storage.abortMultipartUpload(prefix + key, uploadId);
}
@Override
public Iterable<MultipartUpload> listUploads(String keyPrefix) {
return Iterables.transform(storage.listUploads(prefix + keyPrefix), this::removePrefix);
}
@Override
public Part uploadPartCopy(
String srcKey, String dstKey, String uploadId, int partNum, long copySourceRangeStart,
long copySourceRangeEnd) {
return storage.uploadPartCopy(prefix + srcKey, prefix + dstKey, uploadId, partNum,
copySourceRangeStart, copySourceRangeEnd);
}
@Override
public void copy(String srcKey, String dstKey) {
storage.copy(prefix + srcKey, prefix + dstKey);
}
@Override
public void rename(String srcKey, String dstKey) {
storage.rename(prefix + srcKey, prefix + dstKey);
}
private ObjectInfo removePrefix(ObjectInfo o) {
if (o == null) {
return null;
}
return new ObjectInfo(removePrefix(o.key()), o.size(), o.mtime(), o.checksum(), o.isDir());
}
private MultipartUpload removePrefix(MultipartUpload u) {
if (u == null) {
return null;
}
return new MultipartUpload(removePrefix(u.key()), u.uploadId(), u.minPartSize(),
u.maxPartCount());
}
private String removePrefix(String key) {
if (key == null) {
return null;
} else if (key.startsWith(prefix)) {
return key.substring(prefix.length());
} else {
return key;
}
}
@Override
public void putTags(String key, Map<String, String> newTags) {
storage.putTags(prefix + key, newTags);
}
@Override
public Map<String, String> getTags(String key) {
return storage.getTags(prefix + key);
}
@Override
public ObjectInfo objectStatus(String key) {
Preconditions.checkArgument(key != null && !key.isEmpty(),
"Object key cannot be null or empty.");
return removePrefix(storage.objectStatus(prefix + key));
}
@Override
public ChecksumInfo checksumInfo() {
return storage.checksumInfo();
}
@Override
public void close() throws IOException {
storage.close();
}
@Override
public Iterable<ObjectInfo> listDir(String key, boolean recursive) {
Preconditions.checkArgument(storage instanceof DirectoryStorage);
return Iterables.transform(((DirectoryStorage) storage).listDir(prefix + key, recursive),
this::removePrefix);
}
@Override
public void deleteDir(String key, boolean recursive) {
Preconditions.checkArgument(storage instanceof DirectoryStorage);
((DirectoryStorage) storage).deleteDir(prefix + key, recursive);
}
@Override
public boolean isEmptyDir(String key) {
Preconditions.checkArgument(storage instanceof DirectoryStorage);
return ((DirectoryStorage) storage).isEmptyDir(prefix + key);
}
}
|
PrefixStorage
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/errors/InvalidTxnStateException.java
|
{
"start": 847,
"end": 987
}
|
class ____ extends ApiException {
public InvalidTxnStateException(String message) {
super(message);
}
}
|
InvalidTxnStateException
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/support/CrudMethodMetadataPopulatingMethodInterceptorUnitTests.java
|
{
"start": 3822,
"end": 3967
}
|
interface ____ {
@Lock(LockModeType.OPTIMISTIC)
void someMethod();
@Lock(LockModeType.PESSIMISTIC_READ)
void someOtherMethod();
}
}
|
Sample
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/spring/transaction/TransactionalBean2.java
|
{
"start": 279,
"end": 1086
}
|
class ____ {
@Autowired
private RedissonTransactionManager transactionManager;
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void testInNewTransaction() {
RTransaction transaction = transactionManager.getCurrentTransaction();
transaction.getMap("tr2").put("2", "4");
}
@Transactional
public void testPropagationRequired() {
RTransaction transaction = transactionManager.getCurrentTransaction();
transaction.getMap("tr3").put("2", "4");
}
@Transactional
public void testPropagationRequiredWithException() {
RTransaction transaction = transactionManager.getCurrentTransaction();
transaction.getMap("tr5").put("2", "4");
throw new IllegalStateException();
}
}
|
TransactionalBean2
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
|
{
"start": 6632,
"end": 11036
}
|
interface ____ extends KeyProviderExtension.Extension {
/**
* Calls to this method allows the underlying KeyProvider to warm-up any
* implementation specific caches used to store the Encrypted Keys.
* @param keyNames Array of Key Names
* @throws IOException thrown if the key material could not be encrypted.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException;
/**
* Drains the Queue for the provided key.
*
* @param keyName the key to drain the Queue for
*/
public void drain(String keyName);
/**
* Generates a key material and encrypts it using the given key name.
* The generated key material is of the same
* length as the <code>KeyVersion</code> material of the latest key version
* of the key and is encrypted using the same cipher.
* <p>
* NOTE: The generated key is not stored by the <code>KeyProvider</code>
*
* @param encryptionKeyName
* The latest KeyVersion of this key's material will be encrypted.
* @return EncryptedKeyVersion with the generated key material, the version
* name is 'EEK' (for Encrypted Encryption Key)
* @throws IOException
* thrown if the key material could not be generated
* @throws GeneralSecurityException
* thrown if the key material could not be encrypted because of a
* cryptographic issue.
*/
public EncryptedKeyVersion generateEncryptedKey(
String encryptionKeyName) throws IOException,
GeneralSecurityException;
/**
* Decrypts an encrypted byte[] key material using the given key version
* name and initialization vector.
*
* @param encryptedKeyVersion
* contains keyVersionName and IV to decrypt the encrypted key
* material
* @return a KeyVersion with the decrypted key material, the version name is
* 'EK' (For Encryption Key)
* @throws IOException
* thrown if the key material could not be decrypted
* @throws GeneralSecurityException
* thrown if the key material could not be decrypted because of a
* cryptographic issue.
*/
public KeyVersion decryptEncryptedKey(
EncryptedKeyVersion encryptedKeyVersion) throws IOException,
GeneralSecurityException;
/**
* Re-encrypts an encrypted key version, using its initialization vector
* and key material, but with the latest key version name of its key name
* in the key provider.
* <p>
* If the latest key version name in the provider is the
* same as the one encrypted the passed-in encrypted key version, the same
* encrypted key version is returned.
* <p>
* NOTE: The generated key is not stored by the <code>KeyProvider</code>
*
* @param ekv The EncryptedKeyVersion containing keyVersionName and IV.
* @return The re-encrypted EncryptedKeyVersion.
* @throws IOException If the key material could not be re-encrypted.
* @throws GeneralSecurityException If the key material could not be
* re-encrypted because of a cryptographic issue.
*/
EncryptedKeyVersion reencryptEncryptedKey(EncryptedKeyVersion ekv)
throws IOException, GeneralSecurityException;
/**
* Batched version of {@link #reencryptEncryptedKey(EncryptedKeyVersion)}.
* <p>
* For each encrypted key version, re-encrypts an encrypted key version,
* using its initialization vector and key material, but with the latest
* key version name of its key name. If the latest key version name in the
* provider is the same as the one encrypted the passed-in encrypted key
* version, the same encrypted key version is returned.
* <p>
* NOTE: The generated key is not stored by the <code>KeyProvider</code>
*
* @param ekvs List containing the EncryptedKeyVersion's
* @throws IOException If any EncryptedKeyVersion could not be re-encrypted
* @throws GeneralSecurityException If any EncryptedKeyVersion could not be
* re-encrypted because of a cryptographic issue.
*/
void reencryptEncryptedKeys(List<EncryptedKeyVersion> ekvs)
throws IOException, GeneralSecurityException;
}
private static
|
CryptoExtension
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/logging/log4j2/SpringProfileArbiterTests.java
|
{
"start": 6322,
"end": 6773
}
|
interface ____ {
}
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@WithResource(name = "production-profile.xml", content = """
<?xml version="1.0" encoding="UTF-8"?>
<Configuration>
<Loggers>
<SpringProfile name="production">
<Logger name="org.springframework.boot.logging.log4j2" level="TRACE" />
</SpringProfile>
</Loggers>
</Configuration>
""")
private @
|
WithProfileExpressionXmlResource
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/IdentifierNameTest.java
|
{
"start": 11848,
"end": 12247
}
|
interface ____ {
// BUG: Diagnostic contains: getRpcPolicy
int getRPCPolicy();
// BUG: Diagnostic contains: getRpc
int getRPC();
}
""")
.doTest();
}
@Test
public void initialismsInVariableNames_partOfCamelCase() {
helper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/view/FooterBlock.java
|
{
"start": 961,
"end": 1101
}
|
class ____ extends HtmlBlock {
@Override protected void render(Block html) {
html.
div("#footer.ui-widget").__();
}
}
|
FooterBlock
|
java
|
hibernate__hibernate-orm
|
local-build-plugins/src/main/java/org/hibernate/orm/db/DatabaseService.java
|
{
"start": 242,
"end": 388
}
|
class ____ implements BuildService<BuildServiceParameters.None> {
public static final String REGISTRATION_NAME = "databaseService";
}
|
DatabaseService
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_usingComparator_Test.java
|
{
"start": 1172,
"end": 1761
}
|
class ____ extends ObjectArrayAssertBaseTest {
private Comparator<Object[]> comparator = alwaysEqual();
private ObjectArrays arraysBefore;
@BeforeEach
void before() {
arraysBefore = getArrays(assertions);
}
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.usingComparator(comparator);
}
@Override
protected void verify_internal_effects() {
assertThat(getObjects(assertions).getComparator()).isSameAs(comparator);
assertThat(getArrays(assertions)).isSameAs(arraysBefore);
}
}
|
ObjectArrayAssert_usingComparator_Test
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/circular/CircularProducerNormalScopeConstructorInjectionTest.java
|
{
"start": 990,
"end": 1300
}
|
class ____ {
private final String value;
// for client proxy
MyValue() {
this.value = null;
}
MyValue(String value) {
this.value = value;
}
String get() {
return value;
}
}
@Dependent
static
|
MyValue
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/StGeohexTests.java
|
{
"start": 1257,
"end": 3431
}
|
class ____ extends SpatialGridFunctionTestCase {
public StGeohexTests(@Name("TestCase") Supplier<TestCaseSupplier.TestCase> testCaseSupplier) {
this.testCase = testCaseSupplier.get();
}
/**
* Other geo grid functions use the same type-specific license requirement as the spatial aggregations, but geohex is licensed
* more strictly, at platinum for all field types.
*/
public static License.OperationMode licenseRequirement(List<DataType> fieldTypes) {
return License.OperationMode.PLATINUM;
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
final List<TestCaseSupplier> suppliers = new ArrayList<>();
addTestCaseSuppliers(suppliers, new DataType[] { GEO_POINT }, GEOHEX, StGeohexTests::valueOf, StGeohexTests::boundedValueOf);
return parameterSuppliersFromTypedDataWithDefaultChecks(true, suppliers);
}
private static long valueOf(BytesRef wkb, int precision) {
return StGeohex.unboundedGrid.calculateGridId(UNSPECIFIED.wkbAsPoint(wkb), precision);
}
private static Long boundedValueOf(BytesRef wkb, int precision, GeoBoundingBox bbox) {
StGeohex.GeoHexBoundedGrid bounds = new StGeohex.GeoHexBoundedGrid.Factory(precision, bbox).get(null);
long gridId = bounds.calculateGridId(UNSPECIFIED.wkbAsPoint(wkb));
return gridId < 0 ? null : gridId;
}
@Override
protected Expression build(Source source, List<Expression> args) {
Expression bounds = args.size() > 2 ? args.get(2) : null;
return new StGeohex(source, args.get(0), args.get(1), bounds);
}
public void testInvalidPrecision() {
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> process(-1, StGeohexTests::valueOf));
assertThat(ex.getMessage(), containsString("Invalid geohex_grid precision of -1. Must be between 0 and 15."));
ex = expectThrows(IllegalArgumentException.class, () -> process(H3.MAX_H3_RES + 1, StGeohexTests::valueOf));
assertThat(ex.getMessage(), containsString("Invalid geohex_grid precision of 16. Must be between 0 and 15."));
}
}
|
StGeohexTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/TaskInformation.java
|
{
"start": 1255,
"end": 4421
}
|
class ____ implements Serializable {
private static final long serialVersionUID = -9006218793155953789L;
/** Job vertex id of the associated job vertex. */
private final JobVertexID jobVertexId;
/** Name of the task. */
private final String taskName;
/** The number of subtasks for this operator. */
private final int numberOfSubtasks;
/** The maximum parallelism == number of key groups. */
private final int maxNumberOfSubtasks;
/** Class name of the invokable to run. */
private final String invokableClassName;
/** Configuration for the task. */
private final Configuration taskConfiguration;
public TaskInformation(
JobVertexID jobVertexId,
String taskName,
int numberOfSubtasks,
int maxNumberOfSubtasks,
String invokableClassName,
Configuration taskConfiguration) {
this.jobVertexId = Preconditions.checkNotNull(jobVertexId);
this.taskName = Preconditions.checkNotNull(taskName);
this.numberOfSubtasks = numberOfSubtasks;
this.maxNumberOfSubtasks = maxNumberOfSubtasks;
this.invokableClassName = Preconditions.checkNotNull(invokableClassName);
this.taskConfiguration = Preconditions.checkNotNull(taskConfiguration);
}
public JobVertexID getJobVertexId() {
return jobVertexId;
}
public String getTaskName() {
return taskName;
}
public int getNumberOfSubtasks() {
return numberOfSubtasks;
}
public int getMaxNumberOfSubtasks() {
return maxNumberOfSubtasks;
}
public String getInvokableClassName() {
return invokableClassName;
}
public Configuration getTaskConfiguration() {
return taskConfiguration;
}
public TaskInformation deepCopy() {
return new TaskInformation(
getJobVertexId(),
getTaskName(),
getNumberOfSubtasks(),
getMaxNumberOfSubtasks(),
getInvokableClassName(),
// Return the new Configuration to avoid shared conf being changed.
new Configuration(getTaskConfiguration()));
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TaskInformation that = (TaskInformation) o;
return numberOfSubtasks == that.numberOfSubtasks
&& maxNumberOfSubtasks == that.maxNumberOfSubtasks
&& Objects.equals(jobVertexId, that.jobVertexId)
&& Objects.equals(taskName, that.taskName)
&& Objects.equals(invokableClassName, that.invokableClassName)
&& Objects.equals(taskConfiguration, that.taskConfiguration);
}
@Override
public int hashCode() {
return Objects.hash(
jobVertexId,
taskName,
numberOfSubtasks,
maxNumberOfSubtasks,
invokableClassName,
taskConfiguration);
}
}
|
TaskInformation
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/maven-resolver/src/main/java/io/quarkus/bootstrap/resolver/maven/ApplicationDependencyTreeResolver.java
|
{
"start": 33611,
"end": 36896
}
|
class ____ {
static ExtensionDependency get(DependencyNode node) {
return (ExtensionDependency) node.getData().get(QUARKUS_EXTENSION_DEPENDENCY);
}
final ExtensionInfo info;
final DependencyNode runtimeNode;
final Collection<Exclusion> exclusions;
boolean conditionalDepsQueued;
private List<ExtensionDependency> extDeps;
private boolean presentInTargetGraph;
private ExtensionDependency(ExtensionInfo info, DependencyNode node, Collection<Exclusion> exclusions) {
this.runtimeNode = node;
this.info = info;
this.exclusions = exclusions;
@SuppressWarnings("unchecked")
final Map<Object, Object> data = (Map<Object, Object>) node.getData();
if (data.isEmpty()) {
node.setData(QUARKUS_EXTENSION_DEPENDENCY, this);
} else if (data.put(QUARKUS_EXTENSION_DEPENDENCY, this) != null) {
throw new IllegalStateException(
"Dependency node " + node + " has already been associated with an extension dependency");
}
}
private void addExtensionDependency(ExtensionDependency dep) {
if (extDeps == null) {
extDeps = new ArrayList<>();
}
extDeps.add(dep);
}
private void replaceRuntimeExtensionNodes(DependencyNode deploymentNode) {
var deploymentVisitor = new OrderedDependencyVisitor(deploymentNode);
// skip the root node
deploymentVisitor.next();
int nodesToReplace = extDeps == null ? 1 : extDeps.size() + 1;
while (deploymentVisitor.hasNext() && nodesToReplace > 0) {
deploymentVisitor.next();
if (replaceRuntimeNode(deploymentVisitor)) {
--nodesToReplace;
} else if (extDeps != null) {
for (int i = 0; i < extDeps.size(); ++i) {
if (extDeps.get(i).replaceRuntimeNode(deploymentVisitor)) {
--nodesToReplace;
break;
}
}
}
}
}
private boolean replaceRuntimeNode(OrderedDependencyVisitor depVisitor) {
if (!presentInTargetGraph && isSameKey(runtimeNode.getArtifact(), depVisitor.getCurrent().getArtifact())) {
// we are not comparing the version in the above condition because the runtime version
// may appear to be different from the deployment one and that's ok
// e.g. the version of the runtime artifact could be managed by a BOM
// but overridden by the user in the project config. The way the deployment deps
// are resolved here, the deployment version of the runtime artifact will be the one from the BOM.
var inserted = new DefaultDependencyNode(runtimeNode);
inserted.setChildren(runtimeNode.getChildren());
depVisitor.replaceCurrent(inserted);
presentInTargetGraph = true;
return true;
}
return false;
}
}
private
|
ExtensionDependency
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/spi/ThreadPoolFactory.java
|
{
"start": 1137,
"end": 1334
}
|
interface ____ customizing the creation of these objects to adapt camel for application servers and other
* environments where thread pools should not be created with the jdk methods
*/
public
|
allows
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/test/java/org/apache/flink/cep/Event.java
|
{
"start": 1200,
"end": 2364
}
|
class ____ {
private String name;
private double price;
private int id;
public Event(int id, String name, double price) {
this.id = id;
this.name = name;
this.price = price;
}
public double getPrice() {
return price;
}
public int getId() {
return id;
}
public String getName() {
return name;
}
@Override
public String toString() {
return "Event(" + id + ", " + name + ", " + price + ")";
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Event) {
Event other = (Event) obj;
return name.equals(other.name) && price == other.price && id == other.id;
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(name, price, id);
}
public static TypeSerializer<Event> createTypeSerializer() {
TypeInformation<Event> typeInformation =
(TypeInformation<Event>) TypeExtractor.createTypeInfo(Event.class);
return typeInformation.createSerializer(new SerializerConfigImpl());
}
}
|
Event
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/test/generic/GenericTypeTest.java
|
{
"start": 946,
"end": 1107
}
|
class ____ implements Serializable{
Gen<Bean> g;
public Gen<Bean> getG() {
return g;
}
public void setG(Gen<Bean> g) {
this.g = g;
}
}
static
|
Str
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/metrics/filter/DefaultReporterFilters.java
|
{
"start": 4260,
"end": 8355
}
|
class ____ extends AbstractReporterFilter<EventBuilder> {
EventReporterFilter(List<FilterSpec> includes, List<FilterSpec> excludes) {
super(includes, excludes);
}
@Override
public boolean filter(EventBuilder reported, String name, String logicalScope) {
for (FilterSpec exclude : excludes) {
if (exclude.namePattern.matcher(name).matches()
&& exclude.scopePattern.matcher(logicalScope).matches()) {
return false;
}
}
for (FilterSpec include : includes) {
if (include.namePattern.matcher(name).matches()
&& include.scopePattern.matcher(logicalScope).matches()) {
return true;
}
}
return false;
}
}
public static ReporterFilter<Metric> metricsFromConfiguration(Configuration configuration) {
return fromConfiguration(
configuration,
MetricOptions.REPORTER_INCLUDES,
MetricOptions.REPORTER_EXCLUDES,
MetricReporterFilter::new);
}
public static ReporterFilter<SpanBuilder> tracesFromConfiguration(Configuration configuration) {
return fromConfiguration(
configuration,
TraceOptions.REPORTER_INCLUDES,
TraceOptions.REPORTER_EXCLUDES,
TraceReporterFilter::new);
}
public static ReporterFilter<EventBuilder> eventsFromConfiguration(
Configuration configuration) {
return fromConfiguration(
configuration,
EventOptions.REPORTER_INCLUDES,
EventOptions.REPORTER_EXCLUDES,
EventReporterFilter::new);
}
private static <REPORTED> ReporterFilter<REPORTED> fromConfiguration(
Configuration configuration,
ConfigOption<List<String>> optionIncludes,
ConfigOption<List<String>> optionExcludes,
BiFunction<List<FilterSpec>, List<FilterSpec>, ReporterFilter<REPORTED>> factory) {
final List<String> includes = configuration.get(optionIncludes);
final List<String> excludes = configuration.get(optionExcludes);
final List<FilterSpec> includeFilters =
includes.stream().map(DefaultReporterFilters::parse).collect(Collectors.toList());
final List<FilterSpec> excludeFilters =
excludes.stream().map(DefaultReporterFilters::parse).collect(Collectors.toList());
return factory.apply(includeFilters, excludeFilters);
}
private static FilterSpec parse(String filter) {
final String[] split = filter.split(":");
final Pattern scope = convertToPattern(split[0]);
final Pattern name = split.length > 1 ? convertToPattern(split[1]) : convertToPattern("*");
final EnumSet<MetricType> type =
split.length > 2 ? parseMetricTypes(split[2]) : ALL_METRIC_TYPES;
return new FilterSpec(scope, name, type);
}
@VisibleForTesting
static Pattern convertToPattern(String scopeOrNameComponent) {
final String[] split = scopeOrNameComponent.split(LIST_DELIMITER);
final String rawPattern =
Arrays.stream(split)
.map(s -> s.replaceAll("\\.", "\\."))
.map(s -> s.replaceAll("\\*", ".*"))
.collect(Collectors.joining("|", "(", ")"));
return Pattern.compile(rawPattern);
}
@VisibleForTesting
static EnumSet<MetricType> parseMetricTypes(String typeComponent) {
final String[] split = typeComponent.split(LIST_DELIMITER);
if (split.length == 1 && split[0].equals("*")) {
return ALL_METRIC_TYPES;
}
return EnumSet.copyOf(
Arrays.stream(split)
.map(s -> ConfigurationUtils.convertToEnum(s, MetricType.class))
.collect(Collectors.toSet()));
}
private static
|
EventReporterFilter
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/ManagedInflightStatisticsTest.java
|
{
"start": 1612,
"end": 5376
}
|
class ____ extends ManagementTestSupport {
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getInflightRepository().setInflightBrowseEnabled(true);
return context;
}
@Test
public void testOldestInflight() throws Exception {
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
Set<ObjectName> set = mbeanServer.queryNames(new ObjectName("*:type=routes,*"), null);
assertEquals(1, set.size());
ObjectName on = set.iterator().next();
Long inflight = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
assertEquals(0, inflight.longValue());
Long ts = (Long) mbeanServer.getAttribute(on, "OldestInflightDuration");
assertNull(ts);
String id = (String) mbeanServer.getAttribute(on, "OldestInflightExchangeId");
assertNull(id);
MockEndpoint result = getMockEndpoint("mock:result");
result.expectedMessageCount(2);
CountDownLatch latch1 = new CountDownLatch(1);
CountDownLatch latch2 = new CountDownLatch(1);
// start some exchanges.
template.asyncSendBody("direct:start", latch1);
Thread.sleep(250);
template.asyncSendBody("direct:start", latch2);
await().atMost(2, TimeUnit.SECONDS).until(() -> {
Long num = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
return num != null && num == 2;
});
inflight = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
assertEquals(2, inflight.longValue());
ts = (Long) mbeanServer.getAttribute(on, "OldestInflightDuration");
assertNotNull(ts);
id = (String) mbeanServer.getAttribute(on, "OldestInflightExchangeId");
assertNotNull(id);
log.info("Oldest Exchange id: {}, duration: {}", id, ts);
// complete first exchange
latch1.countDown();
// Lets wait for the first exchange to complete.
Thread.sleep(200);
Long ts2 = (Long) mbeanServer.getAttribute(on, "OldestInflightDuration");
assertNotNull(ts2);
String id2 = (String) mbeanServer.getAttribute(on, "OldestInflightExchangeId");
assertNotNull(id2);
log.info("Oldest Exchange id: {}, duration: {}", id2, ts2);
// Lets verify the oldest changed.
assertNotEquals(id, id2);
// The duration values could be different
assertNotEquals(ts, ts2);
latch2.countDown();
// Lets wait for all the exchanges to complete.
await().atMost(2, TimeUnit.SECONDS).until(() -> {
Long num = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
return num != null && num == 0;
});
assertMockEndpointsSatisfied();
inflight = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
assertEquals(0, inflight.longValue());
ts = (Long) mbeanServer.getAttribute(on, "OldestInflightDuration");
assertNull(ts);
id = (String) mbeanServer.getAttribute(on, "OldestInflightExchangeId");
assertNull(id);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.process(exchange -> {
CountDownLatch latch = (CountDownLatch) exchange.getIn().getBody();
latch.await(10, TimeUnit.SECONDS);
})
.to("mock:result").id("mock");
}
};
}
}
|
ManagedInflightStatisticsTest
|
java
|
quarkusio__quarkus
|
integration-tests/hibernate-search-standalone-elasticsearch/src/test/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/PropertyAccessTest.java
|
{
"start": 253,
"end": 1029
}
|
class ____ {
@Test
public void testPrivateFieldAccess() {
when().get("/test/property-access/private-field").then()
.statusCode(200)
.body(is("OK"));
}
@Test
public void testPublicFieldAccess() {
when().get("/test/property-access/public-field").then()
.statusCode(200)
.body(is("OK"));
}
@Test
public void testMethodAccess() {
when().get("/test/property-access/method").then()
.statusCode(200)
.body(is("OK"));
}
@Test
public void testRecordFieldAccess() {
when().get("/test/property-access/record-field").then()
.statusCode(200)
.body(is("OK"));
}
}
|
PropertyAccessTest
|
java
|
apache__camel
|
components/camel-splunk/src/test/java/org/apache/camel/component/splunk/SplunkMockTestSupport.java
|
{
"start": 1172,
"end": 1744
}
|
class ____ extends CamelTestSupport {
@Mock
Service service;
@Mock
Socket socket;
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
// set splunk to use mocked connection factory
SplunkComponent splunk = context.getComponent("splunk", SplunkComponent.class);
DefaultSplunkConfigurationFactory cf = Helper.mockComponent(service, socket);
splunk.setSplunkConfigurationFactory(cf);
return context;
}
}
|
SplunkMockTestSupport
|
java
|
google__guava
|
android/guava/src/com/google/common/primitives/ImmutableLongArray.java
|
{
"start": 8840,
"end": 18036
}
|
class ____ {
private long[] array;
private int count = 0; // <= array.length
Builder(int initialCapacity) {
array = new long[initialCapacity];
}
/**
* Appends {@code value} to the end of the values the built {@link ImmutableLongArray} will
* contain.
*/
@CanIgnoreReturnValue
public Builder add(long value) {
ensureRoomFor(1);
array[count] = value;
count += 1;
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableLongArray} will contain.
*/
@CanIgnoreReturnValue
public Builder addAll(long[] values) {
ensureRoomFor(values.length);
System.arraycopy(values, 0, array, count, values.length);
count += values.length;
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableLongArray} will contain.
*/
@CanIgnoreReturnValue
public Builder addAll(Iterable<Long> values) {
if (values instanceof Collection) {
return addAll((Collection<Long>) values);
}
for (Long value : values) {
add(value);
}
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableLongArray} will contain.
*/
@CanIgnoreReturnValue
public Builder addAll(Collection<Long> values) {
ensureRoomFor(values.size());
for (Long value : values) {
array[count++] = value;
}
return this;
}
/**
* Appends all values from {@code stream}, in order, to the end of the values the built {@link
* ImmutableLongArray} will contain.
*
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
@IgnoreJRERequirement // Users will use this only if they're already using streams.
@CanIgnoreReturnValue
public Builder addAll(LongStream stream) {
Spliterator.OfLong spliterator = stream.spliterator();
long size = spliterator.getExactSizeIfKnown();
if (size > 0) { // known *and* nonempty
ensureRoomFor(Ints.saturatedCast(size));
}
spliterator.forEachRemaining((LongConsumer) this::add);
return this;
}
/**
* Appends {@code values}, in order, to the end of the values the built {@link
* ImmutableLongArray} will contain.
*/
@CanIgnoreReturnValue
public Builder addAll(ImmutableLongArray values) {
ensureRoomFor(values.length());
System.arraycopy(values.array, values.start, array, count, values.length());
count += values.length();
return this;
}
private void ensureRoomFor(int numberToAdd) {
int newCount = count + numberToAdd; // TODO(kevinb): check overflow now?
if (newCount > array.length) {
array = Arrays.copyOf(array, expandedCapacity(array.length, newCount));
}
}
// Unfortunately this is pasted from ImmutableCollection.Builder.
private static int expandedCapacity(int oldCapacity, int minCapacity) {
if (minCapacity < 0) {
throw new AssertionError("cannot store more than MAX_VALUE elements");
}
// careful of overflow!
int newCapacity = oldCapacity + (oldCapacity >> 1) + 1;
if (newCapacity < minCapacity) {
newCapacity = Integer.highestOneBit(minCapacity - 1) << 1;
}
if (newCapacity < 0) {
newCapacity = Integer.MAX_VALUE; // guaranteed to be >= newCapacity
}
return newCapacity;
}
/**
* Returns a new immutable array. The builder can continue to be used after this call, to append
* more values and build again.
*
* <p><b>Performance note:</b> the returned array is backed by the same array as the builder, so
* no data is copied as part of this step, but this may occupy more memory than strictly
* necessary. To copy the data to a right-sized backing array, use {@code .build().trimmed()}.
*/
public ImmutableLongArray build() {
return count == 0 ? EMPTY : new ImmutableLongArray(array, 0, count);
}
}
// Instance stuff here
// The array is never mutated after storing in this field and the construction strategies ensure
// it doesn't escape this class
@SuppressWarnings("Immutable")
private final long[] array;
/*
* TODO(kevinb): evaluate the trade-offs of going bimorphic to save these two fields from most
* instances. Note that the instances that would get smaller are the right set to care about
* optimizing, because the rest have the option of calling `trimmed`.
*/
private final transient int start; // it happens that we only serialize instances where this is 0
private final int end; // exclusive
private ImmutableLongArray(long[] array) {
this(array, 0, array.length);
}
private ImmutableLongArray(long[] array, int start, int end) {
this.array = array;
this.start = start;
this.end = end;
}
/** Returns the number of values in this array. */
public int length() {
return end - start;
}
/** Returns {@code true} if there are no values in this array ({@link #length} is zero). */
public boolean isEmpty() {
return end == start;
}
/**
* Returns the {@code long} value present at the given index.
*
* @throws IndexOutOfBoundsException if {@code index} is negative, or greater than or equal to
* {@link #length}
*/
public long get(int index) {
Preconditions.checkElementIndex(index, length());
return array[start + index];
}
/**
* Returns the smallest index for which {@link #get} returns {@code target}, or {@code -1} if no
* such index exists. Equivalent to {@code asList().indexOf(target)}.
*/
public int indexOf(long target) {
for (int i = start; i < end; i++) {
if (array[i] == target) {
return i - start;
}
}
return -1;
}
/**
* Returns the largest index for which {@link #get} returns {@code target}, or {@code -1} if no
* such index exists. Equivalent to {@code asList().lastIndexOf(target)}.
*/
public int lastIndexOf(long target) {
for (int i = end - 1; i >= start; i--) {
if (array[i] == target) {
return i - start;
}
}
return -1;
}
/**
* Returns {@code true} if {@code target} is present at any index in this array. Equivalent to
* {@code asList().contains(target)}.
*/
public boolean contains(long target) {
return indexOf(target) >= 0;
}
/**
* Invokes {@code consumer} for each value contained in this array, in order.
*
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
@IgnoreJRERequirement // We rely on users not to call this without library desugaring.
public void forEach(LongConsumer consumer) {
checkNotNull(consumer);
for (int i = start; i < end; i++) {
consumer.accept(array[i]);
}
}
/**
* Returns a stream over the values in this array, in order.
*
* @since 33.4.0 (but since 22.0 in the JRE flavor)
*/
// If users use this when they shouldn't, we hope that NewApi will catch subsequent stream calls
@IgnoreJRERequirement
public LongStream stream() {
return Arrays.stream(array, start, end);
}
/** Returns a new, mutable copy of this array's values, as a primitive {@code long[]}. */
public long[] toArray() {
return Arrays.copyOfRange(array, start, end);
}
/**
* Returns a new immutable array containing the values in the specified range.
*
* <p><b>Performance note:</b> The returned array has the same full memory footprint as this one
* does (no actual copying is performed). To reduce memory usage, use {@code subArray(start,
* end).trimmed()}.
*/
public ImmutableLongArray subArray(int startIndex, int endIndex) {
Preconditions.checkPositionIndexes(startIndex, endIndex, length());
return startIndex == endIndex
? EMPTY
: new ImmutableLongArray(array, start + startIndex, start + endIndex);
}
@IgnoreJRERequirement // used only from APIs that use streams
/*
* We declare this as package-private, rather than private, to avoid generating a synthetic
* accessor method (under -target 8) that would lack the Android flavor's @IgnoreJRERequirement.
*/
Spliterator.OfLong spliterator() {
return Spliterators.spliterator(array, start, end, Spliterator.IMMUTABLE | Spliterator.ORDERED);
}
/**
* Returns an immutable <i>view</i> of this array's values as a {@code List}; note that {@code
* long} values are boxed into {@link Long} instances on demand, which can be very expensive. The
* returned list should be used once and discarded. For any usages beyond that, pass the returned
* list to {@link com.google.common.collect.ImmutableList#copyOf(Collection) ImmutableList.copyOf}
* and use that list instead.
*/
public List<Long> asList() {
/*
* Typically we cache this kind of thing, but much repeated use of this view is a performance
* anti-pattern anyway. If we cache, then everyone pays a price in memory footprint even if
* they never use this method.
*/
return new AsList(this);
}
private static final
|
Builder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/ResolvedIndices.java
|
{
"start": 2551,
"end": 2728
}
|
class ____ ResolvedIndices that allows for the building of a list of indices
* without the need to construct new objects and merging them together
*/
public static
|
for
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1375/Target.java
|
{
"start": 232,
"end": 366
}
|
class ____ {
Nested nested;
public void setNested(Nested nested) {
this.nested = nested;
}
public static
|
Target
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security-oauth2-resource-server/src/main/java/org/springframework/boot/security/oauth2/server/resource/autoconfigure/servlet/OAuth2ResourceServerJwtConfiguration.java
|
{
"start": 3727,
"end": 3868
}
|
class ____ {
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(JwtDecoder.class)
static
|
OAuth2ResourceServerJwtConfiguration
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
|
{
"start": 3817,
"end": 7957
}
|
class ____ implements JobPropertyParser {
private Field[] mrFields = MRJobConfig.class.getFields();
private DecimalFormat format = new DecimalFormat();
private JobConf configuration = new JobConf(false);
private static final Pattern MAX_HEAP_PATTERN =
Pattern.compile("-Xmx[0-9]+[kKmMgGtT]?+");
private static final Pattern MIN_HEAP_PATTERN =
Pattern.compile("-Xms[0-9]+[kKmMgGtT]?+");
// turn off the warning w.r.t deprecated mapreduce keys
static {
Logger.getLogger(Configuration.class).setLevel(Level.OFF);
}
// Accepts a key if there is a corresponding key in the current mapreduce
// configuration
private boolean accept(String key) {
return getLatestKeyName(key) != null;
}
// Finds a corresponding key for the specified key in the current mapreduce
// setup.
// Note that this API uses a cached copy of the Configuration object. This is
// purely for performance reasons.
private String getLatestKeyName(String key) {
// set the specified key
configuration.set(key, key);
try {
// check if keys in MRConfig maps to the specified key.
for (Field f : mrFields) {
String mrKey = f.get(f.getName()).toString();
if (configuration.get(mrKey) != null) {
return mrKey;
}
}
// unset the key
return null;
} catch (IllegalAccessException iae) {
throw new RuntimeException(iae);
} finally {
// clean up!
configuration.clear();
}
}
@Override
public DataType<?> parseJobProperty(String key, String value) {
if (accept(key)) {
return fromString(key, value);
}
return null;
}
/**
* Extracts the -Xmx heap option from the specified string.
*/
public static void extractMaxHeapOpts(final String javaOptions,
List<String> heapOpts,
List<String> others) {
for (String opt : javaOptions.split(" ")) {
Matcher matcher = MAX_HEAP_PATTERN.matcher(opt);
if (matcher.find()) {
heapOpts.add(opt);
} else {
others.add(opt);
}
}
}
/**
* Extracts the -Xms heap option from the specified string.
*/
public static void extractMinHeapOpts(String javaOptions,
List<String> heapOpts, List<String> others) {
for (String opt : javaOptions.split(" ")) {
Matcher matcher = MIN_HEAP_PATTERN.matcher(opt);
if (matcher.find()) {
heapOpts.add(opt);
} else {
others.add(opt);
}
}
}
// Maps the value of the specified key.
private DataType<?> fromString(String key, String value) {
DefaultDataType defaultValue = new DefaultDataType(value);
if (value != null) {
// check known configs
// job-name
String latestKey = getLatestKeyName(key);
if (MRJobConfig.JOB_NAME.equals(latestKey)) {
return new JobName(value);
}
// user-name
if (MRJobConfig.USER_NAME.equals(latestKey)) {
return new UserName(value);
}
// queue-name
if (MRJobConfig.QUEUE_NAME.equals(latestKey)) {
return new QueueName(value);
}
if (MRJobConfig.MAP_JAVA_OPTS.equals(latestKey)
|| MRJobConfig.REDUCE_JAVA_OPTS.equals(latestKey)) {
List<String> heapOptions = new ArrayList<String>();
extractMaxHeapOpts(value, heapOptions, new ArrayList<String>());
extractMinHeapOpts(value, heapOptions, new ArrayList<String>());
return new DefaultDataType(StringUtils.join(heapOptions, ' '));
}
//TODO compression?
//TODO Other job configs like FileOutputFormat/FileInputFormat etc
// check if the config parameter represents a number
try {
format.parse(value);
return defaultValue;
} catch (ParseException pe) {}
// check if the config parameters represents a boolean
// avoiding exceptions
if ("true".equals(value) || "false".equals(value)) {
return defaultValue;
}
// check if the config parameter represents a
|
MapReduceJobPropertiesParser
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/pool/WrapperAdapter.java
|
{
"start": 682,
"end": 1153
}
|
class ____ implements Wrapper {
public WrapperAdapter() {
}
@Override
public boolean isWrapperFor(Class<?> iface) {
return iface != null && iface.isInstance(this);
}
@SuppressWarnings("unchecked")
@Override
public <T> T unwrap(Class<T> iface) {
if (iface == null) {
return null;
}
if (iface.isInstance(this)) {
return (T) this;
}
return null;
}
}
|
WrapperAdapter
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/DefaultMessageHeaderTest.java
|
{
"start": 1138,
"end": 9669
}
|
class ____ {
private CamelContext camelContext;
@BeforeEach
protected void setUp() {
camelContext = new DefaultCamelContext();
camelContext.start();
}
@Test
public void testLookupCaseAgnostic() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
assertEquals("cheese", msg.getHeader("foo"));
assertEquals("cheese", msg.getHeader("Foo"));
assertEquals("cheese", msg.getHeader("FOO"));
}
@Test
public void testLookupCaseAgnosticAddHeader() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
assertEquals("cheese", msg.getHeader("foo"));
assertEquals("cheese", msg.getHeader("Foo"));
assertEquals("cheese", msg.getHeader("FOO"));
assertNull(msg.getHeader("unknown"));
msg.setHeader("bar", "beer");
assertEquals("beer", msg.getHeader("bar"));
assertEquals("beer", msg.getHeader("Bar"));
assertEquals("beer", msg.getHeader("BAR"));
assertNull(msg.getHeader("unknown"));
}
@Test
public void testLookupCaseAgnosticAddHeader2() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
assertEquals("cheese", msg.getHeader("FOO"));
assertEquals("cheese", msg.getHeader("foo"));
assertEquals("cheese", msg.getHeader("Foo"));
assertNull(msg.getHeader("unknown"));
msg.setHeader("bar", "beer");
assertEquals("beer", msg.getHeader("BAR"));
assertEquals("beer", msg.getHeader("bar"));
assertEquals("beer", msg.getHeader("Bar"));
assertNull(msg.getHeader("unknown"));
}
@Test
public void testLookupCaseAgnosticAddHeaderRemoveHeader() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
assertEquals("cheese", msg.getHeader("foo"));
assertEquals("cheese", msg.getHeader("Foo"));
assertEquals("cheese", msg.getHeader("FOO"));
assertNull(msg.getHeader("unknown"));
msg.setHeader("bar", "beer");
assertEquals("beer", msg.getHeader("bar"));
assertEquals("beer", msg.getHeader("Bar"));
assertEquals("beer", msg.getHeader("BAR"));
assertNull(msg.getHeader("unknown"));
msg.removeHeader("bar");
assertNull(msg.getHeader("bar"));
assertNull(msg.getHeader("unknown"));
}
@Test
public void testSetWithDifferentCase() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
msg.setHeader("Foo", "bar");
assertEquals("bar", msg.getHeader("FOO"));
assertEquals("bar", msg.getHeader("foo"));
assertEquals("bar", msg.getHeader("Foo"));
}
@Test
public void testRemoveWithDifferentCase() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
msg.setHeader("Foo", "bar");
assertEquals("bar", msg.getHeader("FOO"));
assertEquals("bar", msg.getHeader("foo"));
assertEquals("bar", msg.getHeader("Foo"));
msg.removeHeader("FOO");
assertNull(msg.getHeader("foo"));
assertNull(msg.getHeader("Foo"));
assertNull(msg.getHeader("FOO"));
assertTrue(msg.getHeaders().isEmpty());
}
@Test
public void testRemoveHeaderWithNullValue() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", null);
msg.removeHeader("tick");
assertTrue(msg.getHeaders().isEmpty());
}
@Test
public void testRemoveHeadersWithWildcard() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("tock", "blaaa");
assertEquals("bla", msg.getHeader("tick"));
assertEquals("blaa", msg.getHeader("tack"));
assertEquals("blaaa", msg.getHeader("tock"));
msg.removeHeaders("t*");
assertTrue(msg.getHeaders().isEmpty());
}
@Test
public void testRemoveHeadersAllWithWildcard() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("tock", "blaaa");
assertEquals("bla", msg.getHeader("tick"));
assertEquals("blaa", msg.getHeader("tack"));
assertEquals("blaaa", msg.getHeader("tock"));
msg.removeHeaders("*");
assertTrue(msg.getHeaders().isEmpty());
}
@Test
public void testRemoveHeadersWithExclude() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tiack", "blaa");
msg.setHeader("tiock", "blaaa");
msg.setHeader("tiuck", "blaaaa");
msg.removeHeaders("ti*", "tiuck", "tiack");
assertEquals(2, msg.getHeaders().size());
assertEquals("blaa", msg.getHeader("tiack"));
assertEquals("blaaaa", msg.getHeader("tiuck"));
}
@Test
public void testRemoveHeadersAllWithExclude() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("tock", "blaaa");
assertEquals("bla", msg.getHeader("tick"));
assertEquals("blaa", msg.getHeader("tack"));
assertEquals("blaaa", msg.getHeader("tock"));
msg.removeHeaders("*", "tick", "tock", "toe");
// new message headers
assertEquals("bla", msg.getHeader("tick"));
assertNull(msg.getHeader("tack"));
assertEquals("blaaa", msg.getHeader("tock"));
}
@Test
public void testRemoveHeadersWithWildcardInExclude() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("taick", "blaa");
msg.setHeader("tock", "blaaa");
msg.removeHeaders("*", "ta*");
assertEquals(2, msg.getHeaders().size());
assertEquals("blaa", msg.getHeader("tack"));
assertEquals("blaa", msg.getHeader("taick"));
}
@Test
public void testRemoveHeadersWithNulls() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("tock", "blaaa");
msg.setHeader("taack", "blaaaa");
assertEquals("bla", msg.getHeader("tick"));
assertEquals("blaa", msg.getHeader("tack"));
assertEquals("blaaa", msg.getHeader("tock"));
assertEquals("blaaaa", msg.getHeader("taack"));
msg.removeHeaders(null, null, null, null);
assertFalse(msg.getHeaders().isEmpty());
}
@Test
public void testRemoveHeadersWithNonExcludeHeaders() {
Message msg = new DefaultMessage(camelContext);
assertNull(msg.getHeader("foo"));
msg.setHeader("tick", "bla");
msg.setHeader("tack", "blaa");
msg.setHeader("tock", "blaaa");
msg.removeHeaders("*", "camels", "are", "fun");
assertTrue(msg.getHeaders().isEmpty());
}
@Test
public void testWithDefaults() {
DefaultMessage msg = new DefaultMessage(camelContext);
// must have exchange so to leverage the type converters
msg.setExchange(new DefaultExchange(new DefaultCamelContext()));
assertNull(msg.getHeader("foo"));
msg.setHeader("foo", "cheese");
assertEquals("cheese", msg.getHeader("foo"));
assertEquals("cheese", msg.getHeader("foo", "foo"));
assertEquals("cheese", msg.getHeader("foo", "foo", String.class));
assertNull(msg.getHeader("beer"));
assertEquals("foo", msg.getHeader("beer", "foo"));
assertEquals(Integer.valueOf(123), msg.getHeader("beer", "123", Integer.class));
}
}
|
DefaultMessageHeaderTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java
|
{
"start": 1085,
"end": 3860
}
|
class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CeilDoubleEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator val;
private final DriverContext driverContext;
private Warnings warnings;
public CeilDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val,
DriverContext driverContext) {
this.source = source;
this.val = val;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (DoubleBlock valBlock = (DoubleBlock) val.eval(page)) {
DoubleVector valVector = valBlock.asVector();
if (valVector == null) {
return eval(page.getPositionCount(), valBlock);
}
return eval(page.getPositionCount(), valVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += val.baseRamBytesUsed();
return baseRamBytesUsed;
}
public DoubleBlock eval(int positionCount, DoubleBlock valBlock) {
try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (valBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
double val = valBlock.getDouble(valBlock.getFirstValueIndex(p));
result.appendDouble(Ceil.process(val));
}
return result.build();
}
}
public DoubleVector eval(int positionCount, DoubleVector valVector) {
try(DoubleVector.FixedBuilder result = driverContext.blockFactory().newDoubleVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
double val = valVector.getDouble(p);
result.appendDouble(p, Ceil.process(val));
}
return result.build();
}
}
@Override
public String toString() {
return "CeilDoubleEvaluator[" + "val=" + val + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(val);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static
|
CeilDoubleEvaluator
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/runner/src/main/java/io/quarkus/bootstrap/runner/RunnerClassLoader.java
|
{
"start": 12252,
"end": 13461
}
|
class ____ in the default package
return null;
}
return resourceName.substring(0, index);
}
/**
* This method is needed to make packages work correctly on JDK9+, as it will be called
* to load the package-info class.
*
* @param moduleName
* @param name
* @return
*/
//@Override
protected Class<?> findClass(String moduleName, String name) {
try {
return loadClass(name, false);
} catch (ClassNotFoundException e) {
return null;
}
}
public void close() {
for (Map.Entry<String, ClassLoadingResource[]> entry : resourceDirectoryMap.entrySet()) {
for (ClassLoadingResource i : entry.getValue()) {
i.close();
}
}
}
public void resetInternalCaches() {
synchronized (this.currentlyBufferedResources) {
for (Map.Entry<String, ClassLoadingResource[]> entry : resourceDirectoryMap.entrySet()) {
for (ClassLoadingResource i : entry.getValue()) {
i.resetInternalCaches();
}
}
this.postBootPhase = true;
}
}
|
is
|
java
|
playframework__playframework
|
documentation/manual/working/javaGuide/main/http/code/javaguide/http/JavaActionsComposition.java
|
{
"start": 2375,
"end": 2516
}
|
class ____ {
public static User findById(Integer id) {
return new User();
}
}
// #pass-arg-action
// ###replace: public
|
User
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/main/java/org/springframework/web/servlet/config/annotation/DelegatingWebMvcConfiguration.java
|
{
"start": 1728,
"end": 1889
}
|
class ____ imported by {@link EnableWebMvc @EnableWebMvc}.
*
* @author Rossen Stoyanchev
* @since 3.1
*/
@Configuration(proxyBeanMethods = false)
public
|
actually
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/executor/loader/WriteReplaceInterface.java
|
{
"start": 776,
"end": 868
}
|
interface ____ {
Object writeReplace() throws ObjectStreamException;
}
|
WriteReplaceInterface
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/ttl/TtlAwareSerializerUpgradeTest.java
|
{
"start": 3383,
"end": 3961
}
|
class ____
implements TypeSerializerUpgradeTestBase.PreUpgradeSetup<TtlValue<String>> {
@Override
public TypeSerializer<TtlValue<String>> createPriorSerializer() {
return new TtlAwareSerializer<>(
new TtlStateFactory.TtlSerializer<>(
LongSerializer.INSTANCE, StringSerializer.INSTANCE));
}
@Override
public TtlValue<String> createTestData() {
return new TtlValue<>(TEST_DATA, 13);
}
}
public static final
|
TtlAwareSerializerEnablingTtlSetup
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/StandardDomainModel.java
|
{
"start": 657,
"end": 1196
}
|
enum ____ {
CONTACTS( ContactsDomainModel.INSTANCE ),
ANIMAL( AnimalDomainModel.INSTANCE ),
GAMBIT( GambitDomainModel.INSTANCE ),
HELPDESK( HelpDeskDomainModel.INSTANCE ),
RETAIL( RetailDomainModel.INSTANCE ),
USERGUIDE( UserguideDomainModel.INSTANCE ),
LIBRARY( LibraryDomainModel.INSTANCE );
private final DomainModelDescriptor domainModel;
StandardDomainModel(DomainModelDescriptor domainModel) {
this.domainModel = domainModel;
}
public DomainModelDescriptor getDescriptor() {
return domainModel;
}
}
|
StandardDomainModel
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterUnusedInFormalsTest.java
|
{
"start": 3630,
"end": 4035
}
|
interface ____<T> {}
// BUG: Diagnostic contains:
static <T extends Foo<?>> T doCast(Object o) {
return (T) o;
}
}
""")
.doTest();
}
@Test
public void okGenericFactory() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.util.List;
|
Foo
|
java
|
google__guice
|
core/test/com/google/inject/BinderTestSuite.java
|
{
"start": 22630,
"end": 22942
}
|
class ____ extends Injectable {
@Inject
public void inject(
AWithImplementedBy aWithImplementedBy,
Provider<AWithImplementedBy> aWithImplementedByProvider) {
this.value = aWithImplementedBy;
this.provider = aWithImplementedByProvider;
}
}
static
|
InjectsAWithImplementedBy
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/strategy/PipelinedRegionSchedulingStrategy.java
|
{
"start": 1641,
"end": 18233
}
|
class ____ implements SchedulingStrategy {
private final SchedulerOperations schedulerOperations;
private final SchedulingTopology schedulingTopology;
/** External consumer regions of each ConsumedPartitionGroup. */
private final Map<ConsumedPartitionGroup, Set<SchedulingPipelinedRegion>>
partitionGroupConsumerRegions = new IdentityHashMap<>();
private final Map<SchedulingPipelinedRegion, List<ExecutionVertexID>> regionVerticesSorted =
new IdentityHashMap<>();
/** All produced partition groups of one schedulingPipelinedRegion. */
private final Map<SchedulingPipelinedRegion, Set<ConsumedPartitionGroup>>
producedPartitionGroupsOfRegion = new IdentityHashMap<>();
/** The ConsumedPartitionGroups which are produced by multiple regions. */
private final Set<ConsumedPartitionGroup> crossRegionConsumedPartitionGroups =
Collections.newSetFromMap(new IdentityHashMap<>());
private final Set<SchedulingPipelinedRegion> scheduledRegions =
Collections.newSetFromMap(new IdentityHashMap<>());
public PipelinedRegionSchedulingStrategy(
final SchedulerOperations schedulerOperations,
final SchedulingTopology schedulingTopology) {
this.schedulerOperations = checkNotNull(schedulerOperations);
this.schedulingTopology = checkNotNull(schedulingTopology);
init();
}
private void init() {
initCrossRegionConsumedPartitionGroups();
initPartitionGroupConsumerRegions();
initProducedPartitionGroupsOfRegion();
for (SchedulingExecutionVertex vertex : schedulingTopology.getVertices()) {
final SchedulingPipelinedRegion region =
schedulingTopology.getPipelinedRegionOfVertex(vertex.getId());
regionVerticesSorted
.computeIfAbsent(region, r -> new ArrayList<>())
.add(vertex.getId());
}
}
private void initProducedPartitionGroupsOfRegion() {
for (SchedulingPipelinedRegion region : schedulingTopology.getAllPipelinedRegions()) {
Set<ConsumedPartitionGroup> producedPartitionGroupsSetOfRegion = new HashSet<>();
for (SchedulingExecutionVertex executionVertex : region.getVertices()) {
producedPartitionGroupsSetOfRegion.addAll(
IterableUtils.toStream(executionVertex.getProducedResults())
.flatMap(
partition ->
partition.getConsumedPartitionGroups().stream())
.collect(Collectors.toSet()));
}
producedPartitionGroupsOfRegion.put(region, producedPartitionGroupsSetOfRegion);
}
}
private void initCrossRegionConsumedPartitionGroups() {
final Map<ConsumedPartitionGroup, Set<SchedulingPipelinedRegion>>
producerRegionsByConsumedPartitionGroup = new IdentityHashMap<>();
for (SchedulingPipelinedRegion pipelinedRegion :
schedulingTopology.getAllPipelinedRegions()) {
for (ConsumedPartitionGroup consumedPartitionGroup :
pipelinedRegion.getAllNonPipelinedConsumedPartitionGroups()) {
producerRegionsByConsumedPartitionGroup.computeIfAbsent(
consumedPartitionGroup, this::getProducerRegionsForConsumedPartitionGroup);
}
}
for (SchedulingPipelinedRegion pipelinedRegion :
schedulingTopology.getAllPipelinedRegions()) {
for (ConsumedPartitionGroup consumedPartitionGroup :
pipelinedRegion.getAllNonPipelinedConsumedPartitionGroups()) {
final Set<SchedulingPipelinedRegion> producerRegions =
producerRegionsByConsumedPartitionGroup.get(consumedPartitionGroup);
if (producerRegions.size() > 1 && producerRegions.contains(pipelinedRegion)) {
crossRegionConsumedPartitionGroups.add(consumedPartitionGroup);
}
}
}
}
private Set<SchedulingPipelinedRegion> getProducerRegionsForConsumedPartitionGroup(
ConsumedPartitionGroup consumedPartitionGroup) {
final Set<SchedulingPipelinedRegion> producerRegions =
Collections.newSetFromMap(new IdentityHashMap<>());
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
producerRegions.add(getProducerRegion(partitionId));
}
return producerRegions;
}
private SchedulingPipelinedRegion getProducerRegion(IntermediateResultPartitionID partitionId) {
return schedulingTopology.getPipelinedRegionOfVertex(
schedulingTopology.getResultPartition(partitionId).getProducer().getId());
}
private void initPartitionGroupConsumerRegions() {
for (SchedulingPipelinedRegion region : schedulingTopology.getAllPipelinedRegions()) {
for (ConsumedPartitionGroup consumedPartitionGroup :
region.getAllNonPipelinedConsumedPartitionGroups()) {
if (crossRegionConsumedPartitionGroups.contains(consumedPartitionGroup)
|| isExternalConsumedPartitionGroup(consumedPartitionGroup, region)) {
partitionGroupConsumerRegions
.computeIfAbsent(consumedPartitionGroup, group -> new HashSet<>())
.add(region);
}
}
}
}
private Set<SchedulingPipelinedRegion> getBlockingDownstreamRegionsOfVertex(
SchedulingExecutionVertex executionVertex) {
return IterableUtils.toStream(executionVertex.getProducedResults())
.filter(partition -> !partition.getResultType().canBePipelinedConsumed())
.flatMap(partition -> partition.getConsumedPartitionGroups().stream())
.filter(
group ->
crossRegionConsumedPartitionGroups.contains(group)
|| group.areAllPartitionsFinished())
.flatMap(
partitionGroup ->
partitionGroupConsumerRegions
.getOrDefault(partitionGroup, Collections.emptySet())
.stream())
.collect(Collectors.toSet());
}
@Override
public void startScheduling() {
final Set<SchedulingPipelinedRegion> sourceRegions =
IterableUtils.toStream(schedulingTopology.getAllPipelinedRegions())
.filter(this::isSourceRegion)
.collect(Collectors.toSet());
maybeScheduleRegions(sourceRegions);
}
private boolean isSourceRegion(SchedulingPipelinedRegion region) {
for (ConsumedPartitionGroup consumedPartitionGroup :
region.getAllNonPipelinedConsumedPartitionGroups()) {
if (crossRegionConsumedPartitionGroups.contains(consumedPartitionGroup)
|| isExternalConsumedPartitionGroup(consumedPartitionGroup, region)) {
return false;
}
}
return true;
}
@Override
public void restartTasks(final Set<ExecutionVertexID> verticesToRestart) {
final Set<SchedulingPipelinedRegion> regionsToRestart =
verticesToRestart.stream()
.map(schedulingTopology::getPipelinedRegionOfVertex)
.collect(Collectors.toSet());
scheduledRegions.removeAll(regionsToRestart);
maybeScheduleRegions(regionsToRestart);
}
@Override
public void onExecutionStateChange(
final ExecutionVertexID executionVertexId, final ExecutionState executionState) {
if (executionState == ExecutionState.FINISHED) {
maybeScheduleRegions(
getBlockingDownstreamRegionsOfVertex(
schedulingTopology.getVertex(executionVertexId)));
}
}
@Override
public void onPartitionConsumable(final IntermediateResultPartitionID resultPartitionId) {}
private void maybeScheduleRegions(final Set<SchedulingPipelinedRegion> regions) {
final Set<SchedulingPipelinedRegion> regionsToSchedule = new HashSet<>();
Set<SchedulingPipelinedRegion> nextRegions = regions;
while (!nextRegions.isEmpty()) {
nextRegions = addSchedulableAndGetNextRegions(nextRegions, regionsToSchedule);
}
// schedule regions in topological order.
SchedulingStrategyUtils.sortPipelinedRegionsInTopologicalOrder(
schedulingTopology, regionsToSchedule)
.forEach(this::scheduleRegion);
}
private Set<SchedulingPipelinedRegion> addSchedulableAndGetNextRegions(
Set<SchedulingPipelinedRegion> currentRegions,
Set<SchedulingPipelinedRegion> regionsToSchedule) {
Set<SchedulingPipelinedRegion> nextRegions = new HashSet<>();
// cache consumedPartitionGroup's consumable status to avoid compute repeatedly.
final Map<ConsumedPartitionGroup, Boolean> consumableStatusCache = new HashMap<>();
final Set<ConsumedPartitionGroup> visitedConsumedPartitionGroups = new HashSet<>();
for (SchedulingPipelinedRegion currentRegion : currentRegions) {
if (isRegionSchedulable(currentRegion, consumableStatusCache, regionsToSchedule)) {
regionsToSchedule.add(currentRegion);
producedPartitionGroupsOfRegion
.getOrDefault(currentRegion, Collections.emptySet())
.forEach(
(producedPartitionGroup) -> {
if (!producedPartitionGroup
.getResultPartitionType()
.canBePipelinedConsumed()) {
return;
}
// If this group has been visited, there is no need
// to repeat the determination.
if (visitedConsumedPartitionGroups.contains(
producedPartitionGroup)) {
return;
}
visitedConsumedPartitionGroups.add(producedPartitionGroup);
nextRegions.addAll(
partitionGroupConsumerRegions.getOrDefault(
producedPartitionGroup,
Collections.emptySet()));
});
}
}
return nextRegions;
}
private boolean isRegionSchedulable(
final SchedulingPipelinedRegion region,
final Map<ConsumedPartitionGroup, Boolean> consumableStatusCache,
final Set<SchedulingPipelinedRegion> regionToSchedule) {
return !regionToSchedule.contains(region)
&& !scheduledRegions.contains(region)
&& areRegionInputsAllConsumable(region, consumableStatusCache, regionToSchedule);
}
private void scheduleRegion(final SchedulingPipelinedRegion region) {
checkState(
areRegionVerticesAllInCreatedState(region),
"BUG: trying to schedule a region which is not in CREATED state");
scheduledRegions.add(region);
schedulerOperations.allocateSlotsAndDeploy(regionVerticesSorted.get(region));
}
private boolean areRegionInputsAllConsumable(
final SchedulingPipelinedRegion region,
final Map<ConsumedPartitionGroup, Boolean> consumableStatusCache,
final Set<SchedulingPipelinedRegion> regionToSchedule) {
for (ConsumedPartitionGroup consumedPartitionGroup :
region.getAllNonPipelinedConsumedPartitionGroups()) {
if (crossRegionConsumedPartitionGroups.contains(consumedPartitionGroup)) {
if (!isDownstreamOfCrossRegionConsumedPartitionSchedulable(
consumedPartitionGroup, region, regionToSchedule)) {
return false;
}
} else if (isExternalConsumedPartitionGroup(consumedPartitionGroup, region)) {
if (!consumableStatusCache.computeIfAbsent(
consumedPartitionGroup,
(group) ->
isDownstreamConsumedPartitionGroupSchedulable(
group, regionToSchedule))) {
return false;
}
}
}
return true;
}
private boolean isDownstreamConsumedPartitionGroupSchedulable(
final ConsumedPartitionGroup consumedPartitionGroup,
final Set<SchedulingPipelinedRegion> regionToSchedule) {
if (consumedPartitionGroup.getResultPartitionType().canBePipelinedConsumed()) {
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
SchedulingPipelinedRegion producerRegion = getProducerRegion(partitionId);
if (!scheduledRegions.contains(producerRegion)
&& !regionToSchedule.contains(producerRegion)) {
return false;
}
}
} else {
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
if (schedulingTopology.getResultPartition(partitionId).getState()
!= ResultPartitionState.ALL_DATA_PRODUCED) {
return false;
}
}
}
return true;
}
private boolean isDownstreamOfCrossRegionConsumedPartitionSchedulable(
final ConsumedPartitionGroup consumedPartitionGroup,
final SchedulingPipelinedRegion pipelinedRegion,
final Set<SchedulingPipelinedRegion> regionToSchedule) {
if (consumedPartitionGroup.getResultPartitionType().canBePipelinedConsumed()) {
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
if (isExternalConsumedPartition(partitionId, pipelinedRegion)) {
SchedulingPipelinedRegion producerRegion = getProducerRegion(partitionId);
if (!regionToSchedule.contains(producerRegion)
&& !scheduledRegions.contains(producerRegion)) {
return false;
}
}
}
} else {
for (IntermediateResultPartitionID partitionId : consumedPartitionGroup) {
if (isExternalConsumedPartition(partitionId, pipelinedRegion)
&& schedulingTopology.getResultPartition(partitionId).getState()
!= ResultPartitionState.ALL_DATA_PRODUCED) {
return false;
}
}
}
return true;
}
private boolean areRegionVerticesAllInCreatedState(final SchedulingPipelinedRegion region) {
for (SchedulingExecutionVertex vertex : region.getVertices()) {
if (vertex.getState() != ExecutionState.CREATED) {
return false;
}
}
return true;
}
private boolean isExternalConsumedPartitionGroup(
ConsumedPartitionGroup consumedPartitionGroup,
SchedulingPipelinedRegion pipelinedRegion) {
return isExternalConsumedPartition(consumedPartitionGroup.getFirst(), pipelinedRegion);
}
private boolean isExternalConsumedPartition(
IntermediateResultPartitionID partitionId, SchedulingPipelinedRegion pipelinedRegion) {
return !pipelinedRegion.contains(
schedulingTopology.getResultPartition(partitionId).getProducer().getId());
}
@VisibleForTesting
Set<ConsumedPartitionGroup> getCrossRegionConsumedPartitionGroups() {
return Collections.unmodifiableSet(crossRegionConsumedPartitionGroups);
}
/** The factory for creating {@link PipelinedRegionSchedulingStrategy}. */
public static
|
PipelinedRegionSchedulingStrategy
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertEndsWith_Test.java
|
{
"start": 1812,
"end": 6633
}
|
class ____ extends IterablesBaseTest {
@Override
@BeforeEach
public void setUp() {
super.setUp();
actual = newArrayList("Yoda", "Luke", "Leia", "Obi-Wan");
}
@Test
void should_throw_error_if_sequence_is_null() {
assertThatNullPointerException().isThrownBy(() -> iterables.assertEndsWith(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_pass_if_actual_and_sequence_are_empty() {
actual.clear();
iterables.assertEndsWith(someInfo(), actual, emptyArray());
}
@Test
void should_pass_if_sequence_to_look_for_is_empty_and_actual_is_not() {
iterables.assertEndsWith(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> iterables.assertEndsWith(someInfo(), null, array("Yoda")))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_sequence_is_bigger_than_actual() {
AssertionInfo info = someInfo();
Object[] sequence = { "Yoda", "Luke", "Leia", "Obi-Wan", "Han", "C-3PO", "R2-D2", "Anakin" };
Throwable error = catchThrowable(() -> iterables.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verifyFailureThrownWhenSequenceNotFound(info, sequence);
}
@Test
void should_fail_if_actual_does_not_end_with_sequence() {
AssertionInfo info = someInfo();
Object[] sequence = { "Han", "C-3PO" };
Throwable error = catchThrowable(() -> iterables.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verifyFailureThrownWhenSequenceNotFound(info, sequence);
}
@Test
void should_fail_if_actual_ends_with_first_elements_of_sequence_only_but_not_whole_sequence() {
AssertionInfo info = someInfo();
Object[] sequence = { "Leia", "Obi-Wan", "Han" };
Throwable error = catchThrowable(() -> iterables.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verifyFailureThrownWhenSequenceNotFound(info, sequence);
}
@Test
void should_fail_if_sequence_is_smaller_than_end_of_actual() {
AssertionInfo info = someInfo();
Object[] sequence = { "Luke", "Leia" };
Throwable error = catchThrowable(() -> iterables.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verifyFailureThrownWhenSequenceNotFound(info, sequence);
}
private void verifyFailureThrownWhenSequenceNotFound(AssertionInfo info, Object[] sequence) {
verify(failures).failure(info, shouldEndWith(actual, sequence));
}
@Test
void should_pass_if_actual_ends_with_sequence() {
iterables.assertEndsWith(someInfo(), actual, array("Luke", "Leia", "Obi-Wan"));
}
@Test
void should_pass_if_actual_and_sequence_are_equal() {
iterables.assertEndsWith(someInfo(), actual, array("Yoda", "Luke", "Leia", "Obi-Wan"));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_ends_with_sequence_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertEndsWith(someInfo(), actual, array("luke", "LEIA", "Obi-Wan"));
}
@Test
void should_pass_if_actual_and_sequence_are_equal_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertEndsWith(someInfo(), actual, array("YOda", "LUke", "Leia", "OBI-Wan"));
}
@Test
void should_fail_if_actual_does_not_end_with_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] sequence = { "Han", "C-3PO" };
Throwable error = catchThrowable(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence, comparisonStrategy));
}
@Test
void should_fail_if_actual_ends_with_first_elements_of_sequence_only_but_not_whole_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] sequence = { "Leia", "Obi-Wan", "Han" };
Throwable error = catchThrowable(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertEndsWith(info, actual, sequence));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldEndWith(actual, sequence, comparisonStrategy));
}
}
|
Iterables_assertEndsWith_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/NativeAnalyticsProcess.java
|
{
"start": 1221,
"end": 3658
}
|
class ____ extends AbstractNativeAnalyticsProcess<AnalyticsResult> {
private static final Logger logger = LogManager.getLogger(NativeAnalyticsProcess.class);
private static final String NAME = "analytics";
private final AnalyticsProcessConfig config;
protected NativeAnalyticsProcess(
String jobId,
NativeController nativeController,
ProcessPipes processPipes,
int numberOfFields,
List<Path> filesToDelete,
Consumer<String> onProcessCrash,
AnalyticsProcessConfig config,
NamedXContentRegistry namedXContentRegistry
) {
super(
NAME,
AnalyticsResult.PARSER,
jobId,
nativeController,
processPipes,
numberOfFields,
filesToDelete,
onProcessCrash,
namedXContentRegistry
);
this.config = Objects.requireNonNull(config);
}
@Override
public String getName() {
return NAME;
}
@Override
public AnalyticsProcessConfig getConfig() {
return config;
}
@Override
public void restoreState(Client client, String stateDocIdPrefix) throws IOException {
Objects.requireNonNull(stateDocIdPrefix);
try (OutputStream restoreStream = processRestoreStream()) {
int docNum = 0;
while (true) {
if (isProcessKilled()) {
return;
}
// We fetch the documents one at a time because all together they can amount to too much memory
SearchResponse stateResponse = client.prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern())
.setSize(1)
.setQuery(QueryBuilders.idsQuery().addIds(stateDocIdPrefix + ++docNum))
.get();
try {
if (stateResponse.getHits().getHits().length == 0) {
break;
}
SearchHit stateDoc = stateResponse.getHits().getAt(0);
logger.debug(() -> format("[%s] Restoring state document [%s]", config.jobId(), stateDoc.getId()));
StateToProcessWriterHelper.writeStateToStream(stateDoc.getSourceRef(), restoreStream);
} finally {
stateResponse.decRef();
}
}
}
}
}
|
NativeAnalyticsProcess
|
java
|
quarkusio__quarkus
|
independent-projects/qute/debug/src/test/java/io/quarkus/qute/debug/client/TracingMessageConsumer.java
|
{
"start": 1512,
"end": 1713
}
|
class ____ a copy/paste of
* https://github.com/eclipse-lsp4j/lsp4j/blob/main/org.eclipse.lsp4j.jsonrpc/src/main/java/org/eclipse/lsp4j/jsonrpc/TracingMessageConsumer.java
* adapted for IJ.
*/
public
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java
|
{
"start": 3712,
"end": 14142
}
|
class ____ extends WatcherMockScriptPlugin {
@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("return true;", vars -> true);
scripts.put("return false;", vars -> false);
scripts.put(
"throw new IllegalStateException('failed');",
vars -> { throw new IllegalStateException("[expected] failed hard"); }
);
return scripts;
}
}
/**
* A hard failure is where an exception is thrown by the script condition.
*/
@SuppressWarnings("unchecked")
public void testActionConditionWithHardFailures() throws Exception {
final String id = "testActionConditionWithHardFailures";
final ExecutableCondition scriptConditionFailsHard = mockScriptCondition("throw new IllegalStateException('failed');");
final List<ExecutableCondition> actionConditionsWithFailure = Arrays.asList(
scriptConditionFailsHard,
conditionPasses,
InternalAlwaysCondition.INSTANCE
);
Collections.shuffle(actionConditionsWithFailure, random());
final int failedIndex = actionConditionsWithFailure.indexOf(scriptConditionFailsHard);
putAndTriggerWatch(id, input, actionConditionsWithFailure.toArray(new Condition[actionConditionsWithFailure.size()]));
flush();
assertWatchWithMinimumActionsCount(id, ExecutionState.EXECUTED, 1);
// only one action should have failed via condition
AtomicReference<SearchHit> searchHitReference = new AtomicReference<>();
assertBusy(() -> {
// Watcher history is now written asynchronously, so we check this in an assertBusy
ensureGreen(HistoryStoreField.DATA_STREAM);
final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id)));
try {
assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L)));
searchHitReference.set(response.getHits().getAt(0).asUnpooled());
} finally {
response.decRef();
}
});
final SearchHit hit = searchHitReference.get();
final List<Object> actions = getActionsFromHit(hit.getSourceAsMap());
for (int i = 0; i < actionConditionsWithFailure.size(); ++i) {
final Map<String, Object> action = (Map<String, Object>) actions.get(i);
final Map<String, Object> condition = (Map<String, Object>) action.get("condition");
final Map<String, Object> logging = (Map<String, Object>) action.get("logging");
assertThat(action.get("id"), is("action" + i));
if (i == failedIndex) {
assertThat(action.get("status"), is("condition_failed"));
assertThat(action.get("reason"), is("condition failed. skipping: [expected] failed hard"));
assertThat(condition, nullValue());
assertThat(logging, nullValue());
} else {
assertThat(condition.get("type"), is(actionConditionsWithFailure.get(i).type()));
assertThat(action.get("status"), is("success"));
assertThat(condition.get("met"), is(true));
assertThat(action.get("reason"), nullValue());
assertThat(logging.get("logged_text"), is(Integer.toString(i)));
}
}
}
@SuppressWarnings("unchecked")
public void testActionConditionWithFailures() throws Exception {
final String id = "testActionConditionWithFailures";
final ExecutableCondition[] actionConditionsWithFailure = new ExecutableCondition[] {
conditionFails,
conditionPasses,
InternalAlwaysCondition.INSTANCE };
Collections.shuffle(Arrays.asList(actionConditionsWithFailure), random());
final int failedIndex = Arrays.asList(actionConditionsWithFailure).indexOf(conditionFails);
putAndTriggerWatch(id, input, actionConditionsWithFailure);
assertWatchWithMinimumActionsCount(id, ExecutionState.EXECUTED, 1);
AtomicReference<SearchHit> searchHitReference = new AtomicReference<>();
// only one action should have failed via condition
assertBusy(() -> {
// Watcher history is now written asynchronously, so we check this in an assertBusy
ensureGreen(HistoryStoreField.DATA_STREAM);
final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id)));
try {
assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L)));
searchHitReference.set(response.getHits().getAt(0).asUnpooled());
} finally {
response.decRef();
}
});
final SearchHit hit = searchHitReference.get();
final List<Object> actions = getActionsFromHit(hit.getSourceAsMap());
for (int i = 0; i < actionConditionsWithFailure.length; ++i) {
final Map<String, Object> action = (Map<String, Object>) actions.get(i);
final Map<String, Object> condition = (Map<String, Object>) action.get("condition");
final Map<String, Object> logging = (Map<String, Object>) action.get("logging");
assertThat(action.get("id"), is("action" + i));
assertThat(condition.get("type"), is(actionConditionsWithFailure[i].type()));
if (i == failedIndex) {
assertThat(action.get("status"), is("condition_failed"));
assertThat(condition.get("met"), is(false));
assertThat(action.get("reason"), is("condition not met. skipping"));
assertThat(logging, nullValue());
} else {
assertThat(action.get("status"), is("success"));
assertThat(condition.get("met"), is(true));
assertThat(action.get("reason"), nullValue());
assertThat(logging.get("logged_text"), is(Integer.toString(i)));
}
}
}
@SuppressWarnings("unchecked")
public void testActionCondition() throws Exception {
final String id = "testActionCondition";
final List<ExecutableCondition> actionConditions = new ArrayList<>();
// actionConditions.add(conditionPasses);
actionConditions.add(InternalAlwaysCondition.INSTANCE);
/*
if (randomBoolean()) {
actionConditions.add(InternalAlwaysCondition.INSTANCE);
}
Collections.shuffle(actionConditions, random());
*/
putAndTriggerWatch(id, input, actionConditions.toArray(new Condition[actionConditions.size()]));
flush();
assertWatchWithMinimumActionsCount(id, ExecutionState.EXECUTED, 1);
AtomicReference<SearchHit> searchHitReference = new AtomicReference<>();
// all actions should be successful
assertBusy(() -> {
// Watcher history is now written asynchronously, so we check this in an assertBusy
ensureGreen(HistoryStoreField.DATA_STREAM);
final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id)));
try {
assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L)));
searchHitReference.set(response.getHits().getAt(0).asUnpooled());
} finally {
response.decRef();
}
});
final SearchHit hit = searchHitReference.get();
final List<Object> actions = getActionsFromHit(hit.getSourceAsMap());
for (int i = 0; i < actionConditions.size(); ++i) {
final Map<String, Object> action = (Map<String, Object>) actions.get(i);
final Map<String, Object> condition = (Map<String, Object>) action.get("condition");
final Map<String, Object> logging = (Map<String, Object>) action.get("logging");
assertThat(action.get("id"), is("action" + i));
assertThat(action.get("status"), is("success"));
assertThat(condition.get("type"), is(actionConditions.get(i).type()));
assertThat(condition.get("met"), is(true));
assertThat(action.get("reason"), nullValue());
assertThat(logging.get("logged_text"), is(Integer.toString(i)));
}
}
/**
* Get the "actions" from the Watch History hit.
*
* @param source The hit's source.
* @return The list of "actions"
*/
@SuppressWarnings("unchecked")
private List<Object> getActionsFromHit(final Map<String, Object> source) {
final Map<String, Object> result = (Map<String, Object>) source.get("result");
return (List<Object>) result.get("actions");
}
/**
* Create a Watch with the specified {@code id} and {@code input}.
* <p>
* The {@code actionConditions} are
*
* @param id The ID of the Watch
* @param input The input to use for the Watch
* @param actionConditions The conditions to add to the Watch
*/
private void putAndTriggerWatch(final String id, final Input input, final Condition... actionConditions) throws Exception {
WatchSourceBuilder source = watchBuilder().trigger(schedule(interval("5s")))
.input(input)
.condition(InternalAlwaysCondition.INSTANCE);
for (int i = 0; i < actionConditions.length; ++i) {
source.addAction("action" + i, actionConditions[i], loggingAction(Integer.toString(i)));
}
PutWatchResponse putWatchResponse = new PutWatchRequestBuilder(client(), id).setSource(source).get();
assertThat(putWatchResponse.isCreated(), is(true));
timeWarp().trigger(id);
}
/**
* Create an inline script using the {@link CustomScriptPlugin}.
*
* @param inlineScript The script to "compile" and run
* @return Never {@code null}
*/
private static ExecutableCondition mockScriptCondition(String inlineScript) {
Script script = new Script(ScriptType.INLINE, MockScriptPlugin.NAME, inlineScript, Collections.emptyMap());
return new ScriptCondition(script);
}
}
|
CustomScriptPlugin
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/RedundantOverrideTest.java
|
{
"start": 8692,
"end": 8894
}
|
class ____ {
protected void swap(@DemoAnnotation int a) {}
}
""")
.addSourceLines(
"B.java",
"""
package foo;
|
A
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/activities/ActivitiesLogger.java
|
{
"start": 12660,
"end": 15455
}
|
class ____ {
/*
* Invoked when node allocation finishes, and there's NO container
* allocated or reserved during the allocation
*/
public static void finishSkippedNodeAllocation(
ActivitiesManager activitiesManager, SchedulerNode node) {
finishAllocatedNodeAllocation(activitiesManager, node, null,
AllocationState.SKIPPED);
}
/*
* Invoked when node allocation finishes, and there's any container
* allocated or reserved during the allocation
*/
public static void finishAllocatedNodeAllocation(
ActivitiesManager activitiesManager, SchedulerNode node,
ContainerId containerId, AllocationState containerState) {
NodeId nodeId = getRecordingNodeId(activitiesManager, node);
if (nodeId == null) {
return;
}
if (activitiesManager.shouldRecordThisNode(nodeId)) {
activitiesManager.updateAllocationFinalState(nodeId,
containerId, containerState);
}
}
/*
* Invoked when node heartbeat finishes
*/
public static void finishNodeUpdateRecording(
ActivitiesManager activitiesManager, NodeId nodeID, String partition) {
if (activitiesManager == null) {
return;
}
activitiesManager.finishNodeUpdateRecording(nodeID, partition);
}
/*
* Invoked when node heartbeat starts
*/
public static void startNodeUpdateRecording(
ActivitiesManager activitiesManager, NodeId nodeID) {
if (activitiesManager == null) {
return;
}
activitiesManager.startNodeUpdateRecording(nodeID);
}
}
// Add queue, application or container activity into specific node allocation.
private static void recordActivity(ActivitiesManager activitiesManager,
NodeId nodeId, String parentName, String childName, Priority priority,
ActivityState state, String diagnostic, ActivityLevel level) {
activitiesManager.addSchedulingActivityForNode(nodeId, parentName,
childName, priority != null ? priority.getPriority() : null, state,
diagnostic, level, null);
}
private static NodeId getRecordingNodeId(ActivitiesManager activitiesManager,
SchedulerNode node) {
return activitiesManager == null ? null :
activitiesManager.getRecordingNodeId(node);
}
private static String getRequestName(Integer priority,
Long allocationRequestId) {
return "request_"
+ (priority == null ? "" : priority)
+ "_" + (allocationRequestId == null ? "" : allocationRequestId);
}
private static Integer getPriority(SchedulerRequestKey schedulerKey) {
Priority priority = schedulerKey == null ?
null : schedulerKey.getPriority();
return priority == null ? null : priority.getPriority();
}
}
|
NODE
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/nosql/NoSqlDatabaseManagerTest.java
|
{
"start": 10520,
"end": 12434
}
|
class ____ not correct.", "com.bar.Foo", source.get("className"));
assertEquals("The method is not correct.", "anotherMethod03", source.get("methodName"));
assertEquals("The file name is not correct.", "Foo.java", source.get("fileName"));
assertEquals("The line number is not correct.", 9, source.get("lineNumber"));
assertTrue("The marker should be a map.", object.get("marker") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> marker = (Map<String, Object>) object.get("marker");
assertEquals("The marker name is not correct.", "LoneMarker", marker.get("name"));
assertNull("The marker parent should be null.", marker.get("parent"));
assertTrue("The thrown should be a map.", object.get("thrown") instanceof Map);
@SuppressWarnings("unchecked")
final Map<String, Object> thrown = (Map<String, Object>) object.get("thrown");
assertEquals("The thrown type is not correct.", "java.lang.RuntimeException", thrown.get("type"));
assertEquals("The thrown message is not correct.", "This is something cool!", thrown.get("message"));
assertTrue("The thrown stack trace should be a list.", thrown.get("stackTrace") instanceof List);
@SuppressWarnings("unchecked")
final List<Map<String, Object>> stackTrace = (List<Map<String, Object>>) thrown.get("stackTrace");
assertEquals(
"The thrown stack trace length is not correct.",
exception.getStackTrace().length,
stackTrace.size());
for (int i = 0; i < exception.getStackTrace().length; i++) {
final StackTraceElement e1 = exception.getStackTrace()[i];
final Map<String, Object> e2 = stackTrace.get(i);
assertEquals("Element
|
is
|
java
|
apache__flink
|
flink-libraries/flink-cep/src/main/java/org/apache/flink/cep/nfa/State.java
|
{
"start": 1341,
"end": 4507
}
|
class ____<T> implements Serializable {
private static final long serialVersionUID = 6658700025989097781L;
private final String name;
private StateType stateType;
private final Collection<StateTransition<T>> stateTransitions;
public State(final String name, final StateType stateType) {
this.name = name;
this.stateType = stateType;
stateTransitions = new ArrayList<>();
}
public StateType getStateType() {
return stateType;
}
public boolean isFinal() {
return stateType == StateType.Final;
}
public boolean isStart() {
return stateType == StateType.Start;
}
public String getName() {
return name;
}
public Collection<StateTransition<T>> getStateTransitions() {
return stateTransitions;
}
public void makeStart() {
this.stateType = StateType.Start;
}
public void addStateTransition(
final StateTransitionAction action,
final State<T> targetState,
final IterativeCondition<T> condition) {
stateTransitions.add(new StateTransition<T>(this, action, targetState, condition));
}
public void addIgnore(final IterativeCondition<T> condition) {
addStateTransition(StateTransitionAction.IGNORE, this, condition);
}
public void addIgnore(final State<T> targetState, final IterativeCondition<T> condition) {
addStateTransition(StateTransitionAction.IGNORE, targetState, condition);
}
public void addTake(final State<T> targetState, final IterativeCondition<T> condition) {
addStateTransition(StateTransitionAction.TAKE, targetState, condition);
}
public void addProceed(final State<T> targetState, final IterativeCondition<T> condition) {
addStateTransition(StateTransitionAction.PROCEED, targetState, condition);
}
public void addTake(final IterativeCondition<T> condition) {
addStateTransition(StateTransitionAction.TAKE, this, condition);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof State) {
@SuppressWarnings("unchecked")
State<T> other = (State<T>) obj;
return name.equals(other.name)
&& stateType == other.stateType
&& stateTransitions.equals(other.stateTransitions);
} else {
return false;
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append(stateType).append(" State ").append(name).append(" [\n");
for (StateTransition<T> stateTransition : stateTransitions) {
builder.append("\t").append(stateTransition).append(",\n");
}
builder.append("])");
return builder.toString();
}
@Override
public int hashCode() {
return Objects.hash(name, stateType, stateTransitions);
}
public boolean isStop() {
return stateType == StateType.Stop;
}
public boolean isPending() {
return stateType == StateType.Pending;
}
/** Set of valid state types. */
public
|
State
|
java
|
spring-projects__spring-security
|
access/src/test/java/org/springframework/security/access/annotation/SecuredAnnotationSecurityMetadataSourceTests.java
|
{
"start": 8540,
"end": 8682
}
|
class ____ implements ReturnVoid {
@Override
public void doSomething(List<?> param) {
}
}
public static
|
AnnotatedAnnotationAtClassLevel
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/OperationAlreadyFailedException.java
|
{
"start": 946,
"end": 1114
}
|
class ____ extends DispatcherException {
public OperationAlreadyFailedException(Throwable throwable) {
super(throwable);
}
}
|
OperationAlreadyFailedException
|
java
|
alibaba__nacos
|
api/src/test/java/com/alibaba/nacos/api/ai/model/mcp/registry/IconTest.java
|
{
"start": 1037,
"end": 4670
}
|
class ____ extends BasicRequestTest {
@Test
void testSerializePngIcon() throws JsonProcessingException {
Icon icon = new Icon();
icon.setSrc("https://example.com/icon.png");
icon.setMimeType(Icon.MimeType.IMAGE_PNG);
icon.setSizes(Arrays.asList("16x16", "32x32", "64x64"));
icon.setTheme(Icon.Theme.LIGHT);
String json = mapper.writeValueAsString(icon);
assertNotNull(json);
assertTrue(json.contains("\"src\":\"https://example.com/icon.png\""));
assertTrue(json.contains("\"mimeType\":\"image/png\""));
assertTrue(json.contains("\"sizes\":[\"16x16\",\"32x32\",\"64x64\"]"));
assertTrue(json.contains("\"theme\":\"light\""));
}
@Test
void testSerializeSvgIcon() throws JsonProcessingException {
Icon icon = new Icon();
icon.setSrc("https://example.com/icon.svg");
icon.setMimeType(Icon.MimeType.IMAGE_SVG_XML);
icon.setTheme(Icon.Theme.DARK);
String json = mapper.writeValueAsString(icon);
assertTrue(json.contains("\"mimeType\":\"image/svg+xml\""));
assertTrue(json.contains("\"theme\":\"dark\""));
}
@Test
void testDeserializeIcon() throws JsonProcessingException {
String json = "{\"src\":\"https://example.com/icon.png\",\"mimeType\":\"image/png\","
+ "\"sizes\":[\"16x16\",\"32x32\"],\"theme\":\"light\"}";
Icon icon = mapper.readValue(json, Icon.class);
assertNotNull(icon);
assertEquals("https://example.com/icon.png", icon.getSrc());
assertEquals(Icon.MimeType.IMAGE_PNG, icon.getMimeType());
assertEquals(2, icon.getSizes().size());
assertEquals("16x16", icon.getSizes().get(0));
assertEquals(Icon.Theme.LIGHT, icon.getTheme());
}
@Test
void testMimeTypeEnumValues() {
assertEquals("image/png", Icon.MimeType.IMAGE_PNG.getValue());
assertEquals("image/jpeg", Icon.MimeType.IMAGE_JPEG.getValue());
assertEquals("image/jpg", Icon.MimeType.IMAGE_JPG.getValue());
assertEquals("image/svg+xml", Icon.MimeType.IMAGE_SVG_XML.getValue());
assertEquals("image/webp", Icon.MimeType.IMAGE_WEBP.getValue());
}
@Test
void testThemeEnumValues() {
assertEquals("light", Icon.Theme.LIGHT.getValue());
assertEquals("dark", Icon.Theme.DARK.getValue());
}
@Test
void testMimeTypeFromValue() {
assertEquals(Icon.MimeType.IMAGE_PNG, Icon.MimeType.fromValue("image/png"));
assertEquals(Icon.MimeType.IMAGE_SVG_XML,
Icon.MimeType.fromValue("image/svg+xml"));
assertEquals(Icon.MimeType.IMAGE_WEBP, Icon.MimeType.fromValue("image/webp"));
}
@Test
void testThemeFromValue() {
assertEquals(Icon.Theme.LIGHT, Icon.Theme.fromValue("light"));
assertEquals(Icon.Theme.DARK, Icon.Theme.fromValue("dark"));
}
@Test
void testMimeTypeFromValueCaseInsensitive() {
assertEquals(Icon.MimeType.IMAGE_PNG, Icon.MimeType.fromValue("IMAGE/PNG"));
assertEquals(Icon.MimeType.IMAGE_SVG_XML, Icon.MimeType.fromValue("IMAGE/SVG+XML"));
}
@Test
void testIconMinimalRequired() throws JsonProcessingException {
Icon icon = new Icon();
icon.setSrc("https://example.com/required.png");
String json = mapper.writeValueAsString(icon);
assertTrue(json.contains("\"src\":\"https://example.com/required.png\""));
assertTrue(json.contains("\"mimeType\":null") || !json.contains("mimeType"));
}
}
|
IconTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
|
{
"start": 2748,
"end": 3204
}
|
class ____ {
public int x = 7;
@Override
public int hashCode() {
return x;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final InnerRecord other = (InnerRecord) obj;
if (x != other.x)
return false;
return true;
}
}
public static
|
InnerRecord
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
|
{
"start": 1759,
"end": 6697
}
|
class ____ {
private static final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
private static final Configuration hadoopConf = ScenarioConfiguration.getNormalConfiguration();
static {
nativeConf.addResource(TestConstants.COMPRESS_TEST_CONF_PATH);
hadoopConf.addResource(TestConstants.COMPRESS_TEST_CONF_PATH);
}
@Test
public void testSnappyCompress() throws Exception {
final String snappyCodec = "org.apache.hadoop.io.compress.SnappyCodec";
nativeConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, snappyCodec);
final String nativeOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/snappy";
final Job job = CompressMapper.getCompressJob("nativesnappy", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertThat(job.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, snappyCodec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/snappy";
final Job hadoopjob = CompressMapper.getCompressJob("hadoopsnappy", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertThat(hadoopjob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopjob, job);
}
@Test
public void testGzipCompress() throws Exception {
final String gzipCodec = "org.apache.hadoop.io.compress.GzipCodec";
nativeConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, gzipCodec);
final String nativeOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/gzip";
final Job job = CompressMapper.getCompressJob("nativegzip", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertThat(job.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, gzipCodec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/gzip";
final Job hadoopjob = CompressMapper.getCompressJob("hadoopgzip", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertThat(hadoopjob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopjob, job);
}
@Test
public void testLz4Compress() throws Exception {
final String lz4Codec = "org.apache.hadoop.io.compress.Lz4Codec";
nativeConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, lz4Codec);
final String nativeOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NATIVE_OUTPUTDIR + "/lz4";
final Job nativeJob = CompressMapper.getCompressJob("nativelz4", nativeConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, nativeOutputPath);
assertThat(nativeJob.waitForCompletion(true)).isTrue();
hadoopConf.set(MRJobConfig.MAP_OUTPUT_COMPRESS_CODEC, lz4Codec);
final String hadoopOutputPath =
TestConstants.NATIVETASK_COMPRESS_TEST_NORMAL_OUTPUTDIR + "/lz4";
final Job hadoopJob = CompressMapper.getCompressJob("hadooplz4", hadoopConf,
TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR, hadoopOutputPath);
assertThat(hadoopJob.waitForCompletion(true)).isTrue();
final boolean compareRet = ResultVerifier.verify(nativeOutputPath, hadoopOutputPath);
assertThat(compareRet)
.withFailMessage(
"file compare result: if they are the same ,then return true")
.isTrue();
ResultVerifier.verifyCounters(hadoopJob, nativeJob);
}
@BeforeEach
public void startUp() throws Exception {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
assumeTrue(NativeRuntime.isNativeLibraryLoaded());
final ScenarioConfiguration conf = new ScenarioConfiguration();
final FileSystem fs = FileSystem.get(conf);
final Path path = new Path(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR);
fs.delete(path, true);
if (!fs.exists(path)) {
new TestInputFile(hadoopConf.getInt(
TestConstants.NATIVETASK_COMPRESS_FILESIZE, 100000),
Text.class.getName(), Text.class.getName(), conf)
.createSequenceTestFile(TestConstants.NATIVETASK_COMPRESS_TEST_INPUTDIR);
}
fs.close();
}
@AfterAll
public static void cleanUp() throws IOException {
final FileSystem fs = FileSystem.get(new ScenarioConfiguration());
fs.delete(new Path(TestConstants.NATIVETASK_COMPRESS_TEST_DIR), true);
fs.close();
}
}
|
CompressTest
|
java
|
apache__maven
|
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/TestUtils.java
|
{
"start": 1500,
"end": 9126
}
|
class ____ {
private TestUtils() {
// Utility class
}
/**
* Creates a mock UpgradeContext with default settings.
*
* @return a mock UpgradeContext
*/
public static UpgradeContext createMockContext() {
return createMockContext(Paths.get("/project"));
}
/**
* Creates a mock UpgradeContext with the specified working directory.
*
* @param workingDirectory the working directory to use
* @return a mock UpgradeContext
*/
public static UpgradeContext createMockContext(Path workingDirectory) {
return createMockContext(workingDirectory, createDefaultOptions());
}
/**
* Creates a mock UpgradeContext with the specified options.
*
* @param options the upgrade options to use
* @return a mock UpgradeContext
*/
public static UpgradeContext createMockContext(UpgradeOptions options) {
return createMockContext(Paths.get("/project"), options);
}
/**
* Creates a mock UpgradeContext with the specified working directory and options.
*
* @param workingDirectory the working directory to use
* @param options the upgrade options to use
* @return a mock UpgradeContext
*/
public static UpgradeContext createMockContext(Path workingDirectory, UpgradeOptions options) {
InvokerRequest request = mock(InvokerRequest.class);
// Mock all required properties for LookupContext constructor
when(request.cwd()).thenReturn(workingDirectory);
when(request.installationDirectory()).thenReturn(Paths.get("/maven"));
when(request.userHomeDirectory()).thenReturn(Paths.get("/home/user"));
when(request.topDirectory()).thenReturn(workingDirectory);
when(request.rootDirectory()).thenReturn(Optional.empty());
when(request.userProperties()).thenReturn(Map.of());
when(request.systemProperties()).thenReturn(Map.of());
when(request.options()).thenReturn(Optional.ofNullable(options));
// Mock parserRequest and logger
ParserRequest parserRequest = mock(ParserRequest.class);
Logger logger = mock(Logger.class);
// Capture error messages for debugging
doAnswer(invocation -> {
System.err.println("[ERROR] " + invocation.getArgument(0));
return null;
})
.when(logger)
.error(anyString());
when(request.parserRequest()).thenReturn(parserRequest);
when(parserRequest.logger()).thenReturn(logger);
return new UpgradeContext(request, options);
}
/**
* Creates default upgrade options with all optional values empty.
*
* @return default upgrade options
*/
public static UpgradeOptions createDefaultOptions() {
UpgradeOptions options = mock(UpgradeOptions.class);
when(options.all()).thenReturn(Optional.empty());
when(options.infer()).thenReturn(Optional.empty());
when(options.model()).thenReturn(Optional.empty());
when(options.plugins()).thenReturn(Optional.empty());
when(options.modelVersion()).thenReturn(Optional.empty());
return options;
}
/**
* Creates upgrade options with specific values.
*
* @param all the --all option value (null for absent)
* @param infer the --infer option value (null for absent)
* @param model the --model option value (null for absent)
* @param plugins the --plugins option value (null for absent)
* @param modelVersion the --model-version option value (null for absent)
* @return configured upgrade options
*/
public static UpgradeOptions createOptions(
Boolean all, Boolean infer, Boolean model, Boolean plugins, String modelVersion) {
UpgradeOptions options = mock(UpgradeOptions.class);
when(options.all()).thenReturn(Optional.ofNullable(all));
when(options.infer()).thenReturn(Optional.ofNullable(infer));
when(options.model()).thenReturn(Optional.ofNullable(model));
when(options.plugins()).thenReturn(Optional.ofNullable(plugins));
when(options.modelVersion()).thenReturn(Optional.ofNullable(modelVersion));
return options;
}
/**
* Creates upgrade options with only the --all flag set.
*
* @param all the --all option value
* @return configured upgrade options
*/
public static UpgradeOptions createOptionsWithAll(boolean all) {
return createOptions(all, null, null, null, null);
}
/**
* Creates upgrade options with only the --model-version option set.
*
* @param modelVersion the --model-version option value
* @return configured upgrade options
*/
public static UpgradeOptions createOptionsWithModelVersion(String modelVersion) {
return createOptions(null, null, null, null, modelVersion);
}
/**
* Creates upgrade options with only the --plugins option set.
*
* @param plugins the --plugins option value
* @return configured upgrade options
*/
public static UpgradeOptions createOptionsWithPlugins(boolean plugins) {
return createOptions(null, null, null, plugins, null);
}
/**
* Creates upgrade options with only the --fix-model option set.
*
* @param fixModel the --fix-model option value
* @return configured upgrade options
*/
public static UpgradeOptions createOptionsWithFixModel(boolean fixModel) {
return createOptions(null, null, fixModel, null, null);
}
/**
* Creates upgrade options with only the --infer option set.
*
* @param infer the --infer option value
* @return configured upgrade options
*/
public static UpgradeOptions createOptionsWithInfer(boolean infer) {
return createOptions(null, infer, null, null, null);
}
/**
* Creates a simple POM XML string for testing.
*
* @param groupId the group ID
* @param artifactId the artifact ID
* @param version the version
* @return POM XML string
*/
public static String createSimplePom(String groupId, String artifactId, String version) {
return String.format("""
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<groupId>%s</groupId>
<artifactId>%s</artifactId>
<version>%s</version>
</project>
""", groupId, artifactId, version);
}
/**
* Creates a POM XML string with parent for testing.
*
* @param parentGroupId the parent group ID
* @param parentArtifactId the parent artifact ID
* @param parentVersion the parent version
* @param artifactId the artifact ID
* @return POM XML string with parent
*/
public static String createPomWithParent(
String parentGroupId, String parentArtifactId, String parentVersion, String artifactId) {
return String.format("""
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>%s</groupId>
<artifactId>%s</artifactId>
<version>%s</version>
</parent>
<artifactId>%s</artifactId>
</project>
""", parentGroupId, parentArtifactId, parentVersion, artifactId);
}
}
|
TestUtils
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/crossproject/CrossProjectModeDecider.java
|
{
"start": 1863,
"end": 2822
}
|
class ____ {
private static final String CROSS_PROJECT_ENABLED_SETTING_KEY = "serverless.cross_project.enabled";
private final boolean crossProjectEnabled;
public CrossProjectModeDecider(Settings settings) {
this.crossProjectEnabled = settings.getAsBoolean(CROSS_PROJECT_ENABLED_SETTING_KEY, false);
}
public boolean crossProjectEnabled() {
return crossProjectEnabled;
}
public boolean resolvesCrossProject(IndicesRequest.CrossProjectCandidate request) {
if (crossProjectEnabled == false) {
return false;
}
// TODO: The following check can be an method on the request itself
if (request.allowsCrossProject() == false) {
return false;
}
if (request instanceof IndicesRequest indicesRequest) {
return indicesRequest.indicesOptions().resolveCrossProjectIndexExpression();
}
return true;
}
}
|
CrossProjectModeDecider
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/CartesianBoundsAggregator.java
|
{
"start": 1055,
"end": 2486
}
|
class ____ extends CartesianBoundsAggregatorBase {
private final CartesianPointValuesSource valuesSource;
public CartesianBoundsAggregator(
String name,
AggregationContext context,
Aggregator parent,
ValuesSourceConfig valuesSourceConfig,
Map<String, Object> metadata
) throws IOException {
super(name, context, parent, metadata);
assert valuesSourceConfig.hasValues();
this.valuesSource = (CartesianPointValuesSource) valuesSourceConfig.getValuesSource();
}
@Override
public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx, LeafBucketCollector sub) {
final CartesianPointValuesSource.MultiCartesianPointValues values = valuesSource.pointValues(aggCtx.getLeafReaderContext());
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
maybeResize(bucket);
final int valuesCount = values.docValueCount();
for (int i = 0; i < valuesCount; ++i) {
CartesianPoint value = values.nextValue();
addBounds(bucket, value.getY(), value.getY(), value.getX(), value.getX());
}
}
}
};
}
}
|
CartesianBoundsAggregator
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java
|
{
"start": 9076,
"end": 84589
}
|
class ____ implements Closeable {
private static final Logger logger = LogManager.getLogger(ApiKeyService.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ApiKeyService.class);
public static final Setting<String> STORED_HASH_ALGO_SETTING = XPackSettings.defaultStoredSecureTokenHashAlgorithmSetting(
"xpack.security.authc.api_key.hashing.algorithm",
(s) -> Hasher.SSHA256.name()
);
public static final Setting<TimeValue> DELETE_TIMEOUT = Setting.timeSetting(
"xpack.security.authc.api_key.delete.timeout",
TimeValue.MINUS_ONE,
Property.NodeScope
);
public static final Setting<TimeValue> DELETE_INTERVAL = Setting.timeSetting(
"xpack.security.authc.api_key.delete.interval",
TimeValue.timeValueHours(24L),
Property.NodeScope,
Property.Dynamic
);
public static final Setting<TimeValue> DELETE_RETENTION_PERIOD = Setting.positiveTimeSetting(
"xpack.security.authc.api_key.delete.retention_period",
TimeValue.timeValueDays(7),
Property.NodeScope,
Property.Dynamic
);
public static final Setting<String> CACHE_HASH_ALGO_SETTING = Setting.simpleString(
"xpack.security.authc.api_key.cache.hash_algo",
Hasher.SSHA256.name(),
Setting.Property.NodeScope
);
public static final Setting<TimeValue> CACHE_TTL_SETTING = Setting.timeSetting(
"xpack.security.authc.api_key.cache.ttl",
TimeValue.timeValueHours(24L),
Property.NodeScope
);
public static final Setting<Integer> CACHE_MAX_KEYS_SETTING = Setting.intSetting(
"xpack.security.authc.api_key.cache.max_keys",
25000,
Property.NodeScope
);
public static final Setting<TimeValue> DOC_CACHE_TTL_SETTING = Setting.timeSetting(
"xpack.security.authc.api_key.doc_cache.ttl",
TimeValue.timeValueMinutes(5),
TimeValue.timeValueMinutes(0),
TimeValue.timeValueMinutes(15),
Property.NodeScope
);
public static final Setting<TimeValue> CERTIFICATE_IDENTITY_PATTERN_CACHE_TTL_SETTING = Setting.timeSetting(
"xpack.security.authc.api_key.certificate_identity_pattern_cache.ttl",
TimeValue.timeValueHours(48L),
Property.NodeScope
);
public static final Setting<Integer> CERTIFICATE_IDENTITY_PATTERN_CACHE_MAX_KEYS_SETTING = Setting.intSetting(
"xpack.security.authc.api_key.certificate_identity_pattern_cache.max_keys",
100,
Property.NodeScope
);
private static final RoleDescriptor.Parser ROLE_DESCRIPTOR_PARSER = RoleDescriptor.parserBuilder().allowRestriction(true).build();
private final Clock clock;
private final Client client;
private final SecurityIndexManager securityIndex;
private final ClusterService clusterService;
private final Hasher hasher;
private final boolean enabled;
private final Settings settings;
private final InactiveApiKeysRemover inactiveApiKeysRemover;
private final Cache<String, Pattern> certificateIdentityPatternCache;
private final Cache<String, ListenableFuture<CachedApiKeyHashResult>> apiKeyAuthCache;
private final Hasher cacheHasher;
private final ThreadPool threadPool;
private final ApiKeyDocCache apiKeyDocCache;
private final FeatureService featureService;
private static final int API_KEY_SECRET_NUM_BYTES = 16;
// The API key secret is a Base64 encoded string of 128 random bits.
// See getBase64SecureRandomString()
private static final int API_KEY_SECRET_LENGTH = 22;
private static final long EVICTION_MONITOR_INTERVAL_SECONDS = 300L; // 5 minutes
private static final long EVICTION_MONITOR_INTERVAL_NANOS = EVICTION_MONITOR_INTERVAL_SECONDS * 1_000_000_000L;
private static final long EVICTION_WARNING_THRESHOLD = 15L * EVICTION_MONITOR_INTERVAL_SECONDS; // 15 eviction per sec = 4500 in 5 min
private final AtomicLong lastEvictionCheckedAt = new AtomicLong(0);
private final LongAdder evictionCounter = new LongAdder();
private final List<AutoCloseable> cacheMetrics;
@SuppressWarnings("this-escape")
public ApiKeyService(
Settings settings,
Clock clock,
Client client,
SecurityIndexManager securityIndex,
ClusterService clusterService,
CacheInvalidatorRegistry cacheInvalidatorRegistry,
ThreadPool threadPool,
MeterRegistry meterRegistry,
FeatureService featureService
) {
this.clock = clock;
this.client = client;
this.securityIndex = securityIndex;
this.clusterService = clusterService;
this.enabled = XPackSettings.API_KEY_SERVICE_ENABLED_SETTING.get(settings);
this.hasher = Hasher.resolve(STORED_HASH_ALGO_SETTING.get(settings));
this.settings = settings;
this.inactiveApiKeysRemover = new InactiveApiKeysRemover(settings, client, clusterService);
this.threadPool = threadPool;
this.cacheHasher = Hasher.resolve(CACHE_HASH_ALGO_SETTING.get(settings));
this.featureService = featureService;
final TimeValue ttl = CACHE_TTL_SETTING.get(settings);
final int maximumWeight = CACHE_MAX_KEYS_SETTING.get(settings);
if (ttl.getNanos() > 0) {
this.apiKeyAuthCache = CacheBuilder.<String, ListenableFuture<CachedApiKeyHashResult>>builder()
.setExpireAfterAccess(ttl)
.setMaximumWeight(maximumWeight)
.removalListener(getAuthCacheRemovalListener(maximumWeight))
.build();
final TimeValue docTtl = DOC_CACHE_TTL_SETTING.get(settings);
this.apiKeyDocCache = docTtl.getNanos() == 0 ? null : new ApiKeyDocCache(docTtl, maximumWeight);
final TimeValue patternTtl = CERTIFICATE_IDENTITY_PATTERN_CACHE_TTL_SETTING.get(settings);
final int maximumPatternWeight = CERTIFICATE_IDENTITY_PATTERN_CACHE_MAX_KEYS_SETTING.get(settings);
this.certificateIdentityPatternCache = patternTtl.getNanos() == 0
? null
: CacheBuilder.<String, Pattern>builder().setExpireAfterAccess(patternTtl).setMaximumWeight(maximumPatternWeight).build();
cacheInvalidatorRegistry.registerCacheInvalidator("api_key", new CacheInvalidatorRegistry.CacheInvalidator() {
@Override
public void invalidate(Collection<String> keys) {
if (apiKeyDocCache != null) {
apiKeyDocCache.invalidate(keys);
}
if (apiKeyAuthCache != null) {
keys.forEach(apiKeyAuthCache::invalidate);
}
}
@Override
public void invalidateAll() {
if (apiKeyDocCache != null) {
apiKeyDocCache.invalidateAll();
}
if (apiKeyAuthCache != null) {
apiKeyAuthCache.invalidateAll();
}
}
});
cacheInvalidatorRegistry.registerCacheInvalidator("api_key_doc", new CacheInvalidatorRegistry.CacheInvalidator() {
@Override
public void invalidate(Collection<String> keys) {
if (apiKeyDocCache != null) {
apiKeyDocCache.invalidate(keys);
}
}
@Override
public void invalidateAll() {
if (apiKeyDocCache != null) {
apiKeyDocCache.invalidateAll();
}
}
});
} else {
this.apiKeyAuthCache = null;
this.apiKeyDocCache = null;
this.certificateIdentityPatternCache = null;
}
if (enabled) {
final List<AutoCloseable> cacheMetrics = new ArrayList<>();
if (this.apiKeyAuthCache != null) {
cacheMetrics.addAll(
SecurityCacheMetrics.registerAsyncCacheMetrics(
meterRegistry,
this.apiKeyAuthCache,
SecurityCacheMetrics.CacheType.API_KEY_AUTH_CACHE
)
);
}
if (this.apiKeyDocCache != null) {
cacheMetrics.addAll(
SecurityCacheMetrics.registerAsyncCacheMetrics(
meterRegistry,
this.apiKeyDocCache.docCache,
SecurityCacheMetrics.CacheType.API_KEY_DOCS_CACHE
)
);
cacheMetrics.addAll(
SecurityCacheMetrics.registerAsyncCacheMetrics(
meterRegistry,
this.apiKeyDocCache.roleDescriptorsBytesCache,
SecurityCacheMetrics.CacheType.API_KEY_ROLE_DESCRIPTORS_CACHE
)
);
}
this.cacheMetrics = List.copyOf(cacheMetrics);
} else {
this.cacheMetrics = List.of();
}
}
/**
* Asynchronously creates a new API key based off of the request and authentication
* @param authentication the authentication that this api key should be based off of
* @param request the request to create the api key included any permission restrictions
* @param userRoleDescriptors the user's actual roles that we always enforce
* @param listener the listener that will be used to notify of completion
*/
public void createApiKey(
Authentication authentication,
AbstractCreateApiKeyRequest request,
Set<RoleDescriptor> userRoleDescriptors,
ActionListener<CreateApiKeyResponse> listener
) {
assert request.getType() != ApiKey.Type.CROSS_CLUSTER || false == authentication.isApiKey()
: "cannot create derived cross-cluster API keys (name=["
+ request.getName()
+ "], type=["
+ request.getType()
+ "], auth=["
+ authentication
+ "])";
assert request.getType() != ApiKey.Type.CROSS_CLUSTER || userRoleDescriptors.isEmpty()
: "owner user role descriptor must be empty for cross-cluster API keys (name=["
+ request.getName()
+ "], type=["
+ request.getType()
+ "], roles=["
+ userRoleDescriptors
+ "])";
ensureEnabled();
if (authentication == null) {
listener.onFailure(new IllegalArgumentException("authentication must be provided"));
} else if (authentication.isCloudApiKey()) {
listener.onFailure(new IllegalArgumentException("creating elasticsearch api keys using cloud api keys is not supported"));
} else {
final TransportVersion transportVersion = getMinTransportVersion();
if (validateRoleDescriptorsForMixedCluster(listener, request.getRoleDescriptors(), transportVersion) == false) {
return;
}
if (transportVersion.before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY)
&& request.getType() == ApiKey.Type.CROSS_CLUSTER) {
listener.onFailure(
new IllegalArgumentException(
"all nodes must have version ["
+ TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()
+ "] or higher to support creating cross cluster API keys"
)
);
return;
}
final IllegalArgumentException workflowsValidationException = validateWorkflowsRestrictionConstraints(
transportVersion,
request.getRoleDescriptors(),
userRoleDescriptors
);
if (workflowsValidationException != null) {
listener.onFailure(workflowsValidationException);
return;
}
Set<RoleDescriptor> filteredRoleDescriptors = filterRoleDescriptorsForMixedCluster(
userRoleDescriptors,
transportVersion,
request.getId()
);
createApiKeyAndIndexIt(authentication, request, filteredRoleDescriptors, listener);
}
}
private Set<RoleDescriptor> filterRoleDescriptorsForMixedCluster(
final Set<RoleDescriptor> userRoleDescriptors,
final TransportVersion transportVersion,
final String... apiKeyIds
) {
final Set<RoleDescriptor> userRolesWithoutDescription = removeUserRoleDescriptorDescriptions(userRoleDescriptors);
return maybeRemoveRemotePrivileges(userRolesWithoutDescription, transportVersion, apiKeyIds);
}
private boolean validateRoleDescriptorsForMixedCluster(
final ActionListener<?> listener,
final List<RoleDescriptor> roleDescriptors,
final TransportVersion transportVersion
) {
if (transportVersion.before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY) && hasRemoteIndices(roleDescriptors)) {
// API keys with roles which define remote indices privileges is not allowed in a mixed cluster.
listener.onFailure(
new IllegalArgumentException(
"all nodes must have version ["
+ TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY.toReleaseVersion()
+ "] or higher to support remote indices privileges for API keys"
)
);
return false;
}
if (transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS) && hasRemoteCluster(roleDescriptors)) {
// API keys with roles which define remote cluster privileges is not allowed in a mixed cluster.
listener.onFailure(
new IllegalArgumentException(
"all nodes must have version ["
+ ROLE_REMOTE_CLUSTER_PRIVS.toReleaseVersion()
+ "] or higher to support remote cluster privileges for API keys"
)
);
return false;
}
if (transportVersion.before(TransportVersions.V_8_16_0) && hasGlobalManageRolesPrivilege(roleDescriptors)) {
listener.onFailure(
new IllegalArgumentException(
"all nodes must have version ["
+ TransportVersions.V_8_16_0.toReleaseVersion()
+ "] or higher to support the manage roles privilege for API keys"
)
);
return false;
}
return true;
}
/**
* This method removes description from the given user's (limited-by) role descriptors.
* The description field is not supported for API key role descriptors hence storing limited-by roles with descriptions
* would be inconsistent and require handling backwards compatibility.
* Hence why we have to remove them before create/update of API key roles.
*/
static Set<RoleDescriptor> removeUserRoleDescriptorDescriptions(Set<RoleDescriptor> userRoleDescriptors) {
return userRoleDescriptors.stream().map(roleDescriptor -> {
if (roleDescriptor.hasDescription()) {
return new RoleDescriptor(
roleDescriptor.getName(),
roleDescriptor.getClusterPrivileges(),
roleDescriptor.getIndicesPrivileges(),
roleDescriptor.getApplicationPrivileges(),
roleDescriptor.getConditionalClusterPrivileges(),
roleDescriptor.getRunAs(),
roleDescriptor.getMetadata(),
roleDescriptor.getTransientMetadata(),
roleDescriptor.getRemoteIndicesPrivileges(),
roleDescriptor.getRemoteClusterPermissions(),
roleDescriptor.getRestriction(),
null
);
}
return roleDescriptor;
}).collect(Collectors.toSet());
}
private TransportVersion getMinTransportVersion() {
return clusterService.state().getMinTransportVersion();
}
private static boolean hasRemoteIndices(Collection<RoleDescriptor> roleDescriptors) {
return roleDescriptors != null && roleDescriptors.stream().anyMatch(RoleDescriptor::hasRemoteIndicesPrivileges);
}
private static boolean hasRemoteCluster(Collection<RoleDescriptor> roleDescriptors) {
return roleDescriptors != null && roleDescriptors.stream().anyMatch(RoleDescriptor::hasRemoteClusterPermissions);
}
private static boolean hasGlobalManageRolesPrivilege(Collection<RoleDescriptor> roleDescriptors) {
return roleDescriptors != null
&& roleDescriptors.stream()
.flatMap(roleDescriptor -> Arrays.stream(roleDescriptor.getConditionalClusterPrivileges()))
.anyMatch(privilege -> privilege instanceof ConfigurableClusterPrivileges.ManageRolesPrivilege);
}
private static IllegalArgumentException validateWorkflowsRestrictionConstraints(
TransportVersion transportVersion,
List<RoleDescriptor> requestRoleDescriptors,
Set<RoleDescriptor> userRoleDescriptors
) {
if (getNumberOfRoleDescriptorsWithRestriction(userRoleDescriptors) > 0L) {
return new IllegalArgumentException("owner user role descriptors must not include restriction");
}
final long numberOfRoleDescriptorsWithRestriction = getNumberOfRoleDescriptorsWithRestriction(requestRoleDescriptors);
if (numberOfRoleDescriptorsWithRestriction > 0L) {
// creating/updating API keys with restrictions is not allowed in a mixed cluster.
if (transportVersion.before(WORKFLOWS_RESTRICTION_VERSION)) {
return new IllegalArgumentException(
"all nodes must have version ["
+ WORKFLOWS_RESTRICTION_VERSION.toReleaseVersion()
+ "] or higher to support restrictions for API keys"
);
}
// It's only allowed to create/update API keys with a single role descriptor that is restricted.
if (numberOfRoleDescriptorsWithRestriction != 1L) {
return new IllegalArgumentException("more than one role descriptor with restriction is not supported");
}
// Combining roles with and without restriction is not allowed either.
if (numberOfRoleDescriptorsWithRestriction != requestRoleDescriptors.size()) {
return new IllegalArgumentException("combining role descriptors with and without restriction is not supported");
}
}
return null;
}
private static long getNumberOfRoleDescriptorsWithRestriction(Collection<RoleDescriptor> roleDescriptors) {
if (roleDescriptors == null || roleDescriptors.isEmpty()) {
return 0L;
}
return roleDescriptors.stream().filter(RoleDescriptor::hasRestriction).count();
}
private void createApiKeyAndIndexIt(
Authentication authentication,
AbstractCreateApiKeyRequest request,
Set<RoleDescriptor> userRoleDescriptors,
ActionListener<CreateApiKeyResponse> listener
) {
final Instant created = clock.instant();
final Instant expiration = getApiKeyExpiration(created, request.getExpiration());
final SecureString apiKey = getBase64SecureRandomString(API_KEY_SECRET_NUM_BYTES);
assert ApiKey.Type.CROSS_CLUSTER != request.getType() || API_KEY_SECRET_LENGTH == apiKey.length()
: "Invalid API key (name=[" + request.getName() + "], type=[" + request.getType() + "], length=[" + apiKey.length() + "])";
computeHashForApiKey(apiKey, listener.delegateFailure((l, apiKeyHashChars) -> {
final String certificateIdentity;
try {
certificateIdentity = getCertificateIdentityFromCreateRequest(request);
} catch (ElasticsearchException e) {
listener.onFailure(e);
return;
}
try (
XContentBuilder builder = newDocument(
apiKeyHashChars,
request.getName(),
authentication,
userRoleDescriptors,
created,
expiration,
request.getRoleDescriptors(),
request.getType(),
ApiKey.CURRENT_API_KEY_VERSION,
request.getMetadata(),
certificateIdentity
)
) {
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
bulkRequestBuilder.add(
client.prepareIndex(SECURITY_MAIN_ALIAS)
.setSource(builder)
.setId(request.getId())
.setOpType(DocWriteRequest.OpType.CREATE)
.request()
);
bulkRequestBuilder.setRefreshPolicy(request.getRefreshPolicy());
final BulkRequest bulkRequest = bulkRequestBuilder.request();
securityIndex.forCurrentProject()
.prepareIndexIfNeededThenExecute(
listener::onFailure,
() -> executeAsyncWithOrigin(
client,
SECURITY_ORIGIN,
TransportBulkAction.TYPE,
bulkRequest,
TransportBulkAction.<IndexResponse>unwrappingSingleItemBulkResponse(ActionListener.wrap(indexResponse -> {
assert request.getId().equals(indexResponse.getId())
: "Mismatched API key (request=["
+ request.getId()
+ "](name=["
+ request.getName()
+ "]) index=["
+ indexResponse.getId()
+ "])";
assert indexResponse.getResult() == DocWriteResponse.Result.CREATED
: "Index response was [" + indexResponse.getResult() + "]";
if (apiKeyAuthCache != null) {
final ListenableFuture<CachedApiKeyHashResult> listenableFuture = new ListenableFuture<>();
listenableFuture.onResponse(new CachedApiKeyHashResult(true, apiKey));
apiKeyAuthCache.put(request.getId(), listenableFuture);
}
listener.onResponse(new CreateApiKeyResponse(request.getName(), request.getId(), apiKey, expiration));
}, listener::onFailure))
)
);
} catch (IOException e) {
listener.onFailure(e);
} finally {
Arrays.fill(apiKeyHashChars, (char) 0);
}
}));
}
private String getCertificateIdentityFromCreateRequest(final AbstractCreateApiKeyRequest request) {
String certificateIdentityString = null;
if (request instanceof CreateCrossClusterApiKeyRequest createCrossClusterApiKeyRequest) {
CertificateIdentity certIdentityObject = createCrossClusterApiKeyRequest.getCertificateIdentity();
if (certIdentityObject != null) {
certificateIdentityString = certIdentityObject.value();
}
}
return certificateIdentityString;
}
public void ensureCertificateIdentityFeatureIsEnabled() {
ClusterState clusterState = clusterService.state();
if (featureService.clusterHasFeature(clusterState, CERTIFICATE_IDENTITY_FIELD_FEATURE) == false) {
throw new ElasticsearchException(
"API key operation failed. The cluster is in a mixed-version state and does not yet "
+ "support the [certificate_identity] field. Please retry after the upgrade is complete."
);
}
}
public void updateApiKeys(
final Authentication authentication,
final BaseBulkUpdateApiKeyRequest request,
final Set<RoleDescriptor> userRoleDescriptors,
final ActionListener<BulkUpdateApiKeyResponse> listener
) {
assert request.getType() != ApiKey.Type.CROSS_CLUSTER || userRoleDescriptors.isEmpty()
: "owner user role descriptor must be empty for cross-cluster API keys (ids=["
+ (request.getIds().size() <= 10
? request.getIds()
: (request.getIds().size() + " including " + request.getIds().subList(0, 10)))
+ "], type=["
+ request.getType()
+ "], roles=["
+ userRoleDescriptors
+ "])";
ensureEnabled();
if (authentication == null) {
listener.onFailure(new IllegalArgumentException("authentication must be provided"));
return;
} else if (authentication.isApiKey()) {
listener.onFailure(
new IllegalArgumentException("authentication via API key not supported: only the owner user can update an API key")
);
return;
}
final TransportVersion transportVersion = getMinTransportVersion();
if (validateRoleDescriptorsForMixedCluster(listener, request.getRoleDescriptors(), transportVersion) == false) {
return;
}
final Exception workflowsValidationException = validateWorkflowsRestrictionConstraints(
transportVersion,
request.getRoleDescriptors(),
userRoleDescriptors
);
if (workflowsValidationException != null) {
listener.onFailure(workflowsValidationException);
return;
}
final String[] apiKeyIds = request.getIds().toArray(String[]::new);
if (logger.isDebugEnabled()) {
logger.debug("Updating [{}] API keys", buildDelimitedStringWithLimit(10, apiKeyIds));
}
Set<RoleDescriptor> filteredRoleDescriptors = filterRoleDescriptorsForMixedCluster(
userRoleDescriptors,
transportVersion,
apiKeyIds
);
findVersionedApiKeyDocsForSubject(
authentication,
apiKeyIds,
ActionListener.wrap(
versionedDocs -> updateApiKeys(authentication, request, filteredRoleDescriptors, versionedDocs, listener),
ex -> listener.onFailure(traceLog("bulk update", ex))
)
);
}
private void updateApiKeys(
final Authentication authentication,
final BaseBulkUpdateApiKeyRequest request,
final Set<RoleDescriptor> userRoleDescriptors,
final Collection<VersionedApiKeyDoc> targetVersionedDocs,
final ActionListener<BulkUpdateApiKeyResponse> listener
) {
logger.trace("Found [{}] API keys of [{}] requested for update", targetVersionedDocs.size(), request.getIds().size());
assert targetVersionedDocs.size() <= request.getIds().size()
: "more docs were found for update than were requested. found ["
+ targetVersionedDocs.size()
+ "] requested ["
+ request.getIds().size()
+ "]";
final BulkUpdateApiKeyResponse.Builder responseBuilder = BulkUpdateApiKeyResponse.builder();
final BulkRequestBuilder bulkRequestBuilder = client.prepareBulk();
for (VersionedApiKeyDoc versionedDoc : targetVersionedDocs) {
final String apiKeyId = versionedDoc.id();
try {
validateForUpdate(apiKeyId, request.getType(), authentication, versionedDoc.doc());
final IndexRequest indexRequest = maybeBuildIndexRequest(versionedDoc, authentication, request, userRoleDescriptors);
final boolean isNoop = indexRequest == null;
if (isNoop) {
logger.debug("Detected noop update request for API key [{}]. Skipping index request", apiKeyId);
responseBuilder.noop(apiKeyId);
} else {
bulkRequestBuilder.add(indexRequest);
}
} catch (Exception ex) {
responseBuilder.error(apiKeyId, traceLog("prepare index request for update", ex));
}
}
addErrorsForNotFoundApiKeys(responseBuilder, targetVersionedDocs, request.getIds());
if (bulkRequestBuilder.numberOfActions() == 0) {
logger.trace("No bulk request execution necessary for API key update");
listener.onResponse(responseBuilder.build());
return;
}
logger.trace("Executing bulk request to update [{}] API keys", bulkRequestBuilder.numberOfActions());
bulkRequestBuilder.setRefreshPolicy(defaultCreateDocRefreshPolicy(settings));
securityIndex.forCurrentProject()
.prepareIndexIfNeededThenExecute(
ex -> listener.onFailure(traceLog("prepare security index before update", ex)),
() -> executeAsyncWithOrigin(
client.threadPool().getThreadContext(),
SECURITY_ORIGIN,
bulkRequestBuilder.request(),
ActionListener.<BulkResponse>wrap(
bulkResponse -> buildResponseAndClearCache(bulkResponse, responseBuilder, listener),
ex -> listener.onFailure(traceLog("execute bulk request for update", ex))
),
client::bulk
)
);
}
// package-private for testing
void validateForUpdate(
final String apiKeyId,
final ApiKey.Type expectedType,
final Authentication authentication,
final ApiKeyDoc apiKeyDoc
) {
assert authentication.getEffectiveSubject().getUser().principal().equals(apiKeyDoc.creator.get("principal"))
: "Authenticated user should be owner (authentication=["
+ authentication
+ "], owner=["
+ apiKeyDoc.creator
+ "], id=["
+ apiKeyId
+ "])";
if (apiKeyDoc.invalidated) {
throw new IllegalArgumentException("cannot update invalidated API key [" + apiKeyId + "]");
}
boolean expired = apiKeyDoc.expirationTime != -1 && clock.instant().isAfter(Instant.ofEpochMilli(apiKeyDoc.expirationTime));
if (expired) {
throw new IllegalArgumentException("cannot update expired API key [" + apiKeyId + "]");
}
if (Strings.isNullOrEmpty(apiKeyDoc.name)) {
throw new IllegalArgumentException("cannot update legacy API key [" + apiKeyId + "] without name");
}
if (expectedType != apiKeyDoc.type) {
throw new IllegalArgumentException(
"cannot update API key of type [" + apiKeyDoc.type.value() + "] while expected type is [" + expectedType.value() + "]"
);
}
}
/**
* This method removes remote indices and cluster privileges from the given role descriptors
* when we are in a mixed cluster in which some of the nodes do not support remote indices/clusters.
* Storing these roles would cause parsing issues on old nodes
* (i.e. nodes running with transport version before
* {@link org.elasticsearch.transport.RemoteClusterPortSettings#TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY}).
*/
static Set<RoleDescriptor> maybeRemoveRemotePrivileges(
final Set<RoleDescriptor> userRoleDescriptors,
final TransportVersion transportVersion,
final String... apiKeyIds
) {
if (transportVersion.before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY)
|| transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS)) {
final Set<RoleDescriptor> affectedRoles = new HashSet<>();
final Set<RoleDescriptor> result = userRoleDescriptors.stream().map(roleDescriptor -> {
if (roleDescriptor.hasRemoteIndicesPrivileges() || roleDescriptor.hasRemoteClusterPermissions()) {
affectedRoles.add(roleDescriptor);
return new RoleDescriptor(
roleDescriptor.getName(),
roleDescriptor.getClusterPrivileges(),
roleDescriptor.getIndicesPrivileges(),
roleDescriptor.getApplicationPrivileges(),
roleDescriptor.getConditionalClusterPrivileges(),
roleDescriptor.getRunAs(),
roleDescriptor.getMetadata(),
roleDescriptor.getTransientMetadata(),
roleDescriptor.hasRemoteIndicesPrivileges()
&& transportVersion.before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY)
? null
: roleDescriptor.getRemoteIndicesPrivileges(),
roleDescriptor.hasRemoteClusterPermissions() && transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS)
? null
: roleDescriptor.getRemoteClusterPermissions(),
roleDescriptor.getRestriction(),
roleDescriptor.getDescription()
);
}
return roleDescriptor;
}).collect(Collectors.toSet());
if (false == affectedRoles.isEmpty()) {
List<String> affectedRolesNames = affectedRoles.stream().map(RoleDescriptor::getName).sorted().collect(Collectors.toList());
if (affectedRoles.stream().anyMatch(RoleDescriptor::hasRemoteIndicesPrivileges)
&& transportVersion.before(TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY)) {
logger.info(
"removed remote indices privileges from role(s) {} for API key(s) [{}]",
affectedRolesNames,
buildDelimitedStringWithLimit(10, apiKeyIds)
);
HeaderWarning.addWarning(
"Removed API key's remote indices privileges from role(s) "
+ affectedRolesNames
+ ". Remote indices are not supported by all nodes in the cluster. "
);
}
if (affectedRoles.stream().anyMatch(RoleDescriptor::hasRemoteClusterPermissions)
&& transportVersion.before(ROLE_REMOTE_CLUSTER_PRIVS)) {
logger.info(
"removed remote cluster privileges from role(s) {} for API key(s) [{}]",
affectedRolesNames,
buildDelimitedStringWithLimit(10, apiKeyIds)
);
HeaderWarning.addWarning(
"Removed API key's remote cluster privileges from role(s) "
+ affectedRolesNames
+ ". Remote cluster privileges are not supported by all nodes in the cluster."
);
}
}
return result;
}
return userRoleDescriptors;
}
/**
* Builds a comma delimited string from the given string values (e.g. value1, value2...).
* The number of values included can be controlled with the {@code limit}. The limit must be a positive number.
* Note: package-private for testing
*/
static String buildDelimitedStringWithLimit(final int limit, final String... values) {
if (limit <= 0) {
throw new IllegalArgumentException("limit must be positive number");
}
if (values == null || values.length <= 0) {
return "";
}
final int total = values.length;
final int omitted = Math.max(0, total - limit);
final int valuesToAppend = Math.min(limit, total);
final int capacityForOmittedInfoText = 5; // The number of additional info strings we append when omitting.
final int capacity = valuesToAppend + (omitted > 0 ? capacityForOmittedInfoText : 0);
final StringBuilder sb = new StringBuilder(capacity);
int counter = 0;
while (counter < valuesToAppend) {
sb.append(values[counter]);
counter += 1;
if (counter < valuesToAppend) {
sb.append(", ");
}
}
if (omitted > 0) {
sb.append("... (").append(total).append(" in total, ").append(omitted).append(" omitted)");
}
return sb.toString();
}
/**
* package-private for testing
*/
static XContentBuilder newDocument(
char[] apiKeyHashChars,
String name,
Authentication authentication,
Set<RoleDescriptor> userRoleDescriptors,
Instant created,
Instant expiration,
List<RoleDescriptor> keyRoleDescriptors,
ApiKey.Type type,
ApiKey.Version version,
@Nullable Map<String, Object> metadata,
@Nullable String certificateIdentity
) throws IOException {
final XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject()
.field("doc_type", "api_key")
.field("type", type.value())
.field("creation_time", created.toEpochMilli())
.field("expiration_time", expiration == null ? null : expiration.toEpochMilli())
.field("api_key_invalidated", false);
addApiKeyHash(builder, apiKeyHashChars);
addRoleDescriptors(builder, keyRoleDescriptors);
addLimitedByRoleDescriptors(builder, userRoleDescriptors);
builder.field("name", name).field("version", version.version()).field("metadata_flattened", metadata);
if (certificateIdentity != null) {
builder.field("certificate_identity", certificateIdentity);
}
addCreator(builder, authentication);
return builder.endObject();
}
// package private for testing
/**
* @return `null` if the update is a noop, i.e., if no changes to `currentApiKeyDoc` are required
*/
@Nullable
static XContentBuilder maybeBuildUpdatedDocument(
final String apiKeyId,
final ApiKeyDoc currentApiKeyDoc,
final ApiKey.Version targetDocVersion,
final Authentication authentication,
final BaseUpdateApiKeyRequest request,
final Set<RoleDescriptor> userRoleDescriptors,
final Clock clock
) throws IOException {
assert currentApiKeyDoc.type == request.getType()
: "API Key doc does not match request type (key-id=["
+ apiKeyId
+ "], doc=["
+ currentApiKeyDoc.type
+ "], request=["
+ request.getType()
+ "])";
if (isNoop(apiKeyId, currentApiKeyDoc, targetDocVersion, authentication, request, userRoleDescriptors)) {
return null;
}
final XContentBuilder builder = XContentFactory.jsonBuilder();
builder.startObject()
.field("doc_type", "api_key")
.field("type", currentApiKeyDoc.type.value())
.field("creation_time", currentApiKeyDoc.creationTime)
.field("api_key_invalidated", false);
if (request.getExpiration() != null) {
builder.field("expiration_time", getApiKeyExpiration(clock.instant(), request.getExpiration()).toEpochMilli());
} else {
builder.field("expiration_time", currentApiKeyDoc.expirationTime == -1 ? null : currentApiKeyDoc.expirationTime);
}
addApiKeyHash(builder, currentApiKeyDoc.hash.toCharArray());
final List<RoleDescriptor> keyRoles = request.getRoleDescriptors();
if (keyRoles != null) {
logger.trace(() -> format("Building API key doc with updated role descriptors [%s]", keyRoles));
addRoleDescriptors(builder, keyRoles);
} else {
assert currentApiKeyDoc.roleDescriptorsBytes != null : "Role descriptors for [" + apiKeyId + "] are null";
builder.rawField("role_descriptors", currentApiKeyDoc.roleDescriptorsBytes.streamInput(), XContentType.JSON);
}
addLimitedByRoleDescriptors(builder, userRoleDescriptors);
builder.field("name", currentApiKeyDoc.name).field("version", targetDocVersion.version());
assert currentApiKeyDoc.metadataFlattened == null
|| MetadataUtils.containsReservedMetadata(
XContentHelper.convertToMap(currentApiKeyDoc.metadataFlattened, false, XContentType.JSON).v2()
) == false : "API key doc [" + apiKeyId + "] to be updated contains reserved metadata";
final Map<String, Object> metadata = request.getMetadata();
if (metadata != null) {
logger.trace(() -> format("Building API key doc with updated metadata [%s]", metadata));
builder.field("metadata_flattened", metadata);
} else {
builder.rawField(
"metadata_flattened",
currentApiKeyDoc.metadataFlattened == null
? ApiKeyDoc.NULL_BYTES.streamInput()
: currentApiKeyDoc.metadataFlattened.streamInput(),
XContentType.JSON
);
}
CertificateIdentity certIdentityRequest = request.getCertificateIdentity();
if (certIdentityRequest == null) {
// certificate_identity was omitted from request; preserve existing value
if (currentApiKeyDoc.certificateIdentity != null) {
logger.trace(() -> format("Preserving existing certificate identity for API key [%s]", apiKeyId));
builder.field("certificate_identity", currentApiKeyDoc.certificateIdentity);
}
} else {
String newValue = certIdentityRequest.value();
if (newValue == null) {
// Explicit null provided for certificate_identity in request; clear the certificate_identity
logger.trace(() -> format("Clearing certificate identity for API key [%s]", apiKeyId));
// Don't add certificate_identity field to document (effectively removes it)
} else {
// A new value was provided for certificate_identity; update to the new value.
logger.trace(() -> format("Updating certificate identity for API key [%s]", apiKeyId));
builder.field("certificate_identity", newValue);
}
}
addCreator(builder, authentication);
return builder.endObject();
}
private static boolean isNoop(
final String apiKeyId,
final ApiKeyDoc apiKeyDoc,
final ApiKey.Version targetDocVersion,
final Authentication authentication,
final BaseUpdateApiKeyRequest request,
final Set<RoleDescriptor> userRoleDescriptors
) throws IOException {
final CertificateIdentity newCertificateIdentity = request.getCertificateIdentity();
if (newCertificateIdentity != null) {
String newCertificateIdentityStringValue = newCertificateIdentity.value();
if (Objects.equals(newCertificateIdentityStringValue, apiKeyDoc.certificateIdentity) == false) {
return false;
}
}
if (apiKeyDoc.version != targetDocVersion.version()) {
return false;
}
if (request.getExpiration() != null) {
// Since expiration is relative current time, it's not likely that it matches the stored value to the ms, so assume update
return false;
}
final Map<String, Object> currentCreator = apiKeyDoc.creator;
final var user = authentication.getEffectiveSubject().getUser();
final var sourceRealm = authentication.getEffectiveSubject().getRealm();
if (false == (Objects.equals(user.principal(), currentCreator.get("principal"))
&& Objects.equals(user.fullName(), currentCreator.get("full_name"))
&& Objects.equals(user.email(), currentCreator.get("email"))
&& Objects.equals(user.metadata(), currentCreator.get("metadata"))
&& Objects.equals(sourceRealm.getName(), currentCreator.get("realm"))
&& Objects.equals(sourceRealm.getType(), currentCreator.get("realm_type")))) {
return false;
}
if (sourceRealm.getDomain() != null) {
if (currentCreator.get("realm_domain") == null) {
return false;
}
@SuppressWarnings("unchecked")
var m = (Map<String, Object>) currentCreator.get("realm_domain");
final RealmDomain currentRealmDomain;
try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, m)) {
currentRealmDomain = RealmDomain.fromXContent(parser);
}
if (sourceRealm.getDomain().equals(currentRealmDomain) == false) {
return false;
}
} else {
if (currentCreator.get("realm_domain") != null) {
return false;
}
}
final Map<String, Object> newMetadata = request.getMetadata();
if (newMetadata != null) {
if (apiKeyDoc.metadataFlattened == null) {
return false;
}
final Map<String, Object> currentMetadata = XContentHelper.convertToMap(apiKeyDoc.metadataFlattened, false, XContentType.JSON)
.v2();
if (newMetadata.equals(currentMetadata) == false) {
return false;
}
}
final List<RoleDescriptor> newRoleDescriptors = request.getRoleDescriptors();
if (newRoleDescriptors != null) {
final List<RoleDescriptor> currentRoleDescriptors = parseRoleDescriptorsBytes(apiKeyId, apiKeyDoc.roleDescriptorsBytes, false);
if (false == (newRoleDescriptors.size() == currentRoleDescriptors.size()
&& Set.copyOf(newRoleDescriptors).containsAll(currentRoleDescriptors))) {
return false;
}
if (newRoleDescriptors.size() == currentRoleDescriptors.size()) {
for (int i = 0; i < currentRoleDescriptors.size(); i++) {
// if remote cluster permissions are not equal, then it is not a noop
if (currentRoleDescriptors.get(i)
.getRemoteClusterPermissions()
.equals(newRoleDescriptors.get(i).getRemoteClusterPermissions()) == false) {
return false;
}
}
}
}
assert userRoleDescriptors != null : "API Key [" + apiKeyId + "] has null role descriptors";
final List<RoleDescriptor> currentLimitedByRoleDescriptors = parseRoleDescriptorsBytes(
apiKeyId,
apiKeyDoc.limitedByRoleDescriptorsBytes,
// We want the 7.x `LEGACY_SUPERUSER_ROLE_DESCRIPTOR` role descriptor to be returned here to auto-update
// `LEGACY_SUPERUSER_ROLE_DESCRIPTOR` to `ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR`, when we update a 7.x API key.
false
);
return (userRoleDescriptors.size() == currentLimitedByRoleDescriptors.size()
&& userRoleDescriptors.containsAll(currentLimitedByRoleDescriptors));
}
void tryAuthenticate(ThreadContext ctx, ApiKeyCredentials credentials, ActionListener<AuthenticationResult<User>> listener) {
if (false == isEnabled()) {
listener.onResponse(AuthenticationResult.notHandled());
}
assert credentials != null : "api key credentials must not be null";
loadApiKeyAndValidateCredentials(ctx, credentials, ActionListener.wrap(response -> {
credentials.close();
listener.onResponse(response);
}, e -> {
credentials.close();
listener.onFailure(e);
}));
}
void loadApiKeyAndValidateCredentials(
ThreadContext ctx,
ApiKeyCredentials credentials,
ActionListener<AuthenticationResult<User>> listener
) {
final String docId = credentials.getId();
Consumer<ApiKeyDoc> validator = apiKeyDoc -> validateApiKeyCredentials(
docId,
apiKeyDoc,
credentials,
clock,
listener.delegateResponse((l, e) -> {
if (ExceptionsHelper.unwrapCause(e) instanceof EsRejectedExecutionException) {
l.onResponse(AuthenticationResult.terminate("server is too busy to respond", e));
} else {
l.onFailure(e);
}
})
);
final long invalidationCount;
if (apiKeyDocCache != null) {
ApiKeyDoc existing = apiKeyDocCache.get(docId);
if (existing != null) {
validator.accept(existing);
return;
}
// API key doc not found in cache, take a record of the current invalidation count to prepare for caching
invalidationCount = apiKeyDocCache.getInvalidationCount();
} else {
invalidationCount = -1;
}
final GetRequest getRequest = client.prepareGet(SECURITY_MAIN_ALIAS, docId).setFetchSource(true).request();
executeAsyncWithOrigin(ctx, SECURITY_ORIGIN, getRequest, ActionListener.<GetResponse>wrap(response -> {
if (response.isExists()) {
final ApiKeyDoc apiKeyDoc;
try (
XContentParser parser = XContentHelper.createParser(
XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE),
response.getSourceAsBytesRef(),
XContentType.JSON
)
) {
apiKeyDoc = ApiKeyDoc.fromXContent(parser);
}
if (invalidationCount != -1) {
apiKeyDocCache.putIfNoInvalidationSince(docId, apiKeyDoc, invalidationCount);
}
validator.accept(apiKeyDoc);
} else {
if (apiKeyAuthCache != null) {
apiKeyAuthCache.invalidate(docId);
}
listener.onResponse(AuthenticationResult.unsuccessful("unable to find apikey with id " + credentials.getId(), null));
}
}, e -> {
if (ExceptionsHelper.unwrapCause(e) instanceof EsRejectedExecutionException) {
listener.onResponse(AuthenticationResult.terminate("server is too busy to respond", e));
} else {
listener.onResponse(
AuthenticationResult.unsuccessful("apikey authentication for id " + credentials.getId() + " encountered a failure", e)
);
}
}), client::get);
}
public List<RoleDescriptor> parseRoleDescriptors(
final String apiKeyId,
final Map<String, Object> roleDescriptorsMap,
RoleReference.ApiKeyRoleType roleType
) {
if (roleDescriptorsMap == null) {
return null;
}
final List<RoleDescriptor> roleDescriptors = roleDescriptorsMap.entrySet().stream().map(entry -> {
final String name = entry.getKey();
@SuppressWarnings("unchecked")
final Map<String, Object> rdMap = (Map<String, Object>) entry.getValue();
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
builder.map(rdMap);
try (
XContentParser parser = XContentHelper.createParserNotCompressed(
XContentParserConfiguration.EMPTY.withDeprecationHandler(
new ApiKeyLoggingDeprecationHandler(deprecationLogger, apiKeyId)
),
BytesReference.bytes(builder),
XContentType.JSON
)
) {
return ROLE_DESCRIPTOR_PARSER.parse(name, parser);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}).toList();
return roleType == RoleReference.ApiKeyRoleType.LIMITED_BY
? maybeReplaceSuperuserRoleDescriptor(apiKeyId, roleDescriptors)
: roleDescriptors;
}
public List<RoleDescriptor> parseRoleDescriptorsBytes(
final String apiKeyId,
BytesReference bytesReference,
RoleReference.ApiKeyRoleType roleType
) {
return parseRoleDescriptorsBytes(apiKeyId, bytesReference, roleType == RoleReference.ApiKeyRoleType.LIMITED_BY);
}
private static List<RoleDescriptor> parseRoleDescriptorsBytes(
final String apiKeyId,
BytesReference bytesReference,
final boolean replaceLegacySuperuserRoleDescriptor
) {
if (bytesReference == null) {
return Collections.emptyList();
}
List<RoleDescriptor> roleDescriptors = new ArrayList<>();
try (
XContentParser parser = XContentHelper.createParser(
XContentParserConfiguration.EMPTY.withDeprecationHandler(new ApiKeyLoggingDeprecationHandler(deprecationLogger, apiKeyId)),
bytesReference,
XContentType.JSON
)
) {
parser.nextToken(); // skip outer start object
while (parser.nextToken() != XContentParser.Token.END_OBJECT) {
parser.nextToken(); // role name
String roleName = parser.currentName();
roleDescriptors.add(ROLE_DESCRIPTOR_PARSER.parse(roleName, parser));
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return replaceLegacySuperuserRoleDescriptor ? maybeReplaceSuperuserRoleDescriptor(apiKeyId, roleDescriptors) : roleDescriptors;
}
// package private for tests
static final RoleDescriptor LEGACY_SUPERUSER_ROLE_DESCRIPTOR = new RoleDescriptor(
"superuser",
new String[] { "all" },
new RoleDescriptor.IndicesPrivileges[] {
RoleDescriptor.IndicesPrivileges.builder().indices("*").privileges("all").allowRestrictedIndices(true).build() },
new RoleDescriptor.ApplicationResourcePrivileges[] {
RoleDescriptor.ApplicationResourcePrivileges.builder().application("*").privileges("*").resources("*").build() },
null,
new String[] { "*" },
MetadataUtils.DEFAULT_RESERVED_METADATA,
Collections.emptyMap()
);
// This method should only be called to replace the superuser role descriptor for the limited-by roles of an API Key.
// We do not replace assigned roles because they are created explicitly by users.
// Before #82049, it is possible to specify a role descriptor for API keys that is identical to the builtin superuser role
// (including the _reserved metadata field).
private static List<RoleDescriptor> maybeReplaceSuperuserRoleDescriptor(String apiKeyId, List<RoleDescriptor> roleDescriptors) {
// Scan through all the roles because superuser can be one of the roles that a user has. Unlike building the Role object,
// capturing role descriptors does not preempt for superuser.
return roleDescriptors.stream().map(rd -> {
// Since we are only replacing limited-by roles and all limited-by roles are looked up with role providers,
// it is technically possible to just check the name of superuser and the _reserved metadata field.
// But the gain is not much since role resolving is cached and comparing the whole role descriptor is still safer.
if (rd.equals(LEGACY_SUPERUSER_ROLE_DESCRIPTOR)) {
logger.debug("replacing superuser role for API key [{}]", apiKeyId);
return ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR;
}
return rd;
}).toList();
}
/**
* Validates the ApiKey using the source map
* @param docId the identifier of the document that was retrieved from the security index
* @param apiKeyDoc the partially deserialized API key document
* @param credentials the credentials provided by the user
* @param listener the listener to notify after verification
*/
void validateApiKeyCredentials(
String docId,
ApiKeyDoc apiKeyDoc,
ApiKeyCredentials credentials,
Clock clock,
ActionListener<AuthenticationResult<User>> listener
) {
if ("api_key".equals(apiKeyDoc.docType) == false) {
listener.onResponse(
AuthenticationResult.unsuccessful("document [" + docId + "] is [" + apiKeyDoc.docType + "] not an api key", null)
);
} else if (apiKeyDoc.invalidated == null) {
listener.onResponse(AuthenticationResult.unsuccessful("api key document is missing invalidated field", null));
} else if (apiKeyDoc.invalidated) {
if (apiKeyAuthCache != null) {
apiKeyAuthCache.invalidate(docId);
}
listener.onResponse(AuthenticationResult.unsuccessful("api key [" + credentials.getId() + "] has been invalidated", null));
} else {
if (apiKeyDoc.hash == null) {
throw new IllegalStateException("api key hash is missing");
}
if (apiKeyAuthCache != null) {
final AtomicBoolean valueAlreadyInCache = new AtomicBoolean(true);
final ListenableFuture<CachedApiKeyHashResult> listenableCacheEntry;
try {
listenableCacheEntry = apiKeyAuthCache.computeIfAbsent(credentials.getId(), k -> {
valueAlreadyInCache.set(false);
return new ListenableFuture<>();
});
} catch (ExecutionException e) {
listener.onFailure(e);
return;
}
if (valueAlreadyInCache.get()) {
listenableCacheEntry.addListener(ActionListener.wrap(result -> {
if (result.success) {
if (result.verify(credentials.getKey())) {
// move on
completeApiKeyAuthentication(apiKeyDoc, credentials, clock, listener);
} else {
listener.onResponse(
AuthenticationResult.unsuccessful("invalid credentials for API key [" + credentials.getId() + "]", null)
);
}
} else if (result.verify(credentials.getKey())) { // same key, pass the same result
listener.onResponse(
AuthenticationResult.unsuccessful("invalid credentials for API key [" + credentials.getId() + "]", null)
);
} else {
apiKeyAuthCache.invalidate(credentials.getId(), listenableCacheEntry);
validateApiKeyCredentials(docId, apiKeyDoc, credentials, clock, listener);
}
}, listener::onFailure), threadPool.generic(), threadPool.getThreadContext());
} else {
verifyKeyAgainstHash(apiKeyDoc.hash, credentials, ActionListener.wrap(verified -> {
listenableCacheEntry.onResponse(new CachedApiKeyHashResult(verified, credentials.getKey()));
if (verified) {
// move on
completeApiKeyAuthentication(apiKeyDoc, credentials, clock, listener);
} else {
listener.onResponse(
AuthenticationResult.unsuccessful("invalid credentials for API key [" + credentials.getId() + "]", null)
);
}
}, exception -> {
// Crypto threadpool queue is full, invalidate this cache entry and make sure nothing is going to wait on it
logger.warn(
Strings.format(
"rejecting possibly valid API key authentication because the [%s] threadpool is full",
SECURITY_CRYPTO_THREAD_POOL_NAME
)
);
apiKeyAuthCache.invalidate(credentials.getId(), listenableCacheEntry);
listenableCacheEntry.onFailure(exception);
listener.onFailure(exception);
}));
}
} else {
verifyKeyAgainstHash(apiKeyDoc.hash, credentials, ActionListener.wrap(verified -> {
if (verified) {
// move on
completeApiKeyAuthentication(apiKeyDoc, credentials, clock, listener);
} else {
listener.onResponse(
AuthenticationResult.unsuccessful("invalid credentials for API key [" + credentials.getId() + "]", null)
);
}
}, listener::onFailure));
}
}
}
// pkg private for testing
CachedApiKeyHashResult getFromCache(String id) {
return apiKeyAuthCache == null ? null : apiKeyAuthCache.get(id).result();
}
// pkg private for testing
Cache<String, ListenableFuture<CachedApiKeyHashResult>> getApiKeyAuthCache() {
return apiKeyAuthCache;
}
// pkg private for testing
Cache<String, CachedApiKeyDoc> getDocCache() {
return apiKeyDocCache == null ? null : apiKeyDocCache.docCache;
}
// pkg private for testing
Cache<String, BytesReference> getRoleDescriptorsBytesCache() {
return apiKeyDocCache == null ? null : apiKeyDocCache.roleDescriptorsBytesCache;
}
// package-private for testing
void completeApiKeyAuthentication(
ApiKeyDoc apiKeyDoc,
ApiKeyCredentials credentials,
Clock clock,
ActionListener<AuthenticationResult<User>> listener
) {
if (apiKeyDoc.type != credentials.expectedType) {
listener.onResponse(
AuthenticationResult.terminate(
Strings.format(
"authentication expected API key type of [%s], but API key [%s] has type [%s]",
credentials.expectedType.value(),
credentials.getId(),
apiKeyDoc.type.value()
)
)
);
return;
}
if (apiKeyDoc.certificateIdentity != null) {
if (credentials.getCertificateIdentity() == null) {
listener.onResponse(
AuthenticationResult.terminate(
Strings.format(
"API key (type:[%s], id:[%s]) requires certificate identity matching [%s], but no certificate was provided",
apiKeyDoc.type.value(),
credentials.getId(),
apiKeyDoc.certificateIdentity
)
)
);
return;
}
if (validateCertificateIdentity(credentials.getCertificateIdentity(), apiKeyDoc.certificateIdentity) == false) {
listener.onResponse(
AuthenticationResult.terminate(
Strings.format(
"DN from provided certificate [%s] does not match API Key certificate identity pattern [%s]",
credentials.getCertificateIdentity(),
apiKeyDoc.certificateIdentity
)
)
);
return;
}
}
if (apiKeyDoc.expirationTime == -1 || Instant.ofEpochMilli(apiKeyDoc.expirationTime).isAfter(clock.instant())) {
final String principal = Objects.requireNonNull((String) apiKeyDoc.creator.get("principal"));
final String fullName = (String) apiKeyDoc.creator.get("full_name");
final String email = (String) apiKeyDoc.creator.get("email");
@SuppressWarnings("unchecked")
Map<String, Object> metadata = (Map<String, Object>) apiKeyDoc.creator.get("metadata");
final User apiKeyUser = new User(principal, Strings.EMPTY_ARRAY, fullName, email, metadata, true);
final Map<String, Object> authResultMetadata = new HashMap<>();
authResultMetadata.put(AuthenticationField.API_KEY_CREATOR_REALM_NAME, apiKeyDoc.creator.get("realm"));
authResultMetadata.put(AuthenticationField.API_KEY_CREATOR_REALM_TYPE, apiKeyDoc.creator.get("realm_type"));
authResultMetadata.put(AuthenticationField.API_KEY_ROLE_DESCRIPTORS_KEY, apiKeyDoc.roleDescriptorsBytes);
authResultMetadata.put(AuthenticationField.API_KEY_LIMITED_ROLE_DESCRIPTORS_KEY, apiKeyDoc.limitedByRoleDescriptorsBytes);
authResultMetadata.put(AuthenticationField.API_KEY_ID_KEY, credentials.getId());
authResultMetadata.put(AuthenticationField.API_KEY_NAME_KEY, apiKeyDoc.name);
authResultMetadata.put(AuthenticationField.API_KEY_TYPE_KEY, apiKeyDoc.type.value());
if (apiKeyDoc.metadataFlattened != null) {
authResultMetadata.put(AuthenticationField.API_KEY_METADATA_KEY, apiKeyDoc.metadataFlattened);
}
listener.onResponse(AuthenticationResult.success(apiKeyUser, authResultMetadata));
} else {
listener.onResponse(AuthenticationResult.unsuccessful("api key is expired", null));
}
}
private boolean validateCertificateIdentity(String certificateIdentity, String certificateIdentityPattern) {
logger.trace("Validating certificate identity [{}] against [{}]", certificateIdentity, certificateIdentityPattern);
return getCertificateIdentityPattern(certificateIdentityPattern).matcher(certificateIdentity).matches();
}
// Visible for testing
Pattern getCertificateIdentityPattern(String certificateIdentityPattern) {
if (certificateIdentityPatternCache != null) {
try {
return certificateIdentityPatternCache.computeIfAbsent(certificateIdentityPattern, Pattern::compile);
} catch (ExecutionException e) {
logger.error(
Strings.format(
"Failed to validate certificate identity against pattern [%s] using cache. Falling back to regular matching",
certificateIdentityPattern
),
e
);
}
}
return Pattern.compile(certificateIdentityPattern);
}
ApiKeyCredentials parseCredentialsFromApiKeyString(SecureString apiKeyString) {
if (false == isEnabled()) {
return null;
}
return parseApiKey(apiKeyString, null, ApiKey.Type.REST);
}
static ApiKeyCredentials getCredentialsFromHeader(final String header, @Nullable String certificateIdentity, ApiKey.Type expectedType) {
return parseApiKey(Authenticator.extractCredentialFromHeaderValue(header, "ApiKey"), certificateIdentity, expectedType);
}
public static String withApiKeyPrefix(final String encodedApiKey) {
return "ApiKey " + encodedApiKey;
}
private static ApiKeyCredentials parseApiKey(
SecureString apiKeyString,
@Nullable String certificateIdentity,
ApiKey.Type expectedType
) {
if (apiKeyString != null) {
final byte[] decodedApiKeyCredBytes = Base64.getDecoder().decode(CharArrays.toUtf8Bytes(apiKeyString.getChars()));
char[] apiKeyCredChars = null;
try {
apiKeyCredChars = CharArrays.utf8BytesToChars(decodedApiKeyCredBytes);
int colonIndex = -1;
for (int i = 0; i < apiKeyCredChars.length; i++) {
if (apiKeyCredChars[i] == ':') {
colonIndex = i;
break;
}
}
if (colonIndex < 1) {
throw new IllegalArgumentException("invalid ApiKey value");
}
final int secretStartPos = colonIndex + 1;
if (ApiKey.Type.CROSS_CLUSTER == expectedType && API_KEY_SECRET_LENGTH != apiKeyCredChars.length - secretStartPos) {
throw new IllegalArgumentException("invalid cross-cluster API key value");
}
return new ApiKeyCredentials(
new String(Arrays.copyOfRange(apiKeyCredChars, 0, colonIndex)),
new SecureString(Arrays.copyOfRange(apiKeyCredChars, secretStartPos, apiKeyCredChars.length)),
expectedType,
certificateIdentity
);
} finally {
if (apiKeyCredChars != null) {
Arrays.fill(apiKeyCredChars, (char) 0);
}
}
}
return null;
}
void computeHashForApiKey(SecureString apiKey, ActionListener<char[]> listener) {
threadPool.executor(SECURITY_CRYPTO_THREAD_POOL_NAME).execute(ActionRunnable.supply(listener, () -> hasher.hash(apiKey)));
}
// Protected instance method so this can be mocked
protected void verifyKeyAgainstHash(String apiKeyHash, ApiKeyCredentials credentials, ActionListener<Boolean> listener) {
threadPool.executor(SECURITY_CRYPTO_THREAD_POOL_NAME).execute(ActionRunnable.supply(listener, () -> {
Hasher hasher = Hasher.resolveFromHash(apiKeyHash.toCharArray());
final char[] apiKeyHashChars = apiKeyHash.toCharArray();
try {
return hasher.verify(credentials.getKey(), apiKeyHashChars);
} finally {
Arrays.fill(apiKeyHashChars, (char) 0);
}
}));
}
private static Instant getApiKeyExpiration(Instant now, @Nullable TimeValue expiration) {
if (expiration != null) {
return now.plusSeconds(expiration.getSeconds());
} else {
return null;
}
}
private boolean isEnabled() {
return enabled;
}
public void ensureEnabled() {
if (enabled == false) {
throw new FeatureNotEnabledException(Feature.API_KEY_SERVICE, "api keys are not enabled");
}
}
public void crossClusterApiKeyUsageStats(ActionListener<Map<String, Object>> listener) {
if (false == isEnabled()) {
listener.onResponse(Map.of());
return;
}
final IndexState projectSecurityIndex = securityIndex.forCurrentProject();
if (projectSecurityIndex.indexExists() == false) {
logger.debug("security index does not exist");
listener.onResponse(Map.of("total", 0, "ccs", 0, "ccr", 0, "ccs_ccr", 0));
} else if (projectSecurityIndex.isAvailable(SEARCH_SHARDS) == false) {
listener.onFailure(projectSecurityIndex.getUnavailableReason(SEARCH_SHARDS));
} else {
final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
.filter(QueryBuilders.termQuery("doc_type", "api_key"))
.filter(QueryBuilders.termQuery("type", ApiKey.Type.CROSS_CLUSTER.value()));
findApiKeys(boolQuery, true, true, this::convertSearchHitToApiKeyInfo, ActionListener.wrap(apiKeyInfos -> {
int ccsKeys = 0, ccrKeys = 0, ccsCcrKeys = 0;
for (ApiKey apiKeyInfo : apiKeyInfos) {
assert apiKeyInfo.getType() == ApiKey.Type.CROSS_CLUSTER
: "Incorrect API Key type for [" + apiKeyInfo + "] should be [" + ApiKey.Type.CROSS_CLUSTER + "]";
assert apiKeyInfo.getRoleDescriptors().size() == 1
: "API Key ["
+ apiKeyInfo
+ "] has ["
+ apiKeyInfo.getRoleDescriptors().size()
+ "] role descriptors, but should be 1";
final List<String> clusterPrivileges = Arrays.asList(
apiKeyInfo.getRoleDescriptors().iterator().next().getClusterPrivileges()
);
if (clusterPrivileges.contains("cross_cluster_search")
&& clusterPrivileges.contains("cross_cluster_replication") == false) {
ccsKeys += 1;
} else if (clusterPrivileges.contains("cross_cluster_replication")
&& clusterPrivileges.contains("cross_cluster_search") == false) {
ccrKeys += 1;
} else if (clusterPrivileges.contains("cross_cluster_search")
&& clusterPrivileges.contains("cross_cluster_replication")) {
ccsCcrKeys += 1;
} else {
final String message = "invalid cluster privileges "
+ clusterPrivileges
+ " for cross-cluster API key ["
+ apiKeyInfo.getId()
+ "]";
assert false : message;
listener.onFailure(new IllegalStateException(message));
}
}
listener.onResponse(Map.of("total", apiKeyInfos.size(), "ccs", ccsKeys, "ccr", ccrKeys, "ccs_ccr", ccsCcrKeys));
}, listener::onFailure));
}
}
@Override
public void close() {
cacheMetrics.forEach(metric -> {
try {
metric.close();
} catch (Exception e) {
logger.warn("metrics close() method should not throw Exception", e);
}
});
}
// public
|
ApiKeyService
|
java
|
apache__camel
|
components/camel-kafka/src/main/java/org/apache/camel/component/kafka/consumer/support/resume/KafkaResumeAdapter.java
|
{
"start": 1418,
"end": 3684
}
|
class ____ implements ResumeAdapter, Deserializable, Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(KafkaResumeAdapter.class);
private Consumer<?, ?> consumer;
private ResumeCache<TopicPartition> resumeCache;
private boolean resume(TopicPartition topicPartition, Object value) {
consumer.seek(topicPartition, (Long) value);
return true;
}
@Override
public void resume() {
resumeCache.forEach(this::resume);
}
@Override
public boolean deserialize(ByteBuffer keyBuffer, ByteBuffer valueBuffer) {
Object keyObj = deserializeKey(keyBuffer);
Object valueObj = deserializeValue(valueBuffer);
if (keyObj instanceof String key) {
final String[] keyParts = key.split("/");
if (keyParts == null || keyParts.length != 2) {
String topic = keyParts[0];
int partition = Integer.parseInt(keyParts[1]);
if (valueObj instanceof Long offset) {
resumeCache.add(new TopicPartition(topic, partition), offset);
} else {
LOG.warn("The type for the key '{}' is invalid: {}", key, valueObj);
}
} else {
LOG.warn("Unable to deserialize key '{}' because it has in invalid format and it will be discarded",
key);
}
} else {
LOG.warn("Unable to deserialize key '{}' because its type is invalid", keyObj);
}
return false;
}
@Override
public boolean add(OffsetKey<?> key, Offset<?> offset) {
Object keyObj = key.getValue();
Long valueObject = offset.getValue(Long.class);
if (keyObj instanceof TopicPartition topicPartition) {
resumeCache.add(topicPartition, valueObject);
}
return true;
}
@SuppressWarnings("unchecked")
@Override
public void setCache(ResumeCache<?> cache) {
this.resumeCache = (ResumeCache<TopicPartition>) cache;
}
@Override
public ResumeCache<?> getCache() {
return resumeCache;
}
public void setConsumer(Consumer<?, ?> consumer) {
this.consumer = consumer;
}
}
|
KafkaResumeAdapter
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/program/ClusterClientProvider.java
|
{
"start": 972,
"end": 1216
}
|
interface ____<T> {
/**
* Creates and returns a new {@link ClusterClient}. The returned client needs to be closed via
* {@link ClusterClient#close()} after use.
*/
ClusterClient<T> getClusterClient();
}
|
ClusterClientProvider
|
java
|
quarkusio__quarkus
|
extensions/scheduler/runtime/src/main/java/io/quarkus/scheduler/runtime/SimpleScheduler.java
|
{
"start": 22536,
"end": 25953
}
|
class ____ extends AbstractJobDefinition<SimpleJobDefinition> {
private final SchedulerConfig schedulerConfig;
SimpleJobDefinition(String id, SchedulerConfig schedulerConfig) {
super(id);
this.schedulerConfig = schedulerConfig;
}
@Override
public Trigger schedule() {
checkScheduled();
if (task == null && asyncTask == null) {
throw new IllegalStateException("Either sync or async task must be set");
}
scheduled = true;
ScheduledInvoker invoker;
if (task != null) {
// Use the default invoker to make sure the CDI request context is activated
invoker = new DefaultInvoker() {
@Override
public CompletionStage<Void> invokeBean(ScheduledExecution execution) {
try {
task.accept(execution);
return CompletableFuture.completedStage(null);
} catch (Exception e) {
return CompletableFuture.failedStage(e);
}
}
@Override
public boolean isRunningOnVirtualThread() {
return runOnVirtualThread;
}
};
} else {
invoker = new DefaultInvoker() {
@Override
public CompletionStage<Void> invokeBean(ScheduledExecution execution) {
try {
return asyncTask.apply(execution).subscribeAsCompletionStage();
} catch (Exception e) {
return CompletableFuture.failedStage(e);
}
}
@Override
public boolean isBlocking() {
return false;
}
};
}
Scheduled scheduled = new SyntheticScheduled(identity, cron, every, 0, TimeUnit.MINUTES, delayed,
overdueGracePeriod, concurrentExecution, skipPredicate, timeZone, implementation, executionMaxDelay);
Optional<SimpleTrigger> trigger = createTrigger(identity, null, scheduled, defaultOverdueGracePeriod);
if (trigger.isPresent()) {
SimpleTrigger simpleTrigger = trigger.get();
JobInstrumenter instrumenter = null;
if (schedulerConfig.tracingEnabled() && jobInstrumenter.isResolvable()) {
instrumenter = jobInstrumenter.get();
}
invoker = initInvoker(invoker, events, concurrentExecution, skipPredicate, instrumenter, vertx,
false, SchedulerUtils.parseExecutionMaxDelayAsMillis(scheduled), blockingExecutor);
ScheduledTask scheduledTask = new ScheduledTask(trigger.get(), invoker, true);
ScheduledTask existing = scheduledTasks.putIfAbsent(simpleTrigger.id, scheduledTask);
if (existing != null) {
throw new IllegalStateException("A job with this identity is already scheduled: " + identity);
}
return simpleTrigger;
}
return null;
}
}
}
|
SimpleJobDefinition
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/monitor/collector/PushPendingTaskCountMetricsCollector.java
|
{
"start": 1192,
"end": 1939
}
|
class ____ {
private static final long DELAY_SECONDS = 2;
private static ScheduledExecutorService executorService = ExecutorFactory.newSingleScheduledExecutorService(r -> {
Thread thread = new Thread(r, "nacos.naming.monitor.PushPendingTaskCountMetricsCollector");
thread.setDaemon(true);
return thread;
});
@Autowired
public PushPendingTaskCountMetricsCollector(NamingSubscriberServiceV2Impl namingSubscriberServiceV2) {
executorService.scheduleWithFixedDelay(() -> {
MetricsMonitor.getPushPendingTaskCount().set(namingSubscriberServiceV2.getPushPendingTaskCount());
}, DELAY_SECONDS, DELAY_SECONDS, TimeUnit.SECONDS);
}
}
|
PushPendingTaskCountMetricsCollector
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security/src/main/java/org/springframework/boot/security/autoconfigure/web/servlet/ServletWebSecurityAutoConfiguration.java
|
{
"start": 2654,
"end": 3559
}
|
class ____ {
@Bean
@ConditionalOnMissingBean
PathPatternRequestMatcher.Builder pathPatternRequestMatcherBuilder(
DispatcherServletPath dispatcherServletPath) {
PathPatternRequestMatcher.Builder builder = PathPatternRequestMatcher.withDefaults();
String path = dispatcherServletPath.getPath();
return (!path.equals("/")) ? builder.basePath(path) : builder;
}
}
/**
* The default configuration for web security. It relies on Spring Security's
* content-negotiation strategy to determine what sort of authentication to use. If
* the user specifies their own {@link SecurityFilterChain} bean, this will back-off
* completely and the users should specify all the bits that they want to configure as
* part of the custom security configuration.
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnDefaultWebSecurity
static
|
PathPatternRequestMatcherBuilderConfiguration
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/support/AbstractBeanFactory.java
|
{
"start": 58723,
"end": 60418
}
|
class ____
* @param beanName the name of the bean (for error handling purposes)
* @param typesToMatch the types to match in case of internal type matching purposes
* (also signals that the returned {@code Class} will never be exposed to application code)
* @return the resolved bean class (or {@code null} if none)
* @throws CannotLoadBeanClassException if we failed to load the class
*/
protected @Nullable Class<?> resolveBeanClass(RootBeanDefinition mbd, String beanName, Class<?>... typesToMatch)
throws CannotLoadBeanClassException {
try {
if (mbd.hasBeanClass()) {
return mbd.getBeanClass();
}
Class<?> beanClass = doResolveBeanClass(mbd, typesToMatch);
if (mbd.hasBeanClass()) {
mbd.prepareMethodOverrides();
}
return beanClass;
}
catch (ClassNotFoundException ex) {
throw new CannotLoadBeanClassException(mbd.getResourceDescription(), beanName, mbd.getBeanClassName(), ex);
}
catch (LinkageError err) {
throw new CannotLoadBeanClassException(mbd.getResourceDescription(), beanName, mbd.getBeanClassName(), err);
}
catch (BeanDefinitionValidationException ex) {
throw new BeanDefinitionStoreException(mbd.getResourceDescription(),
beanName, "Validation of method overrides failed", ex);
}
}
private @Nullable Class<?> doResolveBeanClass(RootBeanDefinition mbd, Class<?>... typesToMatch)
throws ClassNotFoundException {
ClassLoader beanClassLoader = getBeanClassLoader();
ClassLoader dynamicLoader = beanClassLoader;
boolean freshResolve = false;
if (!ObjectUtils.isEmpty(typesToMatch)) {
// When just doing type checks (i.e. not creating an actual instance yet),
// use the specified temporary
|
for
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/Spr11202Tests.java
|
{
"start": 1339,
"end": 1840
}
|
class ____ {
@Test
void withImporter() {
ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(Wrapper.class);
assertThat(context.getBean("value")).isEqualTo("foo");
context.close();
}
@Test
void withoutImporter() {
ConfigurableApplicationContext context = new AnnotationConfigApplicationContext(Config.class);
assertThat(context.getBean("value")).isEqualTo("foo");
context.close();
}
@Configuration
@Import(Selector.class)
protected static
|
Spr11202Tests
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AbstractUrlAssert.java
|
{
"start": 886,
"end": 18533
}
|
class ____<SELF extends AbstractUrlAssert<SELF>> extends AbstractAssertWithComparator<SELF, URL> {
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
protected Urls urls = Urls.instance();
protected AbstractUrlAssert(final URL actual, final Class<?> selfType) {
super(actual, selfType);
}
/**
* Verifies that the actual {@code URL} has the expected protocol.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("ftp://helloworld.org")).hasProtocol("ftp");
*
* // This assertion fails:
* assertThat(new URL("http://helloworld.org")).hasProtocol("ftp");</code></pre>
*
* @param expected the expected protocol of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual protocol is not equal to the expected protocol.
*/
public SELF hasProtocol(String expected) {
urls.assertHasProtocol(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected path (which must not be null).
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://helloworld.org/pages")).hasPath("/pages");
* assertThat(new URL("http://www.helloworld.org")).hasPath("");
* // or preferably:
* assertThat(new URL("http://www.helloworld.org")).hasNoPath();
*
* // this assertion fails:
* assertThat(new URL("http://helloworld.org/pickme")).hasPath("/pages/");
*
* // this assertion throws an IllegalArgumentException:
* assertThat(new URL("http://helloworld.org/pages")).hasPath(null);</code></pre>
*
* @param expected the expected path of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual URL path is not equal to the expected path.
* @throws IllegalArgumentException if given path is null.
*/
public SELF hasPath(String expected) {
urls.assertHasPath(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no path.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org")).hasNoPath();
*
* // this assertion fails:
* assertThat(new URL("http://helloworld.org/france")).hasNoPath();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has a path.
*/
public SELF hasNoPath() {
urls.assertHasPath(info, actual, "");
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected port.
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://helloworld.org:8080")).hasPort(8080);
*
* // These assertions fail:
* assertThat(new URL("http://helloworld.org:8080")).hasPort(9876);
* assertThat(new URL("http://helloworld.org")).hasPort(8080);</code></pre>
*
* @param expected the expected port of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual port is not equal to the expected port.
*/
public SELF hasPort(int expected) {
urls.assertHasPort(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no port.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://helloworld.org")).hasNoPort();
*
* // This assertion fails:
* assertThat(new URL("http://helloworld.org:8080")).hasNoPort();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has a port.
*/
public SELF hasNoPort() {
urls.assertHasPort(info, actual, -1);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected host.
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://helloworld.org/pages")).hasHost("helloworld.org");
* assertThat(new URL("http://helloworld.org:8080")).hasHost("helloworld.org");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org")).hasHost("helloworld.org");
* assertThat(new URL("http://www.helloworld.org:8080")).hasHost("helloworld.org");</code></pre>
*
* @param expected the expected host of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual host is not equal to the expected host.
*/
public SELF hasHost(String expected) {
urls.assertHasHost(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no host.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("file:///home/user/Documents/hello-world.txt")).hasNoHost();
*
* // This assertion fails:
* assertThat(new URL("http://helloworld.org:8080/index.html")).hasNoHost();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if actual has a host.
* @since 3.22.0
*/
public SELF hasNoHost() {
urls.assertHasNoHost(info, actual);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected authority.
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://helloworld.org")).hasAuthority("helloworld.org");
* assertThat(new URL("http://helloworld.org:8080")).hasAuthority("helloworld.org:8080");
* assertThat(new URL("http://www.helloworld.org:8080/news")).hasAuthority("www.helloworld.org:8080");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org:8080")).hasAuthority("www.helloworld.org");
* assertThat(new URL("http://www.helloworld.org")).hasAuthority("www.helloworld.org:8080");</code></pre>
*
* @param expected the expected authority of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual authority is not equal to the expected authority.
*/
public SELF hasAuthority(String expected) {
urls.assertHasAuthority(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected query.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/index.html?type=test")).hasQuery("type=test");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?type=test")).hasQuery("type=hello");
* assertThat(new URL("http://www.helloworld.org/index.html")).hasQuery("type=hello");</code></pre>
*
* @param expected the expected query of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual query is not equal to the expected query.
*/
public SELF hasQuery(String expected) {
urls.assertHasQuery(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no query.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasNoQuery();
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?type=test")).hasNoQuery();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has a query.
*/
public SELF hasNoQuery() {
urls.assertHasQuery(info, actual, null);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected anchor.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/news.html#sport")).hasAnchor("sport");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/news.html#sport")).hasAnchor("war");
* assertThat(new URL("http://www.helloworld.org/news.html")).hasAnchor("sport");</code></pre>
*
* @param expected the expected anchor of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual anchor is not equal to the expected anchor.
*/
public SELF hasAnchor(String expected) {
urls.assertHasAnchor(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no anchor.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/news.html")).hasNoAnchor();
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/news.html#sport")).hasNoAnchor();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has an anchor.
*/
public SELF hasNoAnchor() {
urls.assertHasAnchor(info, actual, null);
return myself;
}
/**
* Verifies that the actual {@code URL} has the expected userinfo.
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://test:pass@www.helloworld.org/index.html")).hasUserInfo("test:pass");
* assertThat(new URL("http://test@www.helloworld.org/index.html")).hasUserInfo("test");
* assertThat(new URL("http://:pass@www.helloworld.org/index.html")).hasUserInfo(":pass");
*
* // These assertions fail:
* assertThat(new URL("http://test:pass@www.helloworld.org/index.html")).hasUserInfo("test:fail");
* assertThat(new URL("http://www.helloworld.org/index.html")).hasUserInfo("test:pass");</code></pre>
*
* @param expected the expected userinfo of the actual {@code URL}.
* @return {@code this} assertion object.
* @throws AssertionError if the actual userinfo is not equal to the expected userinfo.
*/
public SELF hasUserInfo(String expected) {
urls.assertHasUserInfo(info, actual, expected);
return myself;
}
/**
* Verifies that the actual {@code URL} has no userinfo.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasNoUserInfo();
*
* // This assertion fails:
* assertThat(new URL("http://test:pass@www.helloworld.org/index.html")).hasNoUserInfo();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has some userinfo.
*/
public SELF hasNoUserInfo() {
urls.assertHasUserInfo(info, actual, null);
return myself;
}
/**
* Verifies that the actual {@code URL} has a parameter with the expected name.
* <p>
* The value of the parameter is not checked.
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://www.helloworld.org/index.html?happy")).hasParameter("happy");
* assertThat(new URL("http://www.helloworld.org/index.html?happy=very")).hasParameter("happy");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasParameter("happy");
* assertThat(new URL("http://www.helloworld.org/index.html?sad=much")).hasParameter("happy");</code></pre>
*
* @param name the name of the parameter expected to be present.
* @return {@code this} assertion object.
* @throws AssertionError if the actual does not have the expected parameter.
* @throws IllegalArgumentException if the query string contains an invalid escape sequence.
*/
public SELF hasParameter(String name) {
urls.assertHasParameter(info, actual, name);
return myself;
}
/**
* Verifies that the actual {@code URL} has a parameter with the expected name and value.
* <p>
* Use {@code null} to indicate an absent value (e.g. {@code foo&bar}) as opposed to an empty value (e.g.
* {@code foo=&bar=}).
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://www.helloworld.org/index.html?happy")).hasParameter("happy", null);
* assertThat(new URL("http://www.helloworld.org/index.html?happy=very")).hasParameter("happy", "very");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?sad")).hasParameter("sad", "much");
* assertThat(new URL("http://www.helloworld.org/index.html?sad=much")).hasParameter("sad", null);</code></pre>
*
* @param name the name of the parameter expected to be present.
* @param value the value of the parameter expected to be present.
* @return {@code this} assertion object.
* @throws AssertionError if the actual does not have the expected parameter.
* @throws IllegalArgumentException if the query string contains an invalid escape sequence.
*/
public SELF hasParameter(String name, String value) {
urls.assertHasParameter(info, actual, name, value);
return myself;
}
/**
* Verifies that the actual {@code URL} does not have any parameters.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasNoParameters();
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?sad")).hasNoParameters();
* assertThat(new URL("http://www.helloworld.org/index.html?sad=much")).hasNoParameters();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has a parameter.
* @throws IllegalArgumentException if the query string contains an invalid escape sequence.
*
* @since 2.5.0 / 3.5.0
*/
public SELF hasNoParameters() {
urls.assertHasNoParameters(info, actual);
return myself;
}
/**
* Verifies that the actual {@code URL} does not have a parameter with the specified name.
* <p>
* The value of the parameter is not checked.
* <p>
* Examples:
* <pre><code class='java'> // This assertion succeeds:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasNoParameter("happy");
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?sad")).hasNoParameter("sad");
* assertThat(new URL("http://www.helloworld.org/index.html?sad=much")).hasNoParameter("sad");</code></pre>
*
* @param name the name of the parameter expected to be absent.
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has the expected parameter.
* @throws IllegalArgumentException if the query string contains an invalid escape sequence.
*
* @since 2.5.0 / 3.5.0
*/
public SELF hasNoParameter(String name) {
urls.assertHasNoParameter(info, actual, name);
return myself;
}
/**
* Verifies that the actual {@code URL} does not have a parameter with the expected name and value.
* <p>
* Use {@code null} to indicate an absent value (e.g. {@code foo&bar}) as opposed to an empty value (e.g.
* {@code foo=&bar=}).
* <p>
* Examples:
* <pre><code class='java'> // These assertions succeed:
* assertThat(new URL("http://www.helloworld.org/index.html")).hasNoParameter("happy", "very");
* assertThat(new URL("http://www.helloworld.org/index.html?happy")).hasNoParameter("happy", "very");
* assertThat(new URL("http://www.helloworld.org/index.html?happy=very")).hasNoParameter("happy", null);
*
* // These assertions fail:
* assertThat(new URL("http://www.helloworld.org/index.html?sad")).hasNoParameter("sad", null);
* assertThat(new URL("http://www.helloworld.org/index.html?sad=much")).hasNoParameter("sad", "much");</code></pre>
*
* @param name the name of the parameter expected to be absent.
* @param value the value of the parameter expected to be absent.
* @return {@code this} assertion object.
* @throws AssertionError if {@code actual} has the expected parameter.
* @throws IllegalArgumentException if the query string contains an invalid escape sequence.
*
* @since 2.5.0 / 3.5.0
*/
public SELF hasNoParameter(String name, String value) {
urls.assertHasNoParameter(info, actual, name, value);
return myself;
}
/**
* Verifies that the actual {@code URL} is equivalent to the given one after <b>their parameters are sorted</b>.
* <p>
* Example:
* <pre><code class='java'> URL url = new URL("http://example.com?a=b&c=d");
*
* // this assertion succeeds ...
* assertThat(url).isEqualToWithSortedQueryParameters(new URL("http://example.com?c=d&a=b"))
* .isEqualToWithSortedQueryParameters(new URL("http://example.com?a=b&c=d"));
*
* // ... but this one fails as parameters do not match.
* assertThat(url).isEqualToWithSortedQueryParameters(new URL("http://example.com?a=b&c=e"));
*
* //... and this one fails as domains are different.
* assertThat(url).isEqualToWithSortedQueryParameters(new URL("http://example2.com?amp;a=b&c=d")); </code></pre>
*
* @param expected the expected {@code URL} to compare actual to.
* @return {@code this} assertion object.
* @throws NullPointerException if the given URL is {@code null}.
* @throws AssertionError if the actual {@code URL} is {@code null}.
* @throws AssertionError if the actual {@code URL} is not equivalent to the given one after their parameters are sorted.
*
* @since 3.16.0
*/
public SELF isEqualToWithSortedQueryParameters(URL expected) {
urls.assertIsEqualToWithSortedQueryParameters(info, actual, expected);
return myself;
}
}
|
AbstractUrlAssert
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/dataset/DataSetTestSedaTest.java
|
{
"start": 981,
"end": 1472
}
|
class ____ extends ContextTestSupport {
@Test
public void testSeda() throws Exception {
template.sendBody("seda:testme", "Hello World");
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("dataset-test:seda:testme?timeout=0");
}
});
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
}
|
DataSetTestSedaTest
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/overloading/ParameterTrie.java
|
{
"start": 2779,
"end": 3537
}
|
class ____.
*
* <p>The violation detection works like this: first a method signature (i.e. all the method
* parameters) is added to the trie. As long as it is possible the algorithm tries to find a
* parameter that can be followed using existing edges in the trie. When no such parameter is
* found, the trie extension procedure begins where remaining parameters are added to the trie in
* order in which they appear in the initial list. This whole procedure (a path that was followed
* during the process) determines a "correct", "consistent" ordering of the parameters. If the
* original input list of parameters has a different order that the one determined by the
* algorithm a violation is reported.
*/
private static
|
members
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/OrphanedFormatStringTest.java
|
{
"start": 4422,
"end": 4825
}
|
class ____ {
void test() {
// BUG: Diagnostic contains:
assertWithMessage("%s").that("").isNull();
}
}
""")
.doTest();
}
@Test
public void flogger() {
testHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.flogger.FluentLogger;
|
Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/JUnit4SetUpNotRunTest.java
|
{
"start": 6590,
"end": 6649
}
|
class ____ also extends TestCase. */
@RunWith(JUnit4.class)
|
but
|
java
|
quarkusio__quarkus
|
extensions/panache/rest-data-panache/deployment/src/main/java/io/quarkus/rest/data/panache/deployment/properties/MethodProperties.java
|
{
"start": 143,
"end": 959
}
|
class ____ {
private final boolean exposed;
private final String path;
private final String[] rolesAllowed;
private final Collection<AnnotationInstance> methodAnnotations;
public MethodProperties(boolean exposed, String path, String[] rolesAllowed,
Collection<AnnotationInstance> methodAnnotations) {
this.exposed = exposed;
this.path = path;
this.rolesAllowed = rolesAllowed;
this.methodAnnotations = methodAnnotations;
}
public boolean isExposed() {
return exposed;
}
public String getPath() {
return path;
}
public String[] getRolesAllowed() {
return rolesAllowed;
}
public Collection<AnnotationInstance> getMethodAnnotations() {
return methodAnnotations;
}
}
|
MethodProperties
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/CustomGlobalVariableTest.java
|
{
"start": 1480,
"end": 3205
}
|
class ____ extends ContextTestSupport {
private MockEndpoint end;
private final String variableName = "foo";
private final String expectedVariableValue = "bar";
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.getRegistry().bind(GLOBAL_VARIABLE_REPOSITORY_ID, new MyGlobalRepo());
return context;
}
@Test
public void testSetExchangeVariableMidRoute() throws Exception {
assertNull(context.getVariable(variableName));
end.expectedMessageCount(1);
template.sendBody("direct:start", "<blah/>");
// make sure we got the message
assertMockEndpointsSatisfied();
// lets get the variable value
List<Exchange> exchanges = end.getExchanges();
Exchange exchange = exchanges.get(0);
String actualVariableValue = exchange.getVariable(variableName, String.class);
// should be stored on global so null
assertNull(actualVariableValue);
// should be stored as global variable
assertEquals("!" + expectedVariableValue + "!", context.getVariable(variableName));
}
@Override
@BeforeEach
public void setUp() throws Exception {
super.setUp();
end = getMockEndpoint("mock:end");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// stored as global variable
from("direct:start").setVariable("global:" + variableName).constant(expectedVariableValue).to("mock:end");
}
};
}
private static
|
CustomGlobalVariableTest
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/reflect/TypeUtils.java
|
{
"start": 14649,
"end": 15016
}
|
class ____ a parameterized type
if (midType instanceof Class<?>) {
return determineTypeArguments((Class<?>) midType, superParameterizedType);
}
final ParameterizedType midParameterizedType = (ParameterizedType) midType;
final Class<?> midClass = getRawType(midParameterizedType);
// get the type variables of the mid
|
or
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
|
{
"start": 211018,
"end": 213531
}
|
class ____ implements Runnable {
private Iterator<BlockInfo> toDeleteIterator = null;
private boolean isSleep;
private NameNodeMetrics metrics;
private void remove(long time) {
if (checkToDeleteIterator()) {
namesystem.writeLock(RwLockMode.BM);
try {
while (toDeleteIterator.hasNext()) {
removeBlock(toDeleteIterator.next());
metrics.decrPendingDeleteBlocksCount();
if (Time.monotonicNow() - time > deleteBlockLockTimeMs) {
isSleep = true;
break;
}
}
} finally {
namesystem.writeUnlock(RwLockMode.BM, "markedDeleteBlockScrubberThread");
}
}
}
private boolean checkToDeleteIterator() {
return toDeleteIterator != null && toDeleteIterator.hasNext();
}
@Override
public void run() {
LOG.info("Start MarkedDeleteBlockScrubber thread");
while (namesystem.isRunning() &&
!Thread.currentThread().isInterrupted()) {
if (!markedDeleteQueue.isEmpty() || checkToDeleteIterator()) {
try {
metrics = NameNode.getNameNodeMetrics();
metrics.setDeleteBlocksQueued(markedDeleteQueue.size());
isSleep = false;
long startTime = Time.monotonicNow();
remove(startTime);
while (!isSleep && !markedDeleteQueue.isEmpty() &&
!Thread.currentThread().isInterrupted()) {
List<BlockInfo> markedDeleteList = markedDeleteQueue.poll();
if (markedDeleteList != null) {
toDeleteIterator = markedDeleteList.listIterator();
}
remove(startTime);
}
} catch (Exception e){
LOG.warn("MarkedDeleteBlockScrubber encountered an exception" +
" during the block deletion process, " +
" the deletion of the block will retry in {} millisecond.",
deleteBlockUnlockIntervalTimeMs, e);
}
}
if (isSleep) {
LOG.debug("Clear markedDeleteQueue over {} millisecond to release the write lock",
deleteBlockLockTimeMs);
}
try {
Thread.sleep(deleteBlockUnlockIntervalTimeMs);
} catch (InterruptedException e) {
LOG.info("Stopping MarkedDeleteBlockScrubber.");
break;
}
}
}
}
/**
* Periodically calls computeBlockRecoveryWork().
*/
private
|
MarkedDeleteBlockScrubber
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/junit/jupiter/AbstractSoftAssertionsExtensionIntegrationTests.java
|
{
"start": 1620,
"end": 3821
}
|
class ____ {
@Test
final void test_instance_per_method() {
assertExecutionResults(getTestInstancePerMethodTestCase(), false);
}
@Test
void test_instance_per_class() {
assertExecutionResults(getTestInstancePerClassTestCase(), false);
}
@Test
void test_instance_per_method_with_nested_tests() {
assertExecutionResults(getTestInstancePerMethodNestedTestCase(), true);
}
@Test
void test_instance_per_class_with_nested_tests() {
assertExecutionResults(getTestInstancePerClassNestedTestCase(), true);
}
protected abstract Class<?> getTestInstancePerMethodTestCase();
protected abstract Class<?> getTestInstancePerClassTestCase();
protected abstract Class<?> getTestInstancePerMethodNestedTestCase();
protected abstract Class<?> getTestInstancePerClassNestedTestCase();
private void assertExecutionResults(Class<?> testClass, boolean nested) {
EngineTestKit.engine("junit-jupiter")
.selectors(selectClass(testClass))
.configurationParameter("junit.jupiter.conditions.deactivate", "*")
.execute().testEvents()
.assertStatistics(stats -> stats.started(nested ? 8 : 4).succeeded(nested ? 4 : 2).failed(nested ? 4 : 2))
.failed()
// @format:off
.assertThatEvents().haveExactly(nested ? 2 : 1,
event(test("multipleFailures"),
finishedWithFailure(instanceOf(AssertJMultipleFailuresError.class),
message(msg -> msg.contains("Multiple Failures (2 failures)")))))
.haveExactly(nested ? 2 : 1,
event(test("parameterizedTest"),
finishedWithFailure(instanceOf(AssertJMultipleFailuresError.class),
message(msg -> msg.contains("Multiple Failures (1 failure)")))));
// @format:on
}
}
|
AbstractSoftAssertionsExtensionIntegrationTests
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-resource-server/src/main/java/org/springframework/security/oauth2/server/resource/authentication/JwtAuthenticationToken.java
|
{
"start": 1312,
"end": 3002
}
|
class ____ extends AbstractOAuth2TokenAuthenticationToken<Jwt> {
private static final long serialVersionUID = 620L;
private final String name;
/**
* Constructs a {@code JwtAuthenticationToken} using the provided parameters.
* @param jwt the JWT
*/
public JwtAuthenticationToken(Jwt jwt) {
super(jwt);
this.name = jwt.getSubject();
}
/**
* Constructs a {@code JwtAuthenticationToken} using the provided parameters.
* @param jwt the JWT
* @param authorities the authorities assigned to the JWT
*/
public JwtAuthenticationToken(Jwt jwt, Collection<? extends GrantedAuthority> authorities) {
super(jwt, authorities);
this.setAuthenticated(true);
this.name = jwt.getSubject();
}
/**
* Constructs a {@code JwtAuthenticationToken} using the provided parameters.
* @param jwt the JWT
* @param authorities the authorities assigned to the JWT
* @param name the principal name
*/
public JwtAuthenticationToken(Jwt jwt, Collection<? extends GrantedAuthority> authorities, String name) {
super(jwt, authorities);
this.setAuthenticated(true);
this.name = name;
}
protected JwtAuthenticationToken(Builder<?> builder) {
super(builder);
this.name = builder.name;
}
@Override
public Map<String, Object> getTokenAttributes() {
return this.getToken().getClaims();
}
/**
* The principal name which is, by default, the {@link Jwt}'s subject
*/
@Override
public String getName() {
return this.name;
}
@Override
public Builder<?> toBuilder() {
return new Builder<>(this);
}
/**
* A builder for {@link JwtAuthenticationToken} instances
*
* @since 7.0
* @see Authentication.Builder
*/
public static
|
JwtAuthenticationToken
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/traffic/ChannelTrafficShapingHandler.java
|
{
"start": 3379,
"end": 6901
}
|
class ____ extends AbstractTrafficShapingHandler {
private final ArrayDeque<ToSend> messagesQueue = new ArrayDeque<ToSend>();
private long queueSize;
/**
* Create a new instance.
*
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
* @param maxTime
* The maximum delay to wait in case of traffic excess.
*/
public ChannelTrafficShapingHandler(long writeLimit, long readLimit,
long checkInterval, long maxTime) {
super(writeLimit, readLimit, checkInterval, maxTime);
}
/**
* Create a new instance using default
* max time as delay allowed value of 15000 ms.
*
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
*/
public ChannelTrafficShapingHandler(long writeLimit,
long readLimit, long checkInterval) {
super(writeLimit, readLimit, checkInterval);
}
/**
* Create a new instance using default Check Interval value of 1000 ms and
* max time as delay allowed value of 15000 ms.
*
* @param writeLimit
* 0 or a limit in bytes/s
* @param readLimit
* 0 or a limit in bytes/s
*/
public ChannelTrafficShapingHandler(long writeLimit,
long readLimit) {
super(writeLimit, readLimit);
}
/**
* Create a new instance using
* default max time as delay allowed value of 15000 ms and no limit.
*
* @param checkInterval
* The delay between two computations of performances for
* channels or 0 if no stats are to be computed.
*/
public ChannelTrafficShapingHandler(long checkInterval) {
super(checkInterval);
}
@Override
public void handlerAdded(ChannelHandlerContext ctx) throws Exception {
TrafficCounter trafficCounter = new TrafficCounter(this, ctx.executor(), "ChannelTC" +
ctx.channel().hashCode(), checkInterval);
setTrafficCounter(trafficCounter);
trafficCounter.start();
super.handlerAdded(ctx);
}
@Override
public void handlerRemoved(ChannelHandlerContext ctx) throws Exception {
trafficCounter.stop();
// write order control
synchronized (this) {
if (ctx.channel().isActive()) {
for (ToSend toSend : messagesQueue) {
long size = calculateSize(toSend.toSend);
trafficCounter.bytesRealWriteFlowControl(size);
queueSize -= size;
ctx.write(toSend.toSend, toSend.promise);
}
} else {
for (ToSend toSend : messagesQueue) {
if (toSend.toSend instanceof ByteBuf) {
((ByteBuf) toSend.toSend).release();
}
}
}
messagesQueue.clear();
}
releaseWriteSuspended(ctx);
releaseReadSuspended(ctx);
super.handlerRemoved(ctx);
}
private static final
|
ChannelTrafficShapingHandler
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/test/java/org/springframework/boot/context/properties/bind/handler/IgnoreTopLevelConverterNotFoundBindHandlerTests.java
|
{
"start": 3220,
"end": 3562
}
|
class ____ {
private int foo;
private @Nullable Map<String, String> map;
int getFoo() {
return this.foo;
}
void setFoo(int foo) {
throw new IllegalStateException();
}
@Nullable Map<String, String> getMap() {
return this.map;
}
void setMap(@Nullable Map<String, String> map) {
this.map = map;
}
}
}
|
Example
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
|
{
"start": 7844,
"end": 7946
}
|
interface ____ "default" is specified
* @throws UnknownHostException
* If the given
|
name
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-jetty/src/main/java/smoketest/jetty/SampleJettyApplication.java
|
{
"start": 803,
"end": 944
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleJettyApplication.class, args);
}
}
|
SampleJettyApplication
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.