language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/SortedNumericDocValuesSyntheticFieldLoader.java
|
{
"start": 926,
"end": 4280
}
|
class ____ implements SourceLoader.SyntheticFieldLoader {
private final String name;
private final String simpleName;
/**
* Optionally loads malformed values from stored fields.
*/
private final IgnoreMalformedStoredValues ignoreMalformedValues;
private Values values = NO_VALUES;
/**
* Build a loader from doc values and, optionally, a stored field.
* @param name the name of the field to load from doc values
* @param simpleName the name to give the field in the rendered {@code _source}
* @param loadIgnoreMalformedValues should we load values skipped by {@code ignore_malformed}
*/
protected SortedNumericDocValuesSyntheticFieldLoader(String name, String simpleName, boolean loadIgnoreMalformedValues) {
this.name = name;
this.simpleName = simpleName;
this.ignoreMalformedValues = loadIgnoreMalformedValues
? IgnoreMalformedStoredValues.stored(name)
: IgnoreMalformedStoredValues.empty();
}
protected abstract void writeValue(XContentBuilder b, long value) throws IOException;
@Override
public Stream<Map.Entry<String, StoredFieldLoader>> storedFieldLoaders() {
return ignoreMalformedValues.storedFieldLoaders();
}
@Override
public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) throws IOException {
SortedNumericDocValues dv = docValuesOrNull(reader, name);
if (dv == null) {
values = NO_VALUES;
return null;
}
if (docIdsInLeaf != null && docIdsInLeaf.length > 1) {
/*
* The singleton optimization is mostly about looking up all
* values for the field at once. If there's just a single
* document then it's just extra overhead.
*/
NumericDocValues single = DocValues.unwrapSingleton(dv);
if (single != null) {
SingletonDocValuesLoader loader = buildSingletonDocValuesLoader(single, docIdsInLeaf);
values = loader == null ? NO_VALUES : loader;
return loader;
}
}
ImmediateDocValuesLoader loader = new ImmediateDocValuesLoader(dv);
values = loader;
return loader;
}
@Override
public boolean hasValue() {
return values.count() > 0 || ignoreMalformedValues.count() > 0;
}
@Override
public void write(XContentBuilder b) throws IOException {
switch (values.count() + ignoreMalformedValues.count()) {
case 0:
return;
case 1:
b.field(simpleName);
if (values.count() > 0) {
assert values.count() == 1;
assert ignoreMalformedValues.count() == 0;
values.write(b);
} else {
assert ignoreMalformedValues.count() == 1;
ignoreMalformedValues.write(b);
}
return;
default:
b.startArray(simpleName);
values.write(b);
ignoreMalformedValues.write(b);
b.endArray();
}
}
@Override
public void reset() {
ignoreMalformedValues.reset();
}
private
|
SortedNumericDocValuesSyntheticFieldLoader
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/script/TimeSeriesCounterTests.java
|
{
"start": 876,
"end": 19838
}
|
class ____ extends ESTestCase {
protected long now;
protected long customCounterResolution;
protected long customCounterDuration;
protected TimeProvider timeProvider = new TimeProvider();
protected TimeSeriesCounter tsc = new TimeSeriesCounter(timeProvider);
protected final Matcher<Long> fiveDelta = lessThan(tsc.fiveMinutes.resolution);
protected final Matcher<Long> fifteenDelta = lessThan(tsc.fifteenMinutes.resolution);
protected final Matcher<Long> twentyFourDelta = lessThan(tsc.twentyFourHours.resolution);
protected List<Long> events;
protected Counter counter;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
now = 1635182590;
customCounterResolution = 45;
customCounterDuration = 900;
reset();
}
protected void reset() {
timeProvider = new TimeProvider();
events = new ArrayList<>();
tsc = new TimeSeriesCounter(timeProvider);
counter = new Counter(customCounterResolution, customCounterDuration);
}
public void testCounterNegativeResolution() {
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new Counter(-20, 200));
assertEquals("resolution [-20] must be greater than zero", iae.getMessage());
}
public void testCounterNegativeDuration() {
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new Counter(20, -200));
assertEquals("duration [-200] must be greater than zero", iae.getMessage());
}
public void testCounterIndivisibleResolution() {
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new Counter(3, 101));
assertEquals("duration [101] must divisible by resolution [3]", iae.getMessage());
}
public void testNegativeIncrement() {
inc(-100);
assertEquals(1, timeSeries(0).fiveMinutes);
}
public void testNegativeSum() {
long t = 60;
// t += 24 * HOUR;
inc(t);
t += 2 * tsc.twentyFourHours.resolution;
inc(t);
TimeSeries ts = timeSeries(t);
assertEquals(2, ts.twentyFourHours);
}
public void testNegativeStart() {
long t = -1 * 48 * HOUR;
inc(t);
t += 2 * tsc.twentyFourHours.resolution;
inc(t);
TimeSeries ts = timeSeries(t);
assertEquals(2, ts.twentyFourHours);
}
public void testOnePerSecond() {
long time = now;
long t;
long nextAssertCheck = randomLongBetween(1, HOUR);
long twentyFive = 25 * HOUR;
for (int i = 0; i < twentyFive; i++) {
t = time + i;
inc(t);
if (i == nextAssertCheck) {
TimeSeries ts = timeSeries(t);
assertThat(five(t) - ts.fiveMinutes, fiveDelta);
assertThat(fifteen(t) - ts.fifteenMinutes, fifteenDelta);
assertThat(twentyFour(t) - ts.twentyFourHours, twentyFourDelta);
assertEquals(i + 1, tsc.count());
nextAssertCheck = Math.min(twentyFive, nextAssertCheck + randomLongBetween(HOUR, 3 * HOUR));
}
}
}
public void testCounterIncrementSameBucket() {
long resolution = 45;
long duration = 900;
counter.inc(now);
long count = randomLongBetween(resolution / 2, resolution * 2);
// this is the beginning of the current epoch
long start = (now / resolution) * resolution;
for (int i = 1; i < count; i++) {
counter.inc(start + randomLongBetween(0, resolution - 1));
}
assertEquals(count, counter.sum(start));
assertEquals(count, counter.sum(now));
long t = 0;
// Since we only incremented the first bucket, we should have access to that throughout duration
for (; t <= duration; t += resolution) {
assertEquals(count, counter.sum(start + t));
}
// Now we've gone past the end of the duration
assertEquals(0, counter.sum(start + t));
assertEquals(0, counter.sum(start + duration + resolution));
// The last second for which this counter is valid
assertEquals(count, counter.sum(start + duration + resolution - 1));
}
public void testFiveMinuteSameBucket() {
inc(now);
long resolution = tsc.fiveMinutes.resolution;
long duration = tsc.fiveMinutes.duration;
long count = randomLongBetween(1, resolution);
long start = (now / resolution) * resolution;
for (int i = 1; i < count; i++) {
inc(start + i);
}
assertEquals(count, tsc.count());
assertEquals(count, timeSeries(now).fiveMinutes);
long t = 0;
for (; t <= duration; t += resolution) {
assertEquals(count, timeSeries(start + t).fiveMinutes);
}
TimeSeries series = timeSeries(start + t);
assertEquals(0, series.fiveMinutes);
assertEquals(count, series.fifteenMinutes);
assertEquals(count, series.twentyFourHours);
series = timeSeries(start + duration + resolution);
assertEquals(0, series.fiveMinutes);
assertEquals(count, series.fifteenMinutes);
assertEquals(count, series.twentyFourHours);
assertEquals(count, timeSeries(start + duration + resolution - 1).fiveMinutes);
}
public void testFifteenMinuteSameBucket() {
inc(now);
long resolution = tsc.fifteenMinutes.resolution;
long duration = tsc.fifteenMinutes.duration;
long start = (now / resolution) * resolution;
long count = randomLongBetween(1, resolution);
for (int i = 1; i < count; i++) {
inc(start + i);
}
assertEquals(count, tsc.count());
assertEquals(count, timeSeries(now).fifteenMinutes);
long t = 0;
for (; t <= duration; t += resolution) {
assertEquals(count, timeSeries(start + t).fifteenMinutes);
}
TimeSeries series = timeSeries(start + t);
assertEquals(0, series.fiveMinutes);
assertEquals(0, series.fifteenMinutes);
assertEquals(count, series.twentyFourHours);
series = timeSeries(start + duration + resolution);
assertEquals(0, series.fiveMinutes);
assertEquals(0, series.fifteenMinutes);
assertEquals(count, series.twentyFourHours);
assertEquals(count, timeSeries(start + duration + resolution - 1).fifteenMinutes);
}
public void testTwentyFourHourSameBucket() {
inc(now);
long resolution = tsc.twentyFourHours.resolution;
long duration = tsc.twentyFourHours.duration;
long start = (now / resolution) * resolution;
long count = randomLongBetween(1, resolution);
for (int i = 1; i < count; i++) {
inc(start + i);
}
assertEquals(count, tsc.count());
assertEquals(count, timeSeries(now).twentyFourHours);
long t = 0;
for (; t <= duration; t += resolution) {
assertEquals(count, timeSeries(start + t).twentyFourHours);
}
TimeSeries series = timeSeries(start + t);
assertEquals(0, series.fiveMinutes);
assertEquals(0, series.fifteenMinutes);
assertEquals(0, series.twentyFourHours);
series = timeSeries(start + duration + resolution);
assertEquals(0, series.fiveMinutes);
assertEquals(0, series.fifteenMinutes);
assertEquals(0, series.twentyFourHours);
assertEquals(count, timeSeries(start + duration + resolution - 1).twentyFourHours);
}
public void testCounterIncrementBucket() {
long count = customCounterDuration / customCounterResolution;
for (int i = 0; i < count; i++) {
counter.inc(now + i * customCounterResolution);
}
assertEquals(count, counter.sum(now + customCounterDuration));
assertEquals(count - 1, counter.sum(now + customCounterDuration + customCounterResolution));
assertEquals(count - 2, counter.sum(now + customCounterDuration + (2 * customCounterResolution)));
counter.inc(now + customCounterDuration);
assertEquals(count, counter.sum(now + customCounterDuration + customCounterResolution));
}
public void testFiveMinuteIncrementBucket() {
int count = tsc.fiveMinutes.buckets.length;
long resolution = tsc.fiveMinutes.resolution;
long duration = tsc.fiveMinutes.duration;
for (int i = 0; i < count; i++) {
inc(now + i * resolution);
}
long t = now + duration;
TimeSeries ts = timeSeries(t);
assertEquals(count, ts.fiveMinutes);
assertEquals(count, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
assertEquals(count, tsc.count());
t = now + duration + resolution;
ts = timeSeries(t);
assertEquals(count - 1, ts.fiveMinutes);
assertEquals(count, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
long numRes = 2;
t = now + duration + (numRes * resolution);
ts = timeSeries(t);
assertEquals(count - numRes, ts.fiveMinutes);
assertEquals(count, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
inc(now + duration);
ts = timeSeries(now + duration + resolution);
assertEquals(count, ts.fiveMinutes);
assertEquals(count + 1, ts.fifteenMinutes);
assertEquals(count + 1, ts.twentyFourHours);
assertEquals(count + 1, tsc.count());
}
public void testFifteenMinuteIncrementBucket() {
int count = tsc.fifteenMinutes.buckets.length;
long resolution = tsc.fifteenMinutes.resolution;
long duration = tsc.fifteenMinutes.duration;
for (int i = 0; i < count; i++) {
long t = now + i * resolution;
inc(t);
}
long t = now + duration;
TimeSeries ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(count, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
t = now + duration + resolution;
ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(count - 1, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
long numRes = 2;
t = now + duration + (numRes * resolution);
ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(count - numRes, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
inc(now + duration);
t = now + duration + resolution;
ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(count, ts.fifteenMinutes);
assertEquals(count + 1, ts.twentyFourHours);
assertEquals(count + 1, tsc.count());
}
public void testTwentyFourHourIncrementBucket() {
int count = tsc.twentyFourHours.buckets.length;
long resolution = tsc.twentyFourHours.resolution;
long duration = tsc.twentyFourHours.duration;
for (int i = 0; i < count; i++) {
long t = now + i * resolution;
inc(t);
}
long t = now + duration;
TimeSeries ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(fifteen(t), ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
t = now + duration + resolution;
ts = timeSeries(t);
assertEquals(five(t), ts.fiveMinutes);
assertEquals(0, ts.fifteenMinutes);
assertEquals(count - 1, ts.twentyFourHours);
long numRes = 2;
t = now + duration + (numRes * resolution);
ts = timeSeries(t);
assertEquals(0, ts.fiveMinutes);
assertEquals(0, ts.fifteenMinutes);
assertEquals(count - numRes, ts.twentyFourHours);
inc(now + duration);
t = now + duration + resolution;
ts = timeSeries(t);
assertEquals(0, ts.fiveMinutes);
assertEquals(1, ts.fifteenMinutes);
assertEquals(count, ts.twentyFourHours);
assertEquals(count + 1, tsc.count());
}
public void testCounterSkipBuckets() {
int count = (int) (customCounterDuration / customCounterResolution);
for (int skip = 1; skip <= count; skip++) {
reset();
int increments = 0;
for (int i = 0; (i * skip * customCounterResolution) < customCounterDuration; i++) {
counter.inc(now + (i * skip * customCounterResolution));
increments++;
}
assertEquals(increments, counter.sum(now + customCounterDuration));
}
}
public void testFiveMinuteSkipBucket() {
int count = tsc.fiveMinutes.buckets.length;
long resolution = tsc.fiveMinutes.resolution;
long duration = tsc.fiveMinutes.duration;
for (int skip = 1; skip <= count; skip++) {
tsc = new TimeSeriesCounter(timeProvider);
long increments = 0;
for (int i = 0; (i * skip * resolution) < duration; i++) {
inc(now + (i * skip * resolution));
increments++;
}
TimeSeries series = timeSeries(now + duration);
assertEquals(increments, series.fiveMinutes);
assertEquals(increments, series.fifteenMinutes);
assertEquals(increments, series.twentyFourHours);
assertEquals(increments, tsc.count());
}
}
public void testFifteenMinuteSkipBuckets() {
int count = tsc.fifteenMinutes.buckets.length;
long resolution = tsc.fifteenMinutes.resolution;
long duration = tsc.fifteenMinutes.duration;
for (int skip = 1; skip <= count; skip++) {
reset();
for (int i = 0; (i * skip * resolution) < duration; i++) {
inc(now + (i * skip * resolution));
}
TimeSeries ts = timeSeries(now + duration);
assertEquals(five(now + duration), ts.fiveMinutes);
assertEquals(events.size(), ts.fifteenMinutes);
assertEquals(events.size(), ts.twentyFourHours);
assertEquals(events.size(), tsc.count());
}
}
public void testTwentyFourHourSkipBuckets() {
int count = tsc.twentyFourHours.buckets.length;
long resolution = tsc.twentyFourHours.resolution;
long duration = tsc.twentyFourHours.duration;
for (int skip = 1; skip <= count; skip++) {
reset();
for (int i = 0; (i * skip * resolution) < duration; i++) {
inc(now + (i * skip * resolution));
}
TimeSeries ts = timeSeries(now + duration);
assertEquals(five(now + duration), ts.fiveMinutes);
assertEquals(events.size(), ts.twentyFourHours);
assertEquals(events.size(), tsc.count());
}
}
public void testCounterReset() {
long time = now;
for (int i = 0; i < 20; i++) {
long count = 0;
long withinBucket = randomIntBetween(1, (int) (customCounterResolution / 2));
time += customCounterResolution + (i * customCounterDuration);
long last = time;
for (int j = 0; j < withinBucket; j++) {
long bucketTime = (time / customCounterResolution) * customCounterResolution;
last = bucketTime + randomLongBetween(0, customCounterResolution - 1);
counter.inc(last);
count++;
}
assertEquals(count, counter.sum(last));
}
}
public void testFiveMinuteReset() {
long time = now;
long resolution = tsc.fiveMinutes.resolution;
long duration = tsc.fiveMinutes.duration;
for (int i = 0; i < 20; i++) {
long withinBucket = randomLongBetween(1, resolution);
time += resolution + (i * duration);
for (int j = 0; j < withinBucket; j++) {
inc(time + j);
}
TimeSeries ts = timeSeries(time);
assertThat(five(time) - ts.fiveMinutes, fiveDelta);
assertThat(fifteen(time) - ts.fifteenMinutes, fifteenDelta);
assertThat(twentyFour(time) - ts.twentyFourHours, twentyFourDelta);
assertEquals(events.size(), tsc.count());
}
}
public void testFifteenMinuteReset() {
long time = now;
long resolution = tsc.fifteenMinutes.resolution;
long duration = tsc.fifteenMinutes.duration;
for (int i = 0; i < 20; i++) {
long withinBucket = randomLongBetween(1, resolution);
time += resolution + (i * duration);
for (int j = 0; j < withinBucket; j++) {
inc(time + j);
}
TimeSeries ts = timeSeries(time);
assertThat(five(time) - ts.fiveMinutes, fiveDelta);
assertThat(fifteen(time) - ts.fifteenMinutes, fifteenDelta);
assertThat(twentyFour(time) - ts.twentyFourHours, twentyFourDelta);
assertEquals(events.size(), tsc.count());
}
}
public void testTwentyFourHourReset() {
long time = now;
long resolution = tsc.twentyFourHours.resolution;
long duration = tsc.twentyFourHours.duration;
for (int i = 0; i < 20; i++) {
long withinBucket = randomLongBetween(1, resolution);
time += resolution + (i * duration);
for (int j = 0; j < withinBucket; j++) {
inc(time + j);
}
TimeSeries ts = timeSeries(time);
assertThat(twentyFour(time) - ts.twentyFourHours, twentyFourDelta);
assertEquals(events.size(), tsc.count());
}
}
// Count the last five minutes of events before t
public long five(long t) {
return countLast(t, tsc.fiveMinutes, events);
}
// Count the last fifteen minutes of events before t
public long fifteen(long t) {
return countLast(t, tsc.fifteenMinutes, events);
}
// Count the last twenty-four hours of events before t
public long twentyFour(long t) {
return countLast(t, tsc.twentyFourHours, events);
}
// Count the last set of events that would be recorded by counter
public long countLast(long t, Counter counter, List<Long> events) {
long count = 0;
long after = ((t - counter.duration) / counter.resolution) * counter.resolution;
for (long event : events) {
if (event > after) {
count++;
}
}
return count;
}
protected void inc(long t) {
timeProvider.inc(t);
}
protected TimeSeries timeSeries(long t) {
return timeProvider.timeSeries(t);
}
|
TimeSeriesCounterTests
|
java
|
apache__flink
|
flink-metrics/flink-metrics-slf4j/src/main/java/org/apache/flink/traces/slf4j/Slf4jTraceReporter.java
|
{
"start": 1169,
"end": 1533
}
|
class ____ implements TraceReporter {
private static final Logger LOG = LoggerFactory.getLogger(Slf4jTraceReporter.class);
@Override
public void open(MetricConfig metricConfig) {}
@Override
public void close() {}
@Override
public void notifyOfAddedSpan(Span span) {
LOG.info("Reported span: {}", span);
}
}
|
Slf4jTraceReporter
|
java
|
apache__camel
|
components/camel-jte/src/test/java/org/apache/camel/component/jte/JteRefTest.java
|
{
"start": 1118,
"end": 2339
}
|
class ____ extends CamelTestSupport {
private static final String TEMP = """
@import org.apache.camel.component.jte.Model
@param Model model
Hello ${model.header("name")}. You ordered item ${model.exchangeProperty("item")} on ${model.strBody()}.
""";
@Test
public void testRef() {
Exchange exchange = template.request("direct:a", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("Tuesday");
exchange.getIn().setHeader("name", "Christian");
exchange.setProperty("item", "8");
}
});
assertEquals("Hello Christian. You ordered item 8 on Tuesday.", exchange.getMessage().getBody());
assertEquals("Christian", exchange.getMessage().getHeader("name"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
context.getRegistry().bind("mytemp", TEMP);
from("direct:a").to(
"jte:ref:mytemp?allowContextMapAll=true");
}
};
}
}
|
JteRefTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/collection/spi/PersistentList.java
|
{
"start": 12042,
"end": 12354
}
|
class ____ extends AbstractValueDelayedOperation {
private final int index;
AbstractListValueDelayedOperation(Integer index, E addedValue, E orphan) {
super( addedValue, orphan );
this.index = index;
}
protected final int getIndex() {
return index;
}
}
final
|
AbstractListValueDelayedOperation
|
java
|
quarkusio__quarkus
|
integration-tests/gradle/src/main/resources/multi-module-included-build/app/src/main/java/org/acme/ExampleResource.java
|
{
"start": 227,
"end": 415
}
|
class ____ {
@Inject
LibService libService;
@GET
@Produces(MediaType.TEXT_PLAIN)
public String hello() {
return "foo " + libService.bar();
}
}
|
ExampleResource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/InExpressionCountLimitExceededTest.java
|
{
"start": 2940,
"end": 3327
}
|
class ____ {
@Id
Integer id;
String text;
public MyEntity() {
}
public MyEntity(Integer id, String text) {
this.id = id;
this.text = text;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
}
|
MyEntity
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/common/io/FileSystemUtilsTests.java
|
{
"start": 1236,
"end": 5598
}
|
class ____ extends ESTestCase {
private Path src;
private Path dst;
private Path txtFile;
private byte[] expectedBytes;
@Before
public void copySourceFilesToTarget() throws IOException, URISyntaxException {
src = createTempDir();
dst = createTempDir();
Files.createDirectories(src);
Files.createDirectories(dst);
txtFile = src.resolve("text-file.txt");
try (ByteChannel byteChannel = Files.newByteChannel(txtFile, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE)) {
expectedBytes = new byte[3];
expectedBytes[0] = randomByte();
expectedBytes[1] = randomByte();
expectedBytes[2] = randomByte();
byteChannel.write(ByteBuffer.wrap(expectedBytes));
}
}
public void testAppend() {
assertEquals(
FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 0),
PathUtils.get("/foo/bar/hello/world/this_is/awesome")
);
assertEquals(
FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 2),
PathUtils.get("/foo/bar/this_is/awesome")
);
assertEquals(
FileSystemUtils.append(PathUtils.get("/foo/bar"), PathUtils.get("/hello/world/this_is/awesome"), 1),
PathUtils.get("/foo/bar/world/this_is/awesome")
);
}
public void testIsHidden() {
for (String p : Arrays.asList(
"/",
"foo",
"/foo",
"foo.bar",
"/foo.bar",
"foo/bar",
"foo/./bar",
"foo/../bar",
"/foo/./bar",
"/foo/../bar"
)) {
Path path = PathUtils.get(p);
assertFalse(FileSystemUtils.isHidden(path));
}
for (String p : Arrays.asList(
".hidden",
".hidden.ext",
"/.hidden",
"/.hidden.ext",
"foo/.hidden",
"foo/.hidden.ext",
"/foo/.hidden",
"/foo/.hidden.ext",
".",
"..",
"foo/.",
"foo/.."
)) {
Path path = PathUtils.get(p);
assertTrue(FileSystemUtils.isHidden(path));
}
}
public void testOpenFileURLStream() throws IOException {
URL urlWithWrongProtocol = new URL("http://www.google.com");
try (InputStream is = FileSystemUtils.openFileURLStream(urlWithWrongProtocol)) {
fail("Should throw IllegalArgumentException due to invalid protocol");
} catch (IllegalArgumentException e) {
assertEquals("Invalid protocol [http], must be [file] or [jar]", e.getMessage());
}
URL urlWithHost = new URL("file", "localhost", txtFile.toString());
try (InputStream is = FileSystemUtils.openFileURLStream(urlWithHost)) {
fail("Should throw IllegalArgumentException due to host");
} catch (IllegalArgumentException e) {
assertEquals("URL cannot have host. Found: [localhost]", e.getMessage());
}
URL urlWithPort = new URL("file", "", 80, txtFile.toString());
try (InputStream is = FileSystemUtils.openFileURLStream(urlWithPort)) {
fail("Should throw IllegalArgumentException due to port");
} catch (IllegalArgumentException e) {
assertEquals("URL cannot have port. Found: [80]", e.getMessage());
}
URL validUrl = txtFile.toUri().toURL();
try (InputStream is = FileSystemUtils.openFileURLStream(validUrl)) {
byte[] actualBytes = new byte[3];
is.read(actualBytes);
assertArrayEquals(expectedBytes, actualBytes);
}
}
public void testIsDesktopServicesStoreFile() throws IOException {
final Path path = createTempDir();
final Path desktopServicesStore = path.resolve(".DS_Store");
Files.createFile(desktopServicesStore);
assertThat(FileSystemUtils.isDesktopServicesStore(desktopServicesStore), equalTo(Constants.MAC_OS_X));
Files.delete(desktopServicesStore);
Files.createDirectory(desktopServicesStore);
assertFalse(FileSystemUtils.isDesktopServicesStore(desktopServicesStore));
}
}
|
FileSystemUtilsTests
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/GroupCoordinatorConfigTest.java
|
{
"start": 15917,
"end": 23578
}
|
class ____.lang.Object is not an instance of org.apache.kafka.coordinator.group.api.assignor.ConsumerGroupPartitionAssignor",
assertThrows(KafkaException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, Object.class.getName());
assertEquals("java.lang.Object is not an instance of org.apache.kafka.coordinator.group.api.assignor.ConsumerGroupPartitionAssignor",
assertThrows(KafkaException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, "foo");
assertEquals("Class foo cannot be found",
assertThrows(KafkaException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIGRATION_POLICY_CONFIG, "foobar");
assertEquals("Invalid value foobar for configuration group.consumer.migration.policy: String must be one of (case insensitive): DISABLED, DOWNGRADE, UPGRADE, BIDIRECTIONAL",
assertThrows(ConfigException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_COMPRESSION_CODEC_CONFIG, -100);
assertEquals("Unknown compression type id: -100",
assertThrows(IllegalArgumentException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 45000);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_HEARTBEAT_INTERVAL_MS_CONFIG, 60000);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 50000);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_SESSION_TIMEOUT_MS_CONFIG, 50000);
assertEquals("group.consumer.heartbeat.interval.ms must be less than group.consumer.session.timeout.ms",
assertThrows(IllegalArgumentException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.OFFSET_COMMIT_TIMEOUT_MS_CONFIG, 5000);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_INITIALIZE_RETRY_INTERVAL_MS_CONFIG, 1000);
assertEquals(5000, createConfig(configs).shareGroupInitializeRetryIntervalMs());
configs.clear();
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 45000);
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_MAX_HEARTBEAT_INTERVAL_MS_CONFIG, 60000);
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 50000);
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_SESSION_TIMEOUT_MS_CONFIG, 50000);
assertEquals("group.streams.heartbeat.interval.ms must be less than group.streams.session.timeout.ms",
assertThrows(IllegalArgumentException.class, () -> createConfig(configs)).getMessage());
configs.clear();
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_INITIAL_REBALANCE_DELAY_MS_CONFIG, -1);
assertEquals("Invalid value -1 for configuration group.streams.initial.rebalance.delay.ms: Value must be at least 0",
assertThrows(ConfigException.class, () -> createConfig(configs)).getMessage());
}
@Test
public void testAppendLingerMs() {
GroupCoordinatorConfig config = createConfig(Map.of(GroupCoordinatorConfig.GROUP_COORDINATOR_APPEND_LINGER_MS_CONFIG, -1));
assertEquals(OptionalInt.empty(), config.appendLingerMs());
config = createConfig(Map.of(GroupCoordinatorConfig.GROUP_COORDINATOR_APPEND_LINGER_MS_CONFIG, 0));
assertEquals(OptionalInt.of(0), config.appendLingerMs());
config = createConfig(Map.of(GroupCoordinatorConfig.GROUP_COORDINATOR_APPEND_LINGER_MS_CONFIG, 5));
assertEquals(OptionalInt.of(5), config.appendLingerMs());
}
public static GroupCoordinatorConfig createGroupCoordinatorConfig(
int offsetMetadataMaxSize,
long offsetsRetentionCheckIntervalMs,
int offsetsRetentionMinutes
) {
Map<String, Object> configs = new HashMap<>();
configs.put(GroupCoordinatorConfig.GROUP_COORDINATOR_NUM_THREADS_CONFIG, 1);
configs.put(GroupCoordinatorConfig.GROUP_COORDINATOR_APPEND_LINGER_MS_CONFIG, 10);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_SESSION_TIMEOUT_MS_CONFIG, 45);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIN_SESSION_TIMEOUT_MS_CONFIG, 45);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, List.of(RangeAssignor.class));
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_SEGMENT_BYTES_CONFIG, 1000);
configs.put(GroupCoordinatorConfig.OFFSET_METADATA_MAX_SIZE_CONFIG, offsetMetadataMaxSize);
configs.put(GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
configs.put(GroupCoordinatorConfig.GROUP_INITIAL_REBALANCE_DELAY_MS_CONFIG, 3000);
configs.put(GroupCoordinatorConfig.GROUP_MIN_SESSION_TIMEOUT_MS_CONFIG, 120);
configs.put(GroupCoordinatorConfig.GROUP_MAX_SESSION_TIMEOUT_MS_CONFIG, 10 * 5 * 1000);
configs.put(GroupCoordinatorConfig.OFFSETS_RETENTION_CHECK_INTERVAL_MS_CONFIG, offsetsRetentionCheckIntervalMs);
configs.put(GroupCoordinatorConfig.OFFSETS_RETENTION_MINUTES_CONFIG, offsetsRetentionMinutes);
configs.put(GroupCoordinatorConfig.OFFSET_COMMIT_TIMEOUT_MS_CONFIG, 5000);
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIGRATION_POLICY_CONFIG, ConsumerGroupMigrationPolicy.DISABLED.name());
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_COMPRESSION_CODEC_CONFIG, (int) CompressionType.NONE.id);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_SESSION_TIMEOUT_MS_CONFIG, 45);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_MIN_SESSION_TIMEOUT_MS_CONFIG, 45);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
configs.put(GroupCoordinatorConfig.SHARE_GROUP_MAX_SIZE_CONFIG, 1000);
return createConfig(configs);
}
@Test
public void testStreamsGroupInitialRebalanceDelayDefaultValue() {
Map<String, Object> configs = new HashMap<>();
GroupCoordinatorConfig config = createConfig(configs);
assertEquals(3000, config.streamsGroupInitialRebalanceDelayMs());
assertEquals(GroupCoordinatorConfig.STREAMS_GROUP_INITIAL_REBALANCE_DELAY_MS_DEFAULT,
config.streamsGroupInitialRebalanceDelayMs());
}
@Test
public void testStreamsGroupInitialRebalanceDelayCustomValue() {
Map<String, Object> configs = new HashMap<>();
configs.put(GroupCoordinatorConfig.STREAMS_GROUP_INITIAL_REBALANCE_DELAY_MS_CONFIG, 7000);
GroupCoordinatorConfig config = createConfig(configs);
assertEquals(7000, config.streamsGroupInitialRebalanceDelayMs());
}
public static GroupCoordinatorConfig createConfig(Map<String, Object> configs) {
return new GroupCoordinatorConfig(new AbstractConfig(
GroupCoordinatorConfig.CONFIG_DEF,
configs,
false
));
}
}
|
java
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionEvaluationReport.java
|
{
"start": 10144,
"end": 10354
}
|
class ____ implements Condition {
@Override
public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
throw new UnsupportedOperationException();
}
}
}
|
AncestorsMatchedCondition
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/propagation/TextMapPropagatorCustomizer.java
|
{
"start": 366,
"end": 462
}
|
interface ____ {
TextMapPropagator customize(Context context);
|
TextMapPropagatorCustomizer
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/core/dynamic/segment/AnnotationCommandSegmentFactoryUnitTests.java
|
{
"start": 662,
"end": 3588
}
|
class ____ {
private AnnotationCommandSegmentFactory factory = new AnnotationCommandSegmentFactory();
@Test
void notAnnotatedDotAsIs() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(CommandMethods.class, "notAnnotated"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).isEmpty();
assertThat(commandSegments.getCommandType().toString()).isEqualTo("not.Annotated");
}
@Test
void uppercaseDot() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(CommandMethods.class, "upperCase"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).isEmpty();
assertThat(commandSegments.getCommandType().toString()).isEqualTo("UPPER.CASE");
}
@Test
void methodNameAsIs() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(CommandMethods.class, "methodName"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).isEmpty();
assertThat(commandSegments.getCommandType().toString()).isEqualTo("methodName");
}
@Test
void splitAsIs() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(CommandMethods.class, "clientSetname"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).hasSize(1).extracting(CommandSegment::asString).contains("Setname");
assertThat(commandSegments.getCommandType().toString()).isEqualTo("client");
}
@Test
void commandAnnotation() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(CommandMethods.class, "atCommand"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).hasSize(1).extracting(CommandSegment::asString).contains("WORLD");
assertThat(commandSegments.getCommandType().toString()).isEqualTo("HELLO");
}
@Test
void splitDefault() {
CommandMethod commandMethod = DeclaredCommandMethod
.create(ReflectionUtils.findMethod(Defaulted.class, "clientSetname"));
CommandSegments commandSegments = factory.createCommandSegments(commandMethod);
assertThat(commandSegments).hasSize(1).extracting(CommandSegment::asString).contains("SETNAME");
assertThat(commandSegments.getCommandType().toString()).isEqualTo("CLIENT");
}
@CommandNaming(strategy = Strategy.DOT, letterCase = LetterCase.AS_IS)
private static
|
AnnotationCommandSegmentFactoryUnitTests
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/SessionWindowedDeserializer.java
|
{
"start": 1254,
"end": 1394
}
|
class ____<T> implements Deserializer<Windowed<T>> {
/**
* Default deserializer for the inner deserializer
|
SessionWindowedDeserializer
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/example/type/AnnotationTypeFilterTestsTypes.java
|
{
"start": 1493,
"end": 1592
}
|
class ____ {
}
@SuppressWarnings("unused")
public static
|
SomeClassMarkedWithNonInheritedAnnotation
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/graph/Vertex.java
|
{
"start": 972,
"end": 6258
}
|
class ____ implements ToXContentFragment {
private final String field;
private final String term;
private double weight;
private final int depth;
private final long bg;
private long fg;
private static final ParseField FIELD = new ParseField("field");
private static final ParseField TERM = new ParseField("term");
private static final ParseField WEIGHT = new ParseField("weight");
private static final ParseField DEPTH = new ParseField("depth");
private static final ParseField FG = new ParseField("fg");
private static final ParseField BG = new ParseField("bg");
public Vertex(String field, String term, double weight, int depth, long bg, long fg) {
super();
this.field = field;
this.term = term;
this.weight = weight;
this.depth = depth;
this.bg = bg;
this.fg = fg;
}
static Vertex readFrom(StreamInput in) throws IOException {
return new Vertex(in.readString(), in.readString(), in.readDouble(), in.readVInt(), in.readVLong(), in.readVLong());
}
void writeTo(StreamOutput out) throws IOException {
out.writeString(field);
out.writeString(term);
out.writeDouble(weight);
out.writeVInt(depth);
out.writeVLong(bg);
out.writeVLong(fg);
}
@Override
public int hashCode() {
return Objects.hash(field, term, weight, depth, bg, fg);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
Vertex other = (Vertex) obj;
return depth == other.depth
&& weight == other.weight
&& bg == other.bg
&& fg == other.fg
&& Objects.equals(field, other.field)
&& Objects.equals(term, other.term);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean returnDetailedInfo = params.paramAsBoolean(GraphExploreResponse.RETURN_DETAILED_INFO_PARAM, false);
builder.field(FIELD.getPreferredName(), field);
builder.field(TERM.getPreferredName(), term);
builder.field(WEIGHT.getPreferredName(), weight);
builder.field(DEPTH.getPreferredName(), depth);
if (returnDetailedInfo) {
builder.field(FG.getPreferredName(), fg);
builder.field(BG.getPreferredName(), bg);
}
return builder;
}
/**
* @return a {@link VertexId} object that uniquely identifies this Vertex
*/
public VertexId getId() {
return createId(field, term);
}
/**
* A convenience method for creating a {@link VertexId}
* @param field the field
* @param term the term
* @return a {@link VertexId} that can be used for looking up vertices
*/
public static VertexId createId(String field, String term) {
return new VertexId(field, term);
}
@Override
public String toString() {
return getId().toString();
}
public String getField() {
return field;
}
public String getTerm() {
return term;
}
/**
* The weight of a vertex is an accumulation of all of the {@link Connection}s
* that are linked to this {@link Vertex} as part of a graph exploration.
* It is used internally to identify the most interesting vertices to be returned.
* @return a measure of the {@link Vertex}'s relative importance.
*/
public double getWeight() {
return weight;
}
public void setWeight(final double weight) {
this.weight = weight;
}
// tag::noformat
/**
* If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default)
* this statistic is available.
* @return the number of documents in the index that contain this term (see bg_count in
* <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html"
* >the significant_terms aggregation</a>)
*/
// end::noformat
public long getBg() {
return bg;
}
// tag::noformat
/**
* If the {@link GraphExploreRequest#useSignificance(boolean)} is true (the default)
* this statistic is available.
* Together with {@link #getBg()} these numbers are used to derive the significance of a term.
* @return the number of documents in the sample of best matching documents that contain this term (see fg_count in
* <a
* href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-bucket-significantterms-aggregation.html"
* >the significant_terms aggregation</a>)
*/
// end::noformat
public long getFg() {
return fg;
}
public void setFg(final long fg) {
this.fg = fg;
}
/**
* @return the sequence number in the series of hops where this Vertex term was first encountered
*/
public int getHopDepth() {
return depth;
}
/**
* An identifier (implements hashcode and equals) that represents a
* unique key for a {@link Vertex}
*/
public static
|
Vertex
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/TaskConfig.java
|
{
"start": 1247,
"end": 1429
}
|
class ____ extends AbstractConfig {
public static final String TASK_CLASS_CONFIG = "task.class";
private static final String TASK_CLASS_DOC =
"Name of the
|
TaskConfig
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlListQueriesAction.java
|
{
"start": 864,
"end": 1970
}
|
class ____ extends BaseRestHandler {
private static final Logger LOGGER = LogManager.getLogger(RestEsqlListQueriesAction.class);
@Override
public String getName() {
return "esql_list_queries";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/_query/queries/{id}"), new Route(GET, "/_query/queries"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
return restChannelConsumer(request, client);
}
private static RestChannelConsumer restChannelConsumer(RestRequest request, NodeClient client) {
LOGGER.debug("Beginning execution of ESQL list queries.");
String id = request.param("id");
var action = id != null ? EsqlGetQueryAction.INSTANCE : EsqlListQueriesAction.INSTANCE;
var actionRequest = id != null ? new EsqlGetQueryRequest(AsyncExecutionId.decode(id)) : new EsqlListQueriesRequest();
return channel -> client.execute(action, actionRequest, new RestToXContentListener<>(channel));
}
}
|
RestEsqlListQueriesAction
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/LifecycleContextBean.java
|
{
"start": 971,
"end": 1876
}
|
class ____ extends LifecycleBean implements ApplicationContextAware {
protected ApplicationContext owningContext;
@Override
public void setBeanFactory(BeanFactory beanFactory) {
super.setBeanFactory(beanFactory);
if (this.owningContext != null) {
throw new RuntimeException("Factory called setBeanFactory after setApplicationContext");
}
}
@Override
public void afterPropertiesSet() {
super.afterPropertiesSet();
if (this.owningContext == null) {
throw new RuntimeException("Factory didn't call setApplicationContext before afterPropertiesSet on lifecycle bean");
}
}
@Override
public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
if (this.owningFactory == null) {
throw new RuntimeException("Factory called setApplicationContext before setBeanFactory");
}
this.owningContext = applicationContext;
}
}
|
LifecycleContextBean
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/mixed/ObservableSwitchMapMaybe.java
|
{
"start": 1494,
"end": 2253
}
|
class ____<T, R> extends Observable<R> {
final Observable<T> source;
final Function<? super T, ? extends MaybeSource<? extends R>> mapper;
final boolean delayErrors;
public ObservableSwitchMapMaybe(Observable<T> source,
Function<? super T, ? extends MaybeSource<? extends R>> mapper,
boolean delayErrors) {
this.source = source;
this.mapper = mapper;
this.delayErrors = delayErrors;
}
@Override
protected void subscribeActual(Observer<? super R> observer) {
if (!ScalarXMapZHelper.tryAsMaybe(source, mapper, observer)) {
source.subscribe(new SwitchMapMaybeMainObserver<>(observer, mapper, delayErrors));
}
}
static final
|
ObservableSwitchMapMaybe
|
java
|
netty__netty
|
codec-http2/src/main/java/io/netty/handler/codec/http2/Http2Stream.java
|
{
"start": 781,
"end": 872
}
|
interface ____ {
/**
* The allowed states of an HTTP2 stream.
*/
|
Http2Stream
|
java
|
google__gson
|
gson/src/main/java/com/google/gson/annotations/Until.java
|
{
"start": 2533,
"end": 2771
}
|
interface ____ {
/**
* The value indicating a version number until this member or type should be included. The number
* is exclusive; annotated elements will be included if {@code gsonVersion < value}.
*/
double value();
}
|
Until
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/GenericTypeSerializationTest.java
|
{
"start": 7511,
"end": 7828
}
|
class ____ {
final String value;
StringStub(String value) {
this.value = value;
}
@JsonCreator(mode = JsonCreator.Mode.DELEGATING)
public static StringStub valueOf(String value) {
return new StringStub(value);
}
}
public static
|
StringStub
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/RestrictedApiCheckerTest.java
|
{
"start": 15864,
"end": 16399
}
|
class ____ {
void ctorRestricted() {
// BUG: Diagnostic contains: RestrictedApi
new User("kak", "Hunter2");
}
@Allowlist
void ctorAllowed(User user) {
new User("kak", "Hunter2");
}
void accessorAllowed(User user) {
user.password();
}
}
""")
.doTest();
}
// NOTE: @RestrictedApi cannot be applied to an entire record declaration
}
|
Testcase
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/InlinerTest.java
|
{
"start": 26795,
"end": 27345
}
|
class ____ {
public void doTest() {
Client client = Client.create();
}
}
""")
.doTest();
}
@Test
public void inlineUnvalidatedInlineMessage() {
CompilationTestHelper.newInstance(Inliner.class, getClass())
.addSourceLines(
"Client.java",
"package foo;",
"import com.google.errorprone.annotations.InlineMe;",
"import com.google.errorprone.annotations.InlineMeValidationDisabled;",
"public final
|
Caller
|
java
|
quarkusio__quarkus
|
devtools/cli-common/src/main/java/io/quarkus/cli/common/DebugOptions.java
|
{
"start": 222,
"end": 2378
}
|
enum ____ {
connect,
listen
}
@CommandLine.Option(order = 7, names = {
"--no-debug" }, description = "Toggle debug mode. Enabled by default.", negatable = true)
public boolean debug = true;
@CommandLine.Option(order = 8, names = {
"--debug-host" }, description = "Debug host, e.g. localhost or 0.0.0.0", defaultValue = LOCALHOST)
public String host = LOCALHOST;
@CommandLine.Option(order = 9, names = {
"--debug-mode" }, description = "Valid values: ${COMPLETION-CANDIDATES}.%nEither connect to or listen on <host>:<port>.", defaultValue = "listen")
public DebugMode mode = DebugMode.listen;
@CommandLine.Option(order = 10, names = {
"--debug-port" }, description = "Debug port (must be a number > 0).", defaultValue = "" + DEFAULT_PORT)
public int port = DEFAULT_PORT;
@CommandLine.Option(order = 11, names = {
"--suspend" }, description = "In listen mode, suspend until a debugger is attached. Disabled by default.", negatable = true)
public boolean suspend = false;
public String getJvmDebugParameter() {
return "-agentlib:jdwp=transport=dt_socket"
+ ",address=" + host + ":" + port
+ ",server=" + (mode == DebugMode.listen ? "y" : "n")
+ ",suspend=" + (suspend ? "y" : "n");
}
public void addDebugArguments(Collection<String> args, Collection<String> jvmArgs) {
if (debug) {
if (suspend) {
args.add("-Dsuspend");
}
if (!LOCALHOST.equals(host)) {
args.add("-DdebugHost=" + host);
}
if (mode == DebugMode.connect) {
args.add("-Ddebug=client");
}
if (port != DEFAULT_PORT) {
args.add("-DdebugPort=" + port);
}
} else {
args.add("-Ddebug=false");
}
}
@Override
public String toString() {
return "DebugOptions [debug=" + debug + ", mode=" + mode + ", host=" + host + ", port=" + port + ", suspend="
+ suspend + "]";
}
}
|
DebugMode
|
java
|
apache__camel
|
core/camel-management/src/test/java/org/apache/camel/management/DefaultManagementAgentMockTest.java
|
{
"start": 2111,
"end": 4227
}
|
class ____ {
@Test
public void testObjectNameModification() throws JMException, IOException {
MBeanServer mbeanServer = mock(MBeanServer.class);
ObjectInstance instance = mock(ObjectInstance.class);
try (ManagementAgent agent = new DefaultManagementAgent()) {
agent.setMBeanServer(mbeanServer);
Object object = "object";
ObjectName sourceObjectName = new ObjectName("domain", "key", "value");
ObjectName registeredObjectName = new ObjectName("domain", "key", "otherValue");
// Register MBean and return different ObjectName
when(mbeanServer.isRegistered(sourceObjectName)).thenReturn(false);
when(mbeanServer.registerMBean(object, sourceObjectName)).thenReturn(instance);
when(instance.getObjectName()).thenReturn(registeredObjectName);
when(mbeanServer.isRegistered(registeredObjectName)).thenReturn(true);
agent.register(object, sourceObjectName);
assertTrue(agent.isRegistered(sourceObjectName));
reset(mbeanServer, instance);
// ... and unregister it again
when(mbeanServer.isRegistered(registeredObjectName)).thenReturn(true);
mbeanServer.unregisterMBean(registeredObjectName);
when(mbeanServer.isRegistered(sourceObjectName)).thenReturn(false);
agent.unregister(sourceObjectName);
assertFalse(agent.isRegistered(sourceObjectName));
}
}
@ParameterizedTest
@ValueSource(strings = { "true", "false" })
public void testShouldUseHostIPAddress(String flag) throws IOException {
System.setProperty(JmxSystemPropertyKeys.USE_HOST_IP_ADDRESS, flag);
CamelContext ctx = new DefaultCamelContext();
try (ManagementAgent agent = new DefaultManagementAgent(ctx)) {
agent.start();
assertEquals(Boolean.parseBoolean(flag), agent.getUseHostIPAddress());
} finally {
System.clearProperty(JmxSystemPropertyKeys.USE_HOST_IP_ADDRESS);
}
}
}
|
DefaultManagementAgentMockTest
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/domain/blog/immutable/ImmutableAuthor.java
|
{
"start": 754,
"end": 1852
}
|
class ____ {
private final int id;
private final String username;
private final String password;
private final String email;
private final String bio;
private final Section favouriteSection;
public ImmutableAuthor(int id, String username, String password, String email, String bio, Section section) {
this.id = id;
this.username = username;
this.password = password;
this.email = email;
this.bio = bio;
this.favouriteSection = section;
}
public int getId() {
return id;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
public String getEmail() {
return email;
}
public String getBio() {
return bio;
}
public Section getFavouriteSection() {
return favouriteSection;
}
@Override
public String toString() {
return "ImmutableAuthor{" + "id=" + id + ", username='" + username + '\'' + ", password='" + password + '\''
+ ", email='" + email + '\'' + ", bio='" + bio + '\'' + ", favouriteSection=" + favouriteSection + '}';
}
}
|
ImmutableAuthor
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/enricher/EnricherAggregateOnExceptionTest.java
|
{
"start": 3667,
"end": 4168
}
|
class ____ implements AggregationStrategy {
@Override
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
if (newExchange.getException() != null) {
oldExchange.getIn().setBody(newExchange.getException().getMessage());
return oldExchange;
}
// replace body
oldExchange.getIn().setBody(newExchange.getIn().getBody());
return oldExchange;
}
}
}
|
MyAggregationStrategy
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/dynamic/DynMethods.java
|
{
"start": 15844,
"end": 16461
}
|
class ____ incompatible
* @throws NoSuchMethodException if no implementation was found
*/
public BoundMethod buildChecked(Object receiver) throws NoSuchMethodException {
return buildChecked().bind(receiver);
}
/**
* Returns the first valid implementation as a BoundMethod or throws a
* RuntimeError if there is none.
* @param receiver an Object to receive the method invocation
* @return a {@link BoundMethod} with a valid implementation and receiver
* @throws IllegalStateException if the method is static
* @throws IllegalArgumentException if the receiver's
|
is
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/ChannelInjectionTest.java
|
{
"start": 1645,
"end": 2176
}
|
class ____ {
@GrpcClient("hello-service")
Channel channel;
public String invoke(String s) {
return GreeterGrpc.newBlockingStub(channel)
.sayHello(HelloRequest.newBuilder().setName(s).build())
.getMessage();
}
public String invokeWeird(String s) {
return GreeterGrpc.newBlockingStub(channel)
.wEIRD(HelloRequest.newBuilder().setName(s).build())
.getMessage();
}
}
}
|
MyConsumer
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-plugin-test/src/main/java/org/apache/camel/dsl/jbang/core/commands/test/TestPluginExporter.java
|
{
"start": 2144,
"end": 9424
}
|
class ____ implements PluginExporter {
public static final Pattern NAME_SEPARATOR_PATTERN = Pattern.compile("[-._]([a-z])");
@Override
public boolean isEnabled() {
// assume tests are located in a subdirectory, only perform when this directory is present
return Files.exists(Path.of(".").resolve("test"));
}
@Override
public Properties getBuildProperties() {
Properties props = new Properties();
if (isEnabled()) {
props.setProperty("citrus.version", CitrusVersion.version());
}
return props;
}
@Override
public Set<String> getDependencies(RuntimeType runtime) {
Set<String> deps = new HashSet<>();
if (!isEnabled()) {
return deps;
}
// add default Citrus dependencies as defined in Citrus JBang
deps.add(asDependency("citrus-base"));
deps.add(asDependency("citrus-junit5"));
deps.add(asDependency("citrus-http"));
deps.add(asDependency("citrus-yaml"));
deps.add(asDependency("citrus-xml"));
deps.add(asDependency("citrus-groovy"));
deps.add(asDependency("citrus-validation-xml"));
deps.add(asDependency("citrus-validation-json"));
deps.add(asDependency("citrus-validation-yaml"));
Path testDir = Path.of(".").resolve("test");
if (Files.exists(testDir.resolve("jbang.properties"))) {
try (FileInputStream fis = new FileInputStream(testDir.resolve("jbang.properties").toFile())) {
Properties props = new Properties();
props.load(fis);
// read runtime dependencies from jbang-.properties
String[] dependencies = props.getOrDefault("run.deps", "").toString().split(",");
for (String dependency : dependencies) {
if (dependency.startsWith("org.citrusframework:")) {
// construct proper Citrus Maven GAV from just the artifact id
deps.add(asDependency(extractArtifactId(dependency)));
} else if (dependency.startsWith("org.apache.camel")) {
// remove version from GAV, because we generally use the Camel bom
String[] parts = dependency.split(":");
deps.add("mvn@test:%s:%s".formatted(parts[0], parts[1]));
} else {
// add as test scoped dependency
deps.add("mvn@test:" + dependency);
}
}
} catch (IOException e) {
// ignore IO error while reading jbang.properties
}
}
return deps;
}
@Override
public void addSourceFiles(Path buildDir, String packageName, Printer printer) throws Exception {
if (!isEnabled()) {
return;
}
Path srcTestSrcDir = buildDir.resolve("src/test/java");
Path srcTestResourcesDir = buildDir.resolve("src/test/resources");
Files.createDirectories(srcTestSrcDir);
Files.createDirectories(srcTestResourcesDir);
Path testDir = Path.of(".").resolve("test");
Path testProfile = testDir.resolve("application.test.properties");
if (Files.exists(testProfile)) {
ExportHelper.safeCopy(testProfile, srcTestResourcesDir.resolve("application.test.properties"), true);
}
try (Stream<Path> paths = Files.list(testDir)) {
// Add all supported test sources
Set<Path> testSources = paths
.filter(path -> !path.getFileName().toString().startsWith("."))
.filter(path -> {
String ext = FileUtils.getFileExtension(path.getFileName().toString());
return CitrusSettings.getTestFileNamePattern(ext)
.stream()
.map(Pattern::compile)
.anyMatch(pattern -> pattern.matcher(path.getFileName().toString()).matches());
}).collect(Collectors.toSet());
for (Path testSource : testSources) {
String ext = FileUtils.getFileExtension(testSource.getFileName().toString());
if (ext.equals("java")) {
Path javaSource;
if (packageName != null) {
javaSource = srcTestSrcDir.resolve(packageName.replaceAll("\\.", "/") + "/" + testSource.getFileName());
} else {
javaSource = srcTestSrcDir.resolve(testSource.getFileName());
}
ExportHelper.safeCopy(new ByteArrayInputStream(
readTestSource(testSource).getBytes(StandardCharsets.UTF_8)), javaSource);
} else {
Path resource = srcTestResourcesDir.resolve(testSource.getFileName());
ExportHelper.safeCopy(new ByteArrayInputStream(
readTestSource(testSource).getBytes(StandardCharsets.UTF_8)), resource);
String javaClassName = getJavaClassName(FileUtils.getBaseName(testSource.getFileName().toString()));
Path javaSource;
if (packageName != null) {
javaSource = srcTestSrcDir.resolve(packageName.replaceAll("\\.", "/") + "/" + javaClassName + ".java");
} else {
javaSource = srcTestSrcDir.resolve(javaClassName + ".java");
}
try (InputStream is = TestPlugin.class.getClassLoader().getResourceAsStream("templates/junit-test.tmpl")) {
String context = IOHelper.loadText(is);
context = context.replaceAll("\\{\\{ \\.PackageDeclaration }}", getPackageDeclaration(packageName));
context = context.replaceAll("\\{\\{ \\.Type }}", ext);
context = context.replaceAll("\\{\\{ \\.Name }}", javaClassName);
context = context.replaceAll("\\{\\{ \\.MethodName }}", StringHelper.decapitalize(javaClassName));
context = context.replaceAll("\\{\\{ \\.ResourcePath }}", testSource.getFileName().toString());
ExportHelper.safeCopy(new ByteArrayInputStream(context.getBytes(StandardCharsets.UTF_8)), javaSource);
}
}
}
}
}
/**
* Read and process given test source. Apply common postprocessing steps on test source code. For instance makes
* sure that relative file paths get replaced with proper classpath resource paths.
*/
private String readTestSource(Path source) throws IOException {
String context = Files.readString(source, StandardCharsets.UTF_8);
context = context.replaceAll("\\.\\./", "camel/");
return context;
}
private String getPackageDeclaration(String packageName) {
if (packageName == null || packageName.isEmpty()) {
return "";
} else {
return "package %s;%n%n".formatted(packageName);
}
}
/**
* Get proper Java
|
TestPluginExporter
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableWindowTimed.java
|
{
"start": 16091,
"end": 16548
}
|
class ____ implements Runnable {
final WindowExactBoundedObserver<?> parent;
final long index;
WindowBoundaryRunnable(WindowExactBoundedObserver<?> parent, long index) {
this.parent = parent;
this.index = index;
}
@Override
public void run() {
parent.boundary(this);
}
}
}
static final
|
WindowBoundaryRunnable
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertEmpty_Test.java
|
{
"start": 1471,
"end": 2210
}
|
class ____ extends CharArraysBaseTest {
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertEmpty(someInfo(), null))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_is_not_empty() {
AssertionInfo info = someInfo();
char[] actual = { 'a', 'b' };
Throwable error = catchThrowable(() -> arrays.assertEmpty(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeEmpty(actual));
}
@Test
void should_pass_if_actual_is_empty() {
arrays.assertEmpty(someInfo(), emptyArray());
}
}
|
CharArrays_assertEmpty_Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NamedLikeContextualKeywordTest.java
|
{
"start": 4587,
"end": 4945
}
|
class ____ implements RegrettablyNamedInterface {
static Throwable foo;
public Test() {}
public void yield() {
foo = new NullPointerException("uh oh");
}
}
""")
.addSourceLines(
"RegrettablyNamedInterface.java",
"""
|
Test
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/TestEmptyClass.java
|
{
"start": 2194,
"end": 3168
}
|
class ____ through mix-ins
ObjectMapper m2 = jsonMapperBuilder()
.addMixIn(Empty.class, EmptyWithAnno.class)
.build();
assertEquals("{}", m2.writeValueAsString(new Empty()));
}
/**
* Alternative it is possible to use a feature to allow
* serializing empty classes, too
*/
@Test
public void testEmptyWithFeature() throws Exception
{
// should be disabled by default as of 3.x
assertFalse(MAPPER.isEnabled(SerializationFeature.FAIL_ON_EMPTY_BEANS));
assertEquals("{}",
MAPPER.writer()
.writeValueAsString(new Empty()));
}
@Test
public void testCustomNoEmpty() throws Exception
{
// first non-empty:
assertEquals("{\"value\":123}", MAPPER.writeValueAsString(new NonZeroWrapper(123)));
// then empty:
assertEquals("{}", MAPPER.writeValueAsString(new NonZeroWrapper(0)));
}
}
|
annotation
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/caching/PostalArea.java
|
{
"start": 260,
"end": 1028
}
|
enum ____ {
_78729( "78729", "North Austin", "Austin", State.TX );
private final String zipCode;
private final String name;
private final String cityName;
private final State state;
PostalArea(
String zipCode,
String name,
String cityName,
State state) {
this.zipCode = zipCode;
this.name = name;
this.cityName = cityName;
this.state = state;
}
public static PostalArea fromZipCode(String zipCode) {
if ( _78729.zipCode.equals( zipCode ) ) {
return _78729;
}
throw new IllegalArgumentException( "Unknown zip code" );
}
public String getZipCode() {
return zipCode;
}
public String getName() {
return name;
}
public String getCityName() {
return cityName;
}
public State getState() {
return state;
}
}
|
PostalArea
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/job/FailureLabelFilterParameter.java
|
{
"start": 1564,
"end": 3163
}
|
class ____ {
private final String key;
private final String value;
public FailureLabel(String key, String value) {
this.key = key;
this.value = value;
}
public String getKey() {
return key;
}
public String getValue() {
return value;
}
}
public FailureLabelFilterParameter() {
super(KEY, MessageParameterRequisiteness.OPTIONAL);
}
@Override
public List<FailureLabel> convertFromString(String values) throws ConversionException {
String[] splitValues = values.split(",");
Set<FailureLabel> result = new HashSet<>();
for (String value : splitValues) {
result.add(convertStringToValue(value));
}
return new ArrayList<>(result);
}
@Override
public FailureLabel convertStringToValue(String value) throws ConversionException {
String[] tokens = value.split(":");
if (tokens.length != 2) {
throw new ConversionException(
String.format("%s may be a `key:value` entry only (%s)", KEY, value));
}
return new FailureLabel(tokens[0], tokens[1]);
}
@Override
public String convertValueToString(FailureLabel value) {
return value.toString();
}
@Override
public String getDescription() {
return "Collection of string values working as a filter in the form of `key:value` pairs "
+ "allowing only exceptions with ALL of the specified failure labels to be returned.";
}
}
|
FailureLabel
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRetryPolicy.java
|
{
"start": 1127,
"end": 2587
}
|
class ____ {
/**
* The maximum number of retry attempts.
*/
private final int maxRetryCount;
/**
* Retry Policy Abbreviation for logging purpose.
*/
private final String retryPolicyAbbreviation;
protected AbfsRetryPolicy(final int maxRetryCount, final String retryPolicyAbbreviation) {
this.maxRetryCount = maxRetryCount;
this.retryPolicyAbbreviation = retryPolicyAbbreviation;
}
/**
* Returns if a request should be retried based on the retry count, current response,
* and the current strategy. The valid http status code lies in the range of 1xx-5xx.
* But an invalid status code might be set due to network or timeout kind of issues.
* Such invalid status code also qualify for retry.
*
* @param retryCount The current retry attempt count.
* @param statusCode The status code of the response, or -1 for socket error.
* @return true if the request should be retried; false otherwise.
*/
public boolean shouldRetry(final int retryCount, final int statusCode) {
return retryCount < maxRetryCount
&& (statusCode < HTTP_CONTINUE
|| statusCode == HttpURLConnection.HTTP_CLIENT_TIMEOUT
|| (statusCode >= HttpURLConnection.HTTP_INTERNAL_ERROR
&& statusCode != HttpURLConnection.HTTP_NOT_IMPLEMENTED
&& statusCode != HttpURLConnection.HTTP_VERSION));
}
/**
* Returns backoff interval to be used for a particular retry count
* Child
|
AbfsRetryPolicy
|
java
|
apache__flink
|
flink-clients/src/main/java/org/apache/flink/client/program/PackagedProgramRetriever.java
|
{
"start": 1028,
"end": 1338
}
|
interface ____ {
/**
* Retrieve the {@link PackagedProgram}.
*
* @return the retrieved {@link PackagedProgram}.
* @throws FlinkException if the {@link PackagedProgram} could not be retrieved
*/
PackagedProgram getPackagedProgram() throws FlinkException;
}
|
PackagedProgramRetriever
|
java
|
spring-projects__spring-framework
|
spring-orm/src/test/java/org/springframework/orm/jpa/support/PersistenceInjectionTests.java
|
{
"start": 33723,
"end": 33872
}
|
class ____ {
@PersistenceContext(unitName = "Person")
private EntityManager em;
}
public static
|
DefaultPrivatePersistenceContextFieldNamedPerson
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/sagemaker/schema/SageMakerStreamSchema.java
|
{
"start": 1952,
"end": 7304
}
|
class ____ extends SageMakerSchema {
private final SageMakerStreamSchemaPayload payload;
public SageMakerStreamSchema(SageMakerStreamSchemaPayload payload) {
super(payload);
this.payload = payload;
}
public InvokeEndpointWithResponseStreamRequest streamRequest(SageMakerModel model, SageMakerInferenceRequest request) {
return streamRequest(model, () -> payload.requestBytes(model, request));
}
private InvokeEndpointWithResponseStreamRequest streamRequest(SageMakerModel model, CheckedSupplier<SdkBytes, Exception> body) {
try {
return createStreamRequest(model).accept(payload.accept(model))
.contentType(payload.contentType(model))
.body(body.get())
.build();
} catch (ElasticsearchStatusException e) {
throw e;
} catch (Exception e) {
throw new ElasticsearchStatusException(
"Failed to create SageMaker request for [%s]",
RestStatus.INTERNAL_SERVER_ERROR,
e,
model.getInferenceEntityId()
);
}
}
public InferenceServiceResults streamResponse(SageMakerModel model, SageMakerClient.SageMakerStream response) {
return new StreamingChatCompletionResults(streamResponse(model, response, payload::streamResponseBody, this::error));
}
private <T> Flow.Publisher<T> streamResponse(
SageMakerModel model,
SageMakerClient.SageMakerStream response,
CheckedBiFunction<SageMakerModel, SdkBytes, T, Exception> parseFunction,
BiFunction<SageMakerModel, Exception, Exception> errorFunction
) {
return downstream -> {
response.responseStream().subscribe(new Flow.Subscriber<>() {
private volatile Flow.Subscription upstream;
@Override
public void onSubscribe(Flow.Subscription subscription) {
this.upstream = subscription;
downstream.onSubscribe(subscription);
}
@Override
public void onNext(ResponseStream item) {
if (item.sdkEventType() == ResponseStream.EventType.PAYLOAD_PART) {
item.accept(InvokeEndpointWithResponseStreamResponseHandler.Visitor.builder().onPayloadPart(payloadPart -> {
try {
downstream.onNext(parseFunction.apply(model, payloadPart.bytes()));
} catch (Exception e) {
downstream.onError(errorFunction.apply(model, e));
}
}).build());
} else {
assert upstream != null : "upstream is unset";
upstream.request(1);
}
}
@Override
public void onError(Throwable throwable) {
if (throwable instanceof Exception e) {
downstream.onError(errorFunction.apply(model, e));
} else {
ExceptionsHelper.maybeError(throwable).ifPresent(ExceptionsHelper::maybeDieOnAnotherThread);
var e = new RuntimeException("Fatal while streaming SageMaker response for [" + model.getInferenceEntityId() + "]");
downstream.onError(errorFunction.apply(model, e));
}
}
@Override
public void onComplete() {
downstream.onComplete();
}
});
};
}
public InvokeEndpointWithResponseStreamRequest chatCompletionStreamRequest(SageMakerModel model, UnifiedCompletionRequest request) {
return streamRequest(model, () -> payload.chatCompletionRequestBytes(model, request));
}
public InferenceServiceResults chatCompletionStreamResponse(SageMakerModel model, SageMakerClient.SageMakerStream response) {
return new StreamingUnifiedChatCompletionResults(
streamResponse(model, response, payload::chatCompletionResponseBody, this::chatCompletionError)
);
}
public UnifiedChatCompletionException chatCompletionError(SageMakerModel model, Exception e) {
if (e instanceof UnifiedChatCompletionException ucce) {
return ucce;
}
var error = errorMessageAndStatus(model, e);
return new UnifiedChatCompletionException(error.v2(), error.v1(), "error", error.v2().name().toLowerCase(Locale.ROOT));
}
private InvokeEndpointWithResponseStreamRequest.Builder createStreamRequest(SageMakerModel model) {
var request = InvokeEndpointWithResponseStreamRequest.builder();
request.endpointName(model.endpointName());
model.customAttributes().ifPresent(request::customAttributes);
model.inferenceComponentName().ifPresent(request::inferenceComponentName);
model.inferenceIdForDataCapture().ifPresent(request::inferenceId);
model.sessionId().ifPresent(request::sessionId);
model.targetContainerHostname().ifPresent(request::targetContainerHostname);
model.targetVariant().ifPresent(request::targetVariant);
return request;
}
}
|
SageMakerStreamSchema
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Sqs2EndpointBuilderFactory.java
|
{
"start": 124793,
"end": 128124
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final Sqs2HeaderNameBuilder INSTANCE = new Sqs2HeaderNameBuilder();
/**
* A map of the attributes requested in ReceiveMessage to their
* respective values.
*
* The option is a: {@code Map<MessageSystemAttributeName, String>}
* type.
*
* Group: consumer
*
* @return the name of the header {@code AwsSqsAttributes}.
*/
public String awsSqsAttributes() {
return "CamelAwsSqsAttributes";
}
/**
* The Amazon SQS message attributes.
*
* The option is a: {@code Map<String, MessageAttributeValue>} type.
*
* Group: consumer
*
* @return the name of the header {@code AwsSqsMessageAttributes}.
*/
public String awsSqsMessageAttributes() {
return "CamelAwsSqsMessageAttributes";
}
/**
* The MD5 checksum of the Amazon SQS message.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code AwsSqsMD5OfBody}.
*/
public String awsSqsMD5OfBody() {
return "CamelAwsSqsMD5OfBody";
}
/**
* The Amazon SQS message ID.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code AwsSqsMessageId}.
*/
public String awsSqsMessageId() {
return "CamelAwsSqsMessageId";
}
/**
* The Amazon SQS message receipt handle.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code AwsSqsReceiptHandle}.
*/
public String awsSqsReceiptHandle() {
return "CamelAwsSqsReceiptHandle";
}
/**
* The delay seconds that the Amazon SQS message can be see by others.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code AwsSqsDelaySeconds}.
*/
public String awsSqsDelaySeconds() {
return "CamelAwsSqsDelaySeconds";
}
/**
* A string to use for filtering the list results.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code AwsSqsPrefix}.
*/
public String awsSqsPrefix() {
return "CamelAwsSqsPrefix";
}
/**
* The operation we want to perform.
*
* The option is a: {@code String} type.
*
* Group: common
*
* @return the name of the header {@code AwsSqsOperation}.
*/
public String awsSqsOperation() {
return "CamelAwsSqsOperation";
}
}
static Sqs2EndpointBuilder endpointBuilder(String componentName, String path) {
|
Sqs2HeaderNameBuilder
|
java
|
netty__netty
|
transport/src/main/java/io/netty/channel/group/VoidChannelGroupFuture.java
|
{
"start": 1037,
"end": 4137
}
|
class ____ implements ChannelGroupFuture {
private static final Iterator<ChannelFuture> EMPTY = Collections.<ChannelFuture>emptyList().iterator();
private final ChannelGroup group;
VoidChannelGroupFuture(ChannelGroup group) {
this.group = group;
}
@Override
public ChannelGroup group() {
return group;
}
@Override
public ChannelFuture find(Channel channel) {
return null;
}
@Override
public boolean isSuccess() {
return false;
}
@Override
public ChannelGroupException cause() {
return null;
}
@Override
public boolean isPartialSuccess() {
return false;
}
@Override
public boolean isPartialFailure() {
return false;
}
@Override
public ChannelGroupFuture addListener(GenericFutureListener<? extends Future<? super Void>> listener) {
throw reject();
}
@Override
public ChannelGroupFuture addListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
throw reject();
}
@Override
public ChannelGroupFuture removeListener(GenericFutureListener<? extends Future<? super Void>> listener) {
throw reject();
}
@Override
public ChannelGroupFuture removeListeners(GenericFutureListener<? extends Future<? super Void>>... listeners) {
throw reject();
}
@Override
public ChannelGroupFuture await() {
throw reject();
}
@Override
public ChannelGroupFuture awaitUninterruptibly() {
throw reject();
}
@Override
public ChannelGroupFuture syncUninterruptibly() {
throw reject();
}
@Override
public ChannelGroupFuture sync() {
throw reject();
}
@Override
public Iterator<ChannelFuture> iterator() {
return EMPTY;
}
@Override
public boolean isCancellable() {
return false;
}
@Override
public boolean await(long timeout, TimeUnit unit) {
throw reject();
}
@Override
public boolean await(long timeoutMillis) {
throw reject();
}
@Override
public boolean awaitUninterruptibly(long timeout, TimeUnit unit) {
throw reject();
}
@Override
public boolean awaitUninterruptibly(long timeoutMillis) {
throw reject();
}
@Override
public Void getNow() {
return null;
}
/**
* {@inheritDoc}
*
* @param mayInterruptIfRunning this value has no effect in this implementation.
*/
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return false;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public boolean isDone() {
return false;
}
@Override
public Void get() {
throw reject();
}
@Override
public Void get(long timeout, TimeUnit unit) {
throw reject();
}
private static RuntimeException reject() {
return new IllegalStateException("void future");
}
}
|
VoidChannelGroupFuture
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/internal/util/SubSequence.java
|
{
"start": 181,
"end": 1840
}
|
class ____ implements CharSequence {
private final CharSequence sequence;
private final int start;
private final int length;
public SubSequence(CharSequence sequence, int start, int end) {
this.sequence = sequence;
this.start = start;
this.length = end - start;
}
@Override
public int length() {
return length;
}
@Override
public char charAt(int index) {
if ( index < 0 || index >= length ) {
throw new StringIndexOutOfBoundsException( index );
}
return sequence.charAt( index + start );
}
@Override
public CharSequence subSequence(int start, int end) {
if ( start < 0 || start >= length ) {
throw new StringIndexOutOfBoundsException( start );
}
if ( end > length ) {
throw new StringIndexOutOfBoundsException( end );
}
return sequence.subSequence( this.start + start, this.start + end );
}
public int lastIndexOf(char c, int fromIndex, int endIndex) {
int idx = CharSequenceHelper.lastIndexOf( sequence, c, start + fromIndex, this.start + endIndex );
if ( idx == -1 ) {
return -1;
}
return idx - this.start;
}
public int indexOf(char c, int fromIndex, int endIndex) {
int idx = CharSequenceHelper.indexOf( sequence, c, this.start + fromIndex, this.start + endIndex );
if ( idx == -1 ) {
return -1;
}
return idx - this.start;
}
public int indexOf(String s, int fromIndex, int endIndex) {
int idx = CharSequenceHelper.indexOf( sequence, s, this.start + fromIndex, this.start + endIndex );
if ( idx == -1 ) {
return -1;
}
return idx - this.start;
}
@Override
public String toString() {
return sequence.subSequence( start, start + length ).toString();
}
}
|
SubSequence
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/class_/ClassAssert_hasPackage_with_Package_Test.java
|
{
"start": 1222,
"end": 2554
}
|
class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Class<?> actual = null;
Package expected = Object.class.getPackage();
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasPackage(expected));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_pass_if_expected_package_is_null() {
// GIVEN
Class<?> actual = Integer.class;
Package expected = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).hasPackage(expected));
// THEN
then(thrown).isInstanceOf(NullPointerException.class).hasMessage(shouldNotBeNull("expected").create());
}
@Test
void should_fail_if_actual_has_not_expected_package() {
// GIVEN
Class<?> actual = Object.class;
Package expected = Collection.class.getPackage();
// WHEN
var assertionError = expectAssertionError(() -> assertThat(actual).hasPackage(expected));
// THEN
then(assertionError).hasMessage(shouldHavePackage(actual, expected).create());
}
@Test
void should_pass_if_actual_has_expected_package() {
// GIVEN
Class<?> actual = Object.class;
Package expected = Object.class.getPackage();
// WHEN/THEN
assertThat(actual).hasPackage(expected);
}
}
|
ClassAssert_hasPackage_with_Package_Test
|
java
|
apache__flink
|
flink-core-api/src/main/java/org/apache/flink/util/function/TriConsumer.java
|
{
"start": 1137,
"end": 1377
}
|
interface ____<S, T, U> {
/**
* Performs this operation on the given arguments.
*
* @param s first argument
* @param t second argument
* @param u third argument
*/
void accept(S s, T t, U u);
}
|
TriConsumer
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/state/TaskStateManagerImplTest.java
|
{
"start": 2413,
"end": 15424
}
|
class ____ {
/** Test reporting and retrieving prioritized local and remote state. */
@Test
void testStateReportingAndRetrieving() {
JobID jobID = new JobID();
ExecutionAttemptID executionAttemptID = createExecutionAttemptId();
TestCheckpointResponder testCheckpointResponder = new TestCheckpointResponder();
TestTaskLocalStateStore testTaskLocalStateStore = new TestTaskLocalStateStore();
InMemoryStateChangelogStorage changelogStorage = new InMemoryStateChangelogStorage();
TaskStateManager taskStateManager =
taskStateManager(
jobID,
executionAttemptID,
testCheckpointResponder,
null,
testTaskLocalStateStore,
changelogStorage);
// ---------------------------------------- test reporting
// -----------------------------------------
CheckpointMetaData checkpointMetaData = new CheckpointMetaData(74L, 11L);
CheckpointMetrics checkpointMetrics = new CheckpointMetrics();
TaskStateSnapshot jmTaskStateSnapshot = new TaskStateSnapshot();
OperatorID operatorID_1 = new OperatorID(1L, 1L);
OperatorID operatorID_2 = new OperatorID(2L, 2L);
OperatorID operatorID_3 = new OperatorID(3L, 3L);
assertThat(taskStateManager.prioritizedOperatorState(operatorID_1).isRestored()).isFalse();
assertThat(taskStateManager.prioritizedOperatorState(operatorID_2).isRestored()).isFalse();
assertThat(taskStateManager.prioritizedOperatorState(operatorID_3).isRestored()).isFalse();
KeyGroupRange keyGroupRange = new KeyGroupRange(0, 1);
// Remote state of operator 1 has only managed keyed state.
OperatorSubtaskState jmOperatorSubtaskState_1 =
OperatorSubtaskState.builder()
.setManagedKeyedState(
StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange))
.build();
// Remote state of operator 1 has only raw keyed state.
OperatorSubtaskState jmOperatorSubtaskState_2 =
OperatorSubtaskState.builder()
.setRawKeyedState(
StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange))
.build();
jmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_1, jmOperatorSubtaskState_1);
jmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_2, jmOperatorSubtaskState_2);
TaskStateSnapshot tmTaskStateSnapshot = new TaskStateSnapshot();
// Only operator 1 has a local alternative for the managed keyed state.
OperatorSubtaskState tmOperatorSubtaskState_1 =
OperatorSubtaskState.builder()
.setManagedKeyedState(
StateHandleDummyUtil.createNewKeyedStateHandle(keyGroupRange))
.build();
tmTaskStateSnapshot.putSubtaskStateByOperatorID(operatorID_1, tmOperatorSubtaskState_1);
taskStateManager.reportTaskStateSnapshots(
checkpointMetaData, checkpointMetrics, jmTaskStateSnapshot, tmTaskStateSnapshot);
TestCheckpointResponder.AcknowledgeReport acknowledgeReport =
testCheckpointResponder.getAcknowledgeReports().get(0);
// checks that the checkpoint responder and the local state store received state as
// expected.
assertThat(acknowledgeReport.getCheckpointId())
.isEqualTo(checkpointMetaData.getCheckpointId());
assertThat(acknowledgeReport.getCheckpointMetrics()).isEqualTo(checkpointMetrics);
assertThat(acknowledgeReport.getExecutionAttemptID()).isEqualTo(executionAttemptID);
assertThat(acknowledgeReport.getJobID()).isEqualTo(jobID);
assertThat(acknowledgeReport.getSubtaskState()).isEqualTo(jmTaskStateSnapshot);
assertThat(testTaskLocalStateStore.retrieveLocalState(checkpointMetaData.getCheckpointId()))
.isEqualTo(tmTaskStateSnapshot);
// -------------------------------------- test prio retrieving
// ---------------------------------------
JobManagerTaskRestore taskRestore =
new JobManagerTaskRestore(
checkpointMetaData.getCheckpointId(), acknowledgeReport.getSubtaskState());
taskStateManager =
taskStateManager(
jobID,
executionAttemptID,
testCheckpointResponder,
taskRestore,
testTaskLocalStateStore,
changelogStorage);
// this has remote AND local managed keyed state.
PrioritizedOperatorSubtaskState prioritized_1 =
taskStateManager.prioritizedOperatorState(operatorID_1);
// this has only remote raw keyed state.
PrioritizedOperatorSubtaskState prioritized_2 =
taskStateManager.prioritizedOperatorState(operatorID_2);
// not restored.
PrioritizedOperatorSubtaskState prioritized_3 =
taskStateManager.prioritizedOperatorState(operatorID_3);
assertThat(prioritized_1.isRestored()).isTrue();
assertThat(prioritized_2.isRestored()).isTrue();
assertThat(prioritized_3.isRestored()).isTrue();
assertThat(taskStateManager.prioritizedOperatorState(new OperatorID()).isRestored())
.isTrue();
// checks for operator 1.
Iterator<StateObjectCollection<KeyedStateHandle>> prioritizedManagedKeyedState_1 =
prioritized_1.getPrioritizedManagedKeyedState().iterator();
assertThat(prioritizedManagedKeyedState_1).hasNext();
StateObjectCollection<KeyedStateHandle> current = prioritizedManagedKeyedState_1.next();
KeyedStateHandle keyedStateHandleExp =
tmOperatorSubtaskState_1.getManagedKeyedState().iterator().next();
KeyedStateHandle keyedStateHandleAct = current.iterator().next();
assertThat(keyedStateHandleExp).isSameAs(keyedStateHandleAct);
assertThat(prioritizedManagedKeyedState_1).hasNext();
current = prioritizedManagedKeyedState_1.next();
keyedStateHandleExp = jmOperatorSubtaskState_1.getManagedKeyedState().iterator().next();
keyedStateHandleAct = current.iterator().next();
assertThat(keyedStateHandleExp).isSameAs(keyedStateHandleAct);
assertThat(prioritizedManagedKeyedState_1).isExhausted();
// checks for operator 2.
Iterator<StateObjectCollection<KeyedStateHandle>> prioritizedRawKeyedState_2 =
prioritized_2.getPrioritizedRawKeyedState().iterator();
assertThat(prioritizedRawKeyedState_2).hasNext();
current = prioritizedRawKeyedState_2.next();
keyedStateHandleExp = jmOperatorSubtaskState_2.getRawKeyedState().iterator().next();
keyedStateHandleAct = current.iterator().next();
assertThat(keyedStateHandleExp).isSameAs(keyedStateHandleAct);
assertThat(prioritizedRawKeyedState_2).isExhausted();
}
/**
* This tests if the {@link TaskStateManager} properly returns the subtask local state dir from
* the corresponding {@link TaskLocalStateStoreImpl}.
*/
@Test
void testForwardingSubtaskLocalStateBaseDirFromLocalStateStore(@TempDir Path tmpFolder)
throws Exception {
JobID jobID = new JobID(42L, 43L);
AllocationID allocationID = new AllocationID(4711L, 23L);
JobVertexID jobVertexID = new JobVertexID(12L, 34L);
ExecutionAttemptID executionAttemptID = createExecutionAttemptId(jobVertexID);
TestCheckpointResponder checkpointResponderMock = new TestCheckpointResponder();
Executor directExecutor = Executors.directExecutor();
File[] allocBaseDirs =
new File[] {
TempDirUtils.newFolder(tmpFolder),
TempDirUtils.newFolder(tmpFolder),
TempDirUtils.newFolder(tmpFolder)
};
LocalSnapshotDirectoryProviderImpl directoryProvider =
new LocalSnapshotDirectoryProviderImpl(allocBaseDirs, jobID, jobVertexID, 0);
LocalRecoveryConfig localRecoveryConfig =
LocalRecoveryConfig.backupAndRecoveryEnabled(directoryProvider);
TaskLocalStateStore taskLocalStateStore =
new TaskLocalStateStoreImpl(
jobID, allocationID, jobVertexID, 13, localRecoveryConfig, directExecutor);
InMemoryStateChangelogStorage changelogStorage = new InMemoryStateChangelogStorage();
TaskStateManager taskStateManager =
taskStateManager(
jobID,
executionAttemptID,
checkpointResponderMock,
null,
taskLocalStateStore,
changelogStorage);
LocalRecoveryConfig localRecoveryConfFromTaskLocalStateStore =
taskLocalStateStore.getLocalRecoveryConfig();
LocalRecoveryConfig localRecoveryConfFromTaskStateManager =
taskStateManager.createLocalRecoveryConfig();
for (int i = 0; i < 10; ++i) {
assertThat(
localRecoveryConfFromTaskLocalStateStore
.getLocalStateDirectoryProvider()
.get()
.allocationBaseDirectory(i))
.isEqualTo(allocBaseDirs[i % allocBaseDirs.length]);
assertThat(
localRecoveryConfFromTaskStateManager
.getLocalStateDirectoryProvider()
.get()
.allocationBaseDirectory(i))
.isEqualTo(allocBaseDirs[i % allocBaseDirs.length]);
}
assertThat(localRecoveryConfFromTaskStateManager.isLocalRecoveryEnabled())
.isEqualTo(localRecoveryConfFromTaskLocalStateStore.isLocalRecoveryEnabled());
}
@Test
void testStateRetrievingWithFinishedOperator() {
TaskStateSnapshot taskStateSnapshot = TaskStateSnapshot.FINISHED_ON_RESTORE;
JobManagerTaskRestore jobManagerTaskRestore =
new JobManagerTaskRestore(2, taskStateSnapshot);
TaskStateManagerImpl stateManager =
new TaskStateManagerImpl(
new JobID(),
createExecutionAttemptId(),
new TestTaskLocalStateStore(),
null,
null,
new TaskExecutorStateChangelogStoragesManager(),
jobManagerTaskRestore,
new TestCheckpointResponder());
assertThat(stateManager.isTaskDeployedAsFinished()).isTrue();
}
void testAcquringRestoreCheckpointId() {
TaskStateManagerImpl emptyStateManager =
new TaskStateManagerImpl(
new JobID(),
createExecutionAttemptId(),
new TestTaskLocalStateStore(),
null,
null,
new TaskExecutorStateChangelogStoragesManager(),
null,
new TestCheckpointResponder());
assertThat(emptyStateManager.getRestoreCheckpointId()).isNotPresent();
TaskStateManagerImpl nonEmptyStateManager =
new TaskStateManagerImpl(
new JobID(),
createExecutionAttemptId(),
new TestTaskLocalStateStore(),
null,
null,
new TaskExecutorStateChangelogStoragesManager(),
new JobManagerTaskRestore(2, new TaskStateSnapshot()),
new TestCheckpointResponder());
assertThat(nonEmptyStateManager.getRestoreCheckpointId()).hasValue(2L);
}
private static TaskStateManager taskStateManager(
JobID jobID,
ExecutionAttemptID executionAttemptID,
CheckpointResponder checkpointResponderMock,
JobManagerTaskRestore jobManagerTaskRestore,
TaskLocalStateStore localStateStore,
StateChangelogStorage<?> stateChangelogStorage) {
return new TaskStateManagerImpl(
jobID,
executionAttemptID,
localStateStore,
null,
stateChangelogStorage,
new TaskExecutorStateChangelogStoragesManager(),
jobManagerTaskRestore,
checkpointResponderMock);
}
}
|
TaskStateManagerImplTest
|
java
|
elastic__elasticsearch
|
modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/annotation/AliasAnnotation.java
|
{
"start": 776,
"end": 1041
}
|
class ____ {
*
* }
* public Inner inner() {
* return new Inner();
* }
* }
* </pre>
*
* Normally scripts would need to reference {@code Outer.Inner}.
*
* With an alias annotation {@code @alias[class="Inner"]} on the class
* <pre>
*
|
Inner
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/AutoValueImmutableFieldsTest.java
|
{
"start": 3521,
"end": 4011
}
|
class ____ {
// BUG: Diagnostic contains: ImmutableMultimap
public abstract Multimap<String, String> countries();
}
""")
.doTest();
}
@Test
public void matchesListMultimap() {
compilationHelper
.addSourceLines(
"in/Test.java",
"""
import com.google.auto.value.AutoValue;
import com.google.common.collect.ListMultimap;
@AutoValue
abstract
|
Test
|
java
|
lettuce-io__lettuce-core
|
src/test/java/io/lettuce/apigenerator/CompilationUnitFactory.java
|
{
"start": 2255,
"end": 9250
}
|
class ____ {
private final File templateFile;
private final File sources;
private final File target;
private final String targetPackage;
private final String targetName;
private final Function<String, String> typeDocFunction;
private final Map<Predicate<MethodDeclaration>, Function<MethodDeclaration, Type>> methodReturnTypeMutation;
private final Predicate<MethodDeclaration> methodFilter;
private final Supplier<List<String>> importSupplier;
private final Consumer<ClassOrInterfaceDeclaration> typeMutator;
private final Consumer<MethodDeclaration> onMethod;
private final BiFunction<MethodDeclaration, Comment, Comment> methodCommentMutator;
private CompilationUnit template;
private final CompilationUnit result = new CompilationUnit();
private ClassOrInterfaceDeclaration resultType;
public CompilationUnitFactory(File templateFile, File sources, String targetPackage, String targetName,
Function<String, String> typeDocFunction, Function<MethodDeclaration, Type> methodReturnTypeFunction,
Predicate<MethodDeclaration> methodFilter, Supplier<List<String>> importSupplier,
Consumer<ClassOrInterfaceDeclaration> typeMutator, Function<Comment, Comment> methodCommentMutator) {
this(templateFile, sources, targetPackage, targetName, typeDocFunction, methodReturnTypeFunction, methodDeclaration -> {
}, methodFilter, importSupplier, typeMutator,
(m, c) -> methodCommentMutator != null ? methodCommentMutator.apply(c) : c);
}
public CompilationUnitFactory(File templateFile, File sources, String targetPackage, String targetName,
Function<String, String> typeDocFunction, Function<MethodDeclaration, Type> methodReturnTypeFunction,
Consumer<MethodDeclaration> onMethod, Predicate<MethodDeclaration> methodFilter,
Supplier<List<String>> importSupplier, Consumer<ClassOrInterfaceDeclaration> typeMutator,
BiFunction<MethodDeclaration, Comment, Comment> methodCommentMutator) {
this.templateFile = templateFile;
this.sources = sources;
this.targetPackage = targetPackage;
this.targetName = targetName;
this.typeDocFunction = typeDocFunction;
this.onMethod = onMethod;
this.methodFilter = methodFilter;
this.importSupplier = importSupplier;
this.typeMutator = typeMutator;
this.methodCommentMutator = methodCommentMutator;
this.methodReturnTypeMutation = new LinkedHashMap<>();
this.methodReturnTypeMutation.put(it -> true, methodReturnTypeFunction);
this.target = new File(sources, targetPackage.replace('.', '/') + "/" + targetName + ".java");
}
public void createInterface() throws Exception {
result.setPackageDeclaration(new PackageDeclaration(new Name(targetPackage)));
template = JavaParser.parse(templateFile);
ClassOrInterfaceDeclaration templateTypeDeclaration = (ClassOrInterfaceDeclaration) template.getTypes().get(0);
resultType = new ClassOrInterfaceDeclaration(EnumSet.of(Modifier.PUBLIC), true, targetName);
if (templateTypeDeclaration.getExtendedTypes() != null) {
resultType.setExtendedTypes(templateTypeDeclaration.getExtendedTypes());
}
if (!templateTypeDeclaration.getTypeParameters().isEmpty()) {
resultType.setTypeParameters(new NodeList<>());
for (TypeParameter typeParameter : templateTypeDeclaration.getTypeParameters()) {
resultType.getTypeParameters()
.add(new TypeParameter(typeParameter.getName().getIdentifier(), typeParameter.getTypeBound()));
}
}
resultType
.setComment(new JavadocComment(typeDocFunction.apply(templateTypeDeclaration.getComment().get().getContent())));
result.setComment(template.getComment().orElse(null));
result.setImports(new NodeList<>());
result.addType(resultType);
resultType.setParentNode(result);
if (template.getImports() != null) {
result.getImports().addAll(template.getImports());
}
List<String> importLines = importSupplier.get();
importLines.forEach(importLine -> result.getImports().add(new ImportDeclaration(importLine, false, false)));
new MethodVisitor().visit(template, null);
if (typeMutator != null) {
typeMutator.accept(resultType);
}
removeUnusedImports();
writeResult();
}
public void keepMethodSignaturesFor(Set<String> methodSignaturesToKeep) {
this.methodReturnTypeMutation.put(methodDeclaration -> contains(methodSignaturesToKeep, methodDeclaration),
MethodDeclaration::getType);
}
private void writeResult() throws IOException {
FileOutputStream fos = new FileOutputStream(target);
fos.write(result.toString().getBytes());
fos.close();
}
public static Type createParametrizedType(String baseType, String... typeArguments) {
NodeList<Type> args = new NodeList<>();
Arrays.stream(typeArguments).map(it -> {
if (it.contains("[]")) {
return it;
}
return StringUtils.capitalize(it);
}).map(it -> new ClassOrInterfaceType(null, it)).forEach(args::add);
return new ClassOrInterfaceType(null, new SimpleName(baseType), args);
}
public static boolean contains(Collection<String> haystack, MethodDeclaration needle) {
ClassOrInterfaceDeclaration declaringClass = (ClassOrInterfaceDeclaration) needle.getParentNode().get();
return haystack.contains(needle.getNameAsString())
|| haystack.contains(declaringClass.getNameAsString() + "." + needle.getNameAsString());
}
public void removeUnusedImports() {
ClassOrInterfaceDeclaration declaringClass = (ClassOrInterfaceDeclaration) result.getChildNodes().get(1);
List<ImportDeclaration> optimizedImports = result.getImports().stream()
.filter(i -> i.isAsterisk() || i.isStatic() || declaringClass.findFirst(Type.class, t -> {
String fullType = t.toString();
String importIdentifier = i.getName().getIdentifier();
return fullType.contains(importIdentifier);
}).isPresent()).sorted((o1, o2) -> {
if (o1.getNameAsString().startsWith("java"))
return -1;
if (o2.getNameAsString().startsWith("java"))
return 1;
return o1.getNameAsString().compareTo(o2.getNameAsString());
}).collect(Collectors.toList());
result.setImports(NodeList.nodeList(optimizedImports));
}
/**
* Simple visitor implementation for visiting MethodDeclaration nodes.
*/
private
|
CompilationUnitFactory
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerNoACLs.java
|
{
"start": 6124,
"end": 10786
}
|
interface ____ get the json output of a *STATUS command
* on the given file.
*
* @param filename The file to query.
* @param command Either GETFILESTATUS, LISTSTATUS, or ACLSTATUS
* @param expectOK Is this operation expected to succeed?
* @throws Exception
*/
private void getStatus(String filename, String command, boolean expectOK)
throws Exception {
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
// Remove leading / from filename
if (filename.charAt(0) == '/') {
filename = filename.substring(1);
}
String pathOps = MessageFormat.format(
"/webhdfs/v1/{0}?user.name={1}&op={2}",
filename, user, command);
URL url = new URL(TestJettyHelper.getJettyURL(), pathOps);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.connect();
int resp = conn.getResponseCode();
BufferedReader reader;
if (expectOK) {
assertEquals(HttpURLConnection.HTTP_OK, resp);
reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String res = reader.readLine();
assertTrue(!res.contains("aclBit"));
assertTrue(res.contains("owner")); // basic sanity check
} else {
assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, resp);
reader = new BufferedReader(new InputStreamReader(conn.getErrorStream()));
String res = reader.readLine();
assertTrue(res.contains("AclException"));
assertTrue(res.contains("Support for ACLs has been disabled"));
}
}
/**
* General-purpose http PUT command to the httpfs server.
* @param filename The file to operate upon
* @param command The command to perform (SETACL, etc)
* @param params Parameters, like "aclspec=..."
*/
private void putCmd(String filename, String command,
String params, boolean expectOK) throws Exception {
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
// Remove leading / from filename
if (filename.charAt(0) == '/') {
filename = filename.substring(1);
}
String pathOps = MessageFormat.format(
"/webhdfs/v1/{0}?user.name={1}{2}{3}&op={4}",
filename, user, (params == null) ? "" : "&",
(params == null) ? "" : params, command);
URL url = new URL(TestJettyHelper.getJettyURL(), pathOps);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("PUT");
conn.connect();
int resp = conn.getResponseCode();
if (expectOK) {
assertEquals(HttpURLConnection.HTTP_OK, resp);
} else {
assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, resp);
BufferedReader reader;
reader = new BufferedReader(new InputStreamReader(conn.getErrorStream()));
String err = reader.readLine();
assertTrue(err.contains("AclException"));
assertTrue(err.contains("Support for ACLs has been disabled"));
}
}
/**
* Test without ACLs.
* Ensure that
* <ol>
* <li>GETFILESTATUS and LISTSTATUS work happily</li>
* <li>ACLSTATUS throws an exception</li>
* <li>The ACL SET, REMOVE, etc calls all fail</li>
* </ol>
*
* @throws Exception
*/
@Test
@TestDir
@TestJetty
public void testWithNoAcls() throws Exception {
final String aclUser1 = "user:foo:rw-";
final String rmAclUser1 = "user:foo:";
final String aclUser2 = "user:bar:r--";
final String aclGroup1 = "group::r--";
final String aclSpec = "aclspec=user::rwx," + aclUser1 + ","
+ aclGroup1 + ",other::---";
final String modAclSpec = "aclspec=" + aclUser2;
final String remAclSpec = "aclspec=" + rmAclUser1;
final String defUser1 = "default:user:glarch:r-x";
final String defSpec1 = "aclspec=" + defUser1;
final String dir = "/noACLs";
final String path = dir + "/foo";
startMiniDFS();
createHttpFSServer();
FileSystem fs = FileSystem.get(nnConf);
fs.mkdirs(new Path(dir));
OutputStream os = fs.create(new Path(path));
os.write(1);
os.close();
/* The normal status calls work as expected; GETACLSTATUS fails */
getStatus(path, "GETFILESTATUS", true);
getStatus(dir, "LISTSTATUS", true);
getStatus(path, "GETACLSTATUS", false);
/* All the ACL-based PUT commands fail with ACL exceptions */
putCmd(path, "SETACL", aclSpec, false);
putCmd(path, "MODIFYACLENTRIES", modAclSpec, false);
putCmd(path, "REMOVEACLENTRIES", remAclSpec, false);
putCmd(path, "REMOVEACL", null, false);
putCmd(dir, "SETACL", defSpec1, false);
putCmd(dir, "REMOVEDEFAULTACL", null, false);
miniDfs.shutdown();
}
}
|
to
|
java
|
spring-projects__spring-security
|
access/src/test/java/org/springframework/security/acls/afterinvocation/AclEntryAfterInvocationProviderTests.java
|
{
"start": 2119,
"end": 6118
}
|
class ____ {
@Test
public void rejectsMissingPermissions() {
assertThatIllegalArgumentException()
.isThrownBy(() -> new AclEntryAfterInvocationProvider(mock(AclService.class), null));
assertThatIllegalArgumentException().isThrownBy(
() -> new AclEntryAfterInvocationProvider(mock(AclService.class), Collections.<Permission>emptyList()));
}
@Test
public void accessIsAllowedIfPermissionIsGranted() {
AclService service = mock(AclService.class);
Acl acl = mock(Acl.class);
given(acl.isGranted(any(List.class), any(List.class), anyBoolean())).willReturn(true);
given(service.readAclById(any(), any())).willReturn(acl);
AclEntryAfterInvocationProvider provider = new AclEntryAfterInvocationProvider(service,
Arrays.asList(mock(Permission.class)));
provider.setMessageSource(new SpringSecurityMessageSource());
provider.setObjectIdentityRetrievalStrategy(mock(ObjectIdentityRetrievalStrategy.class));
provider.setProcessDomainObjectClass(Object.class);
provider.setSidRetrievalStrategy(mock(SidRetrievalStrategy.class));
Object returned = new Object();
assertThat(returned).isSameAs(provider.decide(mock(Authentication.class), new Object(),
SecurityConfig.createList("AFTER_ACL_READ"), returned));
}
@Test
public void accessIsGrantedIfNoAttributesDefined() {
AclEntryAfterInvocationProvider provider = new AclEntryAfterInvocationProvider(mock(AclService.class),
Arrays.asList(mock(Permission.class)));
Object returned = new Object();
assertThat(returned).isSameAs(provider.decide(mock(Authentication.class), new Object(),
Collections.<ConfigAttribute>emptyList(), returned));
}
@Test
public void accessIsGrantedIfObjectTypeNotSupported() {
AclEntryAfterInvocationProvider provider = new AclEntryAfterInvocationProvider(mock(AclService.class),
Arrays.asList(mock(Permission.class)));
provider.setProcessDomainObjectClass(String.class);
// Not a String
Object returned = new Object();
assertThat(returned).isSameAs(provider.decide(mock(Authentication.class), new Object(),
SecurityConfig.createList("AFTER_ACL_READ"), returned));
}
@Test
public void accessIsDeniedIfPermissionIsNotGranted() {
AclService service = mock(AclService.class);
Acl acl = mock(Acl.class);
given(acl.isGranted(any(List.class), any(List.class), anyBoolean())).willReturn(false);
// Try a second time with no permissions found
given(acl.isGranted(any(), any(List.class), anyBoolean())).willThrow(new NotFoundException(""));
given(service.readAclById(any(), any())).willReturn(acl);
AclEntryAfterInvocationProvider provider = new AclEntryAfterInvocationProvider(service,
Arrays.asList(mock(Permission.class)));
provider.setProcessConfigAttribute("MY_ATTRIBUTE");
provider.setMessageSource(new SpringSecurityMessageSource());
provider.setObjectIdentityRetrievalStrategy(mock(ObjectIdentityRetrievalStrategy.class));
provider.setProcessDomainObjectClass(Object.class);
provider.setSidRetrievalStrategy(mock(SidRetrievalStrategy.class));
assertThatExceptionOfType(AccessDeniedException.class)
.isThrownBy(() -> provider.decide(mock(Authentication.class), new Object(),
SecurityConfig.createList("UNSUPPORTED", "MY_ATTRIBUTE"), new Object()));
// Second scenario with no acls found
assertThatExceptionOfType(AccessDeniedException.class)
.isThrownBy(() -> provider.decide(mock(Authentication.class), new Object(),
SecurityConfig.createList("UNSUPPORTED", "MY_ATTRIBUTE"), new Object()));
}
@Test
public void nullReturnObjectIsIgnored() {
AclService service = mock(AclService.class);
AclEntryAfterInvocationProvider provider = new AclEntryAfterInvocationProvider(service,
Arrays.asList(mock(Permission.class)));
assertThat(provider.decide(mock(Authentication.class), new Object(),
SecurityConfig.createList("AFTER_ACL_COLLECTION_READ"), null))
.isNull();
verify(service, never()).readAclById(any(ObjectIdentity.class), any(List.class));
}
}
|
AclEntryAfterInvocationProviderTests
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/DecoratingProxy.java
|
{
"start": 1144,
"end": 1494
}
|
class ____ anyway.
*
* <p>Defined in the core module in order to allow
* {@link org.springframework.core.annotation.AnnotationAwareOrderComparator}
* (and potential other candidates without spring-aop dependencies) to use it
* for introspection purposes, in particular annotation lookups.
*
* @author Juergen Hoeller
* @since 4.3
*/
public
|
there
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/stream/JSONReaderScannerTest_matchField.java
|
{
"start": 245,
"end": 756
}
|
class ____ extends TestCase {
public void test_true() throws Exception {
DefaultJSONParser parser = new DefaultJSONParser(new JSONReaderScanner("{\"items\":[{}],\"value\":{}}"));
VO vo = parser.parseObject(VO.class);
Assert.assertNotNull(vo.getValue());
Assert.assertNotNull(vo.getItems());
Assert.assertEquals(1, vo.getItems().size());
Assert.assertNotNull(vo.getItems().get(0));
parser.close();
}
public static
|
JSONReaderScannerTest_matchField
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/condition/HeadersRequestConditionTests.java
|
{
"start": 1011,
"end": 6181
}
|
class ____ {
@Test
void headerEquals() {
assertThat(new HeadersRequestCondition("foo")).isEqualTo(new HeadersRequestCondition("foo"));
assertThat(new HeadersRequestCondition("FOO")).isEqualTo(new HeadersRequestCondition("foo"));
assertThat(new HeadersRequestCondition("bar")).isNotEqualTo(new HeadersRequestCondition("foo"));
assertThat(new HeadersRequestCondition("foo=bar")).isEqualTo(new HeadersRequestCondition("foo=bar"));
assertThat(new HeadersRequestCondition("FOO=bar")).isEqualTo(new HeadersRequestCondition("foo=bar"));
}
@Test
void headerPresent() {
HeadersRequestCondition condition = new HeadersRequestCondition("accept");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("Accept", "");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void headerPresentNoMatch() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("bar", "");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void headerNotPresent() {
HeadersRequestCondition condition = new HeadersRequestCondition("!accept");
MockHttpServletRequest request = new MockHttpServletRequest();
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void headerValueMatch() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo=bar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "bar");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void headerValueNoMatch() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo=bar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "bazz");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void headerCaseSensitiveValueMatch() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo=Bar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "bar");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void headerValueMatchNegated() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo!=bar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "baz");
assertThat(condition.getMatchingCondition(request)).isNotNull();
}
@Test
void headerValueNoMatchNegated() {
HeadersRequestCondition condition = new HeadersRequestCondition("foo!=bar");
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "bar");
assertThat(condition.getMatchingCondition(request)).isNull();
}
@Test
void compareTo() {
MockHttpServletRequest request = new MockHttpServletRequest();
HeadersRequestCondition condition1 = new HeadersRequestCondition("foo", "bar", "baz");
HeadersRequestCondition condition2 = new HeadersRequestCondition("foo=a", "bar");
int result = condition1.compareTo(condition2, request);
assertThat(result).as("Invalid comparison result: " + result).isLessThan(0);
result = condition2.compareTo(condition1, request);
assertThat(result).as("Invalid comparison result: " + result).isGreaterThan(0);
}
@Test // SPR-16674
public void compareToWithMoreSpecificMatchByValue() {
MockHttpServletRequest request = new MockHttpServletRequest();
HeadersRequestCondition condition1 = new HeadersRequestCondition("foo=a");
HeadersRequestCondition condition2 = new HeadersRequestCondition("foo");
int result = condition1.compareTo(condition2, request);
assertThat(result).as("Invalid comparison result: " + result).isLessThan(0);
result = condition2.compareTo(condition1, request);
assertThat(result).as("Invalid comparison result: " + result).isGreaterThan(0);
}
@Test
void compareToWithNegatedMatch() {
MockHttpServletRequest request = new MockHttpServletRequest();
HeadersRequestCondition condition1 = new HeadersRequestCondition("foo!=a");
HeadersRequestCondition condition2 = new HeadersRequestCondition("foo");
assertThat(condition1.compareTo(condition2, request)).as("Negated match should not count as more specific").isEqualTo(0);
}
@Test
void combine() {
HeadersRequestCondition condition1 = new HeadersRequestCondition("foo=bar");
HeadersRequestCondition condition2 = new HeadersRequestCondition("foo=baz");
HeadersRequestCondition result = condition1.combine(condition2);
Collection<HeaderExpression> conditions = result.getContent();
assertThat(conditions).hasSize(2);
}
@Test
void getMatchingCondition() {
MockHttpServletRequest request = new MockHttpServletRequest();
request.addHeader("foo", "bar");
HeadersRequestCondition condition = new HeadersRequestCondition("foo");
HeadersRequestCondition result = condition.getMatchingCondition(request);
assertThat(result).isEqualTo(condition);
condition = new HeadersRequestCondition("bar");
result = condition.getMatchingCondition(request);
assertThat(result).isNull();
}
}
|
HeadersRequestConditionTests
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/config/TxAdviceBeanDefinitionParser.java
|
{
"start": 1902,
"end": 6792
}
|
class ____ extends AbstractSingleBeanDefinitionParser {
private static final String METHOD_ELEMENT = "method";
private static final String METHOD_NAME_ATTRIBUTE = "name";
private static final String ATTRIBUTES_ELEMENT = "attributes";
private static final String TIMEOUT_ATTRIBUTE = "timeout";
private static final String READ_ONLY_ATTRIBUTE = "read-only";
private static final String PROPAGATION_ATTRIBUTE = "propagation";
private static final String ISOLATION_ATTRIBUTE = "isolation";
private static final String ROLLBACK_FOR_ATTRIBUTE = "rollback-for";
private static final String NO_ROLLBACK_FOR_ATTRIBUTE = "no-rollback-for";
@Override
protected Class<?> getBeanClass(Element element) {
return TransactionInterceptor.class;
}
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
builder.addPropertyReference("transactionManager", TxNamespaceHandler.getTransactionManagerName(element));
List<Element> txAttributes = DomUtils.getChildElementsByTagName(element, ATTRIBUTES_ELEMENT);
if (txAttributes.size() > 1) {
parserContext.getReaderContext().error(
"Element <attributes> is allowed at most once inside element <advice>", element);
}
else if (txAttributes.size() == 1) {
// Using attributes source.
Element attributeSourceElement = txAttributes.get(0);
RootBeanDefinition attributeSourceDefinition = parseAttributeSource(attributeSourceElement, parserContext);
builder.addPropertyValue("transactionAttributeSource", attributeSourceDefinition);
}
else {
// Assume annotations source.
builder.addPropertyValue("transactionAttributeSource",
new RootBeanDefinition("org.springframework.transaction.annotation.AnnotationTransactionAttributeSource"));
}
}
private RootBeanDefinition parseAttributeSource(Element attrEle, ParserContext parserContext) {
List<Element> methods = DomUtils.getChildElementsByTagName(attrEle, METHOD_ELEMENT);
ManagedMap<TypedStringValue, RuleBasedTransactionAttribute> transactionAttributeMap =
new ManagedMap<>(methods.size());
transactionAttributeMap.setSource(parserContext.extractSource(attrEle));
for (Element methodEle : methods) {
String name = methodEle.getAttribute(METHOD_NAME_ATTRIBUTE);
TypedStringValue nameHolder = new TypedStringValue(name);
nameHolder.setSource(parserContext.extractSource(methodEle));
RuleBasedTransactionAttribute attribute = new RuleBasedTransactionAttribute();
String propagation = methodEle.getAttribute(PROPAGATION_ATTRIBUTE);
String isolation = methodEle.getAttribute(ISOLATION_ATTRIBUTE);
String timeout = methodEle.getAttribute(TIMEOUT_ATTRIBUTE);
String readOnly = methodEle.getAttribute(READ_ONLY_ATTRIBUTE);
if (StringUtils.hasText(propagation)) {
attribute.setPropagationBehaviorName(RuleBasedTransactionAttribute.PREFIX_PROPAGATION + propagation);
}
if (StringUtils.hasText(isolation)) {
attribute.setIsolationLevelName(RuleBasedTransactionAttribute.PREFIX_ISOLATION + isolation);
}
if (StringUtils.hasText(timeout)) {
attribute.setTimeoutString(timeout);
}
if (StringUtils.hasText(readOnly)) {
attribute.setReadOnly(Boolean.parseBoolean(methodEle.getAttribute(READ_ONLY_ATTRIBUTE)));
}
List<RollbackRuleAttribute> rollbackRules = new ArrayList<>(1);
if (methodEle.hasAttribute(ROLLBACK_FOR_ATTRIBUTE)) {
String rollbackForValue = methodEle.getAttribute(ROLLBACK_FOR_ATTRIBUTE);
addRollbackRuleAttributesTo(rollbackRules, rollbackForValue);
}
if (methodEle.hasAttribute(NO_ROLLBACK_FOR_ATTRIBUTE)) {
String noRollbackForValue = methodEle.getAttribute(NO_ROLLBACK_FOR_ATTRIBUTE);
addNoRollbackRuleAttributesTo(rollbackRules, noRollbackForValue);
}
attribute.setRollbackRules(rollbackRules);
transactionAttributeMap.put(nameHolder, attribute);
}
RootBeanDefinition attributeSourceDefinition = new RootBeanDefinition(NameMatchTransactionAttributeSource.class);
attributeSourceDefinition.setSource(parserContext.extractSource(attrEle));
attributeSourceDefinition.getPropertyValues().add("nameMap", transactionAttributeMap);
return attributeSourceDefinition;
}
private void addRollbackRuleAttributesTo(List<RollbackRuleAttribute> rollbackRules, String rollbackForValue) {
String[] exceptionTypeNames = StringUtils.commaDelimitedListToStringArray(rollbackForValue);
for (String typeName : exceptionTypeNames) {
rollbackRules.add(new RollbackRuleAttribute(typeName.strip()));
}
}
private void addNoRollbackRuleAttributesTo(List<RollbackRuleAttribute> rollbackRules, String noRollbackForValue) {
String[] exceptionTypeNames = StringUtils.commaDelimitedListToStringArray(noRollbackForValue);
for (String typeName : exceptionTypeNames) {
rollbackRules.add(new NoRollbackRuleAttribute(typeName.strip()));
}
}
}
|
TxAdviceBeanDefinitionParser
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/lucene/search/uhighlight/QueryMaxAnalyzedOffset.java
|
{
"start": 529,
"end": 1245
}
|
class ____ {
private final int queryMaxAnalyzedOffset;
private QueryMaxAnalyzedOffset(final int queryMaxAnalyzedOffset) {
// If we have a negative value, grab value for the actual maximum from the index.
this.queryMaxAnalyzedOffset = queryMaxAnalyzedOffset;
}
public static QueryMaxAnalyzedOffset create(final Integer queryMaxAnalyzedOffset, final int indexMaxAnalyzedOffset) {
if (queryMaxAnalyzedOffset == null) {
return null;
}
return new QueryMaxAnalyzedOffset(queryMaxAnalyzedOffset < 0 ? indexMaxAnalyzedOffset : queryMaxAnalyzedOffset);
}
public int getNotNull() {
return queryMaxAnalyzedOffset;
}
}
|
QueryMaxAnalyzedOffset
|
java
|
apache__camel
|
components/camel-quickfix/src/test/java/org/apache/camel/component/quickfixj/QuickfixjComponentTest.java
|
{
"start": 2758,
"end": 21407
}
|
class ____ {
private File settingsFile;
private File settingsFile2;
private File tempdir;
private File tempdir2;
private ClassLoader contextClassLoader;
private SessionID sessionID;
private SessionSettings settings;
private QuickfixjComponent component;
private CamelContext camelContext;
private MessageFactory engineMessageFactory;
private MessageStoreFactory engineMessageStoreFactory;
private LogFactory engineLogFactory;
private void setSessionID(SessionSettings sessionSettings, SessionID sessionID) {
sessionSettings.setString(sessionID, SessionSettings.BEGINSTRING, sessionID.getBeginString());
sessionSettings.setString(sessionID, SessionSettings.SENDERCOMPID, sessionID.getSenderCompID());
sessionSettings.setString(sessionID, SessionSettings.TARGETCOMPID, sessionID.getTargetCompID());
}
private String getEndpointUri(final String configFilename, SessionID sid) {
String uri = "quickfix:" + configFilename;
if (sid != null) {
uri += "?sessionID=" + sid;
}
return uri;
}
@BeforeEach
public void setUp() throws Exception {
settingsFile = File.createTempFile("quickfixj_test_", ".cfg");
settingsFile2 = File.createTempFile("quickfixj_test2_", ".cfg");
tempdir = settingsFile.getParentFile();
tempdir2 = settingsFile.getParentFile();
URL[] urls = new URL[] { tempdir.toURI().toURL(), tempdir2.toURI().toURL() };
sessionID = new SessionID(FixVersions.BEGINSTRING_FIX44, "FOO", "BAR");
settings = new SessionSettings();
settings.setString(Acceptor.SETTING_SOCKET_ACCEPT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE));
settings.setString(Initiator.SETTING_SOCKET_CONNECT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE));
settings.setBool(Session.SETTING_USE_DATA_DICTIONARY, false);
setSessionID(settings, sessionID);
contextClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader testClassLoader = new URLClassLoader(urls, contextClassLoader);
Thread.currentThread().setContextClassLoader(testClassLoader);
}
private void setUpComponent() throws IOException, NoSuchMethodException {
setUpComponent(false);
}
private void setUpComponent(boolean injectQfjPlugins) throws NoSuchMethodException {
camelContext = new DefaultCamelContext();
component = new QuickfixjComponent();
component.setCamelContext(camelContext);
camelContext.addComponent("quickfix", component);
if (injectQfjPlugins) {
engineMessageFactory = new DefaultMessageFactory();
engineMessageStoreFactory = new MemoryStoreFactory();
engineLogFactory = new ScreenLogFactory();
component.setMessageFactory(engineMessageFactory);
component.setMessageStoreFactory(engineMessageStoreFactory);
component.setLogFactory(engineLogFactory);
}
assertThat(component.getEngines().size(), is(0));
Method converterMethod = QuickfixjConverters.class.getMethod("toSessionID", new Class<?>[] { String.class });
camelContext.getTypeConverterRegistry().addTypeConverter(SessionID.class, String.class,
new StaticMethodTypeConverter(converterMethod, false));
}
@AfterEach
public void tearDown() {
Thread.currentThread().setContextClassLoader(contextClassLoader);
if (component != null) {
component.stop();
}
if (camelContext != null) {
camelContext.stop();
}
}
@Test
public void createEndpointBeforeComponentStart() throws Exception {
setUpComponent();
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings(settings, true);
// Should use cached QFJ engine
Endpoint e1 = component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
assertThat(component.getProvisionalEngines().size(), is(1));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()), is(notNullValue()));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()).isStarted(), is(false));
assertThat(component.getEngines().size(), is(0));
assertThat(((QuickfixjEndpoint) e1).getSID(), is(nullValue()));
writeSettings(settings, false);
// Should use cached QFJ engine
Endpoint e2 = component.createEndpoint(getEndpointUri(settingsFile2.getName(), null));
assertThat(component.getProvisionalEngines().size(), is(2));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()), is(notNullValue()));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getProvisionalEngines().get(settingsFile.getName()).isStarted(), is(false));
assertThat(component.getEngines().size(), is(0));
assertThat(((QuickfixjEndpoint) e2).getSID(), is(nullValue()));
// will start the component
camelContext.start();
assertThat(component.getProvisionalEngines().size(), is(0));
assertThat(component.getEngines().size(), is(2));
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
// Move these too an endpoint testcase if one exists
assertThat(e1.isSingleton(), is(true));
assertThat(((MultipleConsumersSupport) e1).isMultipleConsumersSupported(), is(true));
assertThat(e2.isSingleton(), is(true));
assertThat(((MultipleConsumersSupport) e2).isMultipleConsumersSupported(), is(true));
}
@Test
public void createEndpointAfterComponentStart() throws Exception {
setUpComponent();
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
// will start the component
camelContext.start();
Endpoint e1 = component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
assertThat(component.getEngines().size(), is(1));
assertThat(component.getEngines().get(settingsFile.getName()), is(notNullValue()));
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
assertThat(component.getProvisionalEngines().size(), is(0));
assertThat(((QuickfixjEndpoint) e1).getSID(), is(nullValue()));
Endpoint e2 = component.createEndpoint(getEndpointUri(settingsFile.getName(), sessionID));
assertThat(component.getEngines().size(), is(1));
assertThat(component.getEngines().get(settingsFile.getName()), is(notNullValue()));
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
assertThat(component.getProvisionalEngines().size(), is(0));
assertThat(((QuickfixjEndpoint) e2).getSID(), is(sessionID));
}
@Test
public void createEnginesLazily() throws Exception {
setUpComponent();
component.setLazyCreateEngines(true);
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
// start the component
camelContext.start();
QuickfixjEndpoint e1 = (QuickfixjEndpoint) component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
assertThat(component.getEngines().size(), is(1));
assertThat(component.getProvisionalEngines().size(), is(0));
assertThat(component.getEngines().get(settingsFile.getName()), is(notNullValue()));
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(false));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(false));
e1.ensureInitialized();
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
}
@Test
public void createEndpointsInNonLazyComponent() throws Exception {
setUpComponent();
// configuration will be done per endpoint
component.setLazyCreateEngines(false);
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
// will start the component
camelContext.start();
QuickfixjEndpoint e1 = (QuickfixjEndpoint) component
.createEndpoint(getEndpointUri(settingsFile.getName(), null) + "?lazyCreateEngine=true");
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(false));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(false));
assertThat(component.getEngines().get(settingsFile.getName()).isLazy(), is(true));
e1.ensureInitialized();
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
writeSettings(settings, false);
// will use connector's lazyCreateEngines setting
component.createEndpoint(getEndpointUri(settingsFile2.getName(), sessionID));
assertThat(component.getEngines().get(settingsFile2.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile2.getName()).isStarted(), is(true));
assertThat(component.getEngines().get(settingsFile2.getName()).isLazy(), is(false));
}
@Test
public void createEndpointsInLazyComponent() throws Exception {
setUpComponent();
component.setLazyCreateEngines(true);
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
// will start the component
camelContext.start();
// will use connector's lazyCreateEngines setting
QuickfixjEndpoint e1 = (QuickfixjEndpoint) component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(false));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(false));
assertThat(component.getEngines().get(settingsFile.getName()).isLazy(), is(true));
e1.ensureInitialized();
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(true));
writeSettings(settings, false);
// will override connector's lazyCreateEngines setting
component.createEndpoint(getEndpointUri(settingsFile2.getName(), sessionID) + "&lazyCreateEngine=false");
assertThat(component.getEngines().get(settingsFile2.getName()).isInitialized(), is(true));
assertThat(component.getEngines().get(settingsFile2.getName()).isStarted(), is(true));
assertThat(component.getEngines().get(settingsFile2.getName()).isLazy(), is(false));
}
@Test
public void componentStop() throws Exception {
setUpComponent();
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
Endpoint endpoint = component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
final CountDownLatch latch = new CountDownLatch(1);
Consumer consumer = endpoint.createConsumer(new Processor() {
@Override
public void process(Exchange exchange) {
QuickfixjEventCategory eventCategory
= (QuickfixjEventCategory) exchange.getIn().getHeader(QuickfixjEndpoint.EVENT_CATEGORY_KEY);
if (eventCategory == QuickfixjEventCategory.SessionCreated) {
latch.countDown();
}
}
});
ServiceHelper.startService(consumer);
// Endpoint automatically starts the consumer
assertThat(((StatefulService) consumer).isStarted(), is(true));
// will start the component
camelContext.start();
assertTrue(latch.await(5000, TimeUnit.MILLISECONDS), "Session not created");
component.stop();
assertThat(component.getEngines().get(settingsFile.getName()).isStarted(), is(false));
// it should still be initialized (ready to start again)
assertThat(component.getEngines().get(settingsFile.getName()).isInitialized(), is(true));
}
@Test
public void messagePublication() throws Exception {
setUpComponent();
// Create settings file with both acceptor and initiator
SessionSettings settings = new SessionSettings();
settings.setString(Acceptor.SETTING_SOCKET_ACCEPT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE));
settings.setString(Initiator.SETTING_SOCKET_CONNECT_PROTOCOL, ProtocolFactory.getTypeString(ProtocolFactory.VM_PIPE));
settings.setBool(Session.SETTING_USE_DATA_DICTIONARY, false);
SessionID acceptorSessionID = new SessionID(FixVersions.BEGINSTRING_FIX44, "ACCEPTOR", "INITIATOR");
settings.setString(acceptorSessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.ACCEPTOR_CONNECTION_TYPE);
settings.setLong(acceptorSessionID, Acceptor.SETTING_SOCKET_ACCEPT_PORT, 1234);
setSessionID(settings, acceptorSessionID);
SessionID initiatorSessionID = new SessionID(FixVersions.BEGINSTRING_FIX44, "INITIATOR", "ACCEPTOR");
settings.setString(initiatorSessionID, SessionFactory.SETTING_CONNECTION_TYPE,
SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(initiatorSessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
settings.setLong(initiatorSessionID, Initiator.SETTING_RECONNECT_INTERVAL, 1);
setSessionID(settings, initiatorSessionID);
writeSettings(settings, true);
Endpoint endpoint = component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
// Start the component and wait for the FIX sessions to be logged on
final CountDownLatch logonLatch = new CountDownLatch(2);
final CountDownLatch messageLatch = new CountDownLatch(2);
Consumer consumer = endpoint.createConsumer(new Processor() {
@Override
public void process(Exchange exchange) {
QuickfixjEventCategory eventCategory
= (QuickfixjEventCategory) exchange.getIn().getHeader(QuickfixjEndpoint.EVENT_CATEGORY_KEY);
if (eventCategory == QuickfixjEventCategory.SessionLogon) {
logonLatch.countDown();
} else if (eventCategory == QuickfixjEventCategory.AppMessageReceived) {
messageLatch.countDown();
}
}
});
ServiceHelper.startService(consumer);
// will start the component
camelContext.start();
assertTrue(logonLatch.await(5000, TimeUnit.MILLISECONDS), "Session not created");
Endpoint producerEndpoint = component.createEndpoint(getEndpointUri(settingsFile.getName(), acceptorSessionID));
Producer producer = producerEndpoint.createProducer();
// FIX message to send
Email email = new Email(new EmailThreadID("ID"), new EmailType(EmailType.NEW), new Subject("Test"));
Exchange exchange = producer.getEndpoint().createExchange(ExchangePattern.InOnly);
exchange.getIn().setBody(email);
producer.process(exchange);
// Produce with no session ID specified, session ID must be in message
Producer producer2 = endpoint.createProducer();
email.getHeader().setString(SenderCompID.FIELD, acceptorSessionID.getSenderCompID());
email.getHeader().setString(TargetCompID.FIELD, acceptorSessionID.getTargetCompID());
producer2.process(exchange);
assertTrue(messageLatch.await(5000, TimeUnit.MILLISECONDS), "Messages not received");
}
@Test
public void userSpecifiedQuickfixjPlugins() throws Exception {
setUpComponent(true);
settings.setString(sessionID, SessionFactory.SETTING_CONNECTION_TYPE, SessionFactory.INITIATOR_CONNECTION_TYPE);
settings.setLong(sessionID, Initiator.SETTING_SOCKET_CONNECT_PORT, 1234);
writeSettings();
component.createEndpoint(getEndpointUri(settingsFile.getName(), null));
// will start the component
camelContext.start();
assertThat(component.getEngines().size(), is(1));
QuickfixjEngine engine = component.getEngines().values().iterator().next();
assertThat(engine.getMessageFactory(), is(engineMessageFactory));
assertThat(engine.getMessageStoreFactory(), is(engineMessageStoreFactory));
assertThat(engine.getLogFactory(), is(engineLogFactory));
}
private void writeSettings() throws IOException {
writeSettings(settings, true);
}
private void writeSettings(SessionSettings settings, boolean firstSettingsFile) throws IOException {
FileOutputStream settingsOut = new FileOutputStream(firstSettingsFile ? settingsFile : settingsFile2);
try {
settings.toStream(settingsOut);
} finally {
IOHelper.close(settingsOut);
}
}
}
|
QuickfixjComponentTest
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/configuration/internal/metadata/reader/AuditedPropertiesReader.java
|
{
"start": 22203,
"end": 32414
}
|
class ____ audited but some properties should be excluded
final NotAudited unVer = memberDetails.getDirectAnnotationUsage( NotAudited.class );
if ( ( unVer != null
&& !overriddenAuditedProperties.contains( memberDetails ) )
|| overriddenNotAuditedProperties.contains( memberDetails ) ) {
return false;
}
else {
// if the optimistic locking field has to be unversioned and the current property
// is the optimistic locking field, don't audit it
if ( metadataBuildingContext.getConfiguration().isDoNotAuditOptimisticLockingField() ) {
final Version jpaVer = memberDetails.getDirectAnnotationUsage( Version.class );
if ( jpaVer != null ) {
return false;
}
}
}
final String propertyName = propertyNamePrefix + memberDetails.resolveAttributeName();
final String modifiedFlagsSuffix = metadataBuildingContext.getConfiguration().getModifiedFlagsSuffix();
if ( !this.checkAudited( memberDetails, propertyData,propertyName, allClassAudited, modifiedFlagsSuffix ) ) {
return false;
}
validateLobMappingSupport( memberDetails );
propertyData.setName( propertyName );
propertyData.setBeanName( memberDetails.resolveAttributeName() );
propertyData.setAccessType( accessType );
addPropertyJoinTables( memberDetails, propertyData );
addPropertyCollectionAuditTable( memberDetails, propertyData );
addPropertyAuditingOverrides( memberDetails, propertyData );
if ( !processPropertyAuditingOverrides( memberDetails, propertyData ) ) {
// not audited due to AuditOverride annotation
return false;
}
addPropertyMapKey( memberDetails, propertyData );
setPropertyAuditMappedBy( memberDetails, propertyData );
setPropertyRelationMappedBy( memberDetails, propertyData );
return true;
}
private void addFromComponentProperty(
Property property,
String accessType,
Component propertyValue,
Audited allClassAudited) {
final ComponentAuditingData componentData = new ComponentAuditingData();
final boolean isAudited = fillPropertyData( property, componentData, accessType, allClassAudited );
final PersistentPropertiesSource componentPropertiesSource;
if ( propertyValue.isDynamic() ) {
final ClassDetails mapClassDetails = metadataBuildingContext.getClassDetailsRegistry()
.getClassDetails( Map.class.getName() );
componentPropertiesSource = PersistentPropertiesSource.forComponent( propertyValue, mapClassDetails, true );
}
else {
componentPropertiesSource = PersistentPropertiesSource.forComponent( metadataBuildingContext, propertyValue );
}
final ComponentAuditedPropertiesReader audPropReader = new ComponentAuditedPropertiesReader(
metadataBuildingContext,
componentPropertiesSource,
componentData,
propertyNamePrefix + MappingTools.createComponentPrefix( property.getName() )
);
audPropReader.read( allClassAudited );
if ( isAudited ) {
// Now we know that the property is audited
auditedPropertiesHolder.addPropertyAuditingData( property.getName(), componentData );
}
}
private void addFromNotComponentProperty(Property property, String accessType, Audited allClassAudited) {
final PropertyAuditingData propertyData = new PropertyAuditingData();
final boolean isAudited = fillPropertyData( property, propertyData, accessType, allClassAudited );
if ( isAudited ) {
// Now we know that the property is audited
auditedPropertiesHolder.addPropertyAuditingData( property.getName(), propertyData );
}
}
/**
* Checks if a property is audited and if yes, fills all of its data.
*
* @param property Property to check.
* @param propertyData Property data, on which to set this property's modification store.
* @param accessType Access type for the property.
*
* @return False if this property is not audited.
*/
private boolean fillPropertyData(
Property property,
PropertyAuditingData propertyData,
String accessType,
Audited allClassAudited) {
final String propertyName = propertyNamePrefix + property.getName();
final String modifiedFlagsSuffix = metadataBuildingContext.getConfiguration().getModifiedFlagsSuffix();
if ( !this.checkAudited( property, propertyData,propertyName, allClassAudited, modifiedFlagsSuffix ) ) {
return false;
}
propertyData.setName( propertyName );
propertyData.setBeanName( property.getName() );
propertyData.setAccessType( accessType );
propertyData.setJoinTable( DEFAULT_AUDIT_JOIN_TABLE );
if ( !processPropertyAuditingOverrides( property, propertyData ) ) {
// not audited due to AuditOverride annotation
return false;
}
return true;
}
private void validateLobMappingSupport(MemberDetails memberDetails) {
// HHH-9834 - Sanity check
try {
if ( memberDetails.hasDirectAnnotationUsage( ElementCollection.class ) ) {
if ( memberDetails.hasDirectAnnotationUsage( Lob.class ) ) {
if ( !memberDetails.getType().isImplementor( Map.class ) ) {
throw new EnversMappingException(
"@ElementCollection combined with @Lob is only supported for Map collection types."
);
}
}
}
}
catch ( EnversMappingException e ) {
throw new HibernateException(
String.format(
Locale.ENGLISH,
"Invalid mapping in [%s] for property [%s]",
memberDetails.getDeclaringType().getName(),
memberDetails.resolveAttributeName()
),
e
);
}
}
protected boolean checkAudited(
MemberDetails memberDetails,
PropertyAuditingData propertyData, String propertyName,
Audited allClassAudited, String modifiedFlagSuffix) {
// Checking if this property is explicitly audited or if all properties are.
Audited aud = ( memberDetails.hasDirectAnnotationUsage( Audited.class ) )
? memberDetails.getDirectAnnotationUsage( Audited.class )
: allClassAudited;
if ( aud == null
&& overriddenAuditedProperties.contains( memberDetails )
&& !overriddenNotAuditedProperties.contains( memberDetails ) ) {
// Assigning @Audited defaults. If anyone needs to customize those values in the future,
// appropriate fields shall be added to @AuditOverride annotation.
aud = DEFAULT_AUDITED;
}
if ( aud != null ) {
propertyData.setRelationTargetAuditMode( aud.targetAuditMode() );
propertyData.setRelationTargetNotFoundAction( getRelationNotFoundAction( memberDetails, allClassAudited ) );
propertyData.setUsingModifiedFlag( checkUsingModifiedFlag( aud ) );
propertyData.setModifiedFlagName( ModifiedColumnNameResolver.getName( propertyName, modifiedFlagSuffix ) );
if ( !StringTools.isEmpty( aud.modifiedColumnName() ) ) {
propertyData.setExplicitModifiedFlagName( aud.modifiedColumnName() );
}
return true;
}
else {
return false;
}
}
protected boolean checkAudited(
Property property,
PropertyAuditingData propertyData, String propertyName,
Audited allClassAudited, String modifiedFlagSuffix) {
// Checking if this property is explicitly audited or if all properties are.
if ( allClassAudited != null ) {
propertyData.setRelationTargetAuditMode( allClassAudited.targetAuditMode() );
propertyData.setRelationTargetNotFoundAction(
allClassAudited == null ?
RelationTargetNotFoundAction.DEFAULT :
allClassAudited.targetNotFoundAction()
);
propertyData.setUsingModifiedFlag( checkUsingModifiedFlag( allClassAudited ) );
propertyData.setModifiedFlagName( ModifiedColumnNameResolver.getName( propertyName, modifiedFlagSuffix ) );
if ( !StringTools.isEmpty( allClassAudited.modifiedColumnName() ) ) {
propertyData.setExplicitModifiedFlagName( allClassAudited.modifiedColumnName() );
}
return true;
}
else {
return false;
}
}
protected boolean checkUsingModifiedFlag(Audited aud) {
// HHH-10468
if ( metadataBuildingContext.getConfiguration().hasSettingForUseModifiedFlag() ) {
// HHH-10468
// Modify behavior so that if the global setting has been set by user properties, then
// the audit behavior should be a disjunction between the global setting and the field
// annotation. This allows the annotation to take precedence when the global value is
// false and for the global setting to take precedence when true.
return metadataBuildingContext.getConfiguration().isModifiedFlagsEnabled() || aud.withModifiedFlag();
}
// no global setting enabled, use the annotation's value only.
return aud.withModifiedFlag();
}
private void setPropertyRelationMappedBy(MemberDetails memberDetails, PropertyAuditingData propertyData) {
final OneToMany oneToMany = memberDetails.getDirectAnnotationUsage( OneToMany.class );
if ( oneToMany != null && StringHelper.isNotEmpty( oneToMany.mappedBy() ) ) {
propertyData.setRelationMappedBy( oneToMany.mappedBy() );
}
}
private void setPropertyAuditMappedBy(MemberDetails memberDetails, PropertyAuditingData propertyData) {
final AuditMappedBy auditMappedBy = memberDetails.getDirectAnnotationUsage( AuditMappedBy.class );
if ( auditMappedBy != null ) {
propertyData.setAuditMappedBy( auditMappedBy.mappedBy() );
if ( StringHelper.isNotEmpty( auditMappedBy.positionMappedBy() ) ) {
propertyData.setPositionMappedBy( auditMappedBy.positionMappedBy() );
}
}
}
private void addPropertyMapKey(MemberDetails memberDetails, PropertyAuditingData propertyData) {
final MapKey mapKey = memberDetails.getDirectAnnotationUsage( MapKey.class );
if ( mapKey != null ) {
propertyData.setMapKey( mapKey.name() );
}
else {
final MapKeyEnumerated mapKeyEnumerated = memberDetails.getDirectAnnotationUsage( MapKeyEnumerated.class );
if ( mapKeyEnumerated != null ) {
propertyData.setMapKeyEnumType( mapKeyEnumerated.value() );
}
}
}
private void addPropertyJoinTables(MemberDetails memberDetails, PropertyAuditingData propertyData) {
// The AuditJoinTable annotation source will follow the following priority rules
// 1. Use the override if one is specified
// 2. Use the site annotation if one is specified
// 3. Use the default if neither are specified
//
// The prime directive for (1) is so that when users in a subclass use @AuditOverride(s)
// the join-table specified there should have a higher priority in the event the
// super-
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunction.java
|
{
"start": 1050,
"end": 6619
}
|
class ____ implements AggregatorFunction {
private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of(
new IntermediateStateDesc("max", ElementType.BYTES_REF),
new IntermediateStateDesc("seen", ElementType.BOOLEAN) );
private final DriverContext driverContext;
private final MaxBytesRefAggregator.SingleState state;
private final List<Integer> channels;
public MaxBytesRefAggregatorFunction(DriverContext driverContext, List<Integer> channels,
MaxBytesRefAggregator.SingleState state) {
this.driverContext = driverContext;
this.channels = channels;
this.state = state;
}
public static MaxBytesRefAggregatorFunction create(DriverContext driverContext,
List<Integer> channels) {
return new MaxBytesRefAggregatorFunction(driverContext, channels, MaxBytesRefAggregator.initSingle(driverContext));
}
public static List<IntermediateStateDesc> intermediateStateDesc() {
return INTERMEDIATE_STATE_DESC;
}
@Override
public int intermediateBlockCount() {
return INTERMEDIATE_STATE_DESC.size();
}
@Override
public void addRawInput(Page page, BooleanVector mask) {
if (mask.allFalse()) {
// Entire page masked away
} else if (mask.allTrue()) {
addRawInputNotMasked(page);
} else {
addRawInputMasked(page, mask);
}
}
private void addRawInputMasked(Page page, BooleanVector mask) {
BytesRefBlock valueBlock = page.getBlock(channels.get(0));
BytesRefVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock, mask);
return;
}
addRawVector(valueVector, mask);
}
private void addRawInputNotMasked(Page page) {
BytesRefBlock valueBlock = page.getBlock(channels.get(0));
BytesRefVector valueVector = valueBlock.asVector();
if (valueVector == null) {
addRawBlock(valueBlock);
return;
}
addRawVector(valueVector);
}
private void addRawVector(BytesRefVector valueVector) {
BytesRef valueScratch = new BytesRef();
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
BytesRef valueValue = valueVector.getBytesRef(valuesPosition, valueScratch);
MaxBytesRefAggregator.combine(state, valueValue);
}
}
private void addRawVector(BytesRefVector valueVector, BooleanVector mask) {
BytesRef valueScratch = new BytesRef();
for (int valuesPosition = 0; valuesPosition < valueVector.getPositionCount(); valuesPosition++) {
if (mask.getBoolean(valuesPosition) == false) {
continue;
}
BytesRef valueValue = valueVector.getBytesRef(valuesPosition, valueScratch);
MaxBytesRefAggregator.combine(state, valueValue);
}
}
private void addRawBlock(BytesRefBlock valueBlock) {
BytesRef valueScratch = new BytesRef();
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
BytesRef valueValue = valueBlock.getBytesRef(valueOffset, valueScratch);
MaxBytesRefAggregator.combine(state, valueValue);
}
}
}
private void addRawBlock(BytesRefBlock valueBlock, BooleanVector mask) {
BytesRef valueScratch = new BytesRef();
for (int p = 0; p < valueBlock.getPositionCount(); p++) {
if (mask.getBoolean(p) == false) {
continue;
}
int valueValueCount = valueBlock.getValueCount(p);
if (valueValueCount == 0) {
continue;
}
int valueStart = valueBlock.getFirstValueIndex(p);
int valueEnd = valueStart + valueValueCount;
for (int valueOffset = valueStart; valueOffset < valueEnd; valueOffset++) {
BytesRef valueValue = valueBlock.getBytesRef(valueOffset, valueScratch);
MaxBytesRefAggregator.combine(state, valueValue);
}
}
}
@Override
public void addIntermediateInput(Page page) {
assert channels.size() == intermediateBlockCount();
assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size();
Block maxUncast = page.getBlock(channels.get(0));
if (maxUncast.areAllValuesNull()) {
return;
}
BytesRefVector max = ((BytesRefBlock) maxUncast).asVector();
assert max.getPositionCount() == 1;
Block seenUncast = page.getBlock(channels.get(1));
if (seenUncast.areAllValuesNull()) {
return;
}
BooleanVector seen = ((BooleanBlock) seenUncast).asVector();
assert seen.getPositionCount() == 1;
BytesRef maxScratch = new BytesRef();
MaxBytesRefAggregator.combineIntermediate(state, max.getBytesRef(0, maxScratch), seen.getBoolean(0));
}
@Override
public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) {
state.toIntermediate(blocks, offset, driverContext);
}
@Override
public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) {
blocks[offset] = MaxBytesRefAggregator.evaluateFinal(state, driverContext);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName()).append("[");
sb.append("channels=").append(channels);
sb.append("]");
return sb.toString();
}
@Override
public void close() {
state.close();
}
}
|
MaxBytesRefAggregatorFunction
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java
|
{
"start": 22430,
"end": 22622
}
|
class ____ implements AnnotationOnInterfaceMethod {
@Override
@PreAuthorize("twentyeight")
public String method() {
return "ok";
}
}
private
|
ClassMethodOverridingAnnotationOnMethod
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_3361/Issue3361Mapper.java
|
{
"start": 1959,
"end": 2243
}
|
class ____ {
private final Long anotherAttribute;
public OtherSource(Long anotherAttribute) {
this.anotherAttribute = anotherAttribute;
}
public Long getAnotherAttribute() {
return anotherAttribute;
}
}
}
|
OtherSource
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/enumcollection/EnumIsMemberTest.java
|
{
"start": 1542,
"end": 1915
}
|
enum ____
query.where( builder.isMember( User.Role.Admin, roles ) );
TypedQuery<User> typedQuery = entityManager.createQuery( query );
List<User> users = typedQuery.getResultList();
assertEquals( 1, users.size() );
} );
scope.inTransaction( entityManager -> {
// delete
entityManager.remove( entityManager.find(User.class, 1L ) );
} );
}
}
|
parameter
|
java
|
quarkusio__quarkus
|
core/processor/src/main/java/io/quarkus/annotation/processor/documentation/config/model/AbstractConfigItem.java
|
{
"start": 2100,
"end": 2152
}
|
interface ____ {
String property();
}
}
|
Path
|
java
|
quarkusio__quarkus
|
extensions/smallrye-fault-tolerance/deployment/src/main/java/io/quarkus/smallrye/faulttolerance/deployment/FaultToleranceMethodSearch.java
|
{
"start": 2549,
"end": 3995
}
|
class ____ actually declares the guarded method (can be a supertype of bean class)
* @param name name of the fallback method
* @param parameterTypes parameter types of the guarded method
* @param returnType return type of the guarded method
* @return the fallback method or {@code null} if none exists
*/
MethodInfo findFallbackMethod(ClassInfo beanClass, ClassInfo declaringClass,
String name, Type[] parameterTypes, Type returnType) {
Set<MethodInfo> result = findMethod(beanClass, declaringClass, name, parameterTypes, returnType, false);
return result.isEmpty() ? null : result.iterator().next();
}
/**
* Finds a set of fallback methods with exception parameter for given guarded method. If the guarded method
* is present on given {@code beanClass} and is actually declared by given {@code declaringClass} and has given
* {@code parameterTypes} and {@code returnType}, then fallback methods of given {@code name}, with parameter types
* and return type matching the parameter types and return type of the guarded method, and with one additional
* parameter assignable to {@code Throwable} at the end of parameter list, is searched for on the {@code beanClass}
* and its superclasses and superinterfaces, according to the specification rules. Returns an empty set if no
* matching fallback method exists.
*
* @param beanClass the
|
that
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/aggregate/window/buffers/WindowBuffer.java
|
{
"start": 3382,
"end": 4927
}
|
interface ____ extends Serializable {
/**
* Creates a {@link WindowBuffer} that buffers elements in memory before flushing.
*
* @param operatorOwner the owner of the operator
* @param memoryManager the manager that governs memory by Flink framework
* @param memorySize the managed memory size can be used by this operator
* @param runtimeContext the current {@link RuntimeContext}
* @param timerService the service to register event-time and processing-time timers
* @param stateBackend the state backend to accessing states
* @param windowState the window state to flush buffered data into.
* @param isEventTime indicates whether the operator works in event-time or processing-time
* mode, used for register corresponding timers.
* @param shiftTimeZone the shift timezone of the window
* @throws IOException thrown if the buffer can't be opened
*/
WindowBuffer create(
Object operatorOwner,
MemoryManager memoryManager,
long memorySize,
RuntimeContext runtimeContext,
WindowTimerService<Long> timerService,
KeyedStateBackend<RowData> stateBackend,
WindowState<Long> windowState,
boolean isEventTime,
ZoneId shiftTimeZone)
throws Exception;
}
/** A factory that creates a {@link WindowBuffer}. */
@FunctionalInterface
|
Factory
|
java
|
spring-projects__spring-framework
|
spring-web/src/test/java/org/springframework/web/cors/reactive/CorsWebFilterTests.java
|
{
"start": 1906,
"end": 6803
}
|
class ____ {
private CorsWebFilter filter;
private final CorsConfiguration config = new CorsConfiguration();
@BeforeEach
void setup() {
config.setAllowedOrigins(Arrays.asList("https://domain1.com", "https://domain2.com"));
config.setAllowedMethods(Arrays.asList("GET", "POST"));
config.setAllowedHeaders(Arrays.asList("header1", "header2"));
config.setExposedHeaders(Arrays.asList("header3", "header4"));
config.setMaxAge(123L);
config.setAllowCredentials(false);
filter = new CorsWebFilter(r -> config);
}
@Test
void nonCorsRequest() {
WebFilterChain filterChain = filterExchange -> {
try {
HttpHeaders headers = filterExchange.getResponse().getHeaders();
assertThat(headers.getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isNull();
assertThat(headers.getFirst(ACCESS_CONTROL_EXPOSE_HEADERS)).isNull();
}
catch (AssertionError ex) {
return Mono.error(ex);
}
return Mono.empty();
};
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.get("https://domain1.com/test.html")
.header(HOST, "domain1.com"));
this.filter.filter(exchange, filterChain).block();
}
@Test
void sameOriginRequest() {
WebFilterChain filterChain = filterExchange -> {
try {
HttpHeaders headers = filterExchange.getResponse().getHeaders();
assertThat(headers.getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isNull();
assertThat(headers.getFirst(ACCESS_CONTROL_EXPOSE_HEADERS)).isNull();
}
catch (AssertionError ex) {
return Mono.error(ex);
}
return Mono.empty();
};
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.get("https://domain1.com/test.html")
.header(ORIGIN, "https://domain1.com"));
this.filter.filter(exchange, filterChain).block();
}
@Test
void validActualRequest() {
WebFilterChain filterChain = filterExchange -> {
try {
HttpHeaders headers = filterExchange.getResponse().getHeaders();
assertThat(headers.getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(headers.getFirst(ACCESS_CONTROL_EXPOSE_HEADERS)).isEqualTo("header3, header4");
}
catch (AssertionError ex) {
return Mono.error(ex);
}
return Mono.empty();
};
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.get("https://domain1.com/test.html")
.header(HOST, "domain1.com")
.header(ORIGIN, "https://domain2.com")
.header("header2", "foo"));
this.filter.filter(exchange, filterChain).block();
}
@Test
void invalidActualRequest() {
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.delete("https://domain1.com/test.html")
.header(HOST, "domain1.com")
.header(ORIGIN, "https://domain2.com")
.header("header2", "foo"));
WebFilterChain filterChain = filterExchange -> Mono.error(
new AssertionError("Invalid requests must not be forwarded to the filter chain"));
filter.filter(exchange, filterChain).block();
assertThat(exchange.getResponse().getHeaders().getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isNull();
}
@Test
void validPreFlightRequest() {
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.options("https://domain1.com/test.html")
.header(HOST, "domain1.com")
.header(ORIGIN, "https://domain2.com")
.header(ACCESS_CONTROL_REQUEST_METHOD, HttpMethod.GET.name())
.header(ACCESS_CONTROL_REQUEST_HEADERS, "header1, header2")
);
WebFilterChain filterChain = filterExchange -> Mono.error(
new AssertionError("Preflight requests must not be forwarded to the filter chain"));
filter.filter(exchange, filterChain).block();
HttpHeaders headers = exchange.getResponse().getHeaders();
assertThat(headers.getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isEqualTo("https://domain2.com");
assertThat(headers.getFirst(ACCESS_CONTROL_ALLOW_HEADERS)).isEqualTo("header1, header2");
assertThat(headers.getFirst(ACCESS_CONTROL_EXPOSE_HEADERS)).isEqualTo("header3, header4");
assertThat(Long.parseLong(headers.getFirst(ACCESS_CONTROL_MAX_AGE))).isEqualTo(123L);
}
@Test
void invalidPreFlightRequest() {
MockServerWebExchange exchange = MockServerWebExchange.from(
MockServerHttpRequest
.options("https://domain1.com/test.html")
.header(HOST, "domain1.com")
.header(ORIGIN, "https://domain2.com")
.header(ACCESS_CONTROL_REQUEST_METHOD, HttpMethod.DELETE.name())
.header(ACCESS_CONTROL_REQUEST_HEADERS, "header1, header2"));
WebFilterChain filterChain = filterExchange -> Mono.error(
new AssertionError("Preflight requests must not be forwarded to the filter chain"));
filter.filter(exchange, filterChain).block();
assertThat(exchange.getResponse().getHeaders().getFirst(ACCESS_CONTROL_ALLOW_ORIGIN)).isNull();
}
}
|
CorsWebFilterTests
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/TestJsonSerialize.java
|
{
"start": 1082,
"end": 1387
}
|
class ____
{
@JsonSerialize(as=ValueInterface.class)
public ValueClass getValue() {
return new ValueClass();
}
}
// This should indicate that static type be used for all fields
@JsonSerialize(typing=JsonSerialize.Typing.STATIC)
static
|
WrapperClassForAs
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClass.java
|
{
"start": 4589,
"end": 4982
}
|
class ____ this one
* @since 3.1.1
*/
ConfigurationClass(Class<?> clazz, ConfigurationClass importedBy) {
this.metadata = AnnotationMetadata.introspect(clazz);
this.resource = new DescriptiveResource(clazz.getName());
this.importedBy.add(importedBy);
}
/**
* Create a new {@link ConfigurationClass} with the given name.
* @param metadata the metadata for the underlying
|
importing
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-quickstart/src/test/java/io/quarkus/it/opentelemetry/OpenTelemetryIT.java
|
{
"start": 359,
"end": 521
}
|
class ____ extends OpenTelemetryTest {
@Override
protected void buildGlobalTelemetryInstance() {
// When running native tests the test
|
OpenTelemetryIT
|
java
|
apache__flink
|
flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/test/testdata/ConnectedComponentsData.java
|
{
"start": 1120,
"end": 4532
}
|
class ____ {
public static final String getEnumeratingVertices(int num) {
if (num < 1 || num > 1000000) {
throw new IllegalArgumentException();
}
StringBuilder bld = new StringBuilder(3 * num);
for (int i = 1; i <= num; i++) {
bld.append(i);
bld.append('\n');
}
return bld.toString();
}
/**
* Creates random edges such that even numbered vertices are connected with even numbered
* vertices and odd numbered vertices only with other odd numbered ones.
*/
public static final String getRandomOddEvenEdges(int numEdges, int numVertices, long seed) {
if (numVertices < 2
|| numVertices > 1000000
|| numEdges < numVertices
|| numEdges > 1000000) {
throw new IllegalArgumentException();
}
StringBuilder bld = new StringBuilder(5 * numEdges);
// first create the linear edge sequence even -> even and odd -> odd to make sure they are
// all in the same component
for (int i = 3; i <= numVertices; i++) {
bld.append(i - 2).append(' ').append(i).append('\n');
}
numEdges -= numVertices - 2;
Random r = new Random(seed);
for (int i = 1; i <= numEdges; i++) {
int evenOdd = r.nextBoolean() ? 1 : 0;
int source = r.nextInt(numVertices) + 1;
if (source % 2 != evenOdd) {
source--;
if (source < 1) {
source = 2;
}
}
int target = r.nextInt(numVertices) + 1;
if (target % 2 != evenOdd) {
target--;
if (target < 1) {
target = 2;
}
}
bld.append(source).append(' ').append(target).append('\n');
}
return bld.toString();
}
public static void checkOddEvenResult(BufferedReader result) throws IOException {
Pattern split = Pattern.compile(" ");
String line;
while ((line = result.readLine()) != null) {
String[] res = split.split(line);
Assert.assertEquals("Malformed result: Wrong number of tokens in line.", 2, res.length);
try {
int vertex = Integer.parseInt(res[0]);
int component = Integer.parseInt(res[1]);
int should = vertex % 2;
if (should == 0) {
should = 2;
}
Assert.assertEquals("Vertex is in wrong component.", should, component);
} catch (NumberFormatException e) {
Assert.fail("Malformed result.");
}
}
}
public static void checkOddEvenResult(List<Tuple2<Long, Long>> lines) throws IOException {
for (Tuple2<Long, Long> line : lines) {
try {
long vertex = line.f0;
long component = line.f1;
long should = vertex % 2;
if (should == 0) {
should = 2;
}
Assert.assertEquals("Vertex is in wrong component.", should, component);
} catch (NumberFormatException e) {
Assert.fail("Malformed result.");
}
}
}
private ConnectedComponentsData() {}
}
|
ConnectedComponentsData
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/ConsoleAppenderJira1002ShortThrowableLayoutMain.java
|
{
"start": 889,
"end": 1076
}
|
class ____ {
public static void main() {
ConsoleAppenderNoAnsiStyleLayoutMain.test("target/test-classes/log4j2-1002.xml");
}
}
|
ConsoleAppenderJira1002ShortThrowableLayoutMain
|
java
|
apache__flink
|
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/stream/state/MultiJoinStateViews.java
|
{
"start": 2699,
"end": 5400
}
|
class ____ {
/** Creates a {@link MultiJoinStateView} depends on {@link JoinInputSideSpec}. */
public static MultiJoinStateView create(
RuntimeContext ctx,
String stateName,
JoinInputSideSpec inputSideSpec,
@Nullable
RowType
joinKeyType, /* joinKeyType is null for inputId = 0, see {@link InputSideHasUniqueKey}*/
RowType recordType,
long retentionTime) {
StateTtlConfig ttlConfig = createTtlConfig(retentionTime);
if (inputSideSpec.hasUniqueKey()) {
if (inputSideSpec.joinKeyContainsUniqueKey() && joinKeyType != null) {
return new JoinKeyContainsUniqueKey(
ctx, stateName, joinKeyType, recordType, ttlConfig);
} else {
return new InputSideHasUniqueKey(
ctx,
stateName,
joinKeyType,
recordType,
inputSideSpec.getUniqueKeyType(),
inputSideSpec.getUniqueKeySelector(),
ttlConfig);
}
} else {
return new InputSideHasNoUniqueKey(ctx, stateName, joinKeyType, recordType, ttlConfig);
}
}
/**
* Creates a {@link MapStateDescriptor} with the given parameters and applies TTL configuration.
*
* @param <K> Key type
* @param <V> Value type
* @param stateName Unique name for the state
* @param keyTypeInfo Type information for the key
* @param valueTypeInfo Type information for the value
* @param ttlConfig State TTL configuration
* @return Configured MapStateDescriptor
*/
private static <K, V> MapStateDescriptor<K, V> createStateDescriptor(
String stateName,
TypeInformation<K> keyTypeInfo,
TypeInformation<V> valueTypeInfo,
StateTtlConfig ttlConfig) {
MapStateDescriptor<K, V> descriptor =
new MapStateDescriptor<>(stateName, keyTypeInfo, valueTypeInfo);
if (ttlConfig.isEnabled()) {
descriptor.enableTimeToLive(ttlConfig);
}
return descriptor;
}
// ------------------------------------------------------------------------------------
// Multi Join State View Implementations
// ------------------------------------------------------------------------------------
/**
* State view for input sides where the unique key is fully contained within the join key.
*
* <p>Stores data as {@code MapState<JoinKey, Record>}.
*/
private static final
|
MultiJoinStateViews
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/mutable/MutableShortTest.java
|
{
"start": 1189,
"end": 9011
}
|
class ____ extends AbstractLangTest {
@Test
void testAddAndGetValueObject() {
final MutableShort mutableShort = new MutableShort((short) 0);
final short result = mutableShort.addAndGet(Short.valueOf((short) 1));
assertEquals((short) 1, result);
assertEquals((short) 1, mutableShort.shortValue());
}
@Test
void testAddAndGetValuePrimitive() {
final MutableShort mutableShort = new MutableShort((short) 0);
final short result = mutableShort.addAndGet((short) 1);
assertEquals((short) 1, result);
assertEquals((short) 1, mutableShort.shortValue());
}
@Test
void testAddValueObject() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.add(Short.valueOf((short) 1));
assertEquals((short) 2, mutNum.shortValue());
}
@Test
void testAddValuePrimitive() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.add((short) 1);
assertEquals((short) 2, mutNum.shortValue());
}
@Test
void testCompareTo() {
final MutableShort mutNum = new MutableShort((short) 0);
assertEquals((short) 0, mutNum.compareTo(new MutableShort((short) 0)));
assertEquals((short) +1, mutNum.compareTo(new MutableShort((short) -1)));
assertEquals((short) -1, mutNum.compareTo(new MutableShort((short) 1)));
assertNullPointerException(() -> mutNum.compareTo(null));
}
@Test
void testConstructors() {
assertEquals((short) 0, new MutableShort().shortValue());
assertEquals((short) 1, new MutableShort((short) 1).shortValue());
assertEquals((short) 2, new MutableShort(Short.valueOf((short) 2)).shortValue());
assertEquals((short) 3, new MutableShort(new MutableShort((short) 3)).shortValue());
assertEquals((short) 2, new MutableShort("2").shortValue());
assertNullPointerException(() -> new MutableShort((Number) null));
}
@Test
void testDecrement() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.decrement();
assertEquals(0, mutNum.intValue());
assertEquals(0L, mutNum.longValue());
}
@Test
void testDecrementAndGet() {
final MutableShort mutNum = new MutableShort((short) 1);
final short result = mutNum.decrementAndGet();
assertEquals(0, result);
assertEquals(0, mutNum.intValue());
assertEquals(0L, mutNum.longValue());
}
@Test
void testEquals() {
final MutableShort mutNumA = new MutableShort((short) 0);
final MutableShort mutNumB = new MutableShort((short) 0);
final MutableShort mutNumC = new MutableShort((short) 1);
assertEquals(mutNumA, mutNumA);
assertEquals(mutNumA, mutNumB);
assertEquals(mutNumB, mutNumA);
assertEquals(mutNumB, mutNumB);
assertNotEquals(mutNumA, mutNumC);
assertNotEquals(mutNumB, mutNumC);
assertEquals(mutNumC, mutNumC);
assertNotEquals(null, mutNumA);
assertNotEquals(mutNumA, Short.valueOf((short) 0));
assertNotEquals("0", mutNumA);
}
@Test
void testGetAndAddValueObject() {
final MutableShort mutableShort = new MutableShort((short) 0);
final short result = mutableShort.getAndAdd(Short.valueOf((short) 1));
assertEquals((short) 0, result);
assertEquals((short) 1, mutableShort.shortValue());
}
@Test
void testGetAndAddValuePrimitive() {
final MutableShort mutableShort = new MutableShort((short) 0);
final short result = mutableShort.getAndAdd((short) 1);
assertEquals((short) 0, result);
assertEquals((short) 1, mutableShort.shortValue());
}
@Test
void testGetAndDecrement() {
final MutableShort mutNum = new MutableShort((short) 1);
final short result = mutNum.getAndDecrement();
assertEquals(1, result);
assertEquals(0, mutNum.intValue());
assertEquals(0L, mutNum.longValue());
}
@Test
void testGetAndIncrement() {
final MutableShort mutNum = new MutableShort((short) 1);
final short result = mutNum.getAndIncrement();
assertEquals(1, result);
assertEquals(2, mutNum.intValue());
assertEquals(2L, mutNum.longValue());
}
@Test
void testGetSet() {
final MutableShort mutNum = new MutableShort((short) 0);
assertEquals((short) 0, new MutableShort().shortValue());
assertEquals(Short.valueOf((short) 0), new MutableShort().get());
assertEquals(Short.valueOf((short) 0), new MutableShort().getValue());
mutNum.setValue((short) 1);
assertEquals((short) 1, mutNum.shortValue());
assertEquals(Short.valueOf((short) 1), mutNum.get());
assertEquals(Short.valueOf((short) 1), mutNum.getValue());
mutNum.setValue(Short.valueOf((short) 2));
assertEquals((short) 2, mutNum.shortValue());
assertEquals(Short.valueOf((short) 2), mutNum.get());
assertEquals(Short.valueOf((short) 2), mutNum.getValue());
mutNum.setValue(new MutableShort((short) 3));
assertEquals((short) 3, mutNum.shortValue());
assertEquals(Short.valueOf((short) 3), mutNum.get());
assertEquals(Short.valueOf((short) 3), mutNum.getValue());
assertNullPointerException(() -> mutNum.setValue(null));
}
@Test
void testHashCode() {
final MutableShort mutNumA = new MutableShort((short) 0);
final MutableShort mutNumB = new MutableShort((short) 0);
final MutableShort mutNumC = new MutableShort((short) 1);
assertEquals(mutNumA.hashCode(), mutNumA.hashCode());
assertEquals(mutNumA.hashCode(), mutNumB.hashCode());
assertNotEquals(mutNumA.hashCode(), mutNumC.hashCode());
assertEquals(mutNumA.hashCode(), Short.valueOf((short) 0).hashCode());
}
@Test
void testIncrement() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.increment();
assertEquals(2, mutNum.intValue());
assertEquals(2L, mutNum.longValue());
}
@Test
void testIncrementAndGet() {
final MutableShort mutNum = new MutableShort((short) 1);
final short result = mutNum.incrementAndGet();
assertEquals(2, result);
assertEquals(2, mutNum.intValue());
assertEquals(2L, mutNum.longValue());
}
@Test
void testPrimitiveValues() {
final MutableShort mutNum = new MutableShort((short) 1);
assertEquals(1.0F, mutNum.floatValue());
assertEquals(1.0, mutNum.doubleValue());
assertEquals((byte) 1, mutNum.byteValue());
assertEquals((short) 1, mutNum.shortValue());
assertEquals(1, mutNum.intValue());
assertEquals(1L, mutNum.longValue());
}
@Test
void testSubtractValueObject() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.subtract(Short.valueOf((short) 1));
assertEquals((short) 0, mutNum.shortValue());
}
@Test
void testSubtractValuePrimitive() {
final MutableShort mutNum = new MutableShort((short) 1);
mutNum.subtract((short) 1);
assertEquals((short) 0, mutNum.shortValue());
}
@Test
void testToShort() {
assertEquals(Short.valueOf((short) 0), new MutableShort((short) 0).toShort());
assertEquals(Short.valueOf((short) 123), new MutableShort((short) 123).toShort());
}
@Test
void testToString() {
assertEquals("0", new MutableShort((short) 0).toString());
assertEquals("10", new MutableShort((short) 10).toString());
assertEquals("-123", new MutableShort((short) -123).toString());
}
}
|
MutableShortTest
|
java
|
apache__camel
|
components/camel-barcode/src/test/java/org/apache/camel/dataformat/barcode/BarcodeUnmarshalTest.java
|
{
"start": 1517,
"end": 3444
}
|
class ____ extends BarcodeTestBase {
@TempDir
Path testDirectory;
@Test
void testOrientation() {
Exchange exchange = template.request("direct:code1", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody(MSG);
}
});
assertEquals(180, exchange.getMessage().getHeader("ORIENTATION"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
DataFormat code1 = new BarcodeDataFormat(200, 200, BarcodeImageType.PNG, BarcodeFormat.CODE_39);
from("direct:code1")
.marshal(code1)
.process(new Processor() {
@Override
public void process(Exchange exchange) throws Exception {
InputStream bis = exchange.getIn().getBody(InputStream.class);
BinaryBitmap bitmap = new BinaryBitmap(
new HybridBinarizer(new BufferedImageLuminanceSource(ImageIO.read(bis))));
BitMatrix blackMatrix = bitmap.getBlackMatrix();
blackMatrix.rotate180();
File file = testDirectory.resolve("TestImage.png").toFile();
FileOutputStream outputStream = new FileOutputStream(file);
MatrixToImageWriter.writeToStream(blackMatrix, "png", outputStream);
exchange.getIn().setBody(file);
}
}).unmarshal(code1)
.to("log:OUT")
.to("mock:out");
}
};
}
}
|
BarcodeUnmarshalTest
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/TestDeferredSecurityContext.java
|
{
"start": 856,
"end": 1328
}
|
class ____ implements DeferredSecurityContext {
private SecurityContext securityContext;
private boolean isGenerated;
public TestDeferredSecurityContext(SecurityContext securityContext, boolean isGenerated) {
this.securityContext = securityContext;
this.isGenerated = isGenerated;
}
@Override
public SecurityContext get() {
return this.securityContext;
}
@Override
public boolean isGenerated() {
return this.isGenerated;
}
}
|
TestDeferredSecurityContext
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/authorization/OAuth2AuthorizationCodeGrantTests.java
|
{
"start": 65645,
"end": 66306
}
|
class ____
extends AuthorizationServerConfiguration {
// @formatter:off
@Bean
SecurityFilterChain authorizationServerSecurityFilterChain(HttpSecurity http) throws Exception {
http
.oauth2AuthorizationServer(Customizer.withDefaults())
.authorizeHttpRequests((authorize) ->
authorize.anyRequest().authenticated()
)
.securityContext((securityContext) ->
securityContext.securityContextRepository(securityContextRepository));
return http.build();
}
// @formatter:on
}
@EnableWebSecurity
@Import(OAuth2AuthorizationServerConfiguration.class)
static
|
AuthorizationServerConfigurationWithSecurityContextRepository
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesAuxServices.java
|
{
"start": 3421,
"end": 4391
}
|
class ____ extends JerseyTestBase {
private static final String AUX_SERVICES_PATH = "auxiliaryservices";
private static Context nmContext;
private static Configuration conf = new Configuration();
private DateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final File testRootDir = new File("target",
TestNMWebServicesContainers.class.getSimpleName());
private static final File testLogDir = new File("target",
TestNMWebServicesContainers.class.getSimpleName() + "LogDir");
@Override
protected Application configure() {
ResourceConfig config = new ResourceConfig();
config.register(new JerseyBinder());
config.register(NMWebServices.class);
config.register(GenericExceptionHandler.class);
config.register(new JettisonFeature()).register(JAXBContextResolver.class);
forceSet(TestProperties.CONTAINER_PORT, "9999");
return config;
}
private static
|
TestNMWebServicesAuxServices
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/config/RuntimeBeanNameReference.java
|
{
"start": 790,
"end": 1107
}
|
class ____ for a property value object when it's a
* reference to another bean name in the factory, to be resolved at runtime.
*
* @author Juergen Hoeller
* @since 2.0
* @see RuntimeBeanReference
* @see BeanDefinition#getPropertyValues()
* @see org.springframework.beans.factory.BeanFactory#getBean
*/
public
|
used
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/streaming/api/transformations/GetTransitivePredecessorsTest.java
|
{
"start": 4557,
"end": 5361
}
|
class ____<T> extends Transformation<T> {
private int numGetTransitivePredecessor = 0;
public TestTransformation(String name, TypeInformation<T> outputType, int parallelism) {
super(name, outputType, parallelism);
}
@Override
protected List<Transformation<?>> getTransitivePredecessorsInternal() {
++numGetTransitivePredecessor;
return Collections.singletonList(this);
}
@Override
public List<Transformation<?>> getInputs() {
return Collections.emptyList();
}
public int getNumGetTransitivePredecessor() {
return numGetTransitivePredecessor;
}
}
/** A test implementation of {@link OneInputTransformation}. */
private static
|
TestTransformation
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/ser/filter/JsonFilterTest.java
|
{
"start": 1559,
"end": 1802
}
|
class ____ {
public String a;
public String b;
public String c;
public BeanB(String a, String b, String c) {
this.a = a;
this.b = b;
this.c = c;
}
}
static
|
BeanB
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/MultiplexerTokenFilterFactory.java
|
{
"start": 6093,
"end": 7435
}
|
class ____ extends TokenFilter {
private final TokenStream source;
private final int filterCount;
private int selector;
/**
* Creates a MultiplexTokenFilter on the given input with a set of filters
*/
MultiplexTokenFilter(TokenStream input, List<Function<TokenStream, TokenStream>> filters) {
super(input);
TokenStream sourceFilter = new MultiplexerFilter(input);
for (int i = 0; i < filters.size(); i++) {
final int slot = i;
sourceFilter = new ConditionalTokenFilter(sourceFilter, filters.get(i)) {
@Override
protected boolean shouldFilter() {
return slot == selector;
}
};
}
this.source = sourceFilter;
this.filterCount = filters.size();
this.selector = filterCount - 1;
}
@Override
public boolean incrementToken() throws IOException {
return source.incrementToken();
}
@Override
public void end() throws IOException {
source.end();
}
@Override
public void reset() throws IOException {
source.reset();
}
private final
|
MultiplexTokenFilter
|
java
|
spring-projects__spring-security
|
web/src/main/java/org/springframework/security/web/server/csrf/XorServerCsrfTokenRequestAttributeHandler.java
|
{
"start": 1522,
"end": 4913
}
|
class ____ extends ServerCsrfTokenRequestAttributeHandler {
private static final Log logger = LogFactory.getLog(XorServerCsrfTokenRequestAttributeHandler.class);
private SecureRandom secureRandom = new SecureRandom();
/**
* Specifies the {@code SecureRandom} used to generate random bytes that are used to
* mask the value of the {@link CsrfToken} on each request.
* @param secureRandom the {@code SecureRandom} to use to generate random bytes
*/
public void setSecureRandom(SecureRandom secureRandom) {
Assert.notNull(secureRandom, "secureRandom cannot be null");
this.secureRandom = secureRandom;
}
@Override
public void handle(ServerWebExchange exchange, Mono<CsrfToken> csrfToken) {
Assert.notNull(exchange, "exchange cannot be null");
Assert.notNull(csrfToken, "csrfToken cannot be null");
Mono<CsrfToken> updatedCsrfToken = csrfToken
.map((token) -> new DefaultCsrfToken(token.getHeaderName(), token.getParameterName(),
createXoredCsrfToken(this.secureRandom, token.getToken())))
.cast(CsrfToken.class)
.cache();
super.handle(exchange, updatedCsrfToken);
}
@Override
public Mono<String> resolveCsrfTokenValue(ServerWebExchange exchange, CsrfToken csrfToken) {
return super.resolveCsrfTokenValue(exchange, csrfToken)
.flatMap((actualToken) -> Mono.justOrEmpty(getTokenValue(actualToken, csrfToken.getToken())));
}
private static @Nullable String getTokenValue(String actualToken, String token) {
byte[] actualBytes;
try {
actualBytes = Base64.getUrlDecoder().decode(actualToken);
}
catch (Exception ex) {
logger.trace(LogMessage.format("Not returning the CSRF token since it's not Base64-encoded"), ex);
return null;
}
byte[] tokenBytes = Utf8.encode(token);
int tokenSize = tokenBytes.length;
if (actualBytes.length != tokenSize * 2) {
logger.trace(LogMessage.format(
"Not returning the CSRF token since its Base64-decoded length (%d) is not equal to (%d)",
actualBytes.length, tokenSize * 2));
return null;
}
// extract token and random bytes
byte[] xoredCsrf = new byte[tokenSize];
byte[] randomBytes = new byte[tokenSize];
System.arraycopy(actualBytes, 0, randomBytes, 0, tokenSize);
System.arraycopy(actualBytes, tokenSize, xoredCsrf, 0, tokenSize);
byte[] csrfBytes = xorCsrf(randomBytes, xoredCsrf);
return (csrfBytes != null) ? Utf8.decode(csrfBytes) : null;
}
private static String createXoredCsrfToken(SecureRandom secureRandom, String token) {
byte[] tokenBytes = Utf8.encode(token);
byte[] randomBytes = new byte[tokenBytes.length];
secureRandom.nextBytes(randomBytes);
byte[] xoredBytes = xorCsrf(randomBytes, tokenBytes);
byte[] combinedBytes = new byte[tokenBytes.length + randomBytes.length];
System.arraycopy(randomBytes, 0, combinedBytes, 0, randomBytes.length);
System.arraycopy(xoredBytes, 0, combinedBytes, randomBytes.length, xoredBytes.length);
return Base64.getUrlEncoder().encodeToString(combinedBytes);
}
private static byte[] xorCsrf(byte[] randomBytes, byte[] csrfBytes) {
Assert.isTrue(randomBytes.length == csrfBytes.length, "arrays must be equal length");
int len = csrfBytes.length;
byte[] xoredCsrf = new byte[len];
System.arraycopy(csrfBytes, 0, xoredCsrf, 0, len);
for (int i = 0; i < len; i++) {
xoredCsrf[i] ^= randomBytes[i];
}
return xoredCsrf;
}
}
|
XorServerCsrfTokenRequestAttributeHandler
|
java
|
apache__camel
|
components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/processor/SpringXPathHeaderNameResultTypeAndNamespaceTest.java
|
{
"start": 1064,
"end": 1410
}
|
class ____ extends XPathHeaderNameResultTypeAndNamespaceTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this,
"org/apache/camel/spring/processor/xPathHeaderNameResultTypeAndNamespaceContext.xml");
}
}
|
SpringXPathHeaderNameResultTypeAndNamespaceTest
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/protocol/decoder/ListMultiDecoder2.java
|
{
"start": 877,
"end": 1746
}
|
class ____<T> implements MultiDecoder<Object> {
private final MultiDecoder<?>[] decoders;
public ListMultiDecoder2(MultiDecoder<?>... decoders) {
this.decoders = decoders;
}
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size, List<Object> parts) {
int index = state.getLevel();
return decoders[index].getDecoder(codec, paramNum, state, size, parts);
}
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
int index = state.getLevel();
return decoders[index].getDecoder(codec, paramNum, state, size);
}
@Override
public Object decode(List<Object> parts, State state) {
int index = state.getLevel();
return decoders[index].decode(parts, state);
}
}
|
ListMultiDecoder2
|
java
|
netty__netty
|
codec-http2/src/main/java/io/netty/handler/codec/http2/InboundHttpToHttp2Adapter.java
|
{
"start": 995,
"end": 3500
}
|
class ____ extends ChannelInboundHandlerAdapter {
private final Http2Connection connection;
private final Http2FrameListener listener;
public InboundHttpToHttp2Adapter(Http2Connection connection, Http2FrameListener listener) {
this.connection = connection;
this.listener = listener;
}
private static int getStreamId(Http2Connection connection, HttpHeaders httpHeaders) {
return httpHeaders.getInt(HttpConversionUtil.ExtensionHeaderNames.STREAM_ID.text(),
connection.remote().incrementAndGetNextStreamId());
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof FullHttpMessage) {
handle(ctx, connection, listener, (FullHttpMessage) msg);
} else {
super.channelRead(ctx, msg);
}
}
// note that this may behave strangely when used for the initial upgrade
// message when using h2c, since that message is ineligible for flow
// control, but there is not yet an API for signaling that.
static void handle(ChannelHandlerContext ctx, Http2Connection connection,
Http2FrameListener listener, FullHttpMessage message) throws Http2Exception {
try {
int streamId = getStreamId(connection, message.headers());
Http2Stream stream = connection.stream(streamId);
if (stream == null) {
stream = connection.remote().createStream(streamId, false);
}
message.headers().set(HttpConversionUtil.ExtensionHeaderNames.SCHEME.text(), HttpScheme.HTTP.name());
Http2Headers messageHeaders = HttpConversionUtil.toHttp2Headers(message, true);
boolean hasContent = message.content().isReadable();
boolean hasTrailers = !message.trailingHeaders().isEmpty();
listener.onHeadersRead(
ctx, streamId, messageHeaders, 0, !(hasContent || hasTrailers));
if (hasContent) {
listener.onDataRead(ctx, streamId, message.content(), 0, !hasTrailers);
}
if (hasTrailers) {
Http2Headers headers = HttpConversionUtil.toHttp2Headers(message.trailingHeaders(), true);
listener.onHeadersRead(ctx, streamId, headers, 0, true);
}
stream.closeRemoteSide();
} finally {
message.release();
}
}
}
|
InboundHttpToHttp2Adapter
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/aot/AotProcessingException.java
|
{
"start": 843,
"end": 1155
}
|
class ____ extends AotException {
/**
* Create a new instance with the detail message and a root cause, if any.
* @param msg the detail message
* @param cause the root cause, if any
*/
public AotProcessingException(String msg, @Nullable Throwable cause) {
super(msg, cause);
}
}
|
AotProcessingException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/entitymode/dom4j/DeprecationLoggingTest.java
|
{
"start": 869,
"end": 1628
}
|
class ____ {
@RegisterExtension
public LoggerInspectionExtension logInspection = LoggerInspectionExtension.builder()
.setLogger( DeprecationLogger.DEPRECATION_LOGGER ).build();
@Test
public void basicTest() {
logInspection.registerListener( LogListenerImpl.INSTANCE );
MetadataSources metadataSources = new MetadataSources( ServiceRegistryUtil.serviceRegistry() )
.addResource( "org/hibernate/orm/test/entitymode/dom4j/Car.hbm.xml" );
try {
metadataSources.buildMetadata();
}
finally {
ServiceRegistry metaServiceRegistry = metadataSources.getServiceRegistry();
if ( metaServiceRegistry instanceof BootstrapServiceRegistry ) {
BootstrapServiceRegistryBuilder.destroy( metaServiceRegistry );
}
}
}
}
|
DeprecationLoggingTest
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/support/monitor/dao/MonitorDaoJdbcImpl.java
|
{
"start": 22744,
"end": 26164
}
|
class ____ {
private final Class<?> clazz;
private final List<FieldInfo> fields = new ArrayList<FieldInfo>();
private final List<FieldInfo> groupByFields = new ArrayList<FieldInfo>();
private final List<FieldInfo> hashFields = new ArrayList<FieldInfo>();
private final String tableName;
private String insertSql;
public BeanInfo(Class<?> clazz) {
this.clazz = clazz;
{
MTable annotation = clazz.getAnnotation(MTable.class);
if (annotation == null) {
throw new IllegalArgumentException(clazz.getName() + " not contains @MTable");
}
tableName = annotation.name();
}
for (Field field : clazz.getDeclaredFields()) {
MField annotation = field.getAnnotation(MField.class);
if (annotation == null) {
continue;
}
String columnName = annotation.name();
if (StringUtils.isEmpty(columnName)) {
columnName = field.getName();
}
Field hashFor = null;
String hashForType = null;
if (!StringUtils.isEmpty(annotation.hashFor())) {
try {
hashFor = clazz.getDeclaredField(annotation.hashFor());
hashForType = annotation.hashForType();
} catch (Exception e) {
throw new IllegalStateException("hashFor error", e);
}
}
FieldInfo fieldInfo = new FieldInfo(field, columnName, hashFor, hashForType);
fields.add(fieldInfo);
if (annotation.groupBy()) {
groupByFields.add(fieldInfo);
}
if (hashFor != null) {
hashFields.add(fieldInfo);
}
}
}
public String getTableName() {
return tableName;
}
public Class<?> getClazz() {
return clazz;
}
public String getInsertSql() {
return insertSql;
}
public void setInsertSql(String insertSql) {
this.insertSql = insertSql;
}
public List<FieldInfo> getFields() {
return fields;
}
public List<FieldInfo> getGroupByFields() {
return groupByFields;
}
public List<FieldInfo> getHashFields() {
return hashFields;
}
}
public boolean cacheContains(String type, Long hash) {
Map<Long, String> cache = cacheMap.get(type);
if (cache == null) {
return false;
}
return cache.containsKey(hash);
}
public String cacheGet(String type, Long hash) {
Map<Long, String> cache = cacheMap.get(type);
if (cache == null) {
return null;
}
return cache.get(hash);
}
public void cachePut(String type, Long hash, String value) {
ConcurrentMap<Long, String> cache = cacheMap.get(type);
if (cache == null) {
cacheMap.putIfAbsent(type, new ConcurrentHashMap<Long, String>(16, 0.75f, 1));
cache = cacheMap.get(type);
}
cache.putIfAbsent(hash, value);
}
public static
|
BeanInfo
|
java
|
alibaba__nacos
|
client/src/test/java/com/alibaba/nacos/client/utils/StringUtilsTest.java
|
{
"start": 1392,
"end": 3155
}
|
class ____ {
@Test
void testisNotBlank() {
assertTrue(isNotBlank("foo"));
assertFalse(isNotBlank(" "));
assertFalse(isNotBlank(null));
}
@Test
void testIsNotEmpty() {
assertFalse(isNotEmpty(""));
assertTrue(isNotEmpty("foo"));
}
@Test
void testDefaultIfEmpty() {
assertEquals("foo", defaultIfEmpty("", "foo"));
assertEquals("bar", defaultIfEmpty("bar", "foo"));
}
@Test
void testEquals() {
assertTrue(StringUtils.equals("foo", "foo"));
assertFalse(StringUtils.equals("bar", "foo"));
assertFalse(StringUtils.equals(" ", "foo"));
assertFalse(StringUtils.equals("foo", null));
}
@Test
void testSubstringBetween() {
assertNull(substringBetween(null, null, null));
assertNull(substringBetween("", "foo", ""));
assertNull(substringBetween("foo", "bar", "baz"));
assertEquals("", substringBetween("foo", "foo", ""));
}
@Test
void testJoin() {
assertNull(join(null, ""));
Collection collection = new ArrayList();
collection.add("foo");
collection.add("bar");
assertEquals("foo,bar", join(collection, ","));
}
@Test
void testUuidPattern() {
// match 8-4-4-4-12 uuid pattern
assertTrue(StringUtils.isUuidString("123e4567-e89b-12d3-a456-426655440000"));
// not match 8-4-4-4-12 uuid pattern
assertFalse(StringUtils.isUuidString("123e54567-e89b5-12d35-a4565-426655440000"));
// not match hexadecimal and '-' char
assertFalse(StringUtils.isUuidString("@23e4567+e89b-12d3-a456-426655440000"));
}
}
|
StringUtilsTest
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/net/QuicClientAddressValidation.java
|
{
"start": 577,
"end": 879
}
|
enum ____ {
/**
* The server won't perform any validation (no Retry packet is emitted).
*/
NONE,
/**
* The server performs basic token validation without any crypto.
*/
BASIC,
/**
* The server performs validation using cryptography.
*/
CRYPTO
}
|
QuicClientAddressValidation
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/predicate/conditional/Conditionals.java
|
{
"start": 609,
"end": 2566
}
|
class ____ {
private Conditionals() {}
static Object coalesce(Collection<Object> values) {
if (values == null || values.isEmpty()) {
return null;
}
for (Object object : values) {
if (object != null) {
return object;
}
}
return null;
}
static Object coalesceInput(List<Processor> processors, Object input) {
for (Processor proc : processors) {
Object result = proc.process(input);
if (result != null) {
return result;
}
}
return null;
}
static Object greatest(Collection<Object> values) {
return extremum(values, Comparisons::gt);
}
static Object greatestInput(Collection<Processor> processors, Object input) {
List<Object> values = new ArrayList<>(processors.size());
for (Processor processor : processors) {
values.add(processor.process(input));
}
return greatest(values);
}
static Object least(Collection<Object> values) {
return extremum(values, Comparisons::lt);
}
static Object leastInput(List<Processor> processors, Object input) {
List<Object> values = new ArrayList<>(processors.size());
for (Processor processor : processors) {
values.add(processor.process(input));
}
return least(values);
}
private static Object extremum(Collection<Object> values, BiFunction<Object, Object, Boolean> comparison) {
if (values == null || values.isEmpty()) {
return null;
}
Object result = null;
boolean isFirst = true;
for (Object value : values) {
if (isFirst || (result == null) || (comparison.apply(value, result) == Boolean.TRUE)) {
result = value;
}
isFirst = false;
}
return result;
}
}
|
Conditionals
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/DefaultExchangeTest.java
|
{
"start": 17117,
"end": 17347
}
|
class ____ implements SafeCopyProperty {
private SafeProperty() {
}
@Override
public SafeProperty safeCopy() {
return new SafeProperty();
}
}
private static
|
SafeProperty
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java
|
{
"start": 2187,
"end": 3721
}
|
class ____ extends SingleShardRequest<TermVectorsRequest> implements RealtimeRequest {
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField FIELDS = new ParseField("fields");
private static final ParseField OFFSETS = new ParseField("offsets");
private static final ParseField POSITIONS = new ParseField("positions");
private static final ParseField PAYLOADS = new ParseField("payloads");
private static final ParseField DFS = new ParseField("dfs");
private static final ParseField FILTER = new ParseField("filter");
private static final ParseField DOC = new ParseField("doc");
private String id;
private BytesReference doc;
private XContentType xContentType;
private String routing;
private VersionType versionType = VersionType.INTERNAL;
private long version = Versions.MATCH_ANY;
private String preference;
private static final AtomicInteger randomInt = new AtomicInteger(0);
// TODO: change to String[]
private Set<String> selectedFields;
private boolean realtime = true;
private Map<String, String> perFieldAnalyzer;
private FilterSettings filterSettings;
public static final
|
TermVectorsRequest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/llama/request/embeddings/LlamaEmbeddingsRequest.java
|
{
"start": 1128,
"end": 1297
}
|
class ____ responsible for creating a request to the Llama embeddings model.
* It constructs an HTTP POST request with the necessary headers and body content.
*/
public
|
is
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/http/Saml2LoginBeanDefinitionParser.java
|
{
"start": 14112,
"end": 15568
}
|
class ____ implements ApplicationContextAware {
private ApplicationContext context;
@SuppressWarnings({ "unchecked", "unused" })
Map<String, String> getAuthenticationUrlToProviderName() {
Iterable<RelyingPartyRegistration> relyingPartyRegistrations = null;
RelyingPartyRegistrationRepository relyingPartyRegistrationRepository = this.context
.getBean(RelyingPartyRegistrationRepository.class);
ResolvableType type = ResolvableType.forInstance(relyingPartyRegistrationRepository).as(Iterable.class);
if (type != ResolvableType.NONE
&& RelyingPartyRegistration.class.isAssignableFrom(type.resolveGenerics()[0])) {
relyingPartyRegistrations = (Iterable<RelyingPartyRegistration>) relyingPartyRegistrationRepository;
}
if (relyingPartyRegistrations == null) {
return Collections.emptyMap();
}
String authenticationRequestProcessingUrl = DEFAULT_AUTHENTICATION_REQUEST_PROCESSING_URL;
Map<String, String> saml2AuthenticationUrlToProviderName = new HashMap<>();
relyingPartyRegistrations.forEach((registration) -> saml2AuthenticationUrlToProviderName.put(
authenticationRequestProcessingUrl.replace("{registrationId}", registration.getRegistrationId()),
registration.getRegistrationId()));
return saml2AuthenticationUrlToProviderName;
}
@Override
public void setApplicationContext(ApplicationContext context) throws BeansException {
this.context = context;
}
}
}
|
Saml2LoginBeanConfig
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/slive/ListOp.java
|
{
"start": 1617,
"end": 3192
}
|
class ____ extends Operation {
private static final Logger LOG = LoggerFactory.getLogger(ListOp.class);
ListOp(ConfigExtractor cfg, Random rnd) {
super(ListOp.class.getSimpleName(), cfg, rnd);
}
/**
* Gets the directory to list
*
* @return Path
*/
protected Path getDirectory() {
Path dir = getFinder().getDirectory();
return dir;
}
@Override // Operation
List<OperationOutput> run(FileSystem fs) {
List<OperationOutput> out = super.run(fs);
try {
Path dir = getDirectory();
long dirEntries = 0;
long timeTaken = 0;
{
long startTime = Timer.now();
FileStatus[] files = fs.listStatus(dir);
timeTaken = Timer.elapsed(startTime);
dirEntries = files.length;
}
// log stats
out.add(new OperationOutput(OutputType.LONG, getType(),
ReportWriter.OK_TIME_TAKEN, timeTaken));
out.add(new OperationOutput(OutputType.LONG, getType(),
ReportWriter.SUCCESSES, 1L));
out.add(new OperationOutput(OutputType.LONG, getType(),
ReportWriter.DIR_ENTRIES, dirEntries));
LOG.info("Directory " + dir + " has " + dirEntries + " entries");
} catch (FileNotFoundException e) {
out.add(new OperationOutput(OutputType.LONG, getType(),
ReportWriter.NOT_FOUND, 1L));
LOG.warn("Error with listing", e);
} catch (IOException e) {
out.add(new OperationOutput(OutputType.LONG, getType(),
ReportWriter.FAILURES, 1L));
LOG.warn("Error with listing", e);
}
return out;
}
}
|
ListOp
|
java
|
quarkusio__quarkus
|
extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/MailerImplTest.java
|
{
"start": 942,
"end": 13278
}
|
class ____ {
private static final String FROM = "test@test.org";
private static final String TO = "foo@quarkus.io";
private static final String TEXT_CONTENT_TYPE = "text/plain";
private static Wiser wiser;
private static Vertx vertx;
private MutinyMailerImpl mailer;
@BeforeAll
static void startWiser() {
wiser = Wiser.port(SocketUtil.findAvailablePort());
wiser.start();
vertx = Vertx.vertx();
}
@AfterAll
static void stopWiser() {
wiser.stop();
vertx.close().await().indefinitely();
}
@BeforeEach
void init() {
mailer = new MutinyMailerImpl(vertx,
MailClient.createShared(vertx,
new MailConfig().setPort(wiser.getServer().getPort())),
null, FROM, null, false, List.of(), false, false, null);
wiser.getMessages().clear();
}
@Test
void testTextMail() throws MessagingException, IOException {
String uuid = UUID.randomUUID().toString();
mailer.send(Mail.withText(TO, "Test", uuid)).await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains(uuid);
MimeMessage msg = actual.getMimeMessage();
String content = (String) actual.getMimeMessage().getContent();
assertThat(content).isEqualTo(uuid + "\r\n");
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
assertThat(msg.getAllRecipients()).hasSize(1).contains(new InternetAddress(TO));
}
@Test
void testHTMLMail() throws MessagingException {
String content = UUID.randomUUID().toString();
mailer.send(Mail.withHtml(TO, "Test", "<h1>" + content + "</h1>")).await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains("<h1>" + content + "</h1>");
List<String> types = Collections.singletonList(actual.getMimeMessage().getContentType());
assertThat(types).containsExactly("text/html");
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getContentType()).startsWith("text/html");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
assertThat(msg.getAllRecipients()).hasSize(1).contains(new InternetAddress(TO));
}
@Test
void testWithSeveralMails() {
Mail mail1 = Mail.withText(TO, "Mail 1", "Mail 1").addCc("cc@quarkus.io").addBcc("bcc@quarkus.io");
Mail mail2 = Mail.withHtml(TO, "Mail 2", "<strong>Mail 2</strong>").addCc("cc2@quarkus.io").addBcc("bcc2@quarkus.io");
mailer.send(mail1, mail2).await().indefinitely();
assertThat(wiser.getMessages()).hasSize(6);
}
@Test
void testHeaders() throws MessagingException {
mailer.send(Mail.withText(TO, "Test", "testHeaders")
.addHeader("X-header", "value")
.addHeader("X-header-2", "value1", "value2"))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
assertThat(msg.getHeader("X-header")).hasSize(1).contains("value");
assertThat(msg.getHeader("X-header-2")).hasSize(2).contains("value1", "value2");
}
@Test
void testAttachment() throws MessagingException, IOException {
String payload = UUID.randomUUID().toString();
mailer.send(Mail.withText(TO, "Test", "testAttachment")
.addAttachment("my-file.txt", payload.getBytes(StandardCharsets.UTF_8), TEXT_CONTENT_TYPE))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains("testAttachment");
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
String value = getAttachment("my-file.txt", (MimeMultipart) actual.getMimeMessage().getContent());
assertThat(value).isEqualTo(payload);
}
@Test
void testAttachmentAsStream() throws MessagingException, IOException {
String payload = UUID.randomUUID().toString();
byte[] bytes = payload.getBytes(StandardCharsets.UTF_8);
Iterable<Byte> iterable = () -> new Iterator<Byte>() {
private int index = 0;
@Override
public boolean hasNext() {
return bytes.length > index;
}
@Override
public Byte next() {
return bytes[index++];
}
};
mailer.send(Mail.withText(TO, "Test", "testAttachmentAsStream")
.addAttachment("my-file.txt", Multi.createFrom().iterable(iterable), TEXT_CONTENT_TYPE))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains("testAttachment");
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
String value = getAttachment("my-file.txt", (MimeMultipart) actual.getMimeMessage().getContent());
assertThat(value).isEqualTo(payload);
}
@Test
void testInlineAttachment() throws MessagingException, IOException {
String cid = UUID.randomUUID() + "@acme";
mailer.send(Mail.withHtml(TO, "Test", "testInlineAttachment")
.addInlineAttachment("inline.txt", "my inlined text".getBytes(StandardCharsets.UTF_8), TEXT_CONTENT_TYPE, cid))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains("testInlineAttachment");
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
String value = getInlineAttachment("<" + cid + ">", (MimeMultipart) actual.getMimeMessage().getContent());
assertThat(value).isEqualTo("my inlined text");
}
@Test
void testAttachments() throws MessagingException, IOException {
mailer.send(Mail.withText(TO, "Test", "Simple Test")
.addAttachment("some-data.txt", "Hello".getBytes(StandardCharsets.UTF_8), TEXT_CONTENT_TYPE)
.addAttachment("some-data-2.txt", "Hello 2".getBytes(StandardCharsets.UTF_8), TEXT_CONTENT_TYPE))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
assertThat(getContent(actual)).contains("Simple Test");
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getSubject()).isEqualTo("Test");
assertThat(msg.getFrom()[0].toString()).isEqualTo(FROM);
String value = getAttachment("some-data.txt", (MimeMultipart) actual.getMimeMessage().getContent());
assertThat(value).isEqualTo("Hello");
value = getAttachment("some-data-2.txt", (MimeMultipart) actual.getMimeMessage().getContent());
assertThat(value).isEqualTo("Hello 2");
}
@Test
void testReplyToHeaderIsSet() throws MessagingException {
mailer.send(Mail.withText(TO, "Test", "testHeaders")
.setReplyTo("reply-to@quarkus.io"))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getHeader("Reply-To")).containsExactly("reply-to@quarkus.io");
assertThat(msg.getReplyTo()).containsExactly(InternetAddress.parse("reply-to@quarkus.io"));
}
@Test
void testMultipleReplyToHeaderIsSet() throws MessagingException {
mailer.send(Mail.withText(TO, "Test", "testHeaders")
.setReplyTo("reply-to@quarkus.io", "another@quarkus.io"))
.await().indefinitely();
assertThat(wiser.getMessages()).hasSize(1);
WiserMessage actual = wiser.getMessages().get(0);
MimeMessage msg = actual.getMimeMessage();
assertThat(msg.getHeader("Reply-To")).containsExactly("reply-to@quarkus.io,another@quarkus.io");
assertThat(msg.getReplyTo()).hasSize(2).contains(InternetAddress.parse("reply-to@quarkus.io"))
.contains(InternetAddress.parse("another@quarkus.io"));
}
private String getContent(WiserMessage msg) {
try {
Object content = msg.getMimeMessage().getContent();
if (content instanceof String) {
return content.toString();
}
return getTextFromMimeMultipart((MimeMultipart) content);
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
private String getAttachment(String name, MimeMultipart multipart) throws IOException, MessagingException {
for (int i = 0; i < multipart.getCount(); i++) {
BodyPart bodyPart = multipart.getBodyPart(i);
if (bodyPart.getFileName() != null && bodyPart.getFileName().equalsIgnoreCase(name)) {
assertThat(bodyPart.getContentType()).startsWith(TEXT_CONTENT_TYPE);
return read(bodyPart);
}
}
return null;
}
private String getInlineAttachment(String cid, MimeMultipart multipart) throws IOException, MessagingException {
for (int i = 0; i < multipart.getCount(); i++) {
BodyPart bodyPart = multipart.getBodyPart(i);
if (bodyPart.getContent() instanceof MimeMultipart) {
for (int j = 0; j < ((MimeMultipart) bodyPart.getContent()).getCount(); j++) {
BodyPart nested = ((MimeMultipart) bodyPart.getContent()).getBodyPart(j);
if (nested.getHeader("Content-ID") != null && nested.getHeader("Content-ID")[0].equalsIgnoreCase(cid)) {
assertThat(nested.getDisposition()).isEqualTo("inline");
assertThat(nested.getContentType()).startsWith(TEXT_CONTENT_TYPE);
return read(nested);
}
}
} else if (bodyPart.getContent() instanceof String) {
if (bodyPart.getHeader("Content-ID") != null && bodyPart.getHeader("Content-ID")[0].equalsIgnoreCase(cid)) {
return (String) bodyPart.getContent();
}
}
}
return null;
}
private String read(BodyPart part) throws IOException, MessagingException {
try (InputStream is = part.getInputStream()) {
Scanner s = new Scanner(is, StandardCharsets.UTF_8).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
}
private String getTextFromMimeMultipart(
MimeMultipart mimeMultipart) throws MessagingException, IOException {
StringBuilder result = new StringBuilder();
int count = mimeMultipart.getCount();
for (int i = 0; i < count; i++) {
BodyPart bodyPart = mimeMultipart.getBodyPart(i);
if (bodyPart.isMimeType(TEXT_CONTENT_TYPE)) {
result.append("\n").append(bodyPart.getContent());
break; // without break same text appears twice in my tests
} else if (bodyPart.isMimeType("text/html")) {
result.append("\n").append(bodyPart.getContent());
} else if (bodyPart.getContent() instanceof MimeMultipart) {
result.append(getTextFromMimeMultipart((MimeMultipart) bodyPart.getContent()));
}
}
return result.toString();
}
}
|
MailerImplTest
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/reflection/Reflector.java
|
{
"start": 12845,
"end": 14011
}
|
class ____ have
// overridden a method
if (!uniqueMethods.containsKey(signature)) {
uniqueMethods.put(signature, currentMethod);
}
}
}
}
private String getSignature(Method method) {
StringBuilder sb = new StringBuilder();
Class<?> returnType = method.getReturnType();
sb.append(returnType.getName()).append('#');
sb.append(method.getName());
Class<?>[] parameters = method.getParameterTypes();
for (int i = 0; i < parameters.length; i++) {
sb.append(i == 0 ? ':' : ',').append(parameters[i].getName());
}
return sb.toString();
}
/**
* Checks whether can control member accessible.
*
* @return If can control member accessible, it return {@literal true}
*
* @since 3.5.0
*/
public static boolean canControlMemberAccessible() {
try {
SecurityManager securityManager = System.getSecurityManager();
if (null != securityManager) {
securityManager.checkPermission(new ReflectPermission("suppressAccessChecks"));
}
} catch (SecurityException e) {
return false;
}
return true;
}
/**
* Gets the name of the
|
must
|
java
|
elastic__elasticsearch
|
x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java
|
{
"start": 19401,
"end": 21963
}
|
class ____ implements BiConsumer<MultiSearchRequest, BiConsumer<MultiSearchResponse, Exception>> {
private final List<MultiSearchRequest> capturedRequests = new ArrayList<>();
private final List<BiConsumer<MultiSearchResponse, Exception>> capturedConsumers = new ArrayList<>();
@Override
public void accept(MultiSearchRequest multiSearchRequest, BiConsumer<MultiSearchResponse, Exception> consumer) {
capturedRequests.add(multiSearchRequest);
capturedConsumers.add(consumer);
}
}
public void testAllSearchesExecuted() throws Exception {
final ThreadPool threadPool = new TestThreadPool("test");
final Coordinator coordinator = new Coordinator((request, responseConsumer) -> threadPool.generic().execute(() -> {
final MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[request.requests().size()];
for (int i = 0; i < items.length; i++) {
items[i] = new MultiSearchResponse.Item(emptySearchResponse(), null);
}
var res = new MultiSearchResponse(items, 0L);
try {
responseConsumer.accept(res, null);
} finally {
res.decRef();
}
}), 5, 2, 20);
try {
final Semaphore schedulePermits = new Semaphore(between(100, 10000));
final CountDownLatch completionCountdown = new CountDownLatch(schedulePermits.availablePermits());
for (int i = 0; i < 5; i++) {
threadPool.generic().execute(() -> {
while (schedulePermits.tryAcquire()) {
final AtomicBoolean completed = new AtomicBoolean();
coordinator.schedule(new SearchRequest("index"), ActionListener.running(() -> {
assertTrue(completed.compareAndSet(false, true)); // no double-completion
completionCountdown.countDown();
}));
}
});
}
assertTrue(completionCountdown.await(20L, TimeUnit.SECONDS));
assertThat(coordinator.queue, empty());
assertBusy(() -> assertThat(coordinator.getRemoteRequestsCurrent(), equalTo(0)));
// ^ assertBusy here because the final check of the queue briefly counts as another remote request, after everything is complete
} finally {
ThreadPool.terminate(threadPool, 10L, TimeUnit.SECONDS);
}
}
}
|
MockLookupFunction
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.