language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__flink | flink-python/src/main/java/org/apache/flink/table/runtime/arrow/writers/DoubleWriter.java | {
"start": 2711,
"end": 3140
} | class ____ extends DoubleWriter<ArrayData> {
private DoubleWriterForArray(Float8Vector doubleVector) {
super(doubleVector);
}
@Override
boolean isNullAt(ArrayData in, int ordinal) {
return in.isNullAt(ordinal);
}
@Override
double readDouble(ArrayData in, int ordinal) {
return in.getDouble(ordinal);
}
}
}
| DoubleWriterForArray |
java | spring-projects__spring-framework | spring-context-support/src/main/java/org/springframework/cache/jcache/config/AbstractJCacheConfiguration.java | {
"start": 1333,
"end": 1594
} | class ____ common
* structure for enabling JSR-107 annotation-driven cache management capability.
*
* @author Stephane Nicoll
* @author Juergen Hoeller
* @since 4.1
* @see JCacheConfigurer
*/
@Configuration(proxyBeanMethods = false)
public abstract | providing |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/PhaseProgress.java | {
"start": 853,
"end": 2990
} | class ____ implements ToXContentObject, Writeable {
public static final ParseField PHASE = new ParseField("phase");
public static final ParseField PROGRESS_PERCENT = new ParseField("progress_percent");
public static final ConstructingObjectParser<PhaseProgress, Void> PARSER = new ConstructingObjectParser<>(
"phase_progress",
true,
a -> new PhaseProgress((String) a[0], (int) a[1])
);
static {
PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE);
PARSER.declareInt(ConstructingObjectParser.constructorArg(), PROGRESS_PERCENT);
}
private final String phase;
private final int progressPercent;
public PhaseProgress(String phase, int progressPercent) {
this.phase = Objects.requireNonNull(phase);
this.progressPercent = progressPercent;
}
public PhaseProgress(StreamInput in) throws IOException {
phase = in.readString();
progressPercent = in.readVInt();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(phase);
out.writeVInt(progressPercent);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(PHASE.getPreferredName(), phase);
builder.field(PROGRESS_PERCENT.getPreferredName(), progressPercent);
builder.endObject();
return builder;
}
public String getPhase() {
return phase;
}
public int getProgressPercent() {
return progressPercent;
}
@Override
public int hashCode() {
return Objects.hash(phase, progressPercent);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PhaseProgress that = (PhaseProgress) o;
return Objects.equals(phase, that.phase) && progressPercent == that.progressPercent;
}
@Override
public String toString() {
return Strings.toString(this);
}
}
| PhaseProgress |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/filter/wall/WallCommentTest.java | {
"start": 892,
"end": 1820
} | class ____ extends TestCase {
public void testORACLE() throws Exception {
String sql = "SELECT F1, F2 FROM ABC --test";
OracleWallProvider provider = new OracleWallProvider();
assertFalse(provider.checkValid(sql));
assertEquals(1, provider.getCommentDenyStat().getDenyCount());
}
public void testmysql() throws Exception {
String sql = "SELECT F1, F2 FROM ABC -- test";
MySqlWallProvider provider = new MySqlWallProvider();
assertFalse(provider.checkValid(sql));
assertEquals(1, provider.getCommentDenyStat().getDenyCount());
}
public void testsqlserver() throws Exception {
String sql = "SELECT F1, F2 FROM ABC --test";
SQLServerWallProvider provider = new SQLServerWallProvider();
assertFalse(provider.checkValid(sql));
assertEquals(1, provider.getCommentDenyStat().getDenyCount());
}
}
| WallCommentTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java | {
"start": 2554,
"end": 2616
} | class ____ test.
*/
@SuppressWarnings("deprecation")
public | under |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/appender/rolling/PatternProcessorTest.java | {
"start": 1529,
"end": 18688
} | class ____ {
private static Instant parseLocalDateTime(final String text) {
return LocalDateTime.parse(text).atZone(ZoneId.systemDefault()).toInstant();
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testDontInterpretBackslashAsEscape() {
final PatternProcessor pp = new PatternProcessor("c:\\test\\new/app-%d{HH-mm-ss}.log");
final Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 16);
cal.set(Calendar.MINUTE, 2);
cal.set(Calendar.SECOND, 15);
final StringBuilder buf = new StringBuilder();
pp.formatFileName(buf, cal.getTime(), 23);
assertEquals("c:\\test\\new/app-16-02-15.log", buf.toString());
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeHourlyReturnsFirstMinuteOfNextHour() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-04T11:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeHourlyReturnsFirstMinuteOfNextHour2() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T23:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-05T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeHourlyReturnsFirstMinuteOfNextHourDstStart() {
// America/Chicago 2014 - DST start - Mar 9 02:00
// during winter GMT-6
// during summer GMT-5
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-03-09T01:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-03-09T02:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeHourlyReturnsFirstMinuteOfHourAfterNextHourDstEnd() {
// America/Chicago 2014 - DST end - Nov 2 02:00
// during summer GMT-5
// during winter GMT-6
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-11-02T01:31:59-05:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
// expect 1h 29min since initial
final Instant expected =
OffsetDateTime.parse("2014-11-02T03:00:00-05:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeHourlyReturnsFirstMinuteOfNextYear() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH}.log.gz");
final Instant initial = parseLocalDateTime("2015-12-31T23:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2016-01-01T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMillisecondlyReturnsNextMillisec() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH-mm-ss.SSS}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:53.123");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-04T10:31:53.124");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMinutelyReturnsFirstSecondOfNextMinute() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH-mm}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-04T10:32:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMonthlyReturnsFirstDayOfNextMonth() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM}.log.gz");
final Instant initial = parseLocalDateTime("2014-10-15T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-11-01T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMonthlyReturnsFirstDayOfNextMonth2() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM}.log.gz");
final Instant initial = parseLocalDateTime("2014-01-31T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
// Expect 1st of next month: 2014 Feb 1st
final Instant expected = parseLocalDateTime("2014-02-01T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMonthlyReturnsFirstDayOfNextMonth3() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM}.log.gz");
final Instant initial = parseLocalDateTime("2014-12-31T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
// Expect 1st of next month: 2015 Jan 1st
final Instant expected = parseLocalDateTime("2015-01-01T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeMonthlyReturnsFirstDayOfNextYear() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM}.log.gz");
final Instant initial = parseLocalDateTime("2015-12-28T00:00:00");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
// We expect 1st day of next month
final Instant expected = parseLocalDateTime("2016-01-01T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeSecondlyReturnsFirstMillisecOfNextSecond() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd-HH-mm-ss}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:53.123");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-04T10:31:54");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(Resources.LOCALE)
void testGetNextTimeWeeklyReturnsFirstDayOfNextWeek_FRANCE() {
final Locale old = Locale.getDefault();
Locale.setDefault(Locale.FRANCE); // force 1st day of the week to be Monday
try {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-W}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-10T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
Locale.setDefault(old);
}
}
@Test
@ResourceLock(Resources.LOCALE)
void testGetNextTimeWeeklyReturnsFirstDayOfNextWeek_US() {
final Locale old = Locale.getDefault();
Locale.setDefault(Locale.US); // force 1st day of the week to be Sunday
try {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-W}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T10:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-09T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
Locale.setDefault(old);
}
}
/**
* Tests https://issues.apache.org/jira/browse/LOG4J2-1232
*/
@Test
@ResourceLock(Resources.LOCALE)
void testGetNextTimeWeeklyReturnsFirstWeekInYear_US() {
final Locale old = Locale.getDefault();
Locale.setDefault(Locale.US); // force 1st day of the week to be Sunday
try {
final PatternProcessor pp = new PatternProcessor("logs/market_data_msg.log-%d{yyyy-MM-'W'W}");
final Instant initial = parseLocalDateTime("2015-12-28T00:00:00");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2016-01-03T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
Locale.setDefault(old);
}
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDay() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}.log.gz");
final Instant initial = parseLocalDateTime("2014-03-04T02:31:59");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = parseLocalDateTime("2014-03-05T00:00:00");
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDayHonoringTimeZoneOption1() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{GMT-6}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-03-04T02:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-03-05T00:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
@ResourceLock(value = Resources.TIME_ZONE)
void testGetNextTimeDailyReturnsFirstHourOfNextDayHonoringTimeZoneOption2() {
final TimeZone old = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("GMT+10")); // default is ignored if pattern contains timezone
try {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{GMT-6}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-03-04T02:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-03-05T00:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
TimeZone.setDefault(old);
}
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
@ResourceLock(value = Resources.TIME_ZONE)
void testGetNextTimeDailyReturnsFirstHourOfNextDayHonoringTimeZoneOption3() {
final TimeZone old = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("GMT-10")); // default is ignored if pattern contains timezone
try {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{GMT-6}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-03-04T02:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-03-05T00:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
TimeZone.setDefault(old);
}
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDayDstJan() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-01-04T00:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-01-05T00:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDayDstJun() {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-06-04T00:31:59-05:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-06-05T00:00:00-05:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDayDstStart() {
// America/Chicago 2014 - DST start - Mar 9 02:00
// during winter GMT-6
// during summer GMT-5
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-03-09T00:31:59-06:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-03-10T00:00:00-05:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
void testGetNextTimeDailyReturnsFirstHourOfNextDayDstEnd() {
// America/Chicago 2014 - DST end - Nov 2 02:00
// during summer GMT-5
// during winter GMT-6
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{America/Chicago}.log.gz");
final Instant initial =
OffsetDateTime.parse("2014-11-02T00:31:59-05:00").toInstant();
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected =
OffsetDateTime.parse("2014-11-03T00:00:00-06:00").toInstant();
assertEquals(expected, Instant.ofEpochMilli(actual));
}
@Test
@ResourceLock(value = Resources.LOCALE, mode = ResourceAccessMode.READ)
@ResourceLock(value = Resources.TIME_ZONE)
void testGetNextTimeDailyReturnsFirstHourOfNextDayInGmtIfZoneIsInvalid() {
final TimeZone old = TimeZone.getDefault();
TimeZone.setDefault(TimeZone.getTimeZone("GMT-10")); // default is ignored even if timezone option invalid
try {
final PatternProcessor pp = new PatternProcessor("logs/app-%d{yyyy-MM-dd}{NOTVALID}.log.gz");
final Instant initial = Instant.parse("2014-03-04T02:31:59Z");
final long actual = pp.getNextTime(initial.toEpochMilli(), 1, false);
final Instant expected = Instant.parse("2014-03-05T00:00:00Z");
assertEquals(expected, Instant.ofEpochMilli(actual));
} finally {
TimeZone.setDefault(old);
}
}
@ParameterizedTest
@ValueSource(strings = {"%d{UNIX}", "%d{UNIX_MILLIS}"})
void does_not_throw_with_unix_pattern(final String pattern) {
assertDoesNotThrow(() -> new PatternProcessor(pattern));
}
}
| PatternProcessorTest |
java | alibaba__nacos | plugin-default-impl/nacos-default-auth-plugin/src/main/java/com/alibaba/nacos/plugin/auth/impl/utils/PasswordGeneratorUtil.java | {
"start": 894,
"end": 2384
} | class ____ {
private static final String LOWER_CASE = "abcdefghijklmnopqrstuvwxyz";
private static final String UPPER_CASE = "ABCDEFGHIJKLMNOPQRSTUVWXYZ";
private static final String DIGITS = "0123456789";
private static final String SPECIAL_CHARS = "!@#$%&";
private static final int PASSWORD_LENGTH = 8;
/**
* generateRandomPassword.
* @return
*/
public static String generateRandomPassword() {
SecureRandom random = new SecureRandom();
List<Character> pwdChars = new ArrayList<>();
pwdChars.add(LOWER_CASE.charAt(random.nextInt(LOWER_CASE.length())));
pwdChars.add(UPPER_CASE.charAt(random.nextInt(UPPER_CASE.length())));
pwdChars.add(DIGITS.charAt(random.nextInt(DIGITS.length())));
pwdChars.add(SPECIAL_CHARS.charAt(random.nextInt(SPECIAL_CHARS.length())));
// Fill the rest of the password with random characters from all categories
String allCharacters = LOWER_CASE + UPPER_CASE + DIGITS + SPECIAL_CHARS;
while (pwdChars.size() < PASSWORD_LENGTH) {
pwdChars.add(allCharacters.charAt(random.nextInt(allCharacters.length())));
}
// Shuffle to avoid predictable order
Collections.shuffle(pwdChars, random);
// Build the final password string
return pwdChars.stream().map(String::valueOf).collect(Collectors.joining());
}
}
| PasswordGeneratorUtil |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestContainerAllocation.java | {
"start": 4641,
"end": 13767
} | class ____ {
private static final Logger LOG = LoggerFactory
.getLogger(TestContainerAllocation.class);
public static final String DEFAULT_PATH = CapacitySchedulerConfiguration.ROOT + ".default";
public static final String A_PATH = CapacitySchedulerConfiguration.ROOT + ".a";
public static final String B_PATH = CapacitySchedulerConfiguration.ROOT + ".b";
public static final String C_PATH = CapacitySchedulerConfiguration.ROOT + ".c";
public static final String C1_PATH = C_PATH + ".c1";
public static final String C2_PATH = C_PATH + ".c2";
private static final QueuePath ROOT = new QueuePath(CapacitySchedulerConfiguration.ROOT);
private static final QueuePath DEFAULT = new QueuePath(DEFAULT_PATH);
private static final QueuePath A = new QueuePath(A_PATH);
private static final QueuePath B = new QueuePath(B_PATH);
private static final QueuePath C = new QueuePath(C_PATH);
private static final QueuePath C1 = new QueuePath(C1_PATH);
private static final QueuePath C2 = new QueuePath(C2_PATH);
private final int GB = 1024;
private YarnConfiguration conf;
RMNodeLabelsManager mgr;
@BeforeEach
public void setUp() throws Exception {
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class,
ResourceScheduler.class);
mgr = new NullRMNodeLabelsManager();
mgr.init(conf);
}
@Test
@Timeout(value = 60)
public void testExcessReservationThanNodeManagerCapacity() throws Exception {
@SuppressWarnings("resource")
MockRM rm = new MockRM(conf);
rm.start();
// Register node1
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 2 * GB, 4);
MockNM nm2 = rm.registerNode("127.0.0.1:2234", 3 * GB, 4);
nm1.nodeHeartbeat(true);
nm2.nodeHeartbeat(true);
// wait..
int waitCount = 20;
int size = rm.getRMContext().getRMNodes().size();
while ((size = rm.getRMContext().getRMNodes().size()) != 2
&& waitCount-- > 0) {
LOG.info("Waiting for node managers to register : " + size);
Thread.sleep(100);
}
assertEquals(2, rm.getRMContext().getRMNodes().size());
// Submit an application
RMApp app1 = MockRMAppSubmitter.submitWithMemory(128, rm);
// kick the scheduling
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
LOG.info("sending container requests ");
am1.addRequests(new String[] {"*"}, 2 * GB, 1, 1);
AllocateResponse alloc1Response = am1.schedule(); // send the request
// kick the scheduler
nm1.nodeHeartbeat(true);
int waitCounter = 20;
LOG.info("heartbeating nm1");
while (alloc1Response.getAllocatedContainers().size() < 1
&& waitCounter-- > 0) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(500);
alloc1Response = am1.schedule();
}
LOG.info("received container : "
+ alloc1Response.getAllocatedContainers().size());
// No container should be allocated.
// Internally it should not been reserved.
assertTrue(alloc1Response.getAllocatedContainers().size() == 0);
LOG.info("heartbeating nm2");
waitCounter = 20;
nm2.nodeHeartbeat(true);
while (alloc1Response.getAllocatedContainers().size() < 1
&& waitCounter-- > 0) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(500);
alloc1Response = am1.schedule();
}
LOG.info("received container : "
+ alloc1Response.getAllocatedContainers().size());
assertTrue(alloc1Response.getAllocatedContainers().size() == 1);
rm.stop();
}
// This is to test container tokens are generated when the containers are
// acquired by the AM, not when the containers are allocated
@Test
public void testContainerTokenGeneratedOnPullRequest() throws Exception {
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 8000);
RMApp app1 = MockRMAppSubmitter.submitWithMemory(200, rm1);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// request a container.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED);
RMContainer container =
rm1.getResourceScheduler().getRMContainer(containerId2);
// no container token is generated.
assertEquals(containerId2, container.getContainerId());
assertNull(container.getContainer().getContainerToken());
// acquire the container.
List<Container> containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
assertEquals(containerId2, containers.get(0).getId());
// container token is generated.
assertNotNull(containers.get(0).getContainerToken());
rm1.stop();
}
@Test
public void testNormalContainerAllocationWhenDNSUnavailable() throws Exception{
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("unknownhost:1234", 8000);
RMApp app1 = MockRMAppSubmitter.submitWithMemory(200, rm1);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
// request a container.
am1.allocate("127.0.0.1", 1024, 1, new ArrayList<ContainerId>());
ContainerId containerId2 =
ContainerId.newContainerId(am1.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId2, RMContainerState.ALLOCATED);
// acquire the container.
SecurityUtilTestHelper.setTokenServiceUseIp(true);
List<Container> containers;
try {
containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
// not able to fetch the container;
assertEquals(0, containers.size());
} finally {
SecurityUtilTestHelper.setTokenServiceUseIp(false);
}
containers =
am1.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
// should be able to fetch the container;
assertEquals(1, containers.size());
rm1.stop();
}
// This is to test whether LogAggregationContext is passed into
// container tokens correctly
@Test
public void testLogAggregationContextPassedIntoContainerToken()
throws Exception {
MockRM rm1 = new MockRM(conf);
rm1.start();
MockNM nm1 = rm1.registerNode("127.0.0.1:1234", 8000);
MockNM nm2 = rm1.registerNode("127.0.0.1:2345", 8000);
// LogAggregationContext is set as null
assertNull(getLogAggregationContextFromContainerToken(rm1, nm1, null));
// create a not-null LogAggregationContext
LogAggregationContext logAggregationContext =
LogAggregationContext.newInstance(
"includePattern", "excludePattern",
"rolledLogsIncludePattern",
"rolledLogsExcludePattern",
"policyClass",
"policyParameters");
LogAggregationContext returned =
getLogAggregationContextFromContainerToken(rm1, nm2,
logAggregationContext);
assertEquals("includePattern", returned.getIncludePattern());
assertEquals("excludePattern", returned.getExcludePattern());
assertEquals("rolledLogsIncludePattern",
returned.getRolledLogsIncludePattern());
assertEquals("rolledLogsExcludePattern",
returned.getRolledLogsExcludePattern());
assertEquals("policyClass",
returned.getLogAggregationPolicyClassName());
assertEquals("policyParameters",
returned.getLogAggregationPolicyParameters());
rm1.stop();
}
private LogAggregationContext getLogAggregationContextFromContainerToken(
MockRM rm1, MockNM nm1, LogAggregationContext logAggregationContext)
throws Exception {
RMApp app2 = MockRMAppSubmitter.submit(rm1,
MockRMAppSubmissionData.Builder.createWithMemory(200, rm1)
.withLogAggregationContext(logAggregationContext)
.build());
MockAM am2 = MockRM.launchAndRegisterAM(app2, rm1, nm1);
nm1.nodeHeartbeat(true);
// request a container.
am2.allocate("127.0.0.1", 512, 1, new ArrayList<ContainerId>());
ContainerId containerId =
ContainerId.newContainerId(am2.getApplicationAttemptId(), 2);
rm1.waitForState(nm1, containerId, RMContainerState.ALLOCATED);
// acquire the container.
List<Container> containers =
am2.allocate(new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()).getAllocatedContainers();
assertEquals(containerId, containers.get(0).getId());
// container token is generated.
assertNotNull(containers.get(0).getContainerToken());
ContainerTokenIdentifier token =
BuilderUtils.newContainerTokenIdentifier(containers.get(0)
.getContainerToken());
return token.getLogAggregationContext();
}
private volatile int numRetries = 0;
private | TestContainerAllocation |
java | apache__flink | flink-datastream/src/test/java/org/apache/flink/datastream/impl/common/KeyCheckedOutputCollectorTest.java | {
"start": 1118,
"end": 3229
} | class ____ {
@Test
void testCollect() {
TestingTimestampCollector.Builder<Integer> builder = TestingTimestampCollector.builder();
CompletableFuture<Integer> consumeRecord = new CompletableFuture<>();
builder.setCollectConsumer(consumeRecord::complete);
KeyCheckedOutputCollector<Integer, Integer> collector =
new KeyCheckedOutputCollector<>(builder.build(), (ignore) -> 1, () -> 1);
final int record = 1;
collector.collect(record);
assertThat(consumeRecord).isCompletedWithValue(record);
}
@Test
void testCollectAndOverwriteTimestamp() {
TestingTimestampCollector.Builder<Integer> builder = TestingTimestampCollector.builder();
CompletableFuture<Integer> consumeRecord = new CompletableFuture<>();
CompletableFuture<Long> consumeTimeStamp = new CompletableFuture<>();
builder.setCollectAndOverwriteTimestampConsumer(
(data, timeStamp) -> {
consumeRecord.complete(data);
consumeTimeStamp.complete(timeStamp);
});
final int record = 1;
final long timeStamp = 10L;
KeyCheckedOutputCollector<Integer, Integer> collector =
new KeyCheckedOutputCollector<>(builder.build(), (ignore) -> 1, () -> 1);
collector.collectAndOverwriteTimestamp(record, timeStamp);
assertThat(consumeRecord).isCompletedWithValue(record);
assertThat(consumeTimeStamp).isCompletedWithValue(timeStamp);
}
@Test
void testNotEqualToCurrentKey() {
TestingTimestampCollector.Builder<Integer> builder = TestingTimestampCollector.builder();
KeyCheckedOutputCollector<Integer, Integer> collector =
new KeyCheckedOutputCollector<>(builder.build(), (ignore) -> 1, () -> 2);
assertThatThrownBy(() -> collector.collect(1)).isInstanceOf(IllegalStateException.class);
assertThatThrownBy(() -> collector.collectAndOverwriteTimestamp(1, 10L))
.isInstanceOf(IllegalStateException.class);
}
}
| KeyCheckedOutputCollectorTest |
java | quarkusio__quarkus | extensions/micrometer/deployment/src/test/java/io/quarkus/micrometer/deployment/binder/StorkMetricsLoadBalancerFailTest.java | {
"start": 1296,
"end": 6627
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withConfigurationResource("test-logging.properties")
.overrideConfigKey("pingpong/mp-rest/url", "stork://pingpong-service")
.overrideConfigKey("quarkus.stork.pingpong-service.service-discovery.type", "static")
.overrideConfigKey("quarkus.stork.pingpong-service.service-discovery.address-list", "${test.url}")
.overrideConfigKey("quarkus.stork.pingpong-service.load-balancer.type", "mock")
.overrideConfigKey("greeting/mp-rest/url", "stork://greeting-service/greeting")
.overrideConfigKey("quarkus.stork.greeting-service.service-discovery.type", "static")
.overrideConfigKey("quarkus.stork.greeting-service.service-discovery.address-list", "${test.url}")
.overrideConfigKey("quarkus.stork.greeting-service.load-balancer.type", "mock")
.overrideConfigKey("quarkus.redis.devservices.enabled", "false")
.withApplicationRoot((jar) -> jar
.addClasses(PingPongResource.class, PingPongResource.PingPongRestClient.class,
MockServiceSelectorProvider.class, MockServiceSelectorConfiguration.class,
MockServiceSelectorProviderLoader.class, GreetingResource.class,
GreetingResource.GreetingRestClient.class, Util.class));
final static SimpleMeterRegistry registry = new SimpleMeterRegistry();
@BeforeAll
static void setRegistry() {
Metrics.addRegistry(registry);
}
@AfterAll()
static void removeRegistry() {
Metrics.removeRegistry(registry);
}
@Inject
MockServiceSelectorProvider provider;
@Test
public void shouldGetStorkMetricsWhenServiceSelectorFails() {
Mockito.when(provider.getLoadBalancer().selectServiceInstance(Mockito.anyCollection()))
.thenThrow(new RuntimeException("Load Balancer induced failure"));
RestAssured.when().get("/ping/one").then().statusCode(500);
RestAssured.when().get("/greeting/hola").then().statusCode(500);
//Stork metrics
assertStorkMetrics("pingpong-service");
assertStorkMetrics("greeting-service");
// Stork metrics exposed to Micrometer
assertStorkMetricsInMicrometerRegistry("pingpong-service");
assertStorkMetricsInMicrometerRegistry("greeting-service");
}
private static void assertStorkMetrics(String serviceName) {
StorkObservation metrics = StorkObservationCollectorBean.STORK_METRICS
.get(serviceName + StorkObservationCollectorBean.METRICS_SUFFIX);
Assertions.assertThat(metrics.getDiscoveredInstancesCount()).isEqualTo(1);
Assertions.assertThat(metrics.getServiceName()).isEqualTo(serviceName);
Assertions.assertThat(metrics.isDone()).isTrue();
Assertions.assertThat(metrics.isServiceDiscoverySuccessful()).isTrue();
Assertions.assertThat(metrics.failure().getMessage())
.isEqualTo("Load Balancer induced failure");
Assertions.assertThat(metrics.getOverallDuration()).isNotNull();
Assertions.assertThat(metrics.getServiceDiscoveryType()).isEqualTo("static");
Assertions.assertThat(metrics.getServiceSelectionType()).isEqualTo("mock");
Assertions.assertThat(metrics.getServiceDiscoveryDuration()).isNotNull();
Assertions.assertThat(metrics.getServiceSelectionDuration()).isNotNull();
}
private void assertStorkMetricsInMicrometerRegistry(String serviceName) {
Counter instanceCounter = registry.find("stork.service-discovery.instances.count").tag("service-name", serviceName)
.counter();
Timer serviceDiscoveryDuration = registry.find("stork.service-discovery.duration").tag("service-name", serviceName)
.timer();
Timer serviceSelectionDuration = registry.find("stork.service-selection.duration").tag("service-name", serviceName)
.timer();
Counter serviceDiscoveryFailures = registry.find("stork.service-discovery.failures").tag("service-name", serviceName)
.counter();
Counter loadBalancerFailures = registry.find("stork.service-selection.failures").tag("service-name", serviceName)
.counter();
Util.assertTags(Tag.of("service-name", serviceName), instanceCounter, serviceDiscoveryDuration,
serviceSelectionDuration);
Assertions.assertThat(instanceCounter).isNotNull();
Assertions.assertThat(serviceDiscoveryDuration).isNotNull();
Assertions.assertThat(serviceSelectionDuration).isNotNull();
Assertions.assertThat(serviceDiscoveryFailures).isNotNull();
Assertions.assertThat(loadBalancerFailures).isNotNull();
Assertions.assertThat(instanceCounter.count()).isEqualTo(1);
Assertions.assertThat(loadBalancerFailures.count()).isEqualTo(1);
Assertions.assertThat(serviceDiscoveryFailures.count()).isEqualTo(0);
Assertions.assertThat(serviceDiscoveryDuration.totalTime(TimeUnit.NANOSECONDS)).isGreaterThan(0);
Assertions.assertThat(serviceSelectionDuration.totalTime(TimeUnit.NANOSECONDS)).isGreaterThan(0);
}
}
| StorkMetricsLoadBalancerFailTest |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/XdsDependencyManager.java | {
"start": 31838,
"end": 33118
} | class ____ extends NameResolver.Listener2 {
@Override
public void onResult(final NameResolver.ResolutionResult resolutionResult) {
syncContext.execute(() -> onResult2(resolutionResult));
}
@Override
public Status onResult2(final NameResolver.ResolutionResult resolutionResult) {
if (cancelled) {
return Status.OK;
}
data = resolutionResult.getAddressesOrError();
maybePublishConfig();
return resolutionResult.getAddressesOrError().getStatus();
}
@Override
public void onError(final Status error) {
syncContext.execute(new Runnable() {
@Override
public void run() {
if (cancelled) {
return;
}
// DnsNameResolver cannot distinguish between address-not-found and transient errors.
// Assume it is a transient error.
// TODO: Once the resolution note API is available, don't throw away the error if
// hasDataValue(); pass it as the note instead
if (!hasDataValue()) {
data = StatusOr.fromStatus(error);
maybePublishConfig();
}
}
});
}
}
}
private static final | NameResolverListener |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericManyToOneParameterTest.java | {
"start": 3670,
"end": 3781
} | interface ____<S extends Site> extends Bar {
S getSite();
}
@MappedSuperclass
public static abstract | SitedBar |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/io/disk/iomanager/SynchronousFileIOChannel.java | {
"start": 962,
"end": 1548
} | class ____ extends AbstractFileIOChannel {
protected SynchronousFileIOChannel(FileIOChannel.ID channelID, boolean writeEnabled)
throws IOException {
super(channelID, writeEnabled);
}
// --------------------------------------------------------------------------------------------
@Override
public boolean isClosed() {
return !this.fileChannel.isOpen();
}
@Override
public void close() throws IOException {
if (this.fileChannel.isOpen()) {
this.fileChannel.close();
}
}
}
| SynchronousFileIOChannel |
java | google__guava | android/guava/src/com/google/common/util/concurrent/ClosingFuture.java | {
"start": 78592,
"end": 84922
} | interface ____<
V1 extends @Nullable Object,
V2 extends @Nullable Object,
V3 extends @Nullable Object,
V4 extends @Nullable Object,
U extends @Nullable Object> {
/**
* Applies this function to four inputs, or throws an exception if unable to do so.
*
* <p>Any objects that are passed to {@link DeferredCloser#eventuallyClose(Object, Executor)
* closer.eventuallyClose()} will be closed when the {@link ClosingFuture} pipeline is done
* (but not before this method completes), even if this method throws or the pipeline is
* cancelled.
*/
ClosingFuture<U> apply(
DeferredCloser closer,
@ParametricNullness V1 value1,
@ParametricNullness V2 value2,
@ParametricNullness V3 value3,
@ParametricNullness V4 value4)
throws Exception;
}
private final ClosingFuture<V1> future1;
private final ClosingFuture<V2> future2;
private final ClosingFuture<V3> future3;
private final ClosingFuture<V4> future4;
private Combiner4(
ClosingFuture<V1> future1,
ClosingFuture<V2> future2,
ClosingFuture<V3> future3,
ClosingFuture<V4> future4) {
super(true, ImmutableList.of(future1, future2, future3, future4));
this.future1 = future1;
this.future2 = future2;
this.future3 = future3;
this.future4 = future4;
}
/**
* Returns a new {@code ClosingFuture} pipeline step derived from the inputs by applying a
* combining function to their values. The function can use a {@link DeferredCloser} to capture
* objects to be closed when the pipeline is done.
*
* <p>If this combiner was returned by {@link #whenAllSucceed(ClosingFuture, ClosingFuture,
* ClosingFuture, ClosingFuture)} and any of the inputs fail, so will the returned step.
*
* <p>If the function throws a {@code CancellationException}, the pipeline will be cancelled.
*
* <p>If the function throws an {@code ExecutionException}, the cause of the thrown {@code
* ExecutionException} will be extracted and used as the failure of the derived step.
*/
public <U extends @Nullable Object> ClosingFuture<U> call(
ClosingFunction4<V1, V2, V3, V4, U> function, Executor executor) {
return call(
new CombiningCallable<U>() {
@Override
@ParametricNullness
public U call(DeferredCloser closer, Peeker peeker) throws Exception {
return function.apply(
closer,
peeker.getDone(future1),
peeker.getDone(future2),
peeker.getDone(future3),
peeker.getDone(future4));
}
@Override
public String toString() {
return function.toString();
}
},
executor);
}
/**
* Returns a new {@code ClosingFuture} pipeline step derived from the inputs by applying a
* {@code ClosingFuture}-returning function to their values. The function can use a {@link
* DeferredCloser} to capture objects to be closed when the pipeline is done (other than those
* captured by the returned {@link ClosingFuture}).
*
* <p>If this combiner was returned by {@link #whenAllSucceed(ClosingFuture, ClosingFuture,
* ClosingFuture, ClosingFuture)} and any of the inputs fail, so will the returned step.
*
* <p>If the function throws a {@code CancellationException}, the pipeline will be cancelled.
*
* <p>If the function throws an {@code ExecutionException}, the cause of the thrown {@code
* ExecutionException} will be extracted and used as the failure of the derived step.
*
* <p>If the function throws any other exception, it will be used as the failure of the derived
* step.
*
* <p>If an exception is thrown after the function creates a {@code ClosingFuture}, then none of
* the closeable objects in that {@code ClosingFuture} will be closed.
*
* <p>Usage guidelines for this method:
*
* <ul>
* <li>Use this method only when calling an API that returns a {@link ListenableFuture} or a
* {@code ClosingFuture}. If possible, prefer calling {@link #call(CombiningCallable,
* Executor)} instead, with a function that returns the next value directly.
* <li>Call {@link DeferredCloser#eventuallyClose(Object, Executor) closer.eventuallyClose()}
* for every closeable object this step creates in order to capture it for later closing.
* <li>Return a {@code ClosingFuture}. To turn a {@link ListenableFuture} into a {@code
* ClosingFuture} call {@link #from(ListenableFuture)}.
* </ul>
*
* <p>The same warnings about doing heavyweight operations within {@link
* ClosingFuture#transformAsync(AsyncClosingFunction, Executor)} apply here.
*/
public <U extends @Nullable Object> ClosingFuture<U> callAsync(
AsyncClosingFunction4<V1, V2, V3, V4, U> function, Executor executor) {
return callAsync(
new AsyncCombiningCallable<U>() {
@Override
public ClosingFuture<U> call(DeferredCloser closer, Peeker peeker) throws Exception {
return function.apply(
closer,
peeker.getDone(future1),
peeker.getDone(future2),
peeker.getDone(future3),
peeker.getDone(future4));
}
@Override
public String toString() {
return function.toString();
}
},
executor);
}
}
/**
* A generic {@link Combiner} that lets you use a lambda or method reference to combine five
* {@link ClosingFuture}s. Use {@link #whenAllSucceed(ClosingFuture, ClosingFuture, ClosingFuture,
* ClosingFuture, ClosingFuture)} to start this combination.
*
* @param <V1> the type returned by the first future
* @param <V2> the type returned by the second future
* @param <V3> the type returned by the third future
* @param <V4> the type returned by the fourth future
* @param <V5> the type returned by the fifth future
*/
public static final | AsyncClosingFunction4 |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/test/FileMatchers.java | {
"start": 1528,
"end": 2892
} | class ____ extends CustomMatcher<Path> {
private final LinkOption[] options;
FileTypeMatcher(String typeName, LinkOption... options) {
super("Path is " + typeName);
this.options = options;
}
@Override
public boolean matches(Object item) {
if (item instanceof Path path) {
return matchPath(path);
} else {
return false;
}
}
protected abstract boolean matchPath(Path path);
@Override
public void describeMismatch(Object item, Description description) {
super.describeMismatch(item, description);
if (item instanceof Path path) {
if (Files.exists(path, options) == false) {
description.appendText(" (file not found)");
} else if (Files.isDirectory(path, options)) {
description.appendText(" (directory)");
} else if (Files.isSymbolicLink(path)) {
description.appendText(" (symlink)");
} else if (Files.isRegularFile(path, options)) {
description.appendText(" (regular file)");
} else {
description.appendText(" (unknown file type)");
}
}
}
}
}
| FileTypeMatcher |
java | quarkusio__quarkus | integration-tests/hibernate-search-standalone-elasticsearch/src/main/java/io/quarkus/it/hibernate/search/standalone/elasticsearch/analysis/Analysis2TestingEntity.java | {
"start": 354,
"end": 649
} | class ____ extends AnalysisTestingEntityBase {
public Analysis2TestingEntity(long id, String text) {
super(id, text);
}
@FullTextField(analyzer = "index-level-analyzer-2")
@Override
public String getText() {
return super.getText();
}
}
| Analysis2TestingEntity |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/ast/ElementFactory.java | {
"start": 1414,
"end": 1609
} | class ____
* @since 4.0.0
*/
@NonNull
ClassElement newClassElement(@NonNull C type, ElementAnnotationMetadataFactory annotationMetadataFactory);
/**
* Builds a new | element |
java | apache__camel | components/camel-quickfix/src/main/java/org/apache/camel/component/quickfixj/QuickfixjProducer.java | {
"start": 1178,
"end": 3554
} | class ____ extends DefaultProducer {
public static final String CORRELATION_TIMEOUT_KEY = "CorrelationTimeout";
public static final String CORRELATION_CRITERIA_KEY = "CorrelationCriteria";
private static final Logger LOG = LoggerFactory.getLogger(QuickfixjProducer.class);
public QuickfixjProducer(Endpoint endpoint) {
super(endpoint);
}
@Override
public QuickfixjEndpoint getEndpoint() {
return (QuickfixjEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
getEndpoint().addProducer();
super.doStart();
}
@Override
protected void doStop() throws Exception {
getEndpoint().removeProducer();
super.doStop();
}
@Override
public void process(Exchange exchange) throws Exception {
try {
getEndpoint().ensureInitialized();
sendMessage(exchange, exchange.getIn());
} catch (Exception e) {
exchange.setException(e);
}
}
void sendMessage(Exchange exchange, org.apache.camel.Message camelMessage) throws Exception {
Message message = camelMessage.getBody(Message.class);
LOG.debug("Sending FIX message: {}", message);
SessionID messageSessionID = getEndpoint().getSID();
if (messageSessionID == null) {
messageSessionID = MessageUtils.getSessionID(message);
}
Session session = getSession(messageSessionID);
if (session == null) {
throw new IllegalStateException("Unknown session: " + messageSessionID);
}
Callable<Message> callable = null;
if (exchange.getPattern().isOutCapable()) {
MessageCorrelator messageCorrelator = getEndpoint().getEngine().getMessageCorrelator();
callable = messageCorrelator.getReply(exchange);
}
if (!session.send(message)) {
throw new CannotSendException("Cannot send FIX message: " + message.toString());
}
if (callable != null) {
Message reply = callable.call();
exchange.getMessage().getHeaders().putAll(camelMessage.getHeaders());
exchange.getMessage().setBody(reply);
}
}
Session getSession(SessionID messageSessionID) {
return Session.lookupSession(messageSessionID);
}
}
| QuickfixjProducer |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/ForStIncrementalCheckpointUtils.java | {
"start": 3072,
"end": 13313
} | class ____ implements Comparable<Score> {
public static final Score MIN = new Score(Integer.MIN_VALUE, -1.0);
private final int intersectGroupRange;
private final double overlapFraction;
public Score(int intersectGroupRange, double overlapFraction) {
this.intersectGroupRange = intersectGroupRange;
this.overlapFraction = overlapFraction;
}
public int getIntersectGroupRange() {
return intersectGroupRange;
}
public double getOverlapFraction() {
return overlapFraction;
}
@Override
public int compareTo(@Nullable Score other) {
return Comparator.nullsFirst(
Comparator.comparing(Score::getIntersectGroupRange)
.thenComparing(Score::getIntersectGroupRange)
.thenComparing(Score::getOverlapFraction))
.compare(this, other);
}
}
/**
* The method to clip the db instance according to the target key group range using the {@link
* RocksDB#delete(ColumnFamilyHandle, byte[])}.
*
* @param db the RocksDB instance to be clipped.
* @param columnFamilyHandles the column families in the db instance.
* @param targetKeyGroupRange the target key group range.
* @param currentKeyGroupRange the key group range of the db instance.
* @param keyGroupPrefixBytes Number of bytes required to prefix the key groups.
* @param useDeleteFilesInRange whether to call db.deleteFilesInRanges for the deleted ranges.
*/
public static void clipDBWithKeyGroupRange(
@Nonnull RocksDB db,
@Nonnull List<ColumnFamilyHandle> columnFamilyHandles,
@Nonnull KeyGroupRange targetKeyGroupRange,
@Nonnull KeyGroupRange currentKeyGroupRange,
@Nonnegative int keyGroupPrefixBytes,
boolean useDeleteFilesInRange)
throws RocksDBException {
List<byte[]> deleteFilesRanges = new ArrayList<>(4);
if (currentKeyGroupRange.getStartKeyGroup() < targetKeyGroupRange.getStartKeyGroup()) {
prepareRangeDeletes(
keyGroupPrefixBytes,
currentKeyGroupRange.getStartKeyGroup(),
targetKeyGroupRange.getStartKeyGroup(),
deleteFilesRanges);
}
if (currentKeyGroupRange.getEndKeyGroup() > targetKeyGroupRange.getEndKeyGroup()) {
prepareRangeDeletes(
keyGroupPrefixBytes,
targetKeyGroupRange.getEndKeyGroup() + 1,
currentKeyGroupRange.getEndKeyGroup() + 1,
deleteFilesRanges);
}
logger.info(
"Performing range delete for backend with target key-groups range {} with boundaries set {} - deleteFilesInRanges = {}.",
targetKeyGroupRange.prettyPrintInterval(),
deleteFilesRanges.stream().map(Arrays::toString).collect(Collectors.toList()),
useDeleteFilesInRange);
deleteRangeData(db, columnFamilyHandles, deleteFilesRanges, useDeleteFilesInRange);
}
private static void prepareRangeDeletes(
int keyGroupPrefixBytes,
int beginKeyGroup,
int endKeyGroup,
List<byte[]> deleteFilesRangesOut) {
byte[] beginKeyGroupBytes = new byte[keyGroupPrefixBytes];
byte[] endKeyGroupBytes = new byte[keyGroupPrefixBytes];
CompositeKeySerializationUtils.serializeKeyGroup(beginKeyGroup, beginKeyGroupBytes);
CompositeKeySerializationUtils.serializeKeyGroup(endKeyGroup, endKeyGroupBytes);
deleteFilesRangesOut.add(beginKeyGroupBytes);
deleteFilesRangesOut.add(endKeyGroupBytes);
}
/**
* Delete the record that falls into the given deleteRanges of the db.
*
* @param db the target need to be clipped.
* @param columnFamilyHandles the column family need to be clipped.
* @param deleteRanges - pairs of deleted ranges (from1, to1, from2, to2, ...). For each pair
* [from, to), the startKey ('from') is inclusive, the endKey ('to') is exclusive.
* @param useDeleteFilesInRange whether to use deleteFilesInRange to clean up redundant files.
*/
private static void deleteRangeData(
RocksDB db,
List<ColumnFamilyHandle> columnFamilyHandles,
List<byte[]> deleteRanges,
boolean useDeleteFilesInRange)
throws RocksDBException {
if (deleteRanges.isEmpty()) {
// nothing to do.
return;
}
Preconditions.checkArgument(deleteRanges.size() % 2 == 0);
for (ColumnFamilyHandle columnFamilyHandle : columnFamilyHandles) {
// First delete the files in ranges
if (useDeleteFilesInRange) {
db.deleteFilesInRanges(columnFamilyHandle, deleteRanges, false);
}
// Then put range limiting tombstones in place.
for (int i = 0; i < deleteRanges.size() / 2; i++) {
// Using RocksDB's deleteRange will take advantage of delete
// tombstones, which mark the range as deleted.
//
// https://github.com/ververica/frocksdb/blob/FRocksDB-6.20.3/include/rocksdb/db.h#L363-L377
db.deleteRange(
columnFamilyHandle, deleteRanges.get(i * 2), deleteRanges.get(i * 2 + 1));
}
}
}
/**
* Checks data in the SST files of the given DB for keys that exceed either the lower and upper
* bound of the proclaimed key-groups range of the DB.
*
* @param db the DB to check.
* @param keyGroupPrefixBytes the number of bytes used to serialize the key-group prefix of keys
* in the DB.
* @param dbExpectedKeyGroupRange the expected key-groups range of the DB.
* @return the check result with detailed info about lower and upper bound violations.
*/
public static RangeCheckResult checkSstDataAgainstKeyGroupRange(
RocksDB db, int keyGroupPrefixBytes, KeyGroupRange dbExpectedKeyGroupRange) {
final byte[] beginKeyGroupBytes = new byte[keyGroupPrefixBytes];
final byte[] endKeyGroupBytes = new byte[keyGroupPrefixBytes];
CompositeKeySerializationUtils.serializeKeyGroup(
dbExpectedKeyGroupRange.getStartKeyGroup(), beginKeyGroupBytes);
CompositeKeySerializationUtils.serializeKeyGroup(
dbExpectedKeyGroupRange.getEndKeyGroup() + 1, endKeyGroupBytes);
KeyRange dbKeyRange = getDBKeyRange(db);
return RangeCheckResult.of(
beginKeyGroupBytes,
endKeyGroupBytes,
dbKeyRange.minKey,
dbKeyRange.maxKey,
keyGroupPrefixBytes);
}
/** Returns a pair of minimum and maximum key in the sst files of the given database. */
private static KeyRange getDBKeyRange(RocksDB db) {
final Comparator<byte[]> comparator = UnsignedBytes.lexicographicalComparator();
final List<LiveFileMetaData> liveFilesMetaData = db.getLiveFilesMetaData();
if (liveFilesMetaData.isEmpty()) {
return KeyRange.EMPTY;
}
Iterator<LiveFileMetaData> liveFileMetaDataIterator = liveFilesMetaData.iterator();
LiveFileMetaData fileMetaData = liveFileMetaDataIterator.next();
byte[] smallestKey = fileMetaData.smallestKey();
byte[] largestKey = fileMetaData.largestKey();
while (liveFileMetaDataIterator.hasNext()) {
fileMetaData = liveFileMetaDataIterator.next();
byte[] sstSmallestKey = fileMetaData.smallestKey();
byte[] sstLargestKey = fileMetaData.largestKey();
if (comparator.compare(sstSmallestKey, smallestKey) < 0) {
smallestKey = sstSmallestKey;
}
if (comparator.compare(sstLargestKey, largestKey) > 0) {
largestKey = sstLargestKey;
}
}
return KeyRange.of(smallestKey, largestKey);
}
/** check whether the bytes is before prefixBytes in the character order. */
public static boolean beforeThePrefixBytes(@Nonnull byte[] bytes, @Nonnull byte[] prefixBytes) {
final int prefixLength = prefixBytes.length;
for (int i = 0; i < prefixLength; ++i) {
int r = (char) prefixBytes[i] - (char) bytes[i];
if (r != 0) {
return r > 0;
}
}
return false;
}
/**
* Choose the best state handle according to the {@link #stateHandleEvaluator(KeyedStateHandle,
* KeyGroupRange, double)} to init the initial db from the given lists and returns its index.
*
* @param restoreStateHandles The candidate state handles.
* @param targetKeyGroupRange The target key group range.
* @param overlapFractionThreshold configured threshold for overlap.
* @return the index of the best candidate handle in the list or -1 if the list was empty.
* @param <T> the generic parameter type of the state handles.
*/
public static <T extends KeyedStateHandle> int findTheBestStateHandleForInitial(
@Nonnull List<T> restoreStateHandles,
@Nonnull KeyGroupRange targetKeyGroupRange,
double overlapFractionThreshold) {
if (restoreStateHandles.isEmpty()) {
return -1;
}
// Shortcut for a common case (scale out)
if (restoreStateHandles.size() == 1) {
return 0;
}
int currentPos = 0;
int bestHandlePos = 0;
Score bestScore = Score.MIN;
for (T rawStateHandle : restoreStateHandles) {
Score handleScore =
stateHandleEvaluator(
rawStateHandle, targetKeyGroupRange, overlapFractionThreshold);
if (handleScore.compareTo(bestScore) > 0) {
bestHandlePos = currentPos;
bestScore = handleScore;
}
++currentPos;
}
return bestHandlePos;
}
/** Helper | Score |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java | {
"start": 4893,
"end": 7491
} | class ____ implements ToXContentObject, Writeable {
private final String uid;
private final String type;
private final String mode;
private final LicenseStatus status;
private final long expiryDate;
public LicenseInfo(String uid, String type, String mode, LicenseStatus status, long expiryDate) {
this.uid = uid;
this.type = type;
this.mode = mode;
this.status = status;
this.expiryDate = expiryDate;
}
public LicenseInfo(StreamInput in) throws IOException {
this(in.readString(), in.readString(), in.readString(), LicenseStatus.readFrom(in), in.readLong());
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(uid);
out.writeString(type);
out.writeString(mode);
status.writeTo(out);
out.writeLong(expiryDate);
}
public String getUid() {
return uid;
}
public String getType() {
return type;
}
public String getMode() {
return mode;
}
public long getExpiryDate() {
return expiryDate;
}
public LicenseStatus getStatus() {
return status;
}
@Override
public boolean equals(Object other) {
if (other == null || other.getClass() != getClass()) return false;
if (this == other) return true;
LicenseInfo rhs = (LicenseInfo) other;
return Objects.equals(uid, rhs.uid)
&& Objects.equals(type, rhs.type)
&& Objects.equals(mode, rhs.mode)
&& Objects.equals(status, rhs.status)
&& expiryDate == rhs.expiryDate;
}
@Override
public int hashCode() {
return Objects.hash(uid, type, mode, status, expiryDate);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("uid", uid);
builder.field("type", type);
builder.field("mode", mode);
builder.field("status", status.label());
if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) {
builder.timestampFieldsFromUnixEpochMillis("expiry_date_in_millis", "expiry_date", expiryDate);
}
return builder.endObject();
}
}
public static | LicenseInfo |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/utils/ContentUtils.java | {
"start": 860,
"end": 2775
} | class ____ {
/**
* verify the pub config content.
*
* @param content content
*/
public static void verifyIncrementPubContent(String content) {
if (content == null || content.length() == 0) {
throw new IllegalArgumentException("The content for publishing or deleting cannot be null!");
}
for (int i = 0; i < content.length(); i++) {
char c = content.charAt(i);
if (c == '\r' || c == '\n') {
throw new IllegalArgumentException("The content for publishing or deleting cannot contain enter and next line symbol!");
}
if (c == Constants.WORD_SEPARATOR.charAt(0)) {
throw new IllegalArgumentException("The content for publishing or deleting cannot contain (char)2!");
}
}
}
public static String getContentIdentity(String content) {
int index = content.indexOf(WORD_SEPARATOR);
if (index == -1) {
throw new IllegalArgumentException("The content does not contain separator!");
}
return content.substring(0, index);
}
public static String getContent(String content) {
int index = content.indexOf(WORD_SEPARATOR);
if (index == -1) {
throw new IllegalArgumentException("The content does not contain separator!");
}
return content.substring(index + 1);
}
/**
* Truncate the content.
*
* @param content content
* @return content after truncate.
*/
public static String truncateContent(String content) {
if (content == null) {
return "";
} else if (content.length() <= LIMIT_CONTENT_SIZE) {
return content;
} else {
return content.substring(0, 100) + "...";
}
}
private static final int LIMIT_CONTENT_SIZE = 100;
}
| ContentUtils |
java | eclipse-vertx__vert.x | vertx-core/src/test/java/io/vertx/tests/json/JsonCodecTest.java | {
"start": 1897,
"end": 17868
} | class ____ {
private static final TypeReference<Integer> INTEGER_TYPE_REF = new TypeReference<Integer>() {};
private static final TypeReference<Long> LONG_TYPE_REF = new TypeReference<Long>() {};
private static final TypeReference<String> STRING_TYPE_REF = new TypeReference<String>() {};
private static final TypeReference<Float> FLOAT_TYPE_REF = new TypeReference<Float>() {};
private static final TypeReference<Double> DOUBLE_TYPE_REF = new TypeReference<Double>() {};
private static final TypeReference<Map<String, Object>> MAP_TYPE_REF = new TypeReference<Map<String, Object>>() {};
private static final TypeReference<List<Object>> LIST_TYPE_REF = new TypeReference<List<Object>>() {};
private static final TypeReference<Boolean> BOOLEAN_TYPE_REF = new TypeReference<Boolean>() {};
@Parameterized.Parameters
public static Collection<Object[]> mappers() {
return Arrays.asList(new Object[][] {
{ new DatabindCodec() }, { new JacksonCodec() }
});
}
private final JacksonCodec codec;
public JsonCodecTest(JacksonCodec codec) {
this.codec = codec;
}
@Test
public void testEncodeJsonObject() {
JsonObject jsonObject = new JsonObject();
jsonObject.put("mystr", "foo");
jsonObject.put("myint", 123);
jsonObject.put("mylong", 1234l);
jsonObject.put("myfloat", 1.23f);
jsonObject.put("mydouble", 2.34d);
jsonObject.put("myboolean", true);
jsonObject.put("mybyte", 255);
byte[] bytes = TestUtils.randomByteArray(10);
jsonObject.put("mybinary", bytes);
jsonObject.put("mybuffer", Buffer.buffer(bytes));
Instant now = Instant.now();
jsonObject.put("myinstant", now);
jsonObject.putNull("mynull");
jsonObject.put("myobj", new JsonObject().put("foo", "bar"));
jsonObject.put("myarr", new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
String expected = "{\"mystr\":\"foo\",\"myint\":123,\"mylong\":1234,\"myfloat\":1.23,\"mydouble\":2.34,\"" +
"myboolean\":true,\"mybyte\":255,\"mybinary\":\"" + strBytes + "\",\"mybuffer\":\"" + strBytes + "\",\"myinstant\":\"" + ISO_INSTANT.format(now) + "\",\"mynull\":null,\"myobj\":{\"foo\":\"bar\"},\"myarr\":[\"foo\",123]}";
String json = codec.toString(jsonObject);
assertEquals(expected, json);
}
@Test
public void testEncodeJsonArray() {
JsonArray jsonArray = new JsonArray();
jsonArray.add("foo");
jsonArray.add(123);
jsonArray.add(1234L);
jsonArray.add(1.23f);
jsonArray.add(2.34d);
jsonArray.add(true);
jsonArray.add((byte)124);
byte[] bytes = TestUtils.randomByteArray(10);
jsonArray.add(bytes);
jsonArray.add(Buffer.buffer(bytes));
jsonArray.addNull();
jsonArray.add(new JsonObject().put("foo", "bar"));
jsonArray.add(new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
String expected = "[\"foo\",123,1234,1.23,2.34,true,124,\"" + strBytes + "\",\"" + strBytes + "\",null,{\"foo\":\"bar\"},[\"foo\",123]]";
String json = codec.toString(jsonArray);
assertEquals(expected, json);
}
@Test
public void testEncodeJsonObjectToBuffer() {
JsonObject jsonObject = new JsonObject();
jsonObject.put("mystr", "foo");
jsonObject.put("myint", 123);
jsonObject.put("mylong", 1234l);
jsonObject.put("myfloat", 1.23f);
jsonObject.put("mydouble", 2.34d);
jsonObject.put("myboolean", true);
byte[] bytes = TestUtils.randomByteArray(10);
jsonObject.put("mybinary", bytes);
jsonObject.put("mybuffer", Buffer.buffer(bytes));
Instant now = Instant.now();
jsonObject.put("myinstant", now);
jsonObject.putNull("mynull");
jsonObject.put("myobj", new JsonObject().put("foo", "bar"));
jsonObject.put("myarr", new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
Buffer expected = Buffer.buffer("{\"mystr\":\"foo\",\"myint\":123,\"mylong\":1234,\"myfloat\":1.23,\"mydouble\":2.34,\"" +
"myboolean\":true,\"mybinary\":\"" + strBytes + "\",\"mybuffer\":\"" + strBytes + "\",\"myinstant\":\"" + ISO_INSTANT.format(now) + "\",\"mynull\":null,\"myobj\":{\"foo\":\"bar\"},\"myarr\":[\"foo\",123]}", "UTF-8");
Buffer json = codec.toBuffer(jsonObject);
assertArrayEquals(expected.getBytes(), json.getBytes());
}
@Test
public void testEncodeJsonArrayToBuffer() {
JsonArray jsonArray = new JsonArray();
jsonArray.add("foo");
jsonArray.add(123);
jsonArray.add(1234l);
jsonArray.add(1.23f);
jsonArray.add(2.34d);
jsonArray.add(true);
byte[] bytes = TestUtils.randomByteArray(10);
jsonArray.add(bytes);
jsonArray.add(Buffer.buffer(bytes));
jsonArray.addNull();
jsonArray.add(new JsonObject().put("foo", "bar"));
jsonArray.add(new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
Buffer expected = Buffer.buffer("[\"foo\",123,1234,1.23,2.34,true,\"" + strBytes + "\",\"" + strBytes + "\",null,{\"foo\":\"bar\"},[\"foo\",123]]", "UTF-8");
Buffer json = codec.toBuffer(jsonArray);
assertArrayEquals(expected.getBytes(), json.getBytes());
}
@Test
public void testEncodeJsonObjectPrettily() {
JsonObject jsonObject = new JsonObject();
jsonObject.put("mystr", "foo");
jsonObject.put("myint", 123);
jsonObject.put("mylong", 1234l);
jsonObject.put("myfloat", 1.23f);
jsonObject.put("mydouble", 2.34d);
jsonObject.put("myboolean", true);
byte[] bytes = TestUtils.randomByteArray(10);
jsonObject.put("mybinary", bytes);
jsonObject.put("mybuffer", Buffer.buffer(bytes));
Instant now = Instant.now();
jsonObject.put("myinstant", now);
jsonObject.put("myobj", new JsonObject().put("foo", "bar"));
jsonObject.put("myarr", new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
String strInstant = ISO_INSTANT.format(now);
String expected = "{" + Utils.LINE_SEPARATOR +
" \"mystr\" : \"foo\"," + Utils.LINE_SEPARATOR +
" \"myint\" : 123," + Utils.LINE_SEPARATOR +
" \"mylong\" : 1234," + Utils.LINE_SEPARATOR +
" \"myfloat\" : 1.23," + Utils.LINE_SEPARATOR +
" \"mydouble\" : 2.34," + Utils.LINE_SEPARATOR +
" \"myboolean\" : true," + Utils.LINE_SEPARATOR +
" \"mybinary\" : \"" + strBytes + "\"," + Utils.LINE_SEPARATOR +
" \"mybuffer\" : \"" + strBytes + "\"," + Utils.LINE_SEPARATOR +
" \"myinstant\" : \"" + strInstant + "\"," + Utils.LINE_SEPARATOR +
" \"myobj\" : {" + Utils.LINE_SEPARATOR +
" \"foo\" : \"bar\"" + Utils.LINE_SEPARATOR +
" }," + Utils.LINE_SEPARATOR +
" \"myarr\" : [ \"foo\", 123 ]" + Utils.LINE_SEPARATOR +
"}";
String json = codec.toString(jsonObject, true);
assertEquals(expected, json);
}
@Test
public void testEncodeJsonArrayPrettily() {
JsonArray jsonArray = new JsonArray();
jsonArray.add("foo");
jsonArray.add(123);
jsonArray.add(1234l);
jsonArray.add(1.23f);
jsonArray.add(2.34d);
jsonArray.add(true);
byte[] bytes = TestUtils.randomByteArray(10);
jsonArray.add(bytes);
jsonArray.add(Buffer.buffer(bytes));
jsonArray.addNull();
jsonArray.add(new JsonObject().put("foo", "bar"));
jsonArray.add(new JsonArray().add("foo").add(123));
String strBytes = TestUtils.toBase64String(bytes);
String expected = "[ \"foo\", 123, 1234, 1.23, 2.34, true, \"" + strBytes + "\", \"" + strBytes + "\", null, {" + Utils.LINE_SEPARATOR +
" \"foo\" : \"bar\"" + Utils.LINE_SEPARATOR +
"}, [ \"foo\", 123 ] ]";
String json = codec.toString(jsonArray, true);
assertEquals(expected, json);
}
@Test
public void testDecodeJsonObject() {
byte[] bytes = TestUtils.randomByteArray(10);
String strBytes = TestUtils.toBase64String(bytes);
Instant now = Instant.now();
String strInstant = ISO_INSTANT.format(now);
String json = "{\"mystr\":\"foo\",\"myint\":123,\"mylong\":1234,\"myfloat\":1.23,\"mydouble\":2.34,\"" +
"myboolean\":true,\"mybyte\":124,\"mybinary\":\"" + strBytes + "\",\"mybuffer\":\"" + strBytes + "\",\"myinstant\":\"" + strInstant + "\",\"mynull\":null,\"myobj\":{\"foo\":\"bar\"},\"myarr\":[\"foo\",123]}";
JsonObject obj = new JsonObject(codec.fromString(json, Map.class));
assertEquals(json, codec.toString(obj));
assertEquals("foo", obj.getString("mystr"));
assertEquals(Integer.valueOf(123), obj.getInteger("myint"));
assertEquals(Long.valueOf(1234), obj.getLong("mylong"));
assertEquals(Float.valueOf(1.23f), obj.getFloat("myfloat"));
assertEquals(Double.valueOf(2.34d), obj.getDouble("mydouble"));
assertTrue(obj.getBoolean("myboolean"));
assertEquals(124, obj.getValue("mybyte"));
assertArrayEquals(bytes, obj.getBinary("mybinary"));
assertEquals(Buffer.buffer(bytes), obj.getBuffer("mybuffer"));
assertEquals(TestUtils.toBase64String(bytes), obj.getValue("mybinary"));
assertEquals(TestUtils.toBase64String(bytes), obj.getValue("mybuffer"));
assertEquals(now, obj.getInstant("myinstant"));
assertEquals(now.toString(), obj.getValue("myinstant"));
assertTrue(obj.containsKey("mynull"));
JsonObject nestedObj = obj.getJsonObject("myobj");
assertEquals("bar", nestedObj.getString("foo"));
JsonArray nestedArr = obj.getJsonArray("myarr");
assertEquals("foo", nestedArr.getString(0));
assertEquals(Integer.valueOf(123), Integer.valueOf(nestedArr.getInteger(1)));
}
@Test
public void testDecodeJsonArray() {
byte[] bytes = TestUtils.randomByteArray(10);
String strBytes = TestUtils.toBase64String(bytes);
Instant now = Instant.now();
String strInstant = ISO_INSTANT.format(now);
String json = "[\"foo\",123,1234,1.23,2.34,true,124,\"" + strBytes + "\",\"" + strBytes + "\",\"" + strInstant + "\",null,{\"foo\":\"bar\"},[\"foo\",123]]";
JsonArray arr = new JsonArray(codec.fromString(json, List.class));
assertEquals("foo", arr.getString(0));
assertEquals(Integer.valueOf(123), arr.getInteger(1));
assertEquals(Long.valueOf(1234l), arr.getLong(2));
assertEquals(Float.valueOf(1.23f), arr.getFloat(3));
assertEquals(Double.valueOf(2.34d), arr.getDouble(4));
assertEquals(true, arr.getBoolean(5));
assertEquals(124, arr.getValue(6));
assertArrayEquals(bytes, arr.getBinary(7));
assertEquals(TestUtils.toBase64String(bytes), arr.getValue(7));
assertEquals(Buffer.buffer(bytes), arr.getBuffer(8));
assertEquals(TestUtils.toBase64String(bytes), arr.getValue(8));
assertEquals(now, arr.getInstant(9));
assertEquals(now.toString(), arr.getValue(9));
assertTrue(arr.hasNull(10));
JsonObject obj = arr.getJsonObject(11);
assertEquals("bar", obj.getString("foo"));
JsonArray arr2 = arr.getJsonArray(12);
assertEquals("foo", arr2.getString(0));
assertEquals(Integer.valueOf(123), arr2.getInteger(1));
}
// Strict JSON doesn't allow comments but we do so users can add comments to config files etc
@Test
public void testDecodeJsonObjectWithComments() {
String jsonWithComments =
"// single line comment\n" +
"/*\n" +
" This is a multi \n" +
" line comment\n" +
"*/\n" +
"{\n" +
"// another single line comment this time inside the JSON object itself\n" +
" \"foo\": \"bar\" // and a single line comment at end of line \n" +
"/*\n" +
" This is a another multi \n" +
" line comment this time inside the JSON object itself\n" +
"*/\n" +
"}";
JsonObject json = new JsonObject(codec.fromString(jsonWithComments, Map.class));
assertEquals("{\"foo\":\"bar\"}", codec.toString(json));
}
// Strict JSON doesn't allow comments but we do so users can add comments to config files etc
@Test
public void testDecodeJsonArrayWithComments() {
String jsonWithComments =
"// single line comment\n" +
"/*\n" +
" This is a multi \n" +
" line comment\n" +
"*/\n" +
"[\n" +
"// another single line comment this time inside the JSON array itself\n" +
" \"foo\", \"bar\" // and a single line comment at end of line \n" +
"/*\n" +
" This is a another multi \n" +
" line comment this time inside the JSON array itself\n" +
"*/\n" +
"]";
JsonArray json = new JsonArray(codec.fromString(jsonWithComments, List.class));
assertEquals("[\"foo\",\"bar\"]", codec.toString(json));
}
@Test
public void testDecodeJsonObjectWithInvalidJson() {
for (String test : new String[] { "3", "\"3", "qiwjdoiqwjdiqwjd", "{\"foo\":1},{\"bar\":2}", "{\"foo\":1} 1234" }) {
try {
codec.fromString(test, Map.class);
fail();
} catch (DecodeException ignore) {
}
try {
codec.fromBuffer(Buffer.buffer(test), Map.class);
fail();
} catch (DecodeException ignore) {
}
}
}
@Test
public void testDecodeJsonArrayWithInvalidJson() {
for (String test : new String[] { "3", "\"3", "qiwjdoiqwjdiqwjd", "[1],[2]", "[] 1234" }) {
try {
codec.fromString(test, List.class);
fail();
} catch (DecodeException ignore) {
}
try {
codec.fromBuffer(Buffer.buffer(test), List.class);
fail();
} catch (DecodeException ignore) {
}
}
}
@Test
public void encodeCustomTypeInstant() {
Instant now = Instant.now();
String json = codec.toString(now);
assertNotNull(json);
// the RFC is one way only
Instant decoded = Instant.from(ISO_INSTANT.parse(json.substring(1, json.length() - 1)));
assertEquals(now, decoded);
}
@Test
public void decodeCustomTypeInstant() {
Instant now = Instant.now();
String json = '"' + ISO_INSTANT.format(now) + '"';
Instant decoded = codec.fromString(json, Instant.class);
assertEquals(now, decoded);
}
@Test
public void encodeCustomTypeBinary() {
byte[] data = new byte[] { 'h', 'e', 'l', 'l', 'o'};
String json = codec.toString(data);
assertNotNull(json);
assertEquals("\"aGVsbG8\"", json);
json = codec.toString(Buffer.buffer(data));
assertNotNull(json);
assertEquals("\"aGVsbG8\"", json);
}
@Test
public void decodeCustomTypeBinary() {
// base64 encoded hello
byte[] data = codec.fromString("\"aGVsbG8\"", byte[].class);
assertEquals("hello", new String(data));
Buffer buff = codec.fromString("\"aGVsbG8\"", Buffer.class);
assertEquals("hello", buff.toString());
}
@Test
public void encodeNull() {
String json = codec.toString(null);
assertNotNull(json);
assertEquals("null", json);
}
@Test
public void encodeToBuffer() {
Buffer json = codec.toBuffer("Hello World!");
assertNotNull(json);
// json strings are always UTF8
assertEquals("\"Hello World!\"", json.toString());
}
@Test
public void encodeNullToBuffer() {
Buffer json = codec.toBuffer(null);
assertNotNull(json);
assertEquals("null", json.toString());
}
@Test
public void testDecodeValue() {
Assume.assumeTrue(codec instanceof DatabindCodec);
assertDecodeValue(Buffer.buffer("42"), 42, INTEGER_TYPE_REF);
assertDecodeValue(Buffer.buffer("42"), 42L, LONG_TYPE_REF);
assertDecodeValue(Buffer.buffer("\"foobar\""), "foobar", STRING_TYPE_REF);
assertDecodeValue(Buffer.buffer("3.4"), 3.4f, FLOAT_TYPE_REF);
assertDecodeValue(Buffer.buffer("3.4"), 3.4d, DOUBLE_TYPE_REF);
assertDecodeValue(Buffer.buffer("{\"foo\":4}"), Collections.singletonMap("foo", 4), MAP_TYPE_REF);
assertDecodeValue(Buffer.buffer("[0,1,2]"), Arrays.asList(0, 1, 2), LIST_TYPE_REF);
assertDecodeValue(Buffer.buffer("true"), true, BOOLEAN_TYPE_REF);
assertDecodeValue(Buffer.buffer("false"), false, BOOLEAN_TYPE_REF);
}
@Test
public void testEnumValue() {
// just a random | JsonCodecTest |
java | spring-projects__spring-framework | spring-messaging/src/test/java/org/springframework/messaging/rsocket/service/RSocketServiceIntegrationTests.java | {
"start": 3684,
"end": 3875
} | interface ____ {
@RSocketExchange("async")
Mono<String> echoAsync(String payload);
@RSocketExchange("stream")
Flux<String> echoStream(String payload);
}
@Controller
static | Service |
java | apache__camel | components/camel-geocoder/src/test/java/org/apache/camel/component/geocoder/GeoCoderApiKeyTestBase.java | {
"start": 911,
"end": 1066
} | class ____ extends CamelTestSupport {
protected String getApiKey() {
return System.getenv("CAMEL_GEOCODER_APIKEY");
}
}
| GeoCoderApiKeyTestBase |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/builders/HttpSecurity.java | {
"start": 19200,
"end": 22223
} | class ____ {
*
* @Bean
* public SecurityFilterChain securityFilterChain(HttpSecurity http) {
* http
* .authorizeHttpRequests((authorizeHttpRequests) ->
* authorizeHttpRequests
* .requestMatchers("/**").hasRole("USER")
* )
* .jee((jee) ->
* jee
* .mappableRoles("USER", "ADMIN")
* );
* return http.build();
* }
* }
* </pre>
*
* Developers wishing to use pre authentication with the container will need to ensure
* their web.xml configures the security constraints. For example, the web.xml (there
* is no equivalent Java based configuration supported by the Servlet specification)
* might look like:
*
* <pre>
* <login-config>
* <auth-method>FORM</auth-method>
* <form-login-config>
* <form-login-page>/login</form-login-page>
* <form-error-page>/login?error</form-error-page>
* </form-login-config>
* </login-config>
*
* <security-role>
* <role-name>ROLE_USER</role-name>
* </security-role>
* <security-constraint>
* <web-resource-collection>
* <web-resource-name>Public</web-resource-name>
* <description>Matches unconstrained pages</description>
* <url-pattern>/login</url-pattern>
* <url-pattern>/logout</url-pattern>
* <url-pattern>/resources/*</url-pattern>
* </web-resource-collection>
* </security-constraint>
* <security-constraint>
* <web-resource-collection>
* <web-resource-name>Secured Areas</web-resource-name>
* <url-pattern>/*</url-pattern>
* </web-resource-collection>
* <auth-constraint>
* <role-name>ROLE_USER</role-name>
* </auth-constraint>
* </security-constraint>
* </pre>
*
* Last you will need to configure your container to contain the user with the correct
* roles. This configuration is specific to the Servlet Container, so consult your
* Servlet Container's documentation.
* @param jeeCustomizer the {@link Customizer} to provide more options for the
* {@link JeeConfigurer}
* @return the {@link HttpSecurity} for further customizations @
*/
public HttpSecurity jee(Customizer<JeeConfigurer<HttpSecurity>> jeeCustomizer) {
jeeCustomizer.customize(getOrApply(new JeeConfigurer<>()));
return HttpSecurity.this;
}
/**
* Configures X509 based pre authentication.
*
* <h2>Example Configuration</h2>
*
* The following configuration will attempt to extract the username from the X509
* certificate. Remember that the Servlet Container will need to be configured to
* request client certificates in order for this to work.
*
* <pre>
* @Configuration
* @EnableWebSecurity
* public | JeeSecurityConfig |
java | elastic__elasticsearch | x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Exists.java | {
"start": 692,
"end": 1365
} | class ____ extends SubQueryExpression {
public Exists(Source source, LogicalPlan query) {
this(source, query, null);
}
public Exists(Source source, LogicalPlan query, NameId id) {
super(source, query, id);
}
@Override
protected NodeInfo<Exists> info() {
return NodeInfo.create(this, Exists::new, query(), id());
}
@Override
protected SubQueryExpression clone(LogicalPlan newQuery) {
return new Exists(source(), newQuery);
}
@Override
public DataType dataType() {
return BOOLEAN;
}
@Override
public Nullability nullable() {
return Nullability.FALSE;
}
}
| Exists |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ChainingConstructorIgnoresParameterTest.java | {
"start": 3540,
"end": 3902
} | enum ____ {
TEST_TARGET
}
}\
""")
.doTest();
}
@Test
public void negativeCase() {
compilationHelper
.addSourceLines(
"ChainingConstructorIgnoresParameterNegativeCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import java.io.File;
/**
* @author cpovirk@google.com (Chris Povirk)
*/
public | Location |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/chararrays/CharArrays_assertNullOrEmpty_Test.java | {
"start": 1350,
"end": 1981
} | class ____ extends CharArraysBaseTest {
@Test
void should_fail_if_array_is_not_null_and_is_not_empty() {
AssertionInfo info = someInfo();
char[] actual = { 'a' };
Throwable error = catchThrowable(() -> arrays.assertNullOrEmpty(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeNullOrEmpty(actual));
}
@Test
void should_pass_if_array_is_null() {
arrays.assertNullOrEmpty(someInfo(), null);
}
@Test
void should_pass_if_array_is_empty() {
arrays.assertNullOrEmpty(someInfo(), emptyArray());
}
}
| CharArrays_assertNullOrEmpty_Test |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/support/ApplicationContextLifecycleTests.java | {
"start": 821,
"end": 4964
} | class ____ {
@Test
void beansStart() {
AbstractApplicationContext context = new ClassPathXmlApplicationContext("lifecycleTests.xml", getClass());
context.start();
LifecycleTestBean bean1 = (LifecycleTestBean) context.getBean("bean1");
LifecycleTestBean bean2 = (LifecycleTestBean) context.getBean("bean2");
LifecycleTestBean bean3 = (LifecycleTestBean) context.getBean("bean3");
LifecycleTestBean bean4 = (LifecycleTestBean) context.getBean("bean4");
String error = "bean was not started";
assertThat(bean1.isRunning()).as(error).isTrue();
assertThat(bean2.isRunning()).as(error).isTrue();
assertThat(bean3.isRunning()).as(error).isTrue();
assertThat(bean4.isRunning()).as(error).isTrue();
context.close();
}
@Test
void beansStop() {
AbstractApplicationContext context = new ClassPathXmlApplicationContext("lifecycleTests.xml", getClass());
context.start();
LifecycleTestBean bean1 = (LifecycleTestBean) context.getBean("bean1");
LifecycleTestBean bean2 = (LifecycleTestBean) context.getBean("bean2");
LifecycleTestBean bean3 = (LifecycleTestBean) context.getBean("bean3");
LifecycleTestBean bean4 = (LifecycleTestBean) context.getBean("bean4");
String startError = "bean was not started";
assertThat(bean1.isRunning()).as(startError).isTrue();
assertThat(bean2.isRunning()).as(startError).isTrue();
assertThat(bean3.isRunning()).as(startError).isTrue();
assertThat(bean4.isRunning()).as(startError).isTrue();
context.stop();
String stopError = "bean was not stopped";
assertThat(bean1.isRunning()).as(stopError).isFalse();
assertThat(bean2.isRunning()).as(stopError).isFalse();
assertThat(bean3.isRunning()).as(stopError).isFalse();
assertThat(bean4.isRunning()).as(stopError).isFalse();
context.close();
}
@Test
void startOrder() {
AbstractApplicationContext context = new ClassPathXmlApplicationContext("lifecycleTests.xml", getClass());
context.start();
LifecycleTestBean bean1 = (LifecycleTestBean) context.getBean("bean1");
LifecycleTestBean bean2 = (LifecycleTestBean) context.getBean("bean2");
LifecycleTestBean bean3 = (LifecycleTestBean) context.getBean("bean3");
LifecycleTestBean bean4 = (LifecycleTestBean) context.getBean("bean4");
String notStartedError = "bean was not started";
assertThat(bean1.getStartOrder()).as(notStartedError).isGreaterThan(0);
assertThat(bean2.getStartOrder()).as(notStartedError).isGreaterThan(0);
assertThat(bean3.getStartOrder()).as(notStartedError).isGreaterThan(0);
assertThat(bean4.getStartOrder()).as(notStartedError).isGreaterThan(0);
String orderError = "dependent bean must start after the bean it depends on";
assertThat(bean2.getStartOrder()).as(orderError).isGreaterThan(bean1.getStartOrder());
assertThat(bean3.getStartOrder()).as(orderError).isGreaterThan(bean2.getStartOrder());
assertThat(bean4.getStartOrder()).as(orderError).isGreaterThan(bean2.getStartOrder());
context.close();
}
@Test
void stopOrder() {
AbstractApplicationContext context = new ClassPathXmlApplicationContext("lifecycleTests.xml", getClass());
context.start();
context.stop();
LifecycleTestBean bean1 = (LifecycleTestBean) context.getBean("bean1");
LifecycleTestBean bean2 = (LifecycleTestBean) context.getBean("bean2");
LifecycleTestBean bean3 = (LifecycleTestBean) context.getBean("bean3");
LifecycleTestBean bean4 = (LifecycleTestBean) context.getBean("bean4");
String notStoppedError = "bean was not stopped";
assertThat(bean1.getStopOrder()).as(notStoppedError).isGreaterThan(0);
assertThat(bean2.getStopOrder()).as(notStoppedError).isGreaterThan(0);
assertThat(bean3.getStopOrder()).as(notStoppedError).isGreaterThan(0);
assertThat(bean4.getStopOrder()).as(notStoppedError).isGreaterThan(0);
String orderError = "dependent bean must stop before the bean it depends on";
assertThat(bean2.getStopOrder()).as(orderError).isLessThan(bean1.getStopOrder());
assertThat(bean3.getStopOrder()).as(orderError).isLessThan(bean2.getStopOrder());
assertThat(bean4.getStopOrder()).as(orderError).isLessThan(bean2.getStopOrder());
context.close();
}
}
| ApplicationContextLifecycleTests |
java | mapstruct__mapstruct | integrationtest/src/test/resources/expressionTextBlocksTest/src/main/java/org/mapstruct/itest/textBlocks/WheelPosition.java | {
"start": 230,
"end": 441
} | class ____ {
private final String position;
public WheelPosition(String position) {
this.position = position;
}
public String getPosition() {
return position;
}
}
| WheelPosition |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/privilege/RestPutPrivilegesAction.java | {
"start": 1628,
"end": 3759
} | class ____ extends SecurityBaseRestHandler {
public RestPutPrivilegesAction(Settings settings, XPackLicenseState licenseState) {
super(settings, licenseState);
}
@Override
public List<Route> routes() {
return List.of(new Route(PUT, "/_security/privilege/"), new Route(POST, "/_security/privilege/"));
}
@Override
public String getName() {
return "security_put_privileges_action";
}
@Override
public RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException {
PutPrivilegesRequestBuilder requestBuilder = new PutPrivilegesRequestBuilder(client).source(
request.requiredContent(),
request.getXContentType()
).setRefreshPolicy(request.param("refresh"));
return execute(requestBuilder);
}
static RestChannelConsumer execute(PutPrivilegesRequestBuilder requestBuilder) {
return channel -> requestBuilder.execute(new RestBuilderListener<PutPrivilegesResponse>(channel) {
@Override
public RestResponse buildResponse(PutPrivilegesResponse response, XContentBuilder builder) throws Exception {
final List<ApplicationPrivilegeDescriptor> privileges = requestBuilder.request().getPrivileges();
Map<String, Map<String, Map<String, Boolean>>> result = new HashMap<>();
privileges.stream()
.map(ApplicationPrivilegeDescriptor::getApplication)
.distinct()
.forEach(a -> result.put(a, new HashMap<>()));
privileges.forEach(privilege -> {
String name = privilege.getName();
boolean created = response.created().getOrDefault(privilege.getApplication(), Collections.emptyList()).contains(name);
result.get(privilege.getApplication()).put(name, Collections.singletonMap("created", created));
});
builder.map(result);
return new RestResponse(RestStatus.OK, builder);
}
});
}
}
| RestPutPrivilegesAction |
java | apache__spark | common/unsafe/src/main/java/org/apache/spark/sql/catalyst/util/CollationAwareUTF8String.java | {
"start": 1530,
"end": 1590
} | class ____ collation-aware UTF8String operations.
*/
public | for |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/converter/DefaultContentTypeResolver.java | {
"start": 1168,
"end": 2485
} | class ____ implements ContentTypeResolver {
private @Nullable MimeType defaultMimeType;
/**
* Set the default MIME type to use when there is no
* {@link MessageHeaders#CONTENT_TYPE} header present.
* <p>This property does not have a default value.
*/
public void setDefaultMimeType(@Nullable MimeType defaultMimeType) {
this.defaultMimeType = defaultMimeType;
}
/**
* Return the default MIME type to use if no
* {@link MessageHeaders#CONTENT_TYPE} header is present.
*/
public @Nullable MimeType getDefaultMimeType() {
return this.defaultMimeType;
}
@Override
public @Nullable MimeType resolve(@Nullable MessageHeaders headers) {
if (headers == null || headers.get(MessageHeaders.CONTENT_TYPE) == null) {
return this.defaultMimeType;
}
Object value = headers.get(MessageHeaders.CONTENT_TYPE);
if (value == null) {
return null;
}
else if (value instanceof MimeType mimeType) {
return mimeType;
}
else if (value instanceof String text) {
return MimeType.valueOf(text);
}
else {
throw new IllegalArgumentException(
"Unknown type for contentType header value: " + value.getClass());
}
}
@Override
public String toString() {
return "DefaultContentTypeResolver[" + "defaultMimeType=" + this.defaultMimeType + "]";
}
}
| DefaultContentTypeResolver |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/JSR166TestCase.java | {
"start": 26568,
"end": 27107
} | class ____ implements Runnable {
protected abstract void realRun() throws Throwable;
final Class<?> exceptionClass;
<T extends Throwable> RunnableShouldThrow(Class<T> exceptionClass) {
this.exceptionClass = exceptionClass;
}
@Override
public final void run() {
try {
realRun();
threadShouldThrow(exceptionClass.getSimpleName());
} catch (Throwable t) {
if (!exceptionClass.isInstance(t)) threadUnexpectedException(t);
}
}
}
public abstract | RunnableShouldThrow |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java | {
"start": 1743,
"end": 8764
} | class ____ extends ConnectorActionRequest implements ToXContentObject {
private final String connectorId;
private final ConnectorSyncInfo syncInfo;
@Nullable
private final Object syncCursor;
private Request(String connectorId, ConnectorSyncInfo syncInfo, Object syncCursor) {
this.connectorId = connectorId;
this.syncInfo = syncInfo;
this.syncCursor = syncCursor;
}
public Request(StreamInput in) throws IOException {
super(in);
this.connectorId = in.readString();
this.syncInfo = in.readOptionalWriteable(ConnectorSyncInfo::new);
this.syncCursor = in.readGenericValue();
}
public String getConnectorId() {
return connectorId;
}
public ConnectorSyncInfo getSyncInfo() {
return syncInfo;
}
public Object getSyncCursor() {
return syncCursor;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
if (Strings.isNullOrEmpty(connectorId)) {
validationException = addValidationError("[connector_id] cannot be [null] or [\"\"].", validationException);
}
return validationException;
}
private static final ConstructingObjectParser<UpdateConnectorLastSyncStatsAction.Request, String> PARSER =
new ConstructingObjectParser<>("connector_update_last_sync_stats_request", false, ((args, connectorId) -> {
int i = 0;
return new Builder().setConnectorId(connectorId)
.setSyncInfo(
new ConnectorSyncInfo.Builder().setLastAccessControlSyncError((String) args[i++])
.setLastAccessControlSyncScheduledAt((Instant) args[i++])
.setLastAccessControlSyncStatus((ConnectorSyncStatus) args[i++])
.setLastDeletedDocumentCount((Long) args[i++])
.setLastIncrementalSyncScheduledAt((Instant) args[i++])
.setLastIndexedDocumentCount((Long) args[i++])
.setLastSyncError((String) args[i++])
.setLastSyncScheduledAt((Instant) args[i++])
.setLastSyncStatus((ConnectorSyncStatus) args[i++])
.setLastSynced((Instant) args[i++])
.build()
)
.setSyncCursor(args[i])
.build();
}));
static {
PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_ERROR);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> ConnectorUtils.parseNullableInstant(
p,
ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD.getPreferredName()
),
ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_SCHEDULED_AT_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()),
ConnectorSyncInfo.LAST_ACCESS_CONTROL_SYNC_STATUS_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_DELETED_DOCUMENT_COUNT_FIELD);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> ConnectorUtils.parseNullableInstant(
p,
ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD.getPreferredName()
),
ConnectorSyncInfo.LAST_INCREMENTAL_SYNC_SCHEDULED_AT_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareLong(optionalConstructorArg(), ConnectorSyncInfo.LAST_INDEXED_DOCUMENT_COUNT_FIELD);
PARSER.declareStringOrNull(optionalConstructorArg(), ConnectorSyncInfo.LAST_SYNC_ERROR_FIELD);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD.getPreferredName()),
ConnectorSyncInfo.LAST_SYNC_SCHEDULED_AT_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? null : ConnectorSyncStatus.connectorSyncStatus(p.text()),
ConnectorSyncInfo.LAST_SYNC_STATUS_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareField(
optionalConstructorArg(),
(p, c) -> ConnectorUtils.parseNullableInstant(p, ConnectorSyncInfo.LAST_SYNCED_FIELD.getPreferredName()),
ConnectorSyncInfo.LAST_SYNCED_FIELD,
ObjectParser.ValueType.STRING_OR_NULL
);
PARSER.declareObjectOrNull(optionalConstructorArg(), (p, c) -> p.map(), null, Connector.SYNC_CURSOR_FIELD);
}
public static UpdateConnectorLastSyncStatsAction.Request fromXContent(XContentParser parser, String connectorId)
throws IOException {
return PARSER.parse(parser, connectorId);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
syncInfo.toXContent(builder, params);
if (syncCursor != null) {
builder.field(Connector.SYNC_CURSOR_FIELD.getPreferredName(), syncCursor);
}
}
builder.endObject();
return builder;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(connectorId);
out.writeOptionalWriteable(syncInfo);
out.writeGenericValue(syncCursor);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Request request = (Request) o;
return Objects.equals(connectorId, request.connectorId)
&& Objects.equals(syncInfo, request.syncInfo)
&& Objects.equals(syncCursor, request.syncCursor);
}
@Override
public int hashCode() {
return Objects.hash(connectorId, syncInfo, syncCursor);
}
public static | Request |
java | elastic__elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java | {
"start": 1287,
"end": 5734
} | class ____<T> implements BatchedIterator<T> {
private static final Logger LOGGER = LogManager.getLogger(BatchedDocumentsIterator.class);
private static final TimeValue CONTEXT_ALIVE_DURATION = TimeValue.timeValueMinutes(5);
private static final int BATCH_SIZE = 10000;
private final OriginSettingClient client;
private final String index;
private volatile long count;
private volatile long totalHits;
private volatile String scrollId;
private volatile boolean isScrollInitialised;
protected BatchedDocumentsIterator(OriginSettingClient client, String index) {
this.client = Objects.requireNonNull(client);
this.index = Objects.requireNonNull(index);
this.totalHits = 0;
this.count = 0;
this.isScrollInitialised = false;
}
/**
* Returns {@code true} if the iteration has more elements.
* (In other words, returns {@code true} if {@link #next} would
* return an element rather than throwing an exception.)
*
* @return {@code true} if the iteration has more elements
*/
@Override
public boolean hasNext() {
return isScrollInitialised == false || count != totalHits;
}
/**
* The first time next() is called, the search will be performed and the first
* batch will be returned. Any subsequent call will return the following batches.
* <p>
* Note that in some implementations it is possible that when there are no
* results at all, the first time this method is called an empty {@code Deque} is returned.
*
* @return a {@code Deque} with the next batch of documents
* @throws NoSuchElementException if the iteration has no more elements
*/
@Override
public Deque<T> next() {
if (hasNext() == false) {
throw new NoSuchElementException();
}
SearchResponse searchResponse;
if (scrollId == null) {
searchResponse = initScroll();
} else {
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId).scroll(CONTEXT_ALIVE_DURATION);
searchResponse = client.searchScroll(searchScrollRequest).actionGet();
}
try {
scrollId = searchResponse.getScrollId();
return mapHits(searchResponse);
} finally {
searchResponse.decRef();
}
}
private SearchResponse initScroll() {
LOGGER.trace("ES API CALL: search index {}", index);
isScrollInitialised = true;
SearchRequest searchRequest = new SearchRequest(index);
searchRequest.indicesOptions(MlIndicesUtils.addIgnoreUnavailable(SearchRequest.DEFAULT_INDICES_OPTIONS));
searchRequest.scroll(CONTEXT_ALIVE_DURATION);
searchRequest.source(
new SearchSourceBuilder().size(BATCH_SIZE)
.query(getQuery())
.fetchSource(shouldFetchSource())
.trackTotalHits(true)
.sort(SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC))
);
SearchResponse searchResponse = client.search(searchRequest).actionGet();
totalHits = searchResponse.getHits().getTotalHits().value();
scrollId = searchResponse.getScrollId();
return searchResponse;
}
private Deque<T> mapHits(SearchResponse searchResponse) {
Deque<T> results = new ArrayDeque<>();
SearchHit[] hits = searchResponse.getHits().getHits();
for (SearchHit hit : hits) {
T mapped = map(hit);
if (mapped != null) {
results.add(mapped);
}
}
count += hits.length;
if (hasNext() == false && scrollId != null) {
client.prepareClearScroll().setScrollIds(Collections.singletonList(scrollId)).get();
}
return results;
}
/**
* Should fetch source? Defaults to {@code true}
* @return whether the source should be fetched
*/
protected boolean shouldFetchSource() {
return true;
}
/**
* Get the query to use for the search
* @return the search query
*/
protected abstract QueryBuilder getQuery();
/**
* Maps the search hit to the document type
* @param hit
* the search hit
* @return The mapped document or {@code null} if the mapping failed
*/
protected abstract T map(SearchHit hit);
}
| BatchedDocumentsIterator |
java | spring-projects__spring-boot | module/spring-boot-r2dbc/src/test/java/org/springframework/boot/r2dbc/autoconfigure/R2dbcTransactionManagerAutoConfigurationTests.java | {
"start": 3759,
"end": 3894
} | class ____ {
@Bean
TransactionalService transactionalService() {
return new TransactionalServiceImpl();
}
}
| BaseConfiguration |
java | apache__camel | components/camel-nats/src/test/java/org/apache/camel/component/nats/integration/NatsAuthConsumerLoadIT.java | {
"start": 1161,
"end": 2031
} | class ____ extends NatsAuthITSupport {
@EndpointInject("mock:result")
protected MockEndpoint mockResultEndpoint;
@Test
public void testLoadConsumer() throws InterruptedException, IOException {
mockResultEndpoint.setExpectedMessageCount(100);
Options options = new Options.Builder().server("nats://" + service.getServiceAddress()).build();
Connection connection = Nats.connect(options);
for (int i = 0; i < 100; i++) {
connection.publish("test", ("test" + i).getBytes());
}
mockResultEndpoint.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("nats:test").to(mockResultEndpoint);
}
};
}
}
| NatsAuthConsumerLoadIT |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AExceptionTranslation.java | {
"start": 3252,
"end": 17456
} | class ____ extends AbstractHadoopTestBase {
public static final String WFOPENSSL_0035_STREAM_IS_CLOSED =
"Unable to execute HTTP request: "
+ ErrorTranslation.OPENSSL_STREAM_CLOSED
+ " Stream is closed";
/**
* Retry policy to use in tests.
*/
private S3ARetryPolicy retryPolicy;
@BeforeEach
public void setup() {
retryPolicy = new S3ARetryPolicy(new Configuration(false));
}
@Test
public void test301ContainsRegion() throws Exception {
String region = "us-west-1";
AwsErrorDetails redirectError = AwsErrorDetails.builder()
.sdkHttpResponse(
SdkHttpResponse.builder().putHeader(BUCKET_REGION_HEADER, region).build())
.build();
S3Exception s3Exception = createS3Exception("wrong region",
SC_301_MOVED_PERMANENTLY,
redirectError);
AWSRedirectException ex = verifyTranslated(
AWSRedirectException.class, s3Exception);
assertStatusCode(SC_301_MOVED_PERMANENTLY, ex);
assertNotNull(ex.getMessage());
assertContained(ex.getMessage(), region);
assertContained(ex.getMessage(), AWS_REGION);
assertExceptionContains(AWS_REGION, ex, "region");
assertExceptionContains(region, ex, "region name");
}
protected void assertContained(String text, String contained) {
assertTrue(text != null && text.contains(contained),
"string \""+ contained + "\" not found in \"" + text + "\"");
}
protected <E extends Throwable> E verifyTranslated(
int status,
Class<E> expected) throws Exception {
return verifyTranslated(expected, createS3Exception(status));
}
@Test
public void test400isBad() throws Exception {
verifyTranslated(SC_400_BAD_REQUEST, AWSBadRequestException.class);
}
@Test
public void test401isNotPermittedFound() throws Exception {
verifyTranslated(SC_401_UNAUTHORIZED, AccessDeniedException.class);
}
@Test
public void test403isNotPermittedFound() throws Exception {
verifyTranslated(SC_403_FORBIDDEN, AccessDeniedException.class);
}
/**
* 404 defaults to FileNotFound.
*/
@Test
public void test404isNotFound() throws Exception {
verifyTranslated(SC_404_NOT_FOUND, FileNotFoundException.class);
}
/**
* 404 + NoSuchBucket == Unknown bucket.
*/
@Test
public void testUnknownBucketException() throws Exception {
S3Exception ex404 = createS3Exception(b -> b
.statusCode(SC_404_NOT_FOUND)
.awsErrorDetails(AwsErrorDetails.builder()
.errorCode(ErrorTranslation.AwsErrorCodes.E_NO_SUCH_BUCKET)
.build()));
verifyTranslated(
UnknownStoreException.class,
ex404);
}
@Test
public void test410isNotFound() throws Exception {
verifyTranslated(SC_410_GONE, FileNotFoundException.class);
}
@Test
public void test416isEOF() throws Exception {
// 416 maps the the subclass of EOFException
final IOException ex = verifyTranslated(SC_416_RANGE_NOT_SATISFIABLE,
RangeNotSatisfiableEOFException.class);
assertThat(ex)
.isInstanceOf(EOFException.class);
}
@Test
public void testGenericS3Exception() throws Exception {
// S3 exception of no known type
AWSS3IOException ex = verifyTranslated(
AWSS3IOException.class,
createS3Exception(451));
assertStatusCode(451, ex);
}
@Test
public void testGenericServiceS3Exception() throws Exception {
// service exception of no known type
AwsServiceException ase = AwsServiceException.builder()
.message("unwind")
.statusCode(SC_500_INTERNAL_SERVER_ERROR)
.build();
AWSServiceIOException ex = verifyTranslated(
AWSStatus500Exception.class,
ase);
assertStatusCode(SC_500_INTERNAL_SERVER_ERROR, ex);
}
protected void assertStatusCode(int expected, AWSServiceIOException ex) {
assertNotNull(ex, "Null exception");
if (expected != ex.statusCode()) {
throw new AssertionError("Expected status code " + expected
+ "but got " + ex.statusCode(),
ex);
}
}
@Test
public void testGenericClientException() throws Exception {
// Generic Amazon exception
verifyTranslated(AWSClientIOException.class,
SdkException.builder().message("").build());
}
private static S3Exception createS3Exception(
Consumer<S3Exception.Builder> consumer) {
S3Exception.Builder builder = S3Exception.builder()
.awsErrorDetails(AwsErrorDetails.builder()
.build());
consumer.accept(builder);
return (S3Exception) builder.build();
}
private static S3Exception createS3Exception(int code) {
return createS3Exception(b -> b.message("").statusCode(code));
}
private static S3Exception createS3Exception(String message, int code,
AwsErrorDetails additionalDetails) {
S3Exception source = (S3Exception) S3Exception.builder()
.message(message)
.statusCode(code)
.awsErrorDetails(additionalDetails)
.build();
return source;
}
private static <E extends Throwable> E verifyTranslated(Class<E> clazz,
SdkException exception) throws Exception {
// Verifying that the translated exception have the correct error message.
IOException ioe = translateException("test", "/", exception);
assertExceptionContains(exception.getMessage(), ioe,
"Translated Exception should contain the error message of the "
+ "actual exception");
return verifyExceptionClass(clazz, ioe);
}
private void assertContainsInterrupted(boolean expected, Throwable thrown)
throws Throwable {
boolean wasInterrupted = containsInterruptedException(thrown) != null;
if (wasInterrupted != expected) {
throw thrown;
}
}
@Test
public void testInterruptExceptionDetecting() throws Throwable {
InterruptedException interrupted = new InterruptedException("irq");
assertContainsInterrupted(true, interrupted);
IOException ioe = new IOException("ioe");
assertContainsInterrupted(false, ioe);
assertContainsInterrupted(true, ioe.initCause(interrupted));
assertContainsInterrupted(true,
new InterruptedIOException("ioirq"));
}
@Test
public void testExtractInterrupted() throws Throwable {
assertThrows(InterruptedIOException.class, () -> {
throw extractException("", "",
new ExecutionException(
SdkException.builder()
.cause(new InterruptedException(""))
.build()));
});
}
@Test
public void testExtractInterruptedIO() throws Throwable {
assertThrows(InterruptedIOException.class, () -> {
throw extractException("", "",
new ExecutionException(
SdkException.builder()
.cause(new InterruptedIOException(""))
.build()));
});
}
@Test
public void testTranslateCredentialException() throws Throwable {
verifyExceptionClass(AccessDeniedException.class,
maybeTranslateCredentialException("/",
new CredentialInitializationException("Credential initialization failed")));
}
@Test
public void testTranslateNestedCredentialException() throws Throwable {
final AccessDeniedException ex =
verifyExceptionClass(AccessDeniedException.class,
maybeTranslateCredentialException("/",
sdkClientException("",
new CredentialInitializationException("Credential initialization failed"))));
// unwrap and verify that the initial client exception has been stripped
final Throwable cause = ex.getCause();
assertThat(cause)
.isInstanceOf(CredentialInitializationException.class);
CredentialInitializationException cie = (CredentialInitializationException) cause;
assertThat(cie.retryable())
.describedAs("Retryable flag")
.isFalse();
}
@Test
public void testTranslateNonCredentialException() throws Throwable {
assertThat(
maybeTranslateCredentialException("/",
sdkClientException("not a credential exception", null)))
.isNull();
assertThat(
maybeTranslateCredentialException("/",
sdkClientException("", sdkClientException("not a credential exception", null))))
.isNull();
}
@Test
public void testTranslateAuditException() throws Throwable {
verifyExceptionClass(AccessDeniedException.class,
maybeTranslateAuditException("/",
new AuditFailureException("failed")));
}
@Test
public void testTranslateNestedAuditException() throws Throwable {
verifyExceptionClass(AccessDeniedException.class,
maybeTranslateAuditException("/",
sdkClientException("", new AuditFailureException("failed"))));
}
@Test
public void testTranslateNestedAuditRejectedException() throws Throwable {
final UnsupportedRequestException ex =
verifyExceptionClass(UnsupportedRequestException.class,
maybeTranslateAuditException("/",
sdkClientException("", new AuditOperationRejectedException("rejected"))));
assertThat(ex.getCause())
.isInstanceOf(AuditOperationRejectedException.class);
}
@Test
public void testTranslateNonAuditException() throws Throwable {
assertThat(
maybeTranslateAuditException("/",
sdkClientException("not an audit exception", null)))
.isNull();
assertThat(
maybeTranslateAuditException("/",
sdkClientException("", sdkClientException("not an audit exception", null))))
.isNull();
}
/**
* 504 gateway timeout is translated to a {@link AWSApiCallTimeoutException}.
*/
@Test
public void test504ToTimeout() throws Throwable {
AWSApiCallTimeoutException ex =
verifyExceptionClass(AWSApiCallTimeoutException.class,
translateException("test", "/", createS3Exception(504)));
verifyCause(S3Exception.class, ex);
}
/**
* SDK ApiCallTimeoutException is translated to a
* {@link AWSApiCallTimeoutException}.
*/
@Test
public void testApiCallTimeoutExceptionToTimeout() throws Throwable {
AWSApiCallTimeoutException ex =
verifyExceptionClass(AWSApiCallTimeoutException.class,
translateException("test", "/",
ApiCallTimeoutException.builder()
.message("timeout")
.build()));
verifyCause(ApiCallTimeoutException.class, ex);
}
/**
* SDK ApiCallAttemptTimeoutException is translated to a
* {@link AWSApiCallTimeoutException}.
*/
@Test
public void testApiCallAttemptTimeoutExceptionToTimeout() throws Throwable {
AWSApiCallTimeoutException ex =
verifyExceptionClass(AWSApiCallTimeoutException.class,
translateException("test", "/",
ApiCallAttemptTimeoutException.builder()
.message("timeout")
.build()));
verifyCause(ApiCallAttemptTimeoutException.class, ex);
// and confirm these timeouts are retried.
assertRetried(ex);
}
@Test
public void testChannelExtraction() throws Throwable {
verifyExceptionClass(HttpChannelEOFException.class,
maybeExtractChannelException("", "/",
new NoHttpResponseException("no response")));
}
@Test
public void testShadedChannelExtraction() throws Throwable {
verifyExceptionClass(HttpChannelEOFException.class,
maybeExtractChannelException("", "/",
shadedNoHttpResponse()));
}
@Test
public void testOpenSSLErrorChannelExtraction() throws Throwable {
verifyExceptionClass(HttpChannelEOFException.class,
maybeExtractChannelException("", "/",
sdkClientException(WFOPENSSL_0035_STREAM_IS_CLOSED, null)));
}
/**
* Test handling of the unshaded HTTP client exception.
*/
@Test
public void testRawNoHttpResponseExceptionRetry() throws Throwable {
assertRetried(
verifyExceptionClass(HttpChannelEOFException.class,
translateException("test", "/",
sdkClientException(new NoHttpResponseException("no response")))));
}
/**
* Test handling of the shaded HTTP client exception.
*/
@Test
public void testShadedNoHttpResponseExceptionRetry() throws Throwable {
assertRetried(
verifyExceptionClass(HttpChannelEOFException.class,
translateException("test", "/",
sdkClientException(shadedNoHttpResponse()))));
}
@Test
public void testOpenSSLErrorRetry() throws Throwable {
assertRetried(
verifyExceptionClass(HttpChannelEOFException.class,
translateException("test", "/",
sdkClientException(WFOPENSSL_0035_STREAM_IS_CLOSED, null))));
}
@Test
public void testS3ExpressPreconditionFailure() throws Throwable {
AwsServiceException ase = AwsServiceException.builder()
.message("unwind")
.statusCode(SC_200_OK)
.awsErrorDetails(AwsErrorDetails.builder()
.errorCode(PRECONDITION_FAILED)
.build())
.build();
verifyExceptionClass(RemoteFileChangedException.class,
translateException("commit", "/path", ase));
}
/**
* Create a shaded NoHttpResponseException.
* @return an exception.
*/
private static Exception shadedNoHttpResponse() {
return new software.amazon.awssdk.thirdparty.org.apache.http.NoHttpResponseException("shaded");
}
/**
* Assert that an exception is retried.
* @param ex exception
* @throws Exception failure during retry policy evaluation.
*/
private void assertRetried(final Exception ex) throws Exception {
assertRetryOutcome(ex, RetryPolicy.RetryAction.RetryDecision.RETRY);
}
/**
* Assert that the retry policy is as expected for a given exception.
* @param ex exception
* @param decision expected decision
* @throws Exception failure during retry policy evaluation.
*/
private void assertRetryOutcome(
final Exception ex,
final RetryPolicy.RetryAction.RetryDecision decision) throws Exception {
assertThat(retryPolicy.shouldRetry(ex, 0, 0, true).action)
.describedAs("retry policy for exception %s", ex)
.isEqualTo(decision);
}
}
| TestS3AExceptionTranslation |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/JUnit4TestNotRunTest.java | {
"start": 18479,
"end": 19191
} | class ____ {
public void testThisIsATest() {}
}\
""")
.doTest();
}
@Test
public void negativeCase2() {
compilationHelper
.addSourceLines(
"JUnit4TestNotRunNegativeCase2.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.RunWith;
/**
* Not a JUnit 4 test (run with a JUnit3 test runner).
*
* @author eaftan@google.com (Eddie Aftandilian)
*/
@RunWith(JUnit38ClassRunner.class)
public | JUnit4TestNotRunNegativeCase1 |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/builditem/ConfigMappingBuildItem.java | {
"start": 1477,
"end": 1576
} | class ____ safe to use during static initialization.
*
* @return true if the configuration | is |
java | apache__hadoop | hadoop-tools/hadoop-gcp/src/test/java/org/apache/hadoop/fs/gs/contract/ITestGoogleContractConcat.java | {
"start": 1072,
"end": 1264
} | class ____ extends AbstractContractConcatTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new GoogleContract(conf);
}
}
| ITestGoogleContractConcat |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInPipelineTest.java | {
"start": 1134,
"end": 1971
} | class ____ extends ContextTestSupport {
@Test
public void testBeanInPipeline() {
Object response = template.requestBody("direct:start", "Start:");
assertEquals("Start:onetwothree", response);
}
@Override
protected Registry createCamelRegistry() throws Exception {
Registry answer = super.createCamelRegistry();
answer.bind("one", new MyBean("one"));
answer.bind("two", new MyBean("two"));
answer.bind("three", new MyBean("three"));
return answer;
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").pipeline("bean:one", "bean:two", "log:x", "log:y", "bean:three");
}
};
}
public static | BeanInPipelineTest |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/SystemExitOutsideMainTest.java | {
"start": 3125,
"end": 3526
} | class ____ {
private static void main(String[] args) {
// BUG: Diagnostic contains: SystemExitOutsideMain
System.exit(0);
}
}
""")
.doTest();
}
@Test
public void systemExitMainLookalikeDifferentArrayParameter() {
helper
.addSourceLines(
"Test.java",
"""
| Test |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/common/InterfaceAddressURLTest.java | {
"start": 1396,
"end": 5883
} | class ____ {
private static final String rawURL =
"dubbo://10.20.130.230:20880/context/path?version=1.0.0&group=g1&application=provider&timeout=1000&category=provider&side=provider&sayHello.weight=222";
private static final URL overrideURL = URL.valueOf(
"override://10.20.130.230:20880/context/path?version=1.0.0&application=morgan&timeout=2000&category=configurators&sayHello.overrideKey=override");
private static final URL consumerURL = URL.valueOf(
"consumer://10.20.130.230/context/path?version=2.0.0,1.0.0&group=g2&application=morgan&timeout=3000&side=consumer&sayHello.timeout=5000");
@Test
void testMergeOverriden() {
URL url = URL.valueOf(rawURL);
ServiceAddressURL interfaceAddressURL =
new DubboServiceAddressURL(url.getUrlAddress(), url.getUrlParam(), null, null);
assertEquals("1000", interfaceAddressURL.getParameter(TIMEOUT_KEY));
ServiceAddressURL withConsumer = DubboServiceAddressURL.valueOf(rawURL, consumerURL);
assertEquals("3000", withConsumer.getParameter(TIMEOUT_KEY));
ServiceAddressURL withOverriden =
DubboServiceAddressURL.valueOf(rawURL, consumerURL, (ServiceConfigURL) overrideURL);
assertEquals("2000", withOverriden.getParameter(TIMEOUT_KEY));
}
@Test
void testGetParameter() {
URL url = URL.valueOf(rawURL);
ServiceAddressURL interfaceAddressURL =
new DubboServiceAddressURL(url.getUrlAddress(), url.getUrlParam(), consumerURL, null);
assertEquals("3000", interfaceAddressURL.getParameter(TIMEOUT_KEY));
assertEquals("morgan", interfaceAddressURL.getApplication());
assertEquals("provider", interfaceAddressURL.getRemoteApplication());
assertEquals("dubbo", interfaceAddressURL.getProtocol());
assertEquals("context/path", interfaceAddressURL.getPath());
assertEquals("consumer", interfaceAddressURL.getSide());
assertEquals("1.0.0", interfaceAddressURL.getVersion());
assertEquals("g1", interfaceAddressURL.getGroup());
}
@Test
void testGetMethodParameter() {
URL url = URL.valueOf(rawURL);
ServiceAddressURL interfaceAddressURL = new DubboServiceAddressURL(
url.getUrlAddress(), url.getUrlParam(), consumerURL, (ServiceConfigURL) overrideURL);
assertEquals("5000", interfaceAddressURL.getMethodParameter("sayHello", TIMEOUT_KEY));
assertEquals("2000", interfaceAddressURL.getMethodParameter("non-exist-methods", TIMEOUT_KEY));
assertEquals("222", interfaceAddressURL.getMethodParameter("sayHello", "weight"));
assertEquals("222", interfaceAddressURL.getMethodParameter("sayHello", "weight"));
assertEquals("override", interfaceAddressURL.getMethodParameter("sayHello", "overrideKey"));
}
@Test
void testURLEquals() {
URL url1 = URL.valueOf(rawURL);
URL url2 = URL.valueOf(rawURL);
assertNotSame(url1, url2);
assertEquals(url1, url2);
// with consumer
ServiceAddressURL withConsumer =
new DubboServiceAddressURL(url1.getUrlAddress(), url1.getUrlParam(), consumerURL, null);
ServiceAddressURL withConsumer2 =
new DubboServiceAddressURL(url1.getUrlAddress(), url1.getUrlParam(), consumerURL, null);
assertEquals(withConsumer, withConsumer2);
ServiceAddressURL withOverride = new DubboServiceAddressURL(
url1.getUrlAddress(), url1.getUrlParam(), consumerURL, (ServiceConfigURL) overrideURL);
url2 = url2.addParameter("timeout", "4444");
ServiceAddressURL withOverride2 = new DubboServiceAddressURL(
url2.getUrlAddress(), url2.getUrlParam(), consumerURL, (ServiceConfigURL) overrideURL);
assertNotEquals(url1, url2);
assertEquals(withOverride, withOverride2);
}
@Test
void testToString() {
URL url1 = URL.valueOf(rawURL);
assertNotNull(url1.toString());
ServiceAddressURL withConsumer =
new DubboServiceAddressURL(url1.getUrlAddress(), url1.getUrlParam(), consumerURL, null);
assertNotNull(withConsumer.toString());
ServiceAddressURL withOverride2 = new DubboServiceAddressURL(
url1.getUrlAddress(), url1.getUrlParam(), consumerURL, (ServiceConfigURL) overrideURL);
assertNotNull(withOverride2.toString());
}
}
| InterfaceAddressURLTest |
java | spring-projects__spring-boot | module/spring-boot-data-ldap/src/test/java/org/springframework/boot/data/ldap/autoconfigure/domain/person/Person.java | {
"start": 1025,
"end": 1158
} | class ____ {
@Id
private Name dn;
@Attribute(name = "cn")
@DnAttribute(value = "cn", index = 1)
private String fullName;
}
| Person |
java | spring-projects__spring-security | access/src/test/java/org/springframework/security/access/expression/method/PrePostAnnotationSecurityMetadataSourceTests.java | {
"start": 10353,
"end": 10513
} | class ____ implements ReturnAnotherList {
@Override
public List<?> doSomething(List<?> param) {
return param;
}
}
public static | ReturnAnotherListImpl1 |
java | grpc__grpc-java | okhttp/src/main/java/io/grpc/okhttp/AsyncSink.java | {
"start": 7637,
"end": 8338
} | class ____ extends ForwardingFrameWriter {
public LimitControlFramesWriter(FrameWriter delegate) {
super(delegate);
}
@Override
public void ackSettings(Settings peerSettings) throws IOException {
controlFramesInWrite++;
super.ackSettings(peerSettings);
}
@Override
public void rstStream(int streamId, ErrorCode errorCode) throws IOException {
controlFramesInWrite++;
super.rstStream(streamId, errorCode);
}
@Override
public void ping(boolean ack, int payload1, int payload2) throws IOException {
if (ack) {
controlFramesInWrite++;
}
super.ping(ack, payload1, payload2);
}
}
}
| LimitControlFramesWriter |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/lookup/EnvironmentLookupTest.java | {
"start": 1008,
"end": 1283
} | class ____ {
@Test
void testLookup() {
final StrLookup lookup = new EnvironmentLookup();
String value = lookup.lookup("PATH");
assertNotNull(value);
value = lookup.lookup("BadKey");
assertNull(value);
}
}
| EnvironmentLookupTest |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/AtmosphereWebsocketEndpointBuilderFactory.java | {
"start": 52472,
"end": 61552
} | interface ____
extends
AtmosphereWebsocketEndpointConsumerBuilder,
AtmosphereWebsocketEndpointProducerBuilder {
default AdvancedAtmosphereWebsocketEndpointBuilder advanced() {
return (AdvancedAtmosphereWebsocketEndpointBuilder) this;
}
/**
* If this option is false the Servlet will disable the HTTP streaming
* and set the content-length header on the response.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param chunked the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder chunked(boolean chunked) {
doSetProperty("chunked", chunked);
return this;
}
/**
* If this option is false the Servlet will disable the HTTP streaming
* and set the content-length header on the response.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: true
* Group: common
*
* @param chunked the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder chunked(String chunked) {
doSetProperty("chunked", chunked);
return this;
}
/**
* Determines whether or not the raw input stream is cached or not. The
* Camel consumer (camel-servlet, camel-jetty etc.) will by default
* cache the input stream to support reading it multiple times to ensure
* it Camel can retrieve all data from the stream. However you can set
* this option to true when you for example need to access the raw
* stream, such as streaming it directly to a file or other persistent
* store. DefaultHttpBinding will copy the request input stream into a
* stream cache and put it into message body if this option is false to
* support reading the stream multiple times. If you use Servlet to
* bridge/proxy an endpoint then consider enabling this option to
* improve performance, in case you do not need to read the message
* payload multiple times. The producer (camel-http) will by default
* cache the response body stream. If setting this option to true, then
* the producers will not cache the response body stream but use the
* response stream as-is (the stream can only be read once) as the
* message body.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableStreamCache the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder disableStreamCache(boolean disableStreamCache) {
doSetProperty("disableStreamCache", disableStreamCache);
return this;
}
/**
* Determines whether or not the raw input stream is cached or not. The
* Camel consumer (camel-servlet, camel-jetty etc.) will by default
* cache the input stream to support reading it multiple times to ensure
* it Camel can retrieve all data from the stream. However you can set
* this option to true when you for example need to access the raw
* stream, such as streaming it directly to a file or other persistent
* store. DefaultHttpBinding will copy the request input stream into a
* stream cache and put it into message body if this option is false to
* support reading the stream multiple times. If you use Servlet to
* bridge/proxy an endpoint then consider enabling this option to
* improve performance, in case you do not need to read the message
* payload multiple times. The producer (camel-http) will by default
* cache the response body stream. If setting this option to true, then
* the producers will not cache the response body stream but use the
* response stream as-is (the stream can only be read once) as the
* message body.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param disableStreamCache the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder disableStreamCache(String disableStreamCache) {
doSetProperty("disableStreamCache", disableStreamCache);
return this;
}
/**
* Whether to send to all (broadcast) or send to a single receiver.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param sendToAll the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder sendToAll(boolean sendToAll) {
doSetProperty("sendToAll", sendToAll);
return this;
}
/**
* Whether to send to all (broadcast) or send to a single receiver.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param sendToAll the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder sendToAll(String sendToAll) {
doSetProperty("sendToAll", sendToAll);
return this;
}
/**
* If enabled and an Exchange failed processing on the consumer side,
* and if the caused Exception was send back serialized in the response
* as a application/x-java-serialized-object content type. On the
* producer side the exception will be deserialized and thrown as is,
* instead of the HttpOperationFailedException. The caused exception is
* required to be serialized. This is by default turned off. If you
* enable this then be aware that Java will deserialize the incoming
* data from the request to Java and that can be a potential security
* risk.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param transferException the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder transferException(boolean transferException) {
doSetProperty("transferException", transferException);
return this;
}
/**
* If enabled and an Exchange failed processing on the consumer side,
* and if the caused Exception was send back serialized in the response
* as a application/x-java-serialized-object content type. On the
* producer side the exception will be deserialized and thrown as is,
* instead of the HttpOperationFailedException. The caused exception is
* required to be serialized. This is by default turned off. If you
* enable this then be aware that Java will deserialize the incoming
* data from the request to Java and that can be a potential security
* risk.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param transferException the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder transferException(String transferException) {
doSetProperty("transferException", transferException);
return this;
}
/**
* To enable streaming to send data as multiple text fragments.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param useStreaming the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder useStreaming(boolean useStreaming) {
doSetProperty("useStreaming", useStreaming);
return this;
}
/**
* To enable streaming to send data as multiple text fragments.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: common
*
* @param useStreaming the value to set
* @return the dsl builder
*/
default AtmosphereWebsocketEndpointBuilder useStreaming(String useStreaming) {
doSetProperty("useStreaming", useStreaming);
return this;
}
}
/**
* Advanced builder for endpoint for the Atmosphere Websocket component.
*/
public | AtmosphereWebsocketEndpointBuilder |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/NodeUnpublishVolumeRequest.java | {
"start": 1024,
"end": 1557
} | class ____ {
public static NodeUnpublishVolumeRequest newInstance(String volumeId,
String targetPath) {
NodeUnpublishVolumeRequest request =
Records.newRecord(NodeUnpublishVolumeRequest.class);
request.setVolumeId(volumeId);
request.setTargetPath(targetPath);
return request;
}
public abstract void setVolumeId(String volumeId);
public abstract void setTargetPath(String targetPath);
public abstract String getVolumeId();
public abstract String getTargetPath();
}
| NodeUnpublishVolumeRequest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/NMProtoUtils.java | {
"start": 1763,
"end": 5841
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(NMProtoUtils.class);
private NMProtoUtils() { }
/**
* Convert the Protobuf representation into a {@link DeletionTask}.
*
* @param proto the Protobuf representation for the DeletionTask
* @param deletionService the {@link DeletionService}
* @return the converted {@link DeletionTask}
*/
public static DeletionTask convertProtoToDeletionTask(
DeletionServiceDeleteTaskProto proto, DeletionService deletionService) {
int taskId = proto.getId();
if (proto.hasTaskType() && proto.getTaskType() != null) {
if (proto.getTaskType().equals(DeletionTaskType.FILE.name())) {
LOG.debug("Converting recovered FileDeletionTask");
return convertProtoToFileDeletionTask(proto, deletionService, taskId);
} else if (proto.getTaskType().equals(
DeletionTaskType.DOCKER_CONTAINER.name())) {
LOG.debug("Converting recovered DockerContainerDeletionTask");
return convertProtoToDockerContainerDeletionTask(proto, deletionService,
taskId);
}
}
LOG.debug("Unable to get task type, trying FileDeletionTask");
return convertProtoToFileDeletionTask(proto, deletionService, taskId);
}
/**
* Convert the Protobuf representation into the {@link FileDeletionTask}.
*
* @param proto the Protobuf representation of the {@link FileDeletionTask}.
* @param deletionService the {@link DeletionService}.
* @param taskId the ID of the {@link DeletionTask}.
* @return the populated {@link FileDeletionTask}.
*/
public static FileDeletionTask convertProtoToFileDeletionTask(
DeletionServiceDeleteTaskProto proto, DeletionService deletionService,
int taskId) {
String user = proto.hasUser() ? proto.getUser() : null;
Path subdir = null;
if (proto.hasSubdir()) {
subdir = new Path(proto.getSubdir());
}
List<Path> basePaths = null;
List<String> basedirs = proto.getBasedirsList();
if (basedirs != null && basedirs.size() > 0) {
basePaths = new ArrayList<>(basedirs.size());
for (String basedir : basedirs) {
basePaths.add(new Path(basedir));
}
}
return new FileDeletionTask(taskId, deletionService, user, subdir,
basePaths);
}
/**
* Convert the Protobuf format into the {@link DockerContainerDeletionTask}.
*
* @param proto Protobuf format of the {@link DockerContainerDeletionTask}.
* @param deletionService the {@link DeletionService}.
* @param taskId the ID of the {@link DeletionTask}.
* @return the populated {@link DockerContainerDeletionTask}.
*/
public static DockerContainerDeletionTask
convertProtoToDockerContainerDeletionTask(
DeletionServiceDeleteTaskProto proto, DeletionService deletionService,
int taskId) {
String user = proto.hasUser() ? proto.getUser() : null;
String containerId =
proto.hasDockerContainerId() ? proto.getDockerContainerId() : null;
return new DockerContainerDeletionTask(taskId, deletionService, user,
containerId);
}
/**
* Convert the Protobuf representation to the {@link DeletionTaskRecoveryInfo}
* representation.
*
* @param proto the Protobuf representation of the {@link DeletionTask}
* @param deletionService the {@link DeletionService}
* @return the populated {@link DeletionTaskRecoveryInfo}
*/
public static DeletionTaskRecoveryInfo convertProtoToDeletionTaskRecoveryInfo(
DeletionServiceDeleteTaskProto proto, DeletionService deletionService) {
DeletionTask deletionTask =
NMProtoUtils.convertProtoToDeletionTask(proto, deletionService);
List<Integer> successorTaskIds = new ArrayList<>();
if (proto.getSuccessorIdsList() != null &&
!proto.getSuccessorIdsList().isEmpty()) {
successorTaskIds = proto.getSuccessorIdsList();
}
long deletionTimestamp = proto.getDeletionTime();
return new DeletionTaskRecoveryInfo(deletionTask, successorTaskIds,
deletionTimestamp);
}
}
| NMProtoUtils |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextConfiguration.java | {
"start": 1184,
"end": 2421
} | class ____ define metadata that is used to determine how to load and configure
* an {@link org.springframework.context.ApplicationContext ApplicationContext}
* for integration tests.
*
* <h3>Supported Resource Types</h3>
*
* <p>{@linkplain #loader Context loaders} may choose to support <em>either</em>
* path-based resource locations (typically XML configuration files) <em>or</em>
* class-based resources. Alternatively, context loaders may choose to support
* path-based <em>and</em> class-based resources simultaneously. Consequently
* {@code @ContextConfiguration} can be used to declare either path-based resource
* locations (via the {@link #locations} or {@link #value} attribute) <em>or</em>
* component classes (via the {@link #classes} attribute). Note, however, that most
* implementations of {@link SmartContextLoader} only support a single resource type.
* Path-based resource locations may be either XML configuration files or Groovy
* scripts (if Groovy is on the classpath). Of course, third-party frameworks may
* choose to support additional types of path-based resources.
*
* <h3>Component Classes</h3>
*
* <p>The term <em>component class</em> can refer to any of the following.
*
* <ul>
* <li>A | to |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/TransientOverrideAsPersistentMappedSuperclassTests.java | {
"start": 9078,
"end": 9415
} | class ____ {
private String title;
@Transient
public String getTitle() {
return title;
}
protected void setTitle(String title) {
this.title = title;
}
}
@Entity(name = "Employee")
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@DiscriminatorColumn(name = "department")
public static abstract | AbstractEmployee |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/mapreduce/RCollector.java | {
"start": 851,
"end": 1045
} | interface ____<K, V> {
/**
* Store key/value
*
* @param key available to reduce
* @param value available to reduce
*/
void emit(K key, V value);
}
| RCollector |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/ShareSessionLimitReachedException.java | {
"start": 967,
"end": 1186
} | class ____ extends RetriableException {
private static final long serialVersionUID = 1L;
public ShareSessionLimitReachedException(String message) {
super(message);
}
}
| ShareSessionLimitReachedException |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationDecision.java | {
"start": 897,
"end": 1043
} | enum ____ represents the various decision types that can be taken by the
* allocators and deciders for allocating a shard to a node.
*/
public | which |
java | apache__camel | components/camel-infinispan/camel-infinispan-common/src/main/java/org/apache/camel/component/infinispan/cluster/InfinispanClusterView.java | {
"start": 2836,
"end": 3549
} | class ____ implements CamelClusterMember {
private final String id;
public ClusterMember(String id) {
this.id = id;
}
@Override
public String getId() {
return id;
}
@Override
public boolean isLeader() {
return InfinispanClusterView.this.isLeader(id);
}
@Override
public boolean isLocal() {
if (id == null) {
return false;
}
return Objects.equals(id, getLocalMember().getId());
}
@Override
public String toString() {
return "ClusterMember{" + "id='" + id + '\'' + '}';
}
}
}
| ClusterMember |
java | apache__spark | sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/Dictionary.java | {
"start": 954,
"end": 1137
} | interface ____ {
int decodeToInt(int id);
long decodeToLong(int id);
float decodeToFloat(int id);
double decodeToDouble(int id);
byte[] decodeToBinary(int id);
}
| Dictionary |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/AbstractMockChecker.java | {
"start": 2119,
"end": 2950
} | class ____<T extends Annotation> extends BugChecker
implements MethodInvocationTreeMatcher, VariableTreeMatcher {
public AbstractMockChecker(
TypeExtractor<VariableTree> varExtractor,
TypeExtractor<MethodInvocationTree> methodExtractor,
Class<T> annotationClass,
Function<T, String> getValueFunction) {
this.varExtractor = varExtractor;
this.methodExtractor = methodExtractor;
this.annotationClass = annotationClass;
this.getValueFunction = getValueFunction;
this.annotationName = annotationClass.getSimpleName();
}
/**
* A policy for determining what classes should not be mocked.
*
* <p>This interface's intended use is to forbid mocking of classes you don't control, for example
* those in the JDK itself or in a library you use.
*/
public | AbstractMockChecker |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/datastream/CustomSinkOperatorUidHashes.java | {
"start": 1238,
"end": 2690
} | class ____ {
/** Default instance providing no custom sink operator hashes. */
public static final CustomSinkOperatorUidHashes DEFAULT =
CustomSinkOperatorUidHashes.builder().build();
@Nullable private final String writerUidHash;
@Nullable private final String committerUidHash;
@Nullable private final String globalCommitterUidHash;
private CustomSinkOperatorUidHashes(
@Nullable String writerUidHash,
@Nullable String committerUidHash,
@Nullable String globalCommitterUidHash) {
this.writerUidHash = writerUidHash;
this.committerUidHash = committerUidHash;
this.globalCommitterUidHash = globalCommitterUidHash;
}
/**
* Creates a builder to construct {@link CustomSinkOperatorUidHashes}.
*
* @return {@link SinkOperatorUidHashesBuilder}
*/
public static SinkOperatorUidHashesBuilder builder() {
return new SinkOperatorUidHashesBuilder();
}
@Internal
@Nullable
public String getWriterUidHash() {
return writerUidHash;
}
@Internal
@Nullable
public String getCommitterUidHash() {
return committerUidHash;
}
@Internal
@Nullable
public String getGlobalCommitterUidHash() {
return globalCommitterUidHash;
}
/** Builder to construct {@link CustomSinkOperatorUidHashes}. */
@PublicEvolving
public static | CustomSinkOperatorUidHashes |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/operators/sort/ChannelWithBlockCount.java | {
"start": 929,
"end": 1335
} | class ____ {
private final FileIOChannel.ID channel;
private final int blockCount;
public ChannelWithBlockCount(FileIOChannel.ID channel, int blockCount) {
this.channel = channel;
this.blockCount = blockCount;
}
public FileIOChannel.ID getChannel() {
return channel;
}
public int getBlockCount() {
return blockCount;
}
}
| ChannelWithBlockCount |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/completable/CompletableDetachTest.java | {
"start": 1150,
"end": 3962
} | class ____ extends RxJavaTest {
@Test
public void doubleSubscribe() {
TestHelper.checkDoubleOnSubscribeCompletable(new Function<Completable, CompletableSource>() {
@Override
public CompletableSource apply(Completable m) throws Exception {
return m.onTerminateDetach();
}
});
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishProcessor.create().ignoreElements().onTerminateDetach());
}
@Test
public void onError() {
Completable.error(new TestException())
.onTerminateDetach()
.test()
.assertFailure(TestException.class);
}
@Test
public void onComplete() {
Completable.complete()
.onTerminateDetach()
.test()
.assertResult();
}
@Test
public void cancelDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Void> to = new Completable() {
@Override
protected void subscribeActual(CompletableObserver observer) {
observer.onSubscribe(wr.get());
};
}
.onTerminateDetach()
.test();
d = null;
to.dispose();
System.gc();
Thread.sleep(200);
to.assertEmpty();
assertNull(wr.get());
}
@Test
public void completeDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Void> to = new Completable() {
@Override
protected void subscribeActual(CompletableObserver observer) {
observer.onSubscribe(wr.get());
observer.onComplete();
observer.onComplete();
};
}
.onTerminateDetach()
.test();
d = null;
System.gc();
Thread.sleep(200);
to.assertResult();
assertNull(wr.get());
}
@Test
public void errorDetaches() throws Exception {
Disposable d = Disposable.empty();
final WeakReference<Disposable> wr = new WeakReference<>(d);
TestObserver<Void> to = new Completable() {
@Override
protected void subscribeActual(CompletableObserver observer) {
observer.onSubscribe(wr.get());
observer.onError(new TestException());
observer.onError(new IOException());
};
}
.onTerminateDetach()
.test();
d = null;
System.gc();
Thread.sleep(200);
to.assertFailure(TestException.class);
assertNull(wr.get());
}
}
| CompletableDetachTest |
java | alibaba__nacos | ai/src/test/java/com/alibaba/nacos/ai/remote/handler/a2a/ReleaseAgentCardRequestHandlerTest.java | {
"start": 2006,
"end": 7907
} | class ____ {
@Mock
private A2aServerOperationService a2aServerOperationService;
@Mock
private RequestMeta meta;
private ReleaseAgentCardRequestHandler requestHandler;
@BeforeEach
void setUp() {
requestHandler = new ReleaseAgentCardRequestHandler(a2aServerOperationService);
}
@AfterEach
void tearDown() {
}
@Test
void handleWithNullAgentCard() throws NacosException {
ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
ReleaseAgentCardResponse response = requestHandler.handle(request, meta);
assertEquals(ResponseCode.FAIL.getCode(), response.getResultCode());
assertEquals(NacosException.INVALID_PARAM, response.getErrorCode());
assertEquals("parameters `agentCard` can't be null", response.getMessage());
}
@Test
void handleWithValidNewAgentCard() throws NacosException {
final ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
AgentCard agentCard = new AgentCard();
agentCard.setName("test");
agentCard.setVersion("1.0.0");
agentCard.setProtocolVersion("0.3.0");
agentCard.setPreferredTransport("JSONRPC");
agentCard.setUrl("https://example.com");
request.setAgentCard(agentCard);
request.setNamespaceId("public");
when(meta.getConnectionId()).thenReturn("TEST_CONNECTION_ID");
when(a2aServerOperationService.getAgentCard("public", "test", "1.0.0", "")).thenThrow(
new NacosApiException(NacosException.NOT_FOUND, ErrorCode.AGENT_NOT_FOUND, ""));
ReleaseAgentCardResponse response = requestHandler.handle(request, meta);
assertEquals(ResponseCode.SUCCESS.getCode(), response.getResultCode());
assertNull(response.getMessage());
verify(a2aServerOperationService).registerAgent(any(AgentCard.class), anyString(), anyString());
}
@Test
void handleWithValidNewVersionAgentCard() throws NacosException {
final ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
AgentCard agentCard = new AgentCard();
agentCard.setName("test");
agentCard.setVersion("1.0.0");
agentCard.setProtocolVersion("0.3.0");
agentCard.setPreferredTransport("JSONRPC");
agentCard.setUrl("https://example.com");
request.setAgentCard(agentCard);
request.setNamespaceId("public");
request.setSetAsLatest(true);
when(meta.getConnectionId()).thenReturn("TEST_CONNECTION_ID");
AgentCardDetailInfo existAgentCard = new AgentCardDetailInfo();
existAgentCard.setName("test");
existAgentCard.setVersion("0.9.0");
when(a2aServerOperationService.getAgentCard("public", "test", "1.0.0", "")).thenThrow(
new NacosApiException(NacosException.NOT_FOUND, ErrorCode.AGENT_VERSION_NOT_FOUND, ""));
ReleaseAgentCardResponse response = requestHandler.handle(request, meta);
assertEquals(ResponseCode.SUCCESS.getCode(), response.getResultCode());
assertNull(response.getMessage());
verify(a2aServerOperationService).updateAgentCard(any(AgentCard.class), anyString(), anyString(), eq(true));
}
@Test
void handleWithExistingAgentCard() throws NacosException {
final ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
AgentCard agentCard = new AgentCard();
agentCard.setName("test");
agentCard.setVersion("1.0.0");
agentCard.setProtocolVersion("0.3.0");
agentCard.setPreferredTransport("JSONRPC");
agentCard.setUrl("https://example.com");
request.setAgentCard(agentCard);
request.setNamespaceId("public");
when(meta.getConnectionId()).thenReturn("TEST_CONNECTION_ID");
AgentCardDetailInfo existAgentCard = new AgentCardDetailInfo();
existAgentCard.setName("test");
existAgentCard.setVersion("1.0.0");
when(a2aServerOperationService.getAgentCard("public", "test", "1.0.0", "")).thenReturn(existAgentCard);
ReleaseAgentCardResponse response = requestHandler.handle(request, meta);
assertEquals(ResponseCode.SUCCESS.getCode(), response.getResultCode());
verify(a2aServerOperationService, never()).registerAgent(any(AgentCard.class), anyString(), anyString());
verify(a2aServerOperationService, never()).updateAgentCard(any(AgentCard.class), anyString(), anyString(),
anyBoolean());
}
@Test
void handleWithOtherException() throws NacosException {
final ReleaseAgentCardRequest request = new ReleaseAgentCardRequest();
AgentCard agentCard = new AgentCard();
agentCard.setName("test");
agentCard.setVersion("1.0.0");
agentCard.setProtocolVersion("0.3.0");
agentCard.setPreferredTransport("JSONRPC");
agentCard.setUrl("https://example.com");
request.setAgentCard(agentCard);
request.setNamespaceId("public");
when(meta.getConnectionId()).thenReturn("TEST_CONNECTION_ID");
when(a2aServerOperationService.getAgentCard("public", "test", "1.0.0", "")).thenThrow(
new NacosApiException(NacosException.SERVER_ERROR, ErrorCode.SERVER_ERROR, "test error"));
ReleaseAgentCardResponse response = requestHandler.handle(request, meta);
assertEquals(ResponseCode.FAIL.getCode(), response.getResultCode());
assertEquals(NacosException.SERVER_ERROR, response.getErrorCode());
assertEquals("test error", response.getMessage());
verify(a2aServerOperationService, never()).registerAgent(any(AgentCard.class), anyString(), anyString());
verify(a2aServerOperationService, never()).updateAgentCard(any(AgentCard.class), anyString(), anyString(),
anyBoolean());
}
} | ReleaseAgentCardRequestHandlerTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonPriorityQueue.java | {
"start": 3403,
"end": 14679
} | class ____ of " + className + " differs from used by this SortedSet!");
}
Class<?> clazz = Class.forName(className);
comparator = (Comparator<V>) clazz.newInstance();
} else {
throw new IllegalStateException("Comparator is not set!");
}
} catch (IllegalStateException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
// TODO cache result
private static String calcClassSign(String name) {
try {
Class<?> clazz = Class.forName(name);
ByteArrayOutputStream result = new ByteArrayOutputStream();
ObjectOutputStream outputStream = new ObjectOutputStream(result);
outputStream.writeObject(clazz);
outputStream.close();
MessageDigest crypt = MessageDigest.getInstance("SHA-1");
crypt.reset();
crypt.update(result.toByteArray());
return new BigInteger(1, crypt.digest()).toString(16);
} catch (Exception e) {
throw new IllegalStateException("Can't calculate sign of " + name, e);
}
}
@Override
public boolean offer(V e) {
return add(e);
}
@Override
public boolean contains(Object o) {
checkComparator();
return binarySearch((V) o).getIndex() >= 0;
}
@Override
public boolean add(V value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch(value);
int index = 0;
if (res.getIndex() < 0) {
index = -(res.getIndex() + 1);
} else {
index = res.getIndex() + 1;
}
get(commandExecutor.evalWriteNoRetryAsync(getRawName(), codec, RedisCommands.EVAL_VOID,
"local len = redis.call('llen', KEYS[1]);"
+ "if tonumber(ARGV[1]) < len then "
+ "local pivot = redis.call('lindex', KEYS[1], ARGV[1]);"
+ "redis.call('linsert', KEYS[1], 'before', pivot, ARGV[2]);"
+ "return;"
+ "end;"
+ "redis.call('rpush', KEYS[1], ARGV[2]);",
Arrays.asList(getRawName()),
index, encode(value)));
return true;
} finally {
lock.unlock();
}
}
private void checkComparator() {
String comparatorSign = comparatorHolder.get();
if (comparatorSign != null) {
String[] vals = comparatorSign.split(":");
String className = vals[0];
if (!comparator.getClass().getName().equals(className)) {
loadComparator();
}
}
}
@Override
public boolean remove(Object value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch((V) value);
if (res.getIndex() < 0) {
return false;
}
remove((int) res.getIndex());
return true;
} finally {
lock.unlock();
}
}
@Override
public boolean containsAll(Collection<?> c) {
checkComparator();
for (Object object : c) {
if (binarySearch((V) object).getIndex() < 0) {
return false;
}
}
return true;
}
@Override
public boolean addAll(Collection<? extends V> c) {
boolean changed = false;
for (V v : c) {
if (add(v)) {
changed = true;
}
}
return changed;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean changed = false;
for (Iterator<?> iterator = iterator(); iterator.hasNext();) {
Object object = (Object) iterator.next();
if (!c.contains(object)) {
iterator.remove();
changed = true;
}
}
return changed;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean changed = false;
for (Object obj : c) {
if (remove(obj)) {
changed = true;
}
}
return changed;
}
@Override
public void clear() {
delete();
}
@Override
public Comparator<? super V> comparator() {
return comparator;
}
@Override
public RFuture<V> pollAsync() {
return wrapLockedAsync(RedisCommands.LPOP, getRawName());
}
protected final <T> RFuture<V> wrapLockedAsync(RedisCommand<T> command, Object... params) {
return wrapLockedAsync(() -> {
return commandExecutor.writeAsync(getRawName(), codec, command, params);
});
}
protected final <T, R> RFuture<R> wrapLockedAsync(Supplier<RFuture<R>> callable) {
long randomId = getServiceManager().getRandom().nextLong();
CompletionStage<R> f = lock.lockAsync(randomId).thenCompose(r -> {
RFuture<R> callback = callable.get();
return callback.handle((value, ex) -> {
CompletableFuture<R> result = new CompletableFuture<>();
lock.unlockAsync(randomId)
.whenComplete((r2, ex2) -> {
if (ex2 != null) {
if (ex != null) {
ex2.addSuppressed(ex);
}
result.completeExceptionally(ex2);
return;
}
if (ex != null) {
result.completeExceptionally(ex);
return;
}
result.complete(value);
});
return result;
}).thenCompose(ff -> ff);
});
return new CompletableFutureWrapper<>(f);
}
public V getFirst() {
V value = getValue(0);
if (value == null) {
throw new NoSuchElementException();
}
return value;
}
@Override
public V poll() {
return get(pollAsync());
}
@Override
public V element() {
return getFirst();
}
@Override
public RFuture<V> peekAsync() {
return getAsync(0);
}
@Override
public V peek() {
return getValue(0);
}
private String getComparatorKeyName() {
return suffixName(getRawName(), "redisson_sortedset_comparator");
}
@Override
public boolean trySetComparator(Comparator<? super V> comparator) {
if (comparator.getClass().isSynthetic()) {
throw new IllegalArgumentException("Synthetic classes aren't allowed");
}
String className = comparator.getClass().getName();
String comparatorSign = className + ":" + calcClassSign(className);
Boolean res = get(commandExecutor.writeAsync(getRawName(), StringCodec.INSTANCE, RedisCommands.SETNX, getComparatorKeyName(), comparatorSign));
if (res) {
this.comparator = comparator;
}
return res;
}
@Override
public V remove() {
return removeFirst();
}
public V removeFirst() {
V value = poll();
if (value == null) {
throw new NoSuchElementException();
}
return value;
}
// TODO optimize: get three values each time instead of single
public BinarySearchResult<V> binarySearch(V value) {
int size = size();
int upperIndex = size - 1;
int lowerIndex = 0;
while (lowerIndex <= upperIndex) {
int index = lowerIndex + (upperIndex - lowerIndex) / 2;
V res = getValue(index);
if (res == null) {
return new BinarySearchResult<V>();
}
int cmp = comparator.compare(value, res);
if (cmp == 0) {
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(index);
return indexRes;
} else if (cmp < 0) {
upperIndex = index - 1;
} else {
lowerIndex = index + 1;
}
}
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(-(lowerIndex + 1));
return indexRes;
}
@Override
@SuppressWarnings("AvoidInlineConditionals")
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
@Override
public V pollLastAndOfferFirstTo(String queueName) {
return get(pollLastAndOfferFirstToAsync(queueName));
}
@Override
public RFuture<V> pollLastAndOfferFirstToAsync(String queueName) {
return wrapLockedAsync(RedisCommands.RPOPLPUSH, getRawName(), queueName);
}
@Override
public RFuture<Boolean> deleteAsync() {
return deleteAsync(getRawName(), getComparatorKeyName());
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit, String param, String... keys) {
return super.expireAsync(timeToLive, timeUnit, param, getRawName(), getComparatorKeyName());
}
@Override
protected RFuture<Boolean> expireAtAsync(long timestamp, String param, String... keys) {
return super.expireAtAsync(timestamp, param, getRawName(), getComparatorKeyName());
}
@Override
public RFuture<Boolean> clearExpireAsync() {
return clearExpireAsync(getRawName(), getComparatorKeyName());
}
@Override
public List<V> poll(int limit) {
return get(pollAsync(limit));
}
@Override
public RFuture<Boolean> offerAsync(V e) {
throw new UnsupportedOperationException();
}
@Override
public RFuture<Boolean> addAsync(V e) {
throw new UnsupportedOperationException();
}
@Override
public RFuture<List<V>> pollAsync(int limit) {
return wrapLockedAsync(() -> {
return commandExecutor.evalWriteNoRetryAsync(getRawName(), codec, RedisCommands.EVAL_LIST,
"local result = {};"
+ "for i = 1, ARGV[1], 1 do " +
"local value = redis.call('lpop', KEYS[1]);" +
"if value ~= false then " +
"table.insert(result, value);" +
"else " +
"return result;" +
"end;" +
"end; " +
"return result;",
Collections.singletonList(getRawName()), limit);
});
}
}
| signature |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java | {
"start": 3901,
"end": 4494
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory lng;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lng) {
this.source = source;
this.lng = lng;
}
@Override
public ToStringFromLongEvaluator get(DriverContext context) {
return new ToStringFromLongEvaluator(source, lng.get(context), context);
}
@Override
public String toString() {
return "ToStringFromLongEvaluator[" + "lng=" + lng + "]";
}
}
}
| Factory |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/function/FailableTest.java | {
"start": 99246,
"end": 99758
} | interface ____ properly defined to throw any exception using the top level generic types
* Object and Throwable.
*/
@Test
void testThrows_FailableLongToIntFunction_Throwable() {
assertThrows(IOException.class, () -> new FailableLongToIntFunction<Throwable>() {
@Override
public int applyAsInt(final long value) throws Throwable {
throw new IOException("test");
}
}.applyAsInt(0));
}
/**
* Tests that our failable | is |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/validation/beanvalidation/MethodValidationProxyTests.java | {
"start": 9355,
"end": 9411
} | interface ____ {
}
@Configuration
public static | MyValid |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/QueryOperationConverter.java | {
"start": 39439,
"end": 40516
} | class ____ extends ExpressionDefaultVisitor<AggCall> {
@Override
public AggCall visit(CallExpression unresolvedCall) {
if (unresolvedCall.getFunctionDefinition() == AS) {
String aggregateName =
extractValue(unresolvedCall.getChildren().get(1), String.class)
.orElseThrow(() -> new TableException("Unexpected name."));
Expression aggregate = unresolvedCall.getChildren().get(0);
if (isFunctionOfKind(aggregate, AGGREGATE)) {
return aggregate.accept(
new AggCallVisitor(
relBuilder, expressionConverter, aggregateName, false));
}
}
throw new TableException("Expected named aggregate. Got: " + unresolvedCall);
}
@Override
protected AggCall defaultMethod(Expression expression) {
throw new TableException("Unexpected expression: " + expression);
}
private | AggregateVisitor |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/customwordembedding/ScriptDetector.java | {
"start": 1033,
"end": 1312
} | class ____ {
private ScriptDetector() {}
// Unicode scripts we care about. To get compact and fast code, we detect only
// a few Unicode scripts that offer a strong indication about the language of
// the text (e.g., Hiragana -> Japanese).
public | ScriptDetector |
java | apache__camel | core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/AvroDataFormat.java | {
"start": 18367,
"end": 21846
} | class ____ implements DataFormatBuilder<AvroDataFormat> {
private Class<?> unmarshalType;
private Class<?> jsonView;
private Class<?> collectionType;
private Object schema;
private String instanceClassName;
private AvroLibrary library = AvroLibrary.ApacheAvro;
private String objectMapper;
private String useDefaultObjectMapper;
private String unmarshalTypeName;
private String jsonViewTypeName;
private String include;
private String allowJmsType;
private String collectionTypeName;
private String useList;
private String moduleClassNames;
private String moduleRefs;
private String enableFeatures;
private String disableFeatures;
private String allowUnmarshallType;
private String timezone;
private String autoDiscoverObjectMapper;
private String contentTypeHeader;
private String schemaResolver;
private String autoDiscoverSchemaResolver;
/**
* Class name to use for marshal and unmarshalling
*/
public Builder instanceClassName(String instanceClassName) {
this.instanceClassName = instanceClassName;
return this;
}
public Builder schema(Object schema) {
this.schema = schema;
return this;
}
/**
* Which Avro library to use.
*/
public Builder library(AvroLibrary library) {
this.library = library;
return this;
}
public Builder contentTypeHeader(String contentTypeHeader) {
this.contentTypeHeader = contentTypeHeader;
return this;
}
public Builder contentTypeHeader(boolean contentTypeHeader) {
this.contentTypeHeader = Boolean.toString(contentTypeHeader);
return this;
}
/**
* Lookup and use the existing ObjectMapper with the given id when using Jackson.
*/
public Builder objectMapper(String objectMapper) {
this.objectMapper = objectMapper;
return this;
}
/**
* Whether to lookup and use default Jackson ObjectMapper from the registry.
*/
public Builder useDefaultObjectMapper(String useDefaultObjectMapper) {
this.useDefaultObjectMapper = useDefaultObjectMapper;
return this;
}
/**
* Whether to lookup and use default Jackson ObjectMapper from the registry.
*/
public Builder useDefaultObjectMapper(boolean useDefaultObjectMapper) {
this.useDefaultObjectMapper = Boolean.toString(useDefaultObjectMapper);
return this;
}
/**
* Class name of the java type to use when unmarshalling
*/
public Builder unmarshalTypeName(String unmarshalTypeName) {
this.unmarshalTypeName = unmarshalTypeName;
return this;
}
/**
* Class of the java type to use when unmarshalling
*/
public Builder unmarshalType(Class<?> unmarshalType) {
this.unmarshalType = unmarshalType;
return this;
}
/**
* When marshalling a POJO to JSON you might want to exclude certain fields from the JSON output. With Jackson
* you can use JSON views to accomplish this. This option is to refer to the | Builder |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/oracle/insert/OracleInsertTest22.java | {
"start": 1062,
"end": 5400
} | class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = "INSERT INTO MKTG_H_EXEC_RESULT_FACT\n" +
"(THE_DATE, AREA_ID, SCENE_ID, MKTG_CNT, MKTG_SUC_CNT\n" +
", TASK_CNT, TASK_F_CNT, TASK_F_SUC_CNT, CON_CNT, CON_SUC_CNT)\n" +
"SELECT TRUNC(SYSDATE), T1.AREA_ID\n" +
", RTRIM(TO_CHAR(T2.PID))\n" +
", SUM(T1.MKTG_CNT), SUM(T1.MKTG_SUC_CNT)\n" +
", SUM(T1.TASK_CNT), SUM(T1.TASK_F_CNT)\n" +
", SUM(T1.TASK_F_SUC_CNT), SUM(T1.CON_CNT)\n" +
", SUM(T1.CON_SUC_CNT)\n" +
"FROM MKTG_H_EXEC_RESULT_FACT T1, (\n" +
"SELECT DISTINCT MKTG_PLAN_LVL1_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"FROM DMN_MKTG_PLAN_TYPE\n" +
"UNION ALL\n" +
"SELECT DISTINCT MKTG_PLAN_LVL2_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"FROM DMN_MKTG_PLAN_TYPE_TWO\n" +
"WHERE MKTG_PLAN_LVL2_ID <> MKTG_PLAN_LVL4_ID\n" +
"UNION ALL\n" +
"SELECT DISTINCT MKTG_PLAN_LVL3_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"FROM DMN_MKTG_PLAN_TYPE\n" +
"WHERE MKTG_PLAN_LVL3_ID <> MKTG_PLAN_LVL4_ID\n" +
") T2\n" +
"WHERE T1.THE_DATE = TRUNC(SYSDATE)\n" +
"AND T1.SCENE_ID = T2.SCENE_ID\n" +
"GROUP BY T1.AREA_ID, RTRIM(TO_CHAR(T2.PID))";
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("INSERT INTO MKTG_H_EXEC_RESULT_FACT\n" +
"\t(THE_DATE, AREA_ID, SCENE_ID, MKTG_CNT, MKTG_SUC_CNT\n" +
"\t, TASK_CNT, TASK_F_CNT, TASK_F_SUC_CNT, CON_CNT, CON_SUC_CNT)\n" +
"SELECT TRUNC(SYSDATE), T1.AREA_ID\n" +
"\t, RTRIM(TO_CHAR(T2.PID))\n" +
"\t, SUM(T1.MKTG_CNT), SUM(T1.MKTG_SUC_CNT)\n" +
"\t, SUM(T1.TASK_CNT), SUM(T1.TASK_F_CNT)\n" +
"\t, SUM(T1.TASK_F_SUC_CNT), SUM(T1.CON_CNT)\n" +
"\t, SUM(T1.CON_SUC_CNT)\n" +
"FROM MKTG_H_EXEC_RESULT_FACT T1, (\n" +
"\tSELECT DISTINCT MKTG_PLAN_LVL1_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"\tFROM DMN_MKTG_PLAN_TYPE\n" +
"\tUNION ALL\n" +
"\tSELECT DISTINCT MKTG_PLAN_LVL2_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"\tFROM DMN_MKTG_PLAN_TYPE_TWO\n" +
"\tWHERE MKTG_PLAN_LVL2_ID <> MKTG_PLAN_LVL4_ID\n" +
"\tUNION ALL\n" +
"\tSELECT DISTINCT MKTG_PLAN_LVL3_ID AS PID, MKTG_PLAN_LVL4_ID AS SCENE_ID\n" +
"\tFROM DMN_MKTG_PLAN_TYPE\n" +
"\tWHERE MKTG_PLAN_LVL3_ID <> MKTG_PLAN_LVL4_ID\n" +
") T2\n" +
"WHERE T1.THE_DATE = TRUNC(SYSDATE)\n" +
"\tAND T1.SCENE_ID = T2.SCENE_ID\n" +
"GROUP BY T1.AREA_ID, RTRIM(TO_CHAR(T2.PID))",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
// System.out.println("coditions : " + visitor.getConditions());
// System.out.println("relationships : " + visitor.getRelationships());
assertEquals(3, visitor.getTables().size());
assertEquals(15, visitor.getColumns().size());
assertTrue(visitor.containsTable("MKTG_H_EXEC_RESULT_FACT"));
assertTrue(visitor.containsTable("DMN_MKTG_PLAN_TYPE"));
assertTrue(visitor.containsTable("DMN_MKTG_PLAN_TYPE_TWO"));
assertTrue(visitor.getColumns().contains(new TableStat.Column("MKTG_H_EXEC_RESULT_FACT", "THE_DATE")));
}
}
| OracleInsertTest22 |
java | grpc__grpc-java | binder/src/main/java/io/grpc/binder/internal/BinderServer.java | {
"start": 7358,
"end": 8382
} | class ____ implements LeakSafeOneWayBinder.TransactionHandler {
static final GoAwayHandler INSTANCE = new GoAwayHandler();
@Override
public boolean handleTransaction(int code, Parcel parcel) {
if (code == BinderTransport.SETUP_TRANSPORT) {
int version = parcel.readInt();
if (version >= BinderTransport.EARLIEST_SUPPORTED_WIRE_FORMAT_VERSION) {
IBinder callbackBinder = parcel.readStrongBinder();
try (ParcelHolder goAwayReply = ParcelHolder.obtain()) {
// Send empty flags to avoid a memory leak linked to empty parcels (b/207778694).
goAwayReply.get().writeInt(0);
callbackBinder.transact(SHUTDOWN_TRANSPORT, goAwayReply.get(), null, FLAG_ONEWAY);
} catch (RemoteException re) {
logger.log(Level.WARNING, "Couldn't reply to post-shutdown() SETUP_TRANSPORT.", re);
}
}
}
return false;
}
}
/** Fluent builder of {@link BinderServer} instances. */
public static | GoAwayHandler |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/xsd/XsdDescriptor.java | {
"start": 258,
"end": 887
} | class ____ {
private final String localResourceName;
private final String namespaceUri;
private final String version;
private final Schema schema;
XsdDescriptor(String localResourceName, Schema schema, String version, String namespaceUri) {
this.localResourceName = localResourceName;
this.schema = schema;
this.version = version;
this.namespaceUri = namespaceUri;
}
public String getLocalResourceName() {
return localResourceName;
}
public String getNamespaceUri() {
return namespaceUri;
}
public String getVersion() {
return version;
}
public Schema getSchema() {
return schema;
}
}
| XsdDescriptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/refaster/UnificationTest.java | {
"start": 13832,
"end": 15316
} | class ____ {",
" public void example(Comparator<String> cmp) {",
" List<String> foo = new ArrayList<String>();",
" foo.add(\"bar\");",
" List<String> sorted = new ArrayList<String>(foo);",
" Collections.sort(sorted, cmp);",
" }",
"}");
expectMatches(
blockTemplate,
Match.create(
ImmutableMap.of(
"collection", "foo",
"comparator", "cmp",
"E", "java.lang.String",
"list", "sorted")));
}
@Test
public void ifBlockTemplate() {
/*
* Template:
*
* if (cond) {
* x = y;
* } else {
* x = z;
* }
*/
BlockTemplate blockTemplate =
BlockTemplate.create(
ImmutableList.of(UTypeVar.create("T")),
ImmutableMap.of(
"cond", UPrimitiveType.BOOLEAN,
"x", UTypeVar.create("T"),
"y", UTypeVar.create("T"),
"z", UTypeVar.create("T")),
UIf.create(
UFreeIdent.create("cond"),
UBlock.create(
UExpressionStatement.create(
UAssign.create(UFreeIdent.create("x"), UFreeIdent.create("y")))),
UBlock.create(
UExpressionStatement.create(
UAssign.create(UFreeIdent.create("x"), UFreeIdent.create("z"))))));
compile(
" | BlockTemplateExample |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/HdfsServerConstants.java | {
"start": 3240,
"end": 3356
} | enum ____ {
NAME_NODE,
DATA_NODE,
JOURNAL_NODE
}
/** Startup options for rolling upgrade. */
| NodeType |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/GroupProtocol.java | {
"start": 867,
"end": 1390
} | enum ____ {
/** Classic group protocol. */
CLASSIC("CLASSIC"),
/** Streams group protocol */
STREAMS("STREAMS");
/**
* String representation of the group protocol.
*/
public final String name;
GroupProtocol(final String name) {
this.name = name;
}
/**
* Case-insensitive group protocol lookup by string name.
*/
public static GroupProtocol of(final String name) {
return GroupProtocol.valueOf(name.toUpperCase(Locale.ROOT));
}
}
| GroupProtocol |
java | spring-projects__spring-boot | module/spring-boot-couchbase/src/main/java/org/springframework/boot/couchbase/autoconfigure/CouchbaseProperties.java | {
"start": 4391,
"end": 5350
} | class ____ {
/**
* Minimum number of sockets per node.
*/
private int minEndpoints = 1;
/**
* Maximum number of sockets per node.
*/
private int maxEndpoints = 12;
/**
* Length of time an HTTP connection may remain idle before it is closed and
* removed from the pool.
*/
private Duration idleHttpConnectionTimeout = Duration.ofSeconds(1);
public int getMinEndpoints() {
return this.minEndpoints;
}
public void setMinEndpoints(int minEndpoints) {
this.minEndpoints = minEndpoints;
}
public int getMaxEndpoints() {
return this.maxEndpoints;
}
public void setMaxEndpoints(int maxEndpoints) {
this.maxEndpoints = maxEndpoints;
}
public Duration getIdleHttpConnectionTimeout() {
return this.idleHttpConnectionTimeout;
}
public void setIdleHttpConnectionTimeout(Duration idleHttpConnectionTimeout) {
this.idleHttpConnectionTimeout = idleHttpConnectionTimeout;
}
}
public static | Io |
java | spring-projects__spring-security | web/src/test/java/org/springframework/security/web/context/SaveContextOnUpdateOrErrorResponseWrapperTests.java | {
"start": 1304,
"end": 6377
} | class ____ {
@Mock
private SecurityContext securityContext;
private MockHttpServletResponse response;
private SaveContextOnUpdateOrErrorResponseWrapperStub wrappedResponse;
@BeforeEach
public void setUp() {
this.response = new MockHttpServletResponse();
this.wrappedResponse = new SaveContextOnUpdateOrErrorResponseWrapperStub(this.response, true);
SecurityContextHolder.setContext(this.securityContext);
}
@AfterEach
public void clearContext() {
SecurityContextHolder.clearContext();
}
@Test
public void sendErrorSavesSecurityContext() throws Exception {
int error = HttpServletResponse.SC_FORBIDDEN;
this.wrappedResponse.sendError(error);
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
assertThat(this.response.getStatus()).isEqualTo(error);
}
@Test
public void sendErrorSkipsSaveSecurityContextDisables() throws Exception {
final int error = HttpServletResponse.SC_FORBIDDEN;
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.sendError(error);
assertThat(this.wrappedResponse.securityContext).isNull();
assertThat(this.response.getStatus()).isEqualTo(error);
}
@Test
public void sendErrorWithMessageSavesSecurityContext() throws Exception {
int error = HttpServletResponse.SC_FORBIDDEN;
String message = "Forbidden";
this.wrappedResponse.sendError(error, message);
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
assertThat(this.response.getStatus()).isEqualTo(error);
assertThat(this.response.getErrorMessage()).isEqualTo(message);
}
@Test
public void sendErrorWithMessageSkipsSaveSecurityContextDisables() throws Exception {
final int error = HttpServletResponse.SC_FORBIDDEN;
final String message = "Forbidden";
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.sendError(error, message);
assertThat(this.wrappedResponse.securityContext).isNull();
assertThat(this.response.getStatus()).isEqualTo(error);
assertThat(this.response.getErrorMessage()).isEqualTo(message);
}
@Test
public void sendRedirectSavesSecurityContext() throws Exception {
String url = "/location";
this.wrappedResponse.sendRedirect(url);
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
assertThat(this.response.getRedirectedUrl()).isEqualTo(url);
}
@Test
public void sendRedirectSkipsSaveSecurityContextDisables() throws Exception {
final String url = "/location";
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.sendRedirect(url);
assertThat(this.wrappedResponse.securityContext).isNull();
assertThat(this.response.getRedirectedUrl()).isEqualTo(url);
}
@Test
public void outputFlushSavesSecurityContext() throws Exception {
this.wrappedResponse.getOutputStream().flush();
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
}
@Test
public void outputFlushSkipsSaveSecurityContextDisables() throws Exception {
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.getOutputStream().flush();
assertThat(this.wrappedResponse.securityContext).isNull();
}
@Test
public void outputCloseSavesSecurityContext() throws Exception {
this.wrappedResponse.getOutputStream().close();
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
}
@Test
public void outputCloseSkipsSaveSecurityContextDisables() throws Exception {
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.getOutputStream().close();
assertThat(this.wrappedResponse.securityContext).isNull();
}
@Test
public void writerFlushSavesSecurityContext() throws Exception {
this.wrappedResponse.getWriter().flush();
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
}
@Test
public void writerFlushSkipsSaveSecurityContextDisables() throws Exception {
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.getWriter().flush();
assertThat(this.wrappedResponse.securityContext).isNull();
}
@Test
public void writerCloseSavesSecurityContext() throws Exception {
this.wrappedResponse.getWriter().close();
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
}
@Test
public void writerCloseSkipsSaveSecurityContextDisables() throws Exception {
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.getWriter().close();
assertThat(this.wrappedResponse.securityContext).isNull();
}
@Test
public void flushBufferSavesSecurityContext() throws Exception {
this.wrappedResponse.flushBuffer();
assertThat(this.wrappedResponse.securityContext).isEqualTo(this.securityContext);
}
@Test
public void flushBufferSkipsSaveSecurityContextDisables() throws Exception {
this.wrappedResponse.disableSaveOnResponseCommitted();
this.wrappedResponse.flushBuffer();
assertThat(this.wrappedResponse.securityContext).isNull();
}
private static | SaveContextOnUpdateOrErrorResponseWrapperTests |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/failure/FailureEnricher.java | {
"start": 2567,
"end": 2628
} | interface ____ {
/** Type of failure. */
| Context |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ser/jdk/NumberSerTest.java | {
"start": 3332,
"end": 3719
} | class ____ extends ValueSerializer<BigDecimal> {
private final DecimalFormat df = createDecimalFormatForDefaultLocale("0.0");
@Override
public void serialize(BigDecimal value, JsonGenerator gen, SerializationContext serializers) {
gen.writeNumber(df.format(value));
}
}
@SuppressWarnings("serial")
static | BigDecimalAsNumberSerializer |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/CharEnumerateValueTests.java | {
"start": 1488,
"end": 4587
} | class ____ {
@Test
@DomainModel(annotatedClasses = Person.class)
@SessionFactory
void testBasicUsage(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.persist( new Person( 1, "John", Gender.MALE ) );
} );
scope.inTransaction( (session) -> {
session.doWork( (connection) -> {
try (Statement statement = connection.createStatement()) {
try (ResultSet resultSet = statement.executeQuery( "select gender from persons" )) {
assertThat( resultSet.next() ).isTrue();
final String storedGender = resultSet.getString( 1 );
assertThat( storedGender ).isEqualTo( "M" );
}
}
} );
} );
}
@DomainModel(annotatedClasses = Person.class)
@SessionFactory
@Test
void testNulls(SessionFactoryScope scope) {
scope.inTransaction( (session) -> {
session.persist( new Person( 1, "John", null ) );
} );
scope.inTransaction( (session) -> {
session.doWork( (connection) -> {
try (Statement statement = connection.createStatement()) {
try (ResultSet resultSet = statement.executeQuery( "select gender from persons" )) {
assertThat( resultSet.next() ).isTrue();
final String storedGender = resultSet.getString( 1 );
assertThat( resultSet.wasNull() ).isTrue();
assertThat( storedGender ).isNull();
}
}
} );
} );
}
@DomainModel(annotatedClasses = Person.class)
@SessionFactory
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportsColumnCheck.class )
@Test
void verifyCheckConstraints(SessionFactoryScope scope) {
scope.inTransaction( (session) -> session.doWork( (connection) -> {
try (PreparedStatement statement = connection.prepareStatement( "insert into persons (id, gender) values (?, ?)" ) ) {
statement.setInt( 1, 100 );
statement.setString( 2, "X" );
statement.executeUpdate();
fail( "Expecting a failure" );
}
catch (SQLException expected) {
}
} ) );
}
@DomainModel(annotatedClasses = Person.class)
@SessionFactory
@SkipForDialect( dialectClass = SybaseDialect.class, matchSubTypes = true,
reason = "Sybase (at least jTDS driver) truncates the value so the constraint is not violated" )
@SkipForDialect( dialectClass = InformixDialect.class,
reason = "Informix truncates the value so the constraint is not violated" )
@RequiresDialectFeature( feature = DialectFeatureChecks.SupportsColumnCheck.class )
@Test
void verifyCheckConstraints2(SessionFactoryScope scope) {
scope.inTransaction( (session) -> session.doWork( (connection) -> {
try (PreparedStatement statement = connection.prepareStatement( "insert into persons (id, gender) values (?, ?)" ) ) {
statement.setInt( 1, 200 );
// this would work without check constraints or with check constraints based solely on EnumType#STRING
statement.setString( 2, "MALE" );
statement.executeUpdate();
fail( "Expecting a failure" );
}
catch (SQLException expected) {
}
} ) );
}
@AfterEach
void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
public | CharEnumerateValueTests |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/UpdateApiKeyRequestTranslator.java | {
"start": 847,
"end": 969
} | interface ____ {
UpdateApiKeyRequest translate(RestRequest request) throws IOException;
| UpdateApiKeyRequestTranslator |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/manager/ApplicationLifecycle.java | {
"start": 410,
"end": 671
} | class ____ implements Lifecycle {
@Override
public void addListener(@NonNull LifecycleListener listener) {
listener.onStart();
}
@Override
public void removeListener(@NonNull LifecycleListener listener) {
// Do nothing.
}
}
| ApplicationLifecycle |
java | apache__rocketmq | store/src/main/java/org/apache/rocketmq/store/DefaultMessageStore.java | {
"start": 97301,
"end": 99597
} | class ____ {
DispatchRequest[][] buffer;
long ptr = 0;
AtomicLong maxPtr = new AtomicLong();
public DispatchRequestOrderlyQueue(int bufferNum) {
this.buffer = new DispatchRequest[bufferNum][];
}
public void put(long index, DispatchRequest[] dispatchRequests) {
while (ptr + this.buffer.length <= index) {
synchronized (this) {
try {
this.wait();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
int mod = (int) (index % this.buffer.length);
this.buffer[mod] = dispatchRequests;
maxPtr.incrementAndGet();
}
public DispatchRequest[] get(List<DispatchRequest[]> dispatchRequestsList) {
synchronized (this) {
for (int i = 0; i < this.buffer.length; i++) {
int mod = (int) (ptr % this.buffer.length);
DispatchRequest[] ret = this.buffer[mod];
if (ret == null) {
this.notifyAll();
return null;
}
dispatchRequestsList.add(ret);
this.buffer[mod] = null;
ptr++;
}
}
return null;
}
public synchronized boolean isEmpty() {
return maxPtr.get() == ptr;
}
}
@Override
public void notifyMessageArriveIfNecessary(DispatchRequest dispatchRequest) {
if (DefaultMessageStore.this.brokerConfig.isLongPollingEnable()
&& DefaultMessageStore.this.messageArrivingListener != null) {
DefaultMessageStore.this.messageArrivingListener.arriving(dispatchRequest.getTopic(),
dispatchRequest.getQueueId(), dispatchRequest.getConsumeQueueOffset() + 1,
dispatchRequest.getTagsCode(), dispatchRequest.getStoreTimestamp(),
dispatchRequest.getBitMap(), dispatchRequest.getPropertiesMap());
DefaultMessageStore.this.reputMessageService.notifyMessageArrive4MultiQueue(dispatchRequest);
}
}
| DispatchRequestOrderlyQueue |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/domain/animal/Human.java | {
"start": 877,
"end": 3879
} | class ____ extends Mammal {
private Name name;
private String nickName;
private double heightInches;
private BigInteger bigIntegerValue;
private BigDecimal bigDecimalValue;
private int intValue;
private float floatValue;
private Collection<Human> friends;
private Collection<DomesticAnimal> pets;
private Map <String,Human> family;
private Set<String> nickNames;
private Map<String,Address> addresses;
@Embedded
public Name getName() {
return name;
}
public void setName(Name name) {
this.name = name;
}
public String getNickName() {
return nickName;
}
public void setNickName(String nickName) {
this.nickName = nickName;
}
@Column( name = "height_centimeters", nullable = false )
@ColumnTransformer( read = "height_centimeters / 2.54E0", write = "? * 2.54E0" )
public double getHeightInches() {
return heightInches;
}
public void setHeightInches(double height) {
this.heightInches = height;
}
public BigDecimal getBigDecimalValue() {
return bigDecimalValue;
}
public void setBigDecimalValue(BigDecimal bigDecimalValue) {
this.bigDecimalValue = bigDecimalValue;
}
public BigInteger getBigIntegerValue() {
return bigIntegerValue;
}
public void setBigIntegerValue(BigInteger bigIntegerValue) {
this.bigIntegerValue = bigIntegerValue;
}
public float getFloatValue() {
return floatValue;
}
public void setFloatValue(float floatValue) {
this.floatValue = floatValue;
}
public int getIntValue() {
return intValue;
}
public void setIntValue(int intValue) {
this.intValue = intValue;
}
@ElementCollection
@CollectionTable( name = "human_nick_names", joinColumns = @JoinColumn( name = "human_fk" ) )
@Column( name = "nick_name" )
@SortNatural
public Set<String> getNickNames() {
return nickNames;
}
public void setNickNames(Set<String> nickNames) {
this.nickNames = nickNames;
}
@ManyToMany
@JoinTable(
name = "friends",
joinColumns = @JoinColumn( name = "friend_fk1" ),
inverseJoinColumns = @JoinColumn( name = "friend_fk2" )
)
public Collection<Human> getFriends() {
return friends;
}
public void setFriends(Collection<Human> friends) {
this.friends = friends;
}
@OneToMany( mappedBy = "owner" )
public Collection<DomesticAnimal> getPets() {
return pets;
}
public void setPets(Collection<DomesticAnimal> pets) {
this.pets = pets;
}
@ManyToMany
@JoinTable(
name = "family",
joinColumns = @JoinColumn( name = "family_fk1" ),
inverseJoinColumns = @JoinColumn( name = "family_fk2" )
)
@MapKeyColumn( name = "relationship" )
public Map<String,Human> getFamily() {
return family;
}
public void setFamily(Map family) {
this.family = family;
}
@ElementCollection
@CollectionTable( name = "human_addresses", joinColumns = @JoinColumn( name = "human_fk" ) )
@MapKeyColumn( name = "`type`" )
public Map<String,Address> getAddresses() {
return addresses;
}
public void setAddresses(Map<String,Address> addresses) {
this.addresses = addresses;
}
}
| Human |
java | spring-projects__spring-security | web/src/main/java/org/springframework/security/web/authentication/session/NullAuthenticatedSessionStrategy.java | {
"start": 948,
"end": 1182
} | class ____ implements SessionAuthenticationStrategy {
@Override
public void onAuthentication(@Nullable Authentication authentication, HttpServletRequest request,
HttpServletResponse response) {
}
}
| NullAuthenticatedSessionStrategy |
java | elastic__elasticsearch | build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/test/rest/transform/match/ReplaceValueInMatchTests.java | {
"start": 1048,
"end": 2115
} | class ____ extends TransformTests {
private static final YAMLFactory YAML_FACTORY = new YAMLFactory();
private static final ObjectMapper MAPPER = new ObjectMapper(YAML_FACTORY);
@Test
public void testReplaceMatch() throws Exception {
String test_original = "/rest/transform/match/match_original.yml";
List<ObjectNode> tests = getTests(test_original);
String test_transformed = "/rest/transform/match/match_transformed.yml";
List<ObjectNode> expectedTransformation = getTests(test_transformed);
SerializableJsonNode<JsonNode> replacementNode = SerializableJsonNode.of("_replaced_type", JsonNode.class);
List<ObjectNode> transformedTests = transformTests(
tests,
List.of(
new ReplaceValueInMatch("_type", replacementNode, null),
new ReplaceValueInMatch("_replace_in_last_test_only", replacementNode, "Last test")
)
);
AssertObjectNodes.areEqual(transformedTests, expectedTransformation);
}
}
| ReplaceValueInMatchTests |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/main/java/org/hibernate/processor/annotation/TypedMetaAttribute.java | {
"start": 405,
"end": 2629
} | class ____ extends NameMetaAttribute {
private final String prefix;
private final String resultType;
private final String referenceType;
private final @Nullable String query;
public TypedMetaAttribute(
Metamodel annotationMetaEntity,
String name,
String prefix,
String resultType,
String referenceType,
@Nullable String query) {
super( annotationMetaEntity, name, prefix );
this.prefix = prefix;
this.resultType = resultType;
this.referenceType = referenceType;
this.query = query;
}
@Override
public boolean hasTypedAttribute() {
return true;
}
private boolean isQuery() {
return "QUERY_".equals(prefix); //UGLY!
}
@Override
public String getAttributeNameDeclarationString() {
StringBuilder declaration = new StringBuilder();
declaration
.append("\n/**\n * @see ")
.append("#");
appendFieldName( declaration, isQuery() );
return declaration
.append( "\n **/\n" )
.append(super.getAttributeNameDeclarationString())
.toString();
}
@Override
public String getAttributeDeclarationString() {
final boolean isQuery = isQuery();
final Metamodel entity = getHostingEntity();
final StringBuilder declaration = new StringBuilder();
declaration
.append("\n/**")
.append("\n * The ")
.append(isQuery ? "query" : "entity graph")
.append(" named {@value ")
.append(prefix)
.append(fieldName())
.append("}\n");
if ( query != null ) {
declaration.append(" * <pre>");
query.lines()
.forEach( line -> declaration.append("\n * ").append( line ) );
declaration.append("\n * </pre>\n");
}
declaration
.append(" *\n * @see ")
.append(entity.getQualifiedName())
.append("\n **/\n")
.append("public static volatile ")
.append(entity.importType(referenceType))
.append('<')
.append(entity.importType(resultType))
.append('>')
.append(' ');
appendFieldName( declaration, isQuery );
declaration.append(';');
return declaration.toString();
}
private void appendFieldName(StringBuilder declaration, boolean isQuery) {
declaration
.append('_')
.append(nameToMethodName(getPropertyName()));
if ( isQuery ) {
declaration.append('_');
}
}
}
| TypedMetaAttribute |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/converter/TypeResolverHelperTest.java | {
"start": 1231,
"end": 2734
} | class ____ {
private static final Map<TypeConvertible<?, ?>, TypeConverter> registeredConverters = new HashMap<>();
@BeforeAll
static void setUp() {
registeredConverters.put(new TypeConvertible<>(Source.class, Child.class), new SourceChildConverter());
}
@Test
public void testTryAssignableToChild() {
TypeConvertible<Source, Child> requestedConverter = new TypeConvertible<>(Source.class, Child.class);
TypeConverter foundConverter = TypeResolverHelper.tryAssignableFrom(requestedConverter, registeredConverters);
assertNotNull(foundConverter);
Child result = foundConverter.tryConvertTo(Child.class, new Source("source"));
assertEquals("sourceP", result.parentField);
assertEquals("sourceC", result.childField);
}
// This general behaviour works in Camel 3 but stopped working in Camel 4 due to what looks like an accidental regression.
// See https://issues.apache.org/jira/browse/CAMEL-19828
@Test
public void testTryAssignableToParent() {
TypeConvertible<Source, Parent> requestedConverter = new TypeConvertible<>(Source.class, Parent.class);
TypeConverter foundConverter = TypeResolverHelper.tryAssignableFrom(requestedConverter, registeredConverters);
assertNotNull(foundConverter);
Parent result = foundConverter.tryConvertTo(Parent.class, new Source("source"));
assertEquals("sourceP", result.parentField);
}
private static | TypeResolverHelperTest |
java | spring-projects__spring-boot | core/spring-boot/src/main/java/org/springframework/boot/env/PropertySourceLoader.java | {
"start": 1074,
"end": 1884
} | interface ____ {
/**
* Returns the file extensions that the loader supports (excluding the '.').
* @return the file extensions
*/
String[] getFileExtensions();
/**
* Load the resource into one or more property sources. Implementations may either
* return a list containing a single source, or in the case of a multi-document format
* such as yaml a source for each document in the resource.
* @param name the root name of the property source. If multiple documents are loaded
* an additional suffix should be added to the name for each source loaded.
* @param resource the resource to load
* @return a list property sources
* @throws IOException if the source cannot be loaded
*/
List<PropertySource<?>> load(String name, Resource resource) throws IOException;
}
| PropertySourceLoader |
java | apache__flink | flink-core/src/test/java/org/apache/flink/api/java/typeutils/PojoParametrizedTypeExtractionTest.java | {
"start": 3091,
"end": 3326
} | class ____
implements MapFunction<Integer, ParameterizedParentImpl> {
@Override
public ParameterizedParentImpl map(Integer value) throws Exception {
return null;
}
}
}
| ConcreteMapFunction |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java | {
"start": 210456,
"end": 211010
} | class ____ extends ParserRuleContext {
@SuppressWarnings("this-escape")
public RegexBooleanExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_regexBooleanExpression; }
@SuppressWarnings("this-escape")
public RegexBooleanExpressionContext() { }
public void copyFrom(RegexBooleanExpressionContext ctx) {
super.copyFrom(ctx);
}
}
@SuppressWarnings("CheckReturnValue")
public static | RegexBooleanExpressionContext |
java | google__guice | extensions/servlet/test/com/google/inject/servlet/ContinuingRequestIntegrationTest.java | {
"start": 6149,
"end": 6199
} | class ____ {}
@Singleton
public static | SomeObject |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java | {
"start": 2307,
"end": 28296
} | class ____ extends AggregatorTestCase {
private static final String AGG_NAME = "scriptedMetric";
private static final Script INIT_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScript", Collections.emptyMap());
private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap());
private static final Script COMBINE_SCRIPT = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"combineScript",
Collections.emptyMap()
);
private static final Script REDUCE_SCRIPT = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"reduceScript",
Collections.emptyMap()
);
private static final Script REDUCE_SCRIPT_COUNT_STATES = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"reduceScriptCountStates",
Collections.emptyMap()
);
private static final Script INIT_SCRIPT_SCORE = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"initScriptScore",
Collections.emptyMap()
);
private static final Script MAP_SCRIPT_SCORE = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"mapScriptScore",
Collections.emptyMap()
);
private static final Script COMBINE_SCRIPT_SCORE = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"combineScriptScore",
Collections.emptyMap()
);
private static final Script COMBINE_SCRIPT_NOOP = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"combineScriptNoop",
Collections.emptyMap()
);
private static final Script INIT_SCRIPT_PARAMS = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"initScriptParams",
Collections.singletonMap("initialValue", 24)
);
private static final Script MAP_SCRIPT_PARAMS = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"mapScriptParams",
Collections.singletonMap("itemValue", 12)
);
private static final Script COMBINE_SCRIPT_PARAMS = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"combineScriptParams",
Collections.singletonMap("multiplier", 4)
);
private static final Script REDUCE_SCRIPT_PARAMS = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"reduceScriptParams",
Collections.singletonMap("additional", 2)
);
private static final String CONFLICTING_PARAM_NAME = "initialValue";
private static final Script INIT_SCRIPT_SELF_REF = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"initScriptSelfRef",
Collections.emptyMap()
);
private static final Script MAP_SCRIPT_SELF_REF = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"mapScriptSelfRef",
Collections.emptyMap()
);
private static final Script COMBINE_SCRIPT_SELF_REF = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"combineScriptSelfRef",
Collections.emptyMap()
);
private static final Script INIT_SCRIPT_MAKING_ARRAY = new Script(
ScriptType.INLINE,
MockScriptEngine.NAME,
"initScriptMakingArray",
Collections.emptyMap()
);
private static final Map<String, Function<Map<String, Object>, Object>> SCRIPTS = new HashMap<>();
@BeforeClass
@SuppressWarnings("unchecked")
public static void initMockScripts() {
SCRIPTS.put("initScript", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("collector", new ArrayList<Integer>());
return state;
});
SCRIPTS.put("mapScript", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
((List<Integer>) state.get("collector")).add(1); // just add 1 for each doc the script is run on
return state;
});
SCRIPTS.put("combineScript", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).sum();
});
SCRIPTS.put("combineScriptNoop", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
return state;
});
SCRIPTS.put("reduceScript", params -> {
List<?> states = (List<?>) params.get("states");
return states.stream().filter(a -> a instanceof Number).map(a -> (Number) a).mapToInt(Number::intValue).sum();
});
SCRIPTS.put("reduceScriptCountStates", params -> {
List<?> states = (List<?>) params.get("states");
return states.size();
});
SCRIPTS.put("initScriptScore", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("collector", new ArrayList<Double>());
return state;
});
SCRIPTS.put("mapScriptScore", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
((List<Double>) state.get("collector")).add(((Number) params.get("_score")).doubleValue());
return state;
});
SCRIPTS.put("combineScriptScore", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
return ((List<Double>) state.get("collector")).stream().mapToDouble(Double::doubleValue).sum();
});
SCRIPTS.put("initScriptParams", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
Integer initialValue = (Integer) params.get("initialValue");
ArrayList<Integer> collector = new ArrayList<>();
collector.add(initialValue);
state.put("collector", collector);
return state;
});
SCRIPTS.put("mapScriptParams", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
Integer itemValue = (Integer) params.get("itemValue");
((List<Integer>) state.get("collector")).add(itemValue);
return state;
});
SCRIPTS.put("combineScriptParams", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
int multiplier = ((Integer) params.get("multiplier"));
return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).map(i -> i * multiplier).sum();
});
SCRIPTS.put(
"reduceScriptParams",
params -> ((List) params.get("states")).stream().mapToInt(i -> (int) i).sum() + (int) params.get("aggs_param") + (int) params
.get("additional") - ((List) params.get("states")).size() * 24 * 4
);
SCRIPTS.put("initScriptSelfRef", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("collector", new ArrayList<Integer>());
state.put("selfRef", state);
return state;
});
SCRIPTS.put("mapScriptSelfRef", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("selfRef", state);
return state;
});
SCRIPTS.put("combineScriptSelfRef", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("selfRef", state);
return state;
});
SCRIPTS.put("initScriptMakingArray", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
state.put("array", new String[] { "foo", "bar" });
state.put("collector", new ArrayList<Integer>());
return state;
});
}
private CircuitBreakerService circuitBreakerService;
@Before
public void mockBreaker() {
circuitBreakerService = mock(CircuitBreakerService.class);
when(circuitBreakerService.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new NoopCircuitBreaker(CircuitBreaker.REQUEST) {
private long total = 0;
@Override
public void addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException {
logger.debug("Used {} grabbing {} for {}", total, bytes, label);
total += bytes;
}
@Override
public void addWithoutBreaking(long bytes) {
logger.debug("Used {} grabbing {}", total, bytes);
total += bytes;
}
@Override
public long getUsed() {
return total;
}
});
}
@Override
protected void afterClose() {
assertThat(circuitBreakerService.getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L));
}
@Override
protected ScriptService getMockScriptService() {
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, SCRIPTS, Collections.emptyMap());
Map<String, ScriptEngine> engines = Collections.singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(
Settings.EMPTY,
engines,
ScriptModule.CORE_CONTEXTS,
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
}
@SuppressWarnings("unchecked")
public void testNoDocs() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
// intentionally not writing any docs
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
assertEquals(0, scriptedMetric.aggregation());
}
}
}
public void testScriptedMetricWithoutCombine() throws IOException {
try (Directory directory = newDirectory()) {
int numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals(exception.getMessage(), "[combine_script] must not be null: [scriptedMetric]");
}
}
}
public void testScriptedMetricWithoutReduce() throws IOException {
try (Directory directory = newDirectory()) {
int numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals(exception.getMessage(), "[reduce_script] must not be null: [scriptedMetric]");
}
}
}
/**
* test that combine script sums the list produced by the "mapScript"
*/
public void testScriptedMetricWithCombine() throws IOException {
try (Directory directory = newDirectory()) {
Integer numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
assertEquals(numDocs, scriptedMetric.aggregation());
}
}
}
public void testNoParallelization() throws IOException {
try (Directory directory = newDirectory()) {
int numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT)
.mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT)
.reduceScript(REDUCE_SCRIPT_COUNT_STATES);
ScriptedMetric scriptedMetric = searchAndReduce(
indexReader,
new AggTestConfig(aggregationBuilder).withSplitLeavesIntoSeperateAggregators(false)
);
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
assertEquals(1, scriptedMetric.aggregation());
}
}
}
/**
* test that uses the score of the documents
*/
public void testScriptedMetricWithCombineAccessesScores() throws IOException {
try (Directory directory = newDirectory()) {
Integer numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_SCORE)
.mapScript(MAP_SCRIPT_SCORE)
.combineScript(COMBINE_SCRIPT_SCORE)
.reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
// all documents have score of 1.0
assertEquals(numDocs, scriptedMetric.aggregation());
}
}
}
public void testScriptParamsPassedThrough() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < 100; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
// force a single aggregator
indexWriter.forceMerge(1);
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_PARAMS)
.mapScript(MAP_SCRIPT_PARAMS)
.combineScript(COMBINE_SCRIPT_PARAMS)
.reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
// The result value depends on the script params.
assertEquals(4896, scriptedMetric.aggregation());
}
}
}
public void testAggParamsPassedToReduceScript() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < 100; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.params(Collections.singletonMap("aggs_param", 1))
.initScript(INIT_SCRIPT_PARAMS)
.mapScript(MAP_SCRIPT_PARAMS)
.combineScript(COMBINE_SCRIPT_PARAMS)
.reduceScript(REDUCE_SCRIPT_PARAMS);
ScriptedMetric scriptedMetric = searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder).withMaxBuckets(0));
// The result value depends on the script params.
assertEquals(4803, scriptedMetric.aggregation());
}
}
}
public void testConflictingAggAndScriptParams() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < 100; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
Map<String, Object> aggParams = Collections.singletonMap(CONFLICTING_PARAM_NAME, "blah");
aggregationBuilder.params(aggParams)
.initScript(INIT_SCRIPT_PARAMS)
.mapScript(MAP_SCRIPT_PARAMS)
.combineScript(COMBINE_SCRIPT_PARAMS)
.reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals(
"Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters",
ex.getMessage()
);
}
}
}
public void testSelfReferencingAggStateAfterInit() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
// No need to add docs for this test
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF)
.mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT_PARAMS)
.reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals("Iterable object is self-referencing itself (Scripted metric aggs init script)", ex.getMessage());
}
}
}
public void testSelfReferencingAggStateAfterMap() throws IOException {
try (Directory directory = newDirectory()) {
Integer numDocs = randomIntBetween(1, 100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT)
.mapScript(MAP_SCRIPT_SELF_REF)
.combineScript(COMBINE_SCRIPT_PARAMS)
.reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals("Iterable object is self-referencing itself (Scripted metric aggs map script)", ex.getMessage());
}
}
}
public void testSelfReferencingAggStateAfterCombine() throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
// No need to add docs for this test
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT)
.mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT_SELF_REF)
.reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> {
searchAndReduce(indexReader, new AggTestConfig(aggregationBuilder));
});
assertEquals("Iterable object is self-referencing itself (Scripted metric aggs combine script)", ex.getMessage());
}
}
}
public void testInitScriptMakesArray() throws IOException {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_MAKING_ARRAY)
.mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT)
.reduceScript(REDUCE_SCRIPT);
testCase(
iw -> { iw.addDocument(new Document()); },
(InternalScriptedMetric r) -> { assertEquals(1, r.aggregation()); },
new AggTestConfig(aggregationBuilder)
);
}
public void testAsSubAgg() throws IOException {
AggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t")
.executionHint("map")
.subAggregation(
new ScriptedMetricAggregationBuilder("scripted").initScript(INIT_SCRIPT)
.mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT)
.reduceScript(REDUCE_SCRIPT)
);
CheckedConsumer<RandomIndexWriter, IOException> buildIndex = iw -> {
for (int i = 0; i < 99; i++) {
iw.addDocument(singleton(new SortedSetDocValuesField("t", i % 2 == 0 ? new BytesRef("even") : new BytesRef("odd"))));
}
};
Consumer<StringTerms> verify = terms -> {
StringTerms.Bucket even = terms.getBucketByKey("even");
assertThat(even.getDocCount(), equalTo(50L));
ScriptedMetric evenMetric = even.getAggregations().get("scripted");
assertThat(evenMetric.aggregation(), equalTo(50));
StringTerms.Bucket odd = terms.getBucketByKey("odd");
assertThat(odd.getDocCount(), equalTo(49L));
ScriptedMetric oddMetric = odd.getAggregations().get("scripted");
assertThat(oddMetric.aggregation(), equalTo(49));
};
testCase(buildIndex, verify, new AggTestConfig(aggregationBuilder, keywordField("t"), longField("number")));
}
}
| ScriptedMetricAggregatorTests |
java | apache__flink | flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/DeserializationSchemaAdapter.java | {
"start": 4116,
"end": 5714
} | class ____ implements BulkFormat.Reader<RowData> {
private final LineBytesInputFormat inputFormat;
private long numRead = 0;
private Reader(Configuration config, FileSourceSplit split) throws IOException {
this.inputFormat = new LineBytesInputFormat(split.path(), config);
this.inputFormat.open(
new FileInputSplit(0, split.path(), split.offset(), split.length(), null));
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Nullable
@Override
public RecordIterator<RowData> readBatch() throws IOException {
RowData[] records = new RowData[DEFAULT_SIZE];
int num = 0;
final long skipCount = numRead;
for (int i = 0; i < BATCH_SIZE; i++) {
RowData record = inputFormat.nextRecord(null);
if (record == null) {
break;
}
records[num++] = record;
}
if (num == 0) {
return null;
}
numRead += num;
ArrayResultIterator<RowData> iterator = new ArrayResultIterator<>();
iterator.set(records, num, NO_OFFSET, skipCount);
return iterator;
}
private void seek(long toSkip) throws IOException {
while (toSkip > 0) {
inputFormat.nextRecord(null);
toSkip--;
}
}
@Override
public void close() throws IOException {
inputFormat.close();
}
}
private | Reader |
java | apache__camel | components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsProducer.java | {
"start": 2608,
"end": 28273
} | class ____ extends DefaultAsyncProducer {
private static final Logger LOG = LoggerFactory.getLogger(JmsProducer.class);
private static final String GENERATED_CORRELATION_ID_PREFIX = "Camel-";
private final JmsEndpoint endpoint;
private final AtomicBoolean started = new AtomicBoolean();
private JmsOperations inOnlyTemplate;
private JmsOperations inOutTemplate;
private UuidGenerator uuidGenerator;
private ReplyManager replyManager;
public JmsProducer(JmsEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
}
@Override
public JmsEndpoint getEndpoint() {
return (JmsEndpoint) super.getEndpoint();
}
protected void initReplyManager() {
if (!started.get()) {
lock.lock();
try {
if (started.get()) {
return;
}
// must use the classloader from the application context when creating reply manager,
// as it should inherit the classloader from app context and not the current which may be
// a different classloader
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
ClassLoader ac = endpoint.getCamelContext().getApplicationContextClassLoader();
try {
if (ac != null) {
Thread.currentThread().setContextClassLoader(ac);
}
// validate that replyToType and replyTo is configured accordingly
if (endpoint.getReplyToType() != null) {
// setting temporary with a fixed replyTo is not supported
if (endpoint.getReplyTo() != null && endpoint.getReplyToType().equals(ReplyToType.Temporary.name())) {
throw new IllegalArgumentException(
"ReplyToType " + ReplyToType.Temporary
+ " is not supported when replyTo " + endpoint.getReplyTo()
+ " is also configured.");
}
}
if (endpoint.getReplyTo() != null) {
replyManager = createReplyManager(endpoint.getReplyTo());
if (LOG.isDebugEnabled()) {
LOG.debug("Using JmsReplyManager: {} to process replies from: {}", replyManager,
endpoint.getReplyTo());
}
} else {
replyManager = createReplyManager();
LOG.debug("Using JmsReplyManager: {} to process replies from temporary queue", replyManager);
}
} catch (Exception e) {
throw new FailedToCreateProducerException(endpoint, e);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
started.set(true);
} finally {
lock.unlock();
}
}
}
protected void unInitReplyManager() {
try {
if (replyManager != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Stopping JmsReplyManager: {} from processing replies from: {}", replyManager,
endpoint.getReplyTo() != null ? endpoint.getReplyTo() : "temporary queue");
}
ServiceHelper.stopService(replyManager);
}
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
} finally {
started.set(false);
}
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
// deny processing if we are not started
if (!isRunAllowed()) {
if (exchange.getException() == null) {
exchange.setException(new RejectedExecutionException());
}
// we cannot process so invoke callback
callback.done(true);
return true;
}
try {
if (!endpoint.isDisableReplyTo() && exchange.getPattern().isOutCapable()) {
// in out requires a bit more work than in only
return processInOut(exchange, callback);
} else {
// in only
return processInOnly(exchange, callback);
}
} catch (Exception e) {
// must catch exception to ensure callback is invoked as expected
// to let Camel error handling deal with this
exchange.setException(e);
callback.done(true);
return true;
}
}
protected boolean processInOut(final Exchange exchange, final AsyncCallback callback) {
final org.apache.camel.Message in = exchange.getIn();
String destinationName = in.getHeader(JmsConstants.JMS_DESTINATION_NAME, String.class);
// remove the header so it wont be propagated
in.removeHeader(JmsConstants.JMS_DESTINATION_NAME);
if (destinationName == null) {
destinationName = endpoint.getDestinationName();
}
Destination destination = in.getHeader(JmsConstants.JMS_DESTINATION, Destination.class);
// remove the header so it wont be propagated
in.removeHeader(JmsConstants.JMS_DESTINATION);
if (destination != null) {
// prefer to use destination over destination name
destinationName = null;
}
initReplyManager();
// the request timeout can be overruled by a header otherwise the endpoint configured value is used
final long timeout
= exchange.getIn().getHeader(JmsConstants.JMS_REQUEST_TIMEOUT, endpoint.getRequestTimeout(), long.class);
final JmsConfiguration configuration = endpoint.getConfiguration();
// when using message id as correlation id, we need at first to use a provisional correlation id
// which we then update to the real JMSMessageID when the message has been sent
// this is done with the help of the MessageSentCallback
final boolean msgIdAsCorrId = configuration.isUseMessageIDAsCorrelationID();
final String provisionalCorrelationId = msgIdAsCorrId ? getUuidGenerator().generateUuid() : null;
MessageSentCallback messageSentCallback = null;
if (msgIdAsCorrId) {
messageSentCallback
= new UseMessageIdAsCorrelationIdMessageSentCallback(replyManager, provisionalCorrelationId, timeout);
}
final String correlationProperty = configuration.getCorrelationProperty();
final String correlationPropertyToUse = ofNullable(correlationProperty).orElse(JmsConstants.JMS_HEADER_CORRELATION_ID);
final String originalCorrelationId = in.getHeader(correlationPropertyToUse, String.class);
boolean generateFreshCorrId = ObjectHelper.isEmpty(originalCorrelationId) && !msgIdAsCorrId
|| originalCorrelationId != null && originalCorrelationId.startsWith(GENERATED_CORRELATION_ID_PREFIX);
if (generateFreshCorrId) {
// we append the 'Camel-' prefix to know it was generated by us
in.setHeader(correlationPropertyToUse, GENERATED_CORRELATION_ID_PREFIX + getUuidGenerator().generateUuid());
}
MessageCreator messageCreator = new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
Message answer = endpoint.getBinding().makeJmsMessage(exchange, in, session, null);
Destination replyTo;
String replyToOverride = configuration.getReplyToOverride();
if (replyToOverride != null) {
replyTo = resolveOrCreateDestination(replyToOverride, session);
} else {
// get the reply to destination to be used from the reply manager
replyTo = replyManager.getReplyTo();
}
if (replyTo == null) {
throw new RuntimeExchangeException("Failed to resolve replyTo destination", exchange);
}
JmsMessageHelper.setJMSReplyTo(answer, replyTo);
replyManager.setReplyToSelectorHeader(in, answer);
String correlationId = determineCorrelationId(answer, provisionalCorrelationId);
replyManager.registerReply(replyManager, exchange, callback, originalCorrelationId, correlationId, timeout);
if (correlationProperty != null) {
replyManager.setCorrelationProperty(correlationProperty);
}
if (LOG.isDebugEnabled()) {
LOG.debug("Using {}: {}, JMSReplyTo destination: {}, with request timeout: {} ms.",
correlationPropertyToUse, correlationId, replyTo, timeout);
}
LOG.trace("Created jakarta.jms.Message: {}", answer);
return answer;
}
};
doSend(exchange, true, destinationName, destination, messageCreator, messageSentCallback);
// continue routing asynchronously (reply will be processed async when its received)
return false;
}
/**
* Strategy to determine which correlation id to use among <tt>JMSMessageID</tt> and <tt>JMSCorrelationID</tt>.
*
* @param message the JMS message
* @param provisionalCorrelationId an optional provisional correlation id, which is preferred to be used
* @return the correlation id to use
* @throws JMSException can be thrown
*/
protected String determineCorrelationId(Message message, String provisionalCorrelationId) throws JMSException {
if (provisionalCorrelationId != null) {
return provisionalCorrelationId;
}
final JmsConfiguration configuration = endpoint.getConfiguration();
final String correlationProperty = configuration.getCorrelationProperty();
final String messageId = message.getJMSMessageID();
final String correlationId = JmsMessageHelper.getJMSCorrelationID(message);
final String correlationPropertyValue;
if (correlationProperty == null) {
correlationPropertyValue = null;
} else {
correlationPropertyValue = message.getStringProperty(correlationProperty);
}
if (!ObjectHelper.isEmpty(correlationPropertyValue)) {
return correlationPropertyValue;
} else if (configuration.isUseMessageIDAsCorrelationID()) {
return messageId;
} else if (ObjectHelper.isEmpty(correlationId)) {
// correlation id is empty so fallback to message id
return messageId;
} else {
return correlationId;
}
}
protected boolean processInOnly(final Exchange exchange, final AsyncCallback callback) {
final org.apache.camel.Message in = exchange.getIn();
String destinationName = in.getHeader(JmsConstants.JMS_DESTINATION_NAME, String.class);
if (destinationName != null) {
// remove the header so it wont be propagated
in.removeHeader(JmsConstants.JMS_DESTINATION_NAME);
}
if (destinationName == null) {
destinationName = endpoint.getDestinationName();
}
Destination destination = in.getHeader(JmsConstants.JMS_DESTINATION, Destination.class);
if (destination != null) {
// remove the header so it wont be propagated
in.removeHeader(JmsConstants.JMS_DESTINATION);
}
if (destination != null) {
// prefer to use destination over destination name
destinationName = null;
}
final String to = destinationName != null ? destinationName : getDestinationName(destination);
MessageSentCallback messageSentCallback = getEndpoint().getConfiguration().isIncludeSentJMSMessageID()
? new InOnlyMessageSentCallback(exchange) : null;
MessageCreator messageCreator = new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
Message answer = endpoint.getBinding().makeJmsMessage(exchange, in, session, null);
// when in InOnly mode the JMSReplyTo is a bit complicated
// we only want to set the JMSReplyTo on the answer if
// there is a JMSReplyTo from the header/endpoint and
// we have been told to preserveMessageQos
Object jmsReplyTo = JmsMessageHelper.getJMSReplyTo(answer);
if (endpoint.isDisableReplyTo()) {
// honor disable reply to configuration
LOG.trace("ReplyTo is disabled on endpoint: {}", endpoint);
JmsMessageHelper.setJMSReplyTo(answer, null);
jmsReplyTo = null;
} else {
// if the binding did not create the reply to then we have to try to create it here
if (jmsReplyTo == null) {
// prefer reply to from header over endpoint configured
jmsReplyTo = exchange.getIn().getHeader(JmsConstants.JMS_HEADER_REPLY_TO, String.class);
if (jmsReplyTo == null) {
jmsReplyTo = endpoint.getReplyTo();
}
}
}
// we must honor these special flags to preserve QoS
// as we are not OUT capable and thus do not expect a reply, and therefore
// the consumer of this message should not return a reply so we remove it
// unless we use preserveMessageQos=true to tell that we still want to use JMSReplyTo
if (jmsReplyTo != null && !(endpoint.isPreserveMessageQos() || endpoint.isExplicitQosEnabled())) {
// log at debug what we are doing, as higher level may cause noise in production logs
// this behavior is also documented at the camel website
LOG.debug(
"Disabling JMSReplyTo: {} for destination: {}. Use preserveMessageQos=true to force Camel to keep the JMSReplyTo on endpoint: {}",
jmsReplyTo, to, endpoint);
jmsReplyTo = null;
}
// the reply to is a String, so we need to look up its Destination instance
// and if needed create the destination using the session if needed to
if (jmsReplyTo instanceof String replyTo) {
// we need to null it as we use the String to resolve it as a Destination instance
jmsReplyTo = resolveOrCreateDestination(replyTo, session);
}
// set the JMSReplyTo on the answer if we are to use it
Destination replyTo = null;
String replyToOverride = endpoint.getConfiguration().getReplyToOverride();
if (replyToOverride != null) {
replyTo = resolveOrCreateDestination(replyToOverride, session);
} else if (jmsReplyTo instanceof Destination destinationD) {
replyTo = destinationD;
}
if (replyTo != null) {
LOG.debug("Using JMSReplyTo destination: {}", replyTo);
JmsMessageHelper.setJMSReplyTo(answer, replyTo);
} else {
// do not use JMSReplyTo
LOG.trace("Not using JMSReplyTo");
JmsMessageHelper.setJMSReplyTo(answer, null);
}
LOG.trace("Created jakarta.jms.Message: {}", answer);
return answer;
}
};
doSend(exchange, false, destinationName, destination, messageCreator, messageSentCallback);
// after sending then set the OUT message id to the JMSMessageID so its identical
setMessageId(exchange);
// we are synchronous so return true
callback.done(true);
return true;
}
/**
* Sends the message using the JmsTemplate.
*
* @param exchange the exchange
* @param inOut use inOut or inOnly template
* @param destinationName the destination name
* @param destination the destination (if no name provided)
* @param messageCreator the creator to create the {@link Message} to send
* @param callback optional callback to invoke when message has been sent
*/
protected void doSend(
Exchange exchange,
boolean inOut, String destinationName, Destination destination,
MessageCreator messageCreator, MessageSentCallback callback) {
// record where we sent the message
String to = destinationName != null ? destinationName : getDestinationName(destination);
if (to != null) {
exchange.getMessage().setHeader(JmsConstants.JMS_DESTINATION_NAME_PRODUCED, to);
}
CamelJmsTemplate template = (CamelJmsTemplate) (inOut ? getInOutTemplate() : getInOnlyTemplate());
if (LOG.isTraceEnabled()) {
LOG.trace("Using {} jms template", inOut ? "inOut" : "inOnly");
}
// destination should be preferred
if (destination != null) {
template.send(destination, messageCreator, callback);
} else if (destinationName != null) {
template.send(destinationName, messageCreator, callback);
} else {
throw new IllegalArgumentException(
"Neither destination nor destinationName is specified on this endpoint: " + endpoint);
}
}
protected Destination resolveOrCreateDestination(String destinationName, Session session)
throws JMSException {
Destination dest = null;
boolean isPubSub = isTopicPrefix(destinationName)
|| !isQueuePrefix(destinationName) && endpoint.isPubSubDomain();
// try using destination resolver to lookup the destination
if (endpoint.getDestinationResolver() != null) {
dest = endpoint.getDestinationResolver().resolveDestinationName(session, destinationName,
isPubSub);
LOG.debug("Resolved JMSReplyTo destination {} using DestinationResolver {} as PubSubDomain {} -> {}",
destinationName, endpoint.getDestinationResolver(), isPubSub, dest);
}
if (dest == null) {
// must normalize the destination name
String before = destinationName;
destinationName = normalizeDestinationName(destinationName);
LOG.trace("Normalized JMSReplyTo destination name {} -> {}", before, destinationName);
// okay then fallback and create the queue/topic
if (isPubSub) {
LOG.debug("Creating JMSReplyTo topic: {}", destinationName);
dest = session.createTopic(destinationName);
} else {
LOG.debug("Creating JMSReplyTo queue: {}", destinationName);
dest = session.createQueue(destinationName);
}
}
return dest;
}
protected void setMessageId(Exchange exchange) {
if (exchange.hasOut()) {
JmsMessage out = exchange.getOut(JmsMessage.class);
try {
if (out != null && out.getJmsMessage() != null) {
out.setMessageId(out.getJmsMessage().getJMSMessageID());
}
} catch (JMSException e) {
LOG.warn("Unable to retrieve JMSMessageID from outgoing JMS Message and set it into Camel's MessageId", e);
}
}
}
public JmsOperations getInOnlyTemplate() {
if (inOnlyTemplate == null) {
inOnlyTemplate = endpoint.createInOnlyTemplate();
}
return inOnlyTemplate;
}
public void setInOnlyTemplate(JmsOperations inOnlyTemplate) {
this.inOnlyTemplate = inOnlyTemplate;
}
public JmsOperations getInOutTemplate() {
if (inOutTemplate == null) {
inOutTemplate = endpoint.createInOutTemplate();
}
return inOutTemplate;
}
public void setInOutTemplate(JmsOperations inOutTemplate) {
this.inOutTemplate = inOutTemplate;
}
public UuidGenerator getUuidGenerator() {
return uuidGenerator;
}
public void setUuidGenerator(UuidGenerator uuidGenerator) {
this.uuidGenerator = uuidGenerator;
}
/**
* Pre tests the connection before starting the listening.
* <p/>
* In case of connection failure the exception is thrown which prevents Camel from starting.
*
* @throws FailedToCreateProducerException is thrown if testing the connection failed
*/
protected void testConnectionOnStartup() throws FailedToCreateProducerException {
try {
CamelJmsTemplate template = (CamelJmsTemplate) getInOnlyTemplate();
if (LOG.isDebugEnabled()) {
LOG.debug("Testing JMS Connection on startup for destination: {}", template.getDefaultDestinationName());
}
ConnectionFactory connectionfactory = template.getConnectionFactory();
if (connectionfactory != null) {
Connection conn = connectionfactory.createConnection();
JmsUtils.closeConnection(conn);
} else {
LOG.error("connection factory is null");
throw new IllegalStateException("connection factory is null");
}
LOG.debug("Successfully tested JMS Connection on startup for destination: {}",
template.getDefaultDestinationName());
} catch (Exception e) {
throw new FailedToCreateProducerException(getEndpoint(), e);
}
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (uuidGenerator == null) {
// use the generator configured on the camel context
uuidGenerator = getEndpoint().getCamelContext().getUuidGenerator();
}
if (endpoint.isTestConnectionOnStartup()) {
testConnectionOnStartup();
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
// must stop/un-init reply manager if it was in use
unInitReplyManager();
}
protected ReplyManager createReplyManager() throws Exception {
// use a temporary queue
ReplyManager replyManager
= new TemporaryQueueReplyManager(getEndpoint().getCamelContext(), getEndpoint().getTemporaryQueueResolver());
replyManager.setEndpoint(getEndpoint());
String name = "JmsReplyManagerTimeoutChecker[" + getEndpoint().getEndpointConfiguredDestinationName() + "]";
ScheduledExecutorService replyManagerScheduledExecutorService
= getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadScheduledExecutor(this, name);
replyManager.setScheduledExecutorService(replyManagerScheduledExecutorService);
name = "JmsReplyManagerOnTimeout[" + getEndpoint().getEndpointConfiguredDestinationName() + "]";
// allow the timeout thread to timeout so during normal operation we do not have a idle thread
ExecutorService replyManagerExecutorService = createReplyManagerExecutorService(replyManager, name);
replyManager.setOnTimeoutExecutorService(replyManagerExecutorService);
ServiceHelper.startService(replyManager);
return replyManager;
}
private ExecutorService createReplyManagerExecutorService(ReplyManager replyManager, String name) {
int max = doGetMax();
return getEndpoint().getCamelContext().getExecutorServiceManager().newThreadPool(replyManager, name, 0, max);
}
private int doGetMax() {
int max = getEndpoint().getReplyToOnTimeoutMaxConcurrentConsumers();
if (max <= 0) {
throw new IllegalArgumentException("The option replyToOnTimeoutMaxConcurrentConsumers must be >= 1");
}
return max;
}
protected ReplyManager createReplyManager(String replyTo) throws Exception {
// use a regular queue
ReplyManager replyManager = new QueueReplyManager(getEndpoint().getCamelContext());
replyManager.setEndpoint(getEndpoint());
String name = "JmsReplyManagerTimeoutChecker[" + replyTo + "]";
ScheduledExecutorService replyManagerScheduledExecutorService
= getEndpoint().getCamelContext().getExecutorServiceManager().newSingleThreadScheduledExecutor(this, name);
replyManager.setScheduledExecutorService(replyManagerScheduledExecutorService);
name = "JmsReplyManagerOnTimeout[" + replyTo + "]";
// allow the timeout thread to timeout so during normal operation we do not have a idle thread
ExecutorService replyManagerExecutorService = createReplyManagerExecutorService(replyManager, name);
replyManager.setOnTimeoutExecutorService(replyManagerExecutorService);
ServiceHelper.startService(replyManager);
return replyManager;
}
}
| JmsProducer |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/telemetry/endpoints/ontextmessage/SingleDtoReceived_SingleDtoResponse_Endpoint.java | {
"start": 240,
"end": 411
} | class ____ {
@OnTextMessage
public Dto onMessage(Dto dto) {
return new Dto("echo 0: " + dto.property());
}
}
| SingleDtoReceived_SingleDtoResponse_Endpoint |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/IsInstanceOfClassTest.java | {
"start": 3633,
"end": 4037
} | class ____ {
boolean m(Object o, Class<?> clazz) {
// BUG: Diagnostic contains: clazz.isInstance(o)
return clazz.isInstance(o.getClass());
}
}
""")
.doTest();
}
@Test
public void positive_getClass_clazz() {
compilationHelper
.addSourceLines(
"pkg/A.java",
"""
| A |
java | apache__logging-log4j2 | log4j-1.2-api/src/test/java/org/apache/log4j/builders/BuilderManagerTest.java | {
"start": 1179,
"end": 2405
} | class ____ {
/**
* This test ensures that instantiation failures due to missing parameters
* always return an empty wrapper instead of null, hence disabling the
* <i>"instantiate by classname"</i> fallback mechanism for supported components.
*/
@Test
void testReturnInvalidValueOnError() {
final PropertiesConfiguration config = new PropertiesConfiguration(null, null);
final BuilderManager manager = new BuilderManager();
final Properties props = new Properties();
props.setProperty("FILE", FileAppender.class.getName());
props.setProperty("FILE.filter.1", StringMatchFilter.class.getName());
// Parse an invalid StringMatchFilter
final Filter filter = manager.parse(
StringMatchFilter.class.getName(), "FILE.filter", props, config, BuilderManager.INVALID_FILTER);
assertEquals(BuilderManager.INVALID_FILTER, filter);
// Parse an invalid FileAppender
final Appender appender = manager.parseAppender(
"FILE", FileAppender.class.getName(), "FILE", "FILE.layout", "FILE.filter.", props, config);
assertEquals(BuilderManager.INVALID_APPENDER, appender);
}
}
| BuilderManagerTest |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.