language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/dependent/listeners/PreDestroyAnotherBeanE.java | {
"start": 284,
"end": 643
} | class ____ implements BeanPreDestroyEventListener<AnotherBeanE> {
@Override
public AnotherBeanE onPreDestroy(BeanPreDestroyEvent<AnotherBeanE> event) {
TestData.DESTRUCTION_ORDER.add(AnotherBeanE.class.getSimpleName());
AnotherBeanE bean = event.getBean();
bean.destroyed = true;
return bean;
}
}
| PreDestroyAnotherBeanE |
java | spring-projects__spring-boot | module/spring-boot-data-couchbase/src/test/java/org/springframework/boot/data/couchbase/autoconfigure/DataCouchbaseAutoConfigurationTests.java | {
"start": 2322,
"end": 4395
} | class ____ {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ValidationAutoConfiguration.class, CouchbaseAutoConfiguration.class,
DataCouchbaseAutoConfiguration.class));
@Test
void disabledIfCouchbaseIsNotConfigured() {
this.contextRunner.run((context) -> assertThat(context).doesNotHaveBean(CouchbaseTemplate.class));
}
@Test
void validatorIsPresent() {
this.contextRunner.run((context) -> assertThat(context).hasSingleBean(ValidatingCouchbaseEventListener.class));
}
@Test
void entityScanShouldSetInitialEntitySet() {
this.contextRunner.withUserConfiguration(EntityScanConfig.class).run((context) -> {
CouchbaseMappingContext mappingContext = context.getBean(CouchbaseMappingContext.class);
ManagedTypes managedTypes = (ManagedTypes) ReflectionTestUtils.getField(mappingContext, "managedTypes");
assertThat(managedTypes).isNotNull();
assertThat(managedTypes.toList()).containsOnly(City.class);
});
}
@Test
void typeKeyDefault() {
this.contextRunner.withUserConfiguration(CouchbaseMockConfiguration.class)
.run((context) -> assertThat(context.getBean(MappingCouchbaseConverter.class).getTypeKey())
.isEqualTo(DefaultCouchbaseTypeMapper.DEFAULT_TYPE_KEY));
}
@Test
void typeKeyCanBeCustomized() {
this.contextRunner.withUserConfiguration(CouchbaseMockConfiguration.class)
.withPropertyValues("spring.data.couchbase.type-key=_custom")
.run((context) -> assertThat(context.getBean(MappingCouchbaseConverter.class).getTypeKey())
.isEqualTo("_custom"));
}
@Test
void customConversions() {
this.contextRunner.withUserConfiguration(CustomConversionsConfig.class).run((context) -> {
CouchbaseTemplate template = context.getBean(CouchbaseTemplate.class);
assertThat(
template.getConverter().getConversionService().canConvert(CouchbaseProperties.class, Boolean.class))
.isTrue();
});
}
@Configuration(proxyBeanMethods = false)
@Import(CouchbaseMockConfiguration.class)
static | DataCouchbaseAutoConfigurationTests |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/benckmark/pool/Oracle_Case4.java | {
"start": 1164,
"end": 7560
} | class ____ extends TestCase {
private String jdbcUrl;
private String user;
private String password;
private String driverClass;
private int maxIdle = 40;
private int maxActive = 50;
private int maxWait = 5000;
private String validationQuery = "SELECT 1 FROM DUAL";
private int threadCount = 1;
private int loopCount = 5;
final int LOOP_COUNT = 1000 * 1;
private boolean testOnBorrow;
private boolean preparedStatementCache = true;
private int preparedStatementCacheSize = 50;
private String properties = "defaultRowPrefetch=50";
private String SQL;
protected void setUp() throws Exception {
// jdbcUrl = "jdbc:oracle:thin:@a.b.c.d:1521:ocnauto";
// user = "alibaba";
// password = "ccbuauto";
// SQL = "SELECT * FROM WP_ORDERS WHERE ID = ?";
jdbcUrl = "jdbc:oracle:thin:@a.b.c.d:1521:ointest3";
user = "alibaba";
password = "deYcR7facWSJtCuDpm2r";
SQL = "SELECT * FROM AV_INFO WHERE ID = ?";
driverClass = "oracle.jdbc.driver.OracleDriver";
}
public void test_druid() throws Exception {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setMaxActive(maxActive);
dataSource.setMaxIdle(maxIdle);
dataSource.setMaxWait(maxWait);
dataSource.setPoolPreparedStatements(preparedStatementCache);
dataSource.setMaxOpenPreparedStatements(preparedStatementCacheSize);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(testOnBorrow);
dataSource.setConnectionProperties(properties);
dataSource.setUseOracleImplicitCache(true);
dataSource.init();
// printAV_INFO(dataSource);
// printTables(dataSource);
// printWP_ORDERS(dataSource);
for (int i = 0; i < loopCount; ++i) {
p0(dataSource, "druid", threadCount);
}
System.out.println();
}
public void test_dbcp() throws Exception {
final BasicDataSource dataSource = new BasicDataSource();
dataSource.setMaxActive(maxActive);
dataSource.setMaxIdle(maxIdle);
dataSource.setMaxWait(maxWait);
dataSource.setPoolPreparedStatements(preparedStatementCache);
dataSource.setMaxOpenPreparedStatements(preparedStatementCacheSize);
dataSource.setDriverClassName(driverClass);
dataSource.setUrl(jdbcUrl);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setValidationQuery(validationQuery);
dataSource.setTestOnBorrow(testOnBorrow);
dataSource.setConnectionProperties(properties);
// printAV_INFO(dataSource);
for (int i = 0; i < loopCount; ++i) {
p0(dataSource, "dbcp", threadCount);
}
System.out.println();
}
private void printWP_ORDERS(DruidDataSource dataSource) throws SQLException {
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("SELECT * FROM WP_ORDERS");
JdbcUtils.printResultSet(rs);
rs.close();
stmt.close();
conn.close();
}
private void printAV_INFO(DataSource dataSource) throws SQLException {
String sql = "SELECT DISTINCT ID FROM AV_INFO WHERE ROWNUM <= 10";
Connection conn = dataSource.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery(sql);
JdbcUtils.printResultSet(rs);
rs.close();
stmt.close();
conn.close();
}
protected void printTables(DruidDataSource dataSource) throws SQLException {
Connection conn = dataSource.getConnection();
ResultSet rs = conn.getMetaData().getTables(null, "ALIBABA", null, new String[]{"TABLE"});
JdbcUtils.printResultSet(rs);
rs.close();
conn.close();
}
private void p0(final DataSource dataSource, String name, int threadCount) throws Exception {
final CountDownLatch startLatch = new CountDownLatch(1);
final CountDownLatch endLatch = new CountDownLatch(threadCount);
for (int i = 0; i < threadCount; ++i) {
Thread thread = new Thread() {
public void run() {
try {
startLatch.await();
for (int i = 0; i < LOOP_COUNT; ++i) {
Connection conn = dataSource.getConnection();
int mod = i % 500;
String sql = SQL; // + " AND ROWNUM <= " + (mod + 1);
PreparedStatement stmt = conn.prepareStatement(sql);
stmt.setInt(1, 61);
ResultSet rs = stmt.executeQuery();
int rowCount = 0;
while (rs.next()) {
rowCount++;
}
// Assert.isTrue(!rs.isClosed());
rs.close();
// Assert.isTrue(!stmt.isClosed());
stmt.close();
Assert.isTrue(stmt.isClosed());
conn.close();
Assert.isTrue(conn.isClosed());
}
} catch (Exception ex) {
ex.printStackTrace();
}
endLatch.countDown();
}
};
thread.start();
}
long startMillis = System.currentTimeMillis();
long startYGC = TestUtil.getYoungGC();
long startFullGC = TestUtil.getFullGC();
startLatch.countDown();
endLatch.await();
long millis = System.currentTimeMillis() - startMillis;
long ygc = TestUtil.getYoungGC() - startYGC;
long fullGC = TestUtil.getFullGC() - startFullGC;
System.out.println("thread " + threadCount + " " + name + " millis : "
+ NumberFormat.getInstance().format(millis) + ", YGC " + ygc + " FGC " + fullGC);
}
}
| Oracle_Case4 |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/DefaultHttp2WindowUpdateFrame.java | {
"start": 795,
"end": 1610
} | class ____ extends AbstractHttp2StreamFrame implements Http2WindowUpdateFrame {
private final int windowUpdateIncrement;
public DefaultHttp2WindowUpdateFrame(int windowUpdateIncrement) {
this.windowUpdateIncrement = windowUpdateIncrement;
}
@Override
public DefaultHttp2WindowUpdateFrame stream(Http2FrameStream stream) {
super.stream(stream);
return this;
}
@Override
public String name() {
return "WINDOW_UPDATE";
}
@Override
public int windowSizeIncrement() {
return windowUpdateIncrement;
}
@Override
public String toString() {
return StringUtil.simpleClassName(this) +
"(stream=" + stream() + ", windowUpdateIncrement=" + windowUpdateIncrement + ')';
}
}
| DefaultHttp2WindowUpdateFrame |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/bigdecimals/BigDecimals_assertNotEqual_Test.java | {
"start": 1527,
"end": 3666
} | class ____ extends BigDecimalsBaseTest {
private static final BigDecimal ONE_WITH_3_DECIMALS = new BigDecimal("1.000");
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> numbers.assertNotEqual(someInfo(), null, ONE))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_big_decimals_are_not_equal() {
numbers.assertNotEqual(someInfo(), ONE, ONE_WITH_3_DECIMALS);
}
@Test
void should_fail_if_big_decimals_are_equal() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> numbers.assertNotEqual(info, ONE, ONE));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEqual(ONE, ONE));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> numbersWithComparatorComparisonStrategy.assertNotEqual(someInfo(),
null,
ONE))
.withMessage(actualIsNull());
}
@Test
void should_pass_if_big_decimals_are_not_equal_according_to_custom_comparison_strategy() {
numbersWithComparatorComparisonStrategy.assertNotEqual(someInfo(), TEN, ONE);
}
@Test
void should_fail_if_big_decimals_are_equal_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Throwable error = catchThrowable(() -> numbersWithComparatorComparisonStrategy.assertNotEqual(info, ONE_WITH_3_DECIMALS,
ONE));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldNotBeEqual(ONE_WITH_3_DECIMALS, ONE, comparatorComparisonStrategy));
}
}
| BigDecimals_assertNotEqual_Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/watcher/FileWatcher.java | {
"start": 4352,
"end": 14463
} | class ____ extends Observer {
private long length;
private long lastModified;
private Observer[] children;
private byte[] digest;
FileObserver(Path path) {
super(path);
}
public void checkAndNotify() throws IOException {
boolean prevExists = exists;
boolean prevIsDirectory = isDirectory;
long prevLength = length;
long prevLastModified = lastModified;
byte[] prevDigest = digest;
exists = fileExists(path);
// TODO we might use the new NIO2 API to get real notification?
if (exists) {
BasicFileAttributes attributes = readAttributes(path);
isDirectory = attributes.isDirectory();
if (isDirectory) {
length = 0;
lastModified = 0;
} else {
length = attributes.size();
lastModified = attributes.lastModifiedTime().toMillis();
}
} else {
isDirectory = false;
length = 0;
lastModified = 0;
}
// Perform notifications and update children for the current file
if (prevExists) {
if (exists) {
if (isDirectory) {
if (prevIsDirectory) {
// Remained a directory
updateChildren();
} else {
// File replaced by directory
onFileDeleted();
onDirectoryCreated(false);
}
} else {
if (prevIsDirectory) {
// Directory replaced by file
onDirectoryDeleted();
onFileCreated(false);
} else {
// Remained file
if (prevLastModified != lastModified || prevLength != length) {
if (checkFileContents) {
digest = calculateDigest();
if (digest == null || Arrays.equals(prevDigest, digest) == false) {
onFileChanged();
}
} else {
onFileChanged();
}
}
}
}
} else {
// Deleted
if (prevIsDirectory) {
onDirectoryDeleted();
} else {
onFileDeleted();
}
}
} else {
// Created
if (exists) {
if (isDirectory) {
onDirectoryCreated(false);
} else {
onFileCreated(false);
}
}
}
}
private byte[] calculateDigest() {
try (var in = newInputStream(path)) {
return MessageDigests.digest(in, MessageDigests.md5());
} catch (IOException e) {
logger.warn(
"failed to read file [{}] while checking for file changes [{}], will assuming file has been modified",
path,
e.toString()
);
return null;
}
}
private void init(boolean initial) throws IOException {
exists = fileExists(path);
if (exists) {
BasicFileAttributes attributes = readAttributes(path);
isDirectory = attributes.isDirectory();
if (isDirectory) {
onDirectoryCreated(initial);
} else {
length = attributes.size();
lastModified = attributes.lastModifiedTime().toMillis();
if (checkFileContents) {
digest = calculateDigest();
}
onFileCreated(initial);
}
}
}
private Observer createChild(Path file, boolean initial) throws IOException {
try {
FileObserver child = new FileObserver(file);
child.init(initial);
return child;
} catch (SecurityException e) {
// don't have permissions, use a placeholder
logger.debug(() -> Strings.format("Don't have permissions to watch path [%s]", file), e);
return new DeniedObserver(file);
}
}
private Path[] listFiles() throws IOException {
try (var dirs = FileWatcher.this.listFiles(path)) {
return StreamSupport.stream(dirs.spliterator(), false).sorted().toArray(Path[]::new);
}
}
private Observer[] listChildren(boolean initial) throws IOException {
Path[] files = listFiles();
if (CollectionUtils.isEmpty(files) == false) {
Observer[] childObservers = new Observer[files.length];
for (int i = 0; i < files.length; i++) {
childObservers[i] = createChild(files[i], initial);
}
return childObservers;
} else {
return EMPTY_DIRECTORY;
}
}
private void updateChildren() throws IOException {
Path[] files = listFiles();
if (CollectionUtils.isEmpty(files) == false) {
Observer[] newChildren = new Observer[files.length];
int child = 0;
int file = 0;
while (file < files.length || child < children.length) {
int compare;
if (file >= files.length) {
compare = -1;
} else if (child >= children.length) {
compare = 1;
} else {
compare = children[child].path.compareTo(files[file]);
}
if (compare == 0) {
// Same file copy it and update
children[child].checkAndNotify();
newChildren[file] = children[child];
file++;
child++;
} else {
if (compare > 0) {
// This child doesn't appear in the old list - init it
newChildren[file] = createChild(files[file], false);
file++;
} else {
// The child from the old list is missing in the new list
// Delete it
deleteChild(child);
child++;
}
}
}
children = newChildren;
} else {
// No files - delete all children
for (int child = 0; child < children.length; child++) {
deleteChild(child);
}
children = EMPTY_DIRECTORY;
}
}
private void deleteChild(int child) {
if (children[child].exists) {
if (children[child].isDirectory) {
children[child].onDirectoryDeleted();
} else {
children[child].onFileDeleted();
}
}
}
private void onFileCreated(boolean initial) {
for (FileChangesListener listener : listeners()) {
try {
if (initial) {
listener.onFileInit(path);
} else {
listener.onFileCreated(path);
}
} catch (Exception e) {
logger.warn("cannot notify file changes listener", e);
}
}
}
void onFileDeleted() {
for (FileChangesListener listener : listeners()) {
try {
listener.onFileDeleted(path);
} catch (Exception e) {
logger.warn("cannot notify file changes listener", e);
}
}
}
private void onFileChanged() {
for (FileChangesListener listener : listeners()) {
try {
listener.onFileChanged(path);
} catch (Exception e) {
logger.warn("cannot notify file changes listener", e);
}
}
}
private void onDirectoryCreated(boolean initial) throws IOException {
for (FileChangesListener listener : listeners()) {
try {
if (initial) {
listener.onDirectoryInit(path);
} else {
listener.onDirectoryCreated(path);
}
} catch (Exception e) {
logger.warn("cannot notify file changes listener", e);
}
}
children = listChildren(initial);
}
void onDirectoryDeleted() {
// First delete all children
for (int child = 0; child < children.length; child++) {
deleteChild(child);
}
for (FileChangesListener listener : listeners()) {
try {
listener.onDirectoryDeleted(path);
} catch (Exception e) {
logger.warn("cannot notify file changes listener", e);
}
}
}
}
}
| FileObserver |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java | {
"start": 7391,
"end": 7930
} | enum ____ {
NONE((byte) 0), // No options
OVERWRITE((byte) 1), // Overwrite the rename destination
TO_TRASH ((byte) 2); // Rename to trash
private final byte code;
private Rename(byte code) {
this.code = code;
}
public static Rename valueOf(byte code) {
return code < 0 || code >= values().length ? null : values()[code];
}
public byte value() {
return code;
}
}
/**
* This is used in FileSystem and FileContext to specify checksum options.
*/
public static | Rename |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/split/JobSplit.java | {
"start": 2031,
"end": 2538
} | class ____ {
static final int META_SPLIT_VERSION = 1;
static final byte[] META_SPLIT_FILE_HEADER = "META-SPL".getBytes(StandardCharsets.UTF_8);
public static final TaskSplitMetaInfo EMPTY_TASK_SPLIT =
new TaskSplitMetaInfo();
/**
* This represents the meta information about the task split.
* The main fields are
* - start offset in actual split
* - data length that will be processed in this split
* - hosts on which this split is local
*/
public static | JobSplit |
java | quarkusio__quarkus | independent-projects/qute/core/src/main/java/io/quarkus/qute/LoopSectionHelper.java | {
"start": 15955,
"end": 16161
} | enum ____ implements ErrorCode {
ITERABLE_NOT_FOUND,
NOT_AN_ITERABLE,
;
@Override
public String getName() {
return "LOOP_" + name();
}
}
}
| Code |
java | spring-projects__spring-security | saml2/saml2-service-provider/src/main/java/org/springframework/security/saml2/provider/service/authentication/logout/Saml2LogoutResponse.java | {
"start": 1396,
"end": 1526
} | class ____ represents a signed and serialized SAML 2.0 Logout Response
*
* @author Josh Cummings
* @since 5.6
*/
public final | that |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/processingstep/LazyClassKeyProcessingStep.java | {
"start": 1884,
"end": 3078
} | class ____ extends TypeCheckingProcessingStep<XElement> {
private static final String PROGUARD_KEEP_RULE = "-keep,allowobfuscation,allowshrinking class ";
// Note: We aggregate @LazyClassKey usages across processing rounds, so we use ClassName instead
// of XElement as the map key to avoid storing XElement instances across processing rounds.
private final SetMultimap<XClassName, XClassName> lazyMapKeysByModule =
LinkedHashMultimap.create();
private final LazyMapKeyProxyGenerator lazyMapKeyProxyGenerator;
@Inject
LazyClassKeyProcessingStep(LazyMapKeyProxyGenerator lazyMapKeyProxyGenerator) {
this.lazyMapKeyProxyGenerator = lazyMapKeyProxyGenerator;
}
@Override
public ImmutableSet<XClassName> annotationClassNames() {
return ImmutableSet.of(XTypeNames.LAZY_CLASS_KEY);
}
@Override
protected void process(XElement element, ImmutableSet<XClassName> annotations) {
XClassName lazyClassKey =
element
.getAnnotation(XTypeNames.LAZY_CLASS_KEY)
.getAsType("value")
.getTypeElement()
.asClassName();
// No need to fail, since we want to support customized usage of | LazyClassKeyProcessingStep |
java | elastic__elasticsearch | modules/dot-prefix-validation/src/main/java/org/elasticsearch/validation/IndexTemplateDotValidator.java | {
"start": 812,
"end": 1395
} | class ____ extends DotPrefixValidator<TransportPutComposableIndexTemplateAction.Request> {
public IndexTemplateDotValidator(ThreadContext threadContext, ClusterService clusterService) {
super(threadContext, clusterService);
}
@Override
protected Set<String> getIndicesFromRequest(TransportPutComposableIndexTemplateAction.Request request) {
return new HashSet<>(Arrays.asList(request.indices()));
}
@Override
public String actionName() {
return TransportPutComposableIndexTemplateAction.TYPE.name();
}
}
| IndexTemplateDotValidator |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/collectionbasictype/ElementCollectionWithConverterTest.java | {
"start": 1651,
"end": 1809
} | class ____ the key
assertMapAttributesInMetaModelFor(
Item.class,
"attributes",
Integer.class,
String.class,
"Generic types or attribute | for |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/IntervalJoinITCase.java | {
"start": 21921,
"end": 22186
} | class ____
extends AscendingRecordTimestampsWatermarkStrategy<Tuple2<String, Integer>> {
public AscendingTuple2TimestampExtractor() {
super((e) -> Long.valueOf(e.f1));
}
}
private static | AscendingTuple2TimestampExtractor |
java | apache__kafka | streams/src/test/java/org/apache/kafka/streams/processor/internals/ProcessorTopologyTest.java | {
"start": 62345,
"end": 62896
} | class ____ implements Processor<String, String, String, String> {
private ProcessorContext<String, String> context;
@Override
public void init(final ProcessorContext<String, String> context) {
this.context = context;
}
@Override
public void process(final Record<String, String> record) {
context.forward(record);
}
}
/**
* A processor that simply forwards all messages to all children with advanced timestamps.
*/
protected static | ForwardingProcessor |
java | google__guice | core/test/com/google/inject/Asserts.java | {
"start": 1901,
"end": 7530
} | class ____ {
private Asserts() {}
/**
* Returns the String that would appear in an error message for this chain of classes as modules.
*/
public static String asModuleChain(Class<?>... classes) {
return Joiner.on(" -> ")
.appendTo(
new StringBuilder(" (via modules: "),
Iterables.transform(ImmutableList.copyOf(classes), Class::getName))
.append(")")
.toString();
}
/**
* Returns the source file appears in error messages based on {@link
* #getIncludeStackTraceOption()} value.
*/
public static String getDeclaringSourcePart(Class<?> clazz) {
if (getIncludeStackTraceOption() == IncludeStackTraceOption.OFF) {
return ".configure(Unknown Source";
}
return ".configure(" + clazz.getSimpleName() + ".java:";
}
/**
* Returns true if {@link #getIncludeStackTraceOption()} returns {@link
* IncludeStackTraceOption#OFF}.
*/
public static boolean isIncludeStackTraceOff() {
return getIncludeStackTraceOption() == IncludeStackTraceOption.OFF;
}
/**
* Fails unless {@code expected.equals(actual)}, {@code actual.equals(expected)} and their hash
* codes are equal. This is useful for testing the equals method itself.
*/
public static void assertEqualsBothWays(Object expected, Object actual) {
assertNotNull(expected);
assertNotNull(actual);
assertEquals("expected.equals(actual)", actual, expected);
assertEquals("actual.equals(expected)", expected, actual);
assertEquals("hashCode", expected.hashCode(), actual.hashCode());
}
/** Fails unless {@code text} includes all {@code substrings}, in order, no duplicates */
public static void assertContains(String text, String... substrings) {
assertContains(text, false, substrings);
}
/**
* Fails unless {@code text} includes all {@code substrings}, in order, and optionally {@code
* allowDuplicates}.
*/
public static void assertContains(String text, boolean allowDuplicates, String... substrings) {
/*if[NO_AOP]
// when we strip out bytecode manipulation, we lose the ability to generate some source lines.
if (text.contains("(Unknown Source)")) {
return;
}
end[NO_AOP]*/
int startingFrom = 0;
for (String substring : substrings) {
int index = text.indexOf(substring, startingFrom);
assertTrue(
String.format("Expected \"%s\" to contain substring \"%s\"", text, substring),
index >= startingFrom);
startingFrom = index + substring.length();
}
if (!allowDuplicates) {
String lastSubstring = substrings[substrings.length - 1];
assertTrue(
String.format(
"Expected \"%s\" to contain substring \"%s\" only once),", text, lastSubstring),
text.indexOf(lastSubstring, startingFrom) == -1);
}
}
/** Fails unless {@code object} doesn't equal itself when reserialized. */
public static void assertEqualWhenReserialized(Object object) throws IOException {
Object reserialized = reserialize(object);
assertEquals(object, reserialized);
assertEquals(object.hashCode(), reserialized.hashCode());
}
/** Fails unless {@code object} has the same toString value when reserialized. */
public static void assertSimilarWhenReserialized(Object object) throws IOException {
Object reserialized = reserialize(object);
assertThat(reserialized.toString()).isEqualTo(object.toString());
}
public static <E> E reserialize(E original) throws IOException {
try {
ByteArrayOutputStream out = new ByteArrayOutputStream();
new ObjectOutputStream(out).writeObject(original);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
@SuppressWarnings("unchecked") // the reserialized type is assignable
E reserialized = (E) new ObjectInputStream(in).readObject();
return reserialized;
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
public static void assertNotSerializable(Object object) throws IOException {
try {
reserialize(object);
Assert.fail();
} catch (NotSerializableException expected) {
}
}
public static void awaitFullGc() {
// GcFinalization *should* do it, but doesn't work well in practice...
// so we put a second latch and wait for a ReferenceQueue to tell us.
ReferenceQueue<Object> queue = new ReferenceQueue<>();
WeakReference<Object> ref = new WeakReference<>(new Object(), queue);
GcFinalization.awaitFullGc();
try {
assertSame("queue didn't return ref in time", ref, queue.remove(5000));
} catch (IllegalArgumentException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public static void awaitClear(WeakReference<?> ref) {
// GcFinalization *should* do it, but doesn't work well in practice...
// so we put a second latch and wait for a ReferenceQueue to tell us.
Object data = ref.get();
ReferenceQueue<Object> queue = null;
WeakReference<Object> extraRef = null;
if (data != null) {
queue = new ReferenceQueue<>();
extraRef = new WeakReference<>(data, queue);
data = null;
}
GcFinalization.awaitClear(ref);
if (queue != null) {
try {
assertSame("queue didn't return ref in time", extraRef, queue.remove(5000));
} catch (IllegalArgumentException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
/** Returns the URLs in the system | Asserts |
java | spring-projects__spring-framework | spring-context/src/main/java/org/springframework/scheduling/support/ScheduledTaskObservationConvention.java | {
"start": 990,
"end": 1243
} | interface ____ extends ObservationConvention<ScheduledTaskObservationContext> {
@Override
default boolean supportsContext(Observation.Context context) {
return context instanceof ScheduledTaskObservationContext;
}
}
| ScheduledTaskObservationConvention |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AbstractExecutionThreadService.java | {
"start": 1309,
"end": 1544
} | class ____ a single thread to execute the service; consider {@link AbstractService}
* if you would like to manage any threading manually.
*
* @author Jesse Wilson
* @since 1.0
*/
@GwtIncompatible
@J2ktIncompatible
public abstract | uses |
java | elastic__elasticsearch | x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/CanMatchIT.java | {
"start": 1835,
"end": 18898
} | class ____ extends AbstractEsqlIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return CollectionUtils.appendToCopy(super.nodePlugins(), MockTransportService.TestPlugin.class);
}
/**
* Make sure that we don't send data-node requests to the target shards which won't match the query
*/
public void testCanMatch() {
assertAcked(
client().admin()
.indices()
.prepareCreate("events_2022")
.setMapping("@timestamp", "type=date,format=yyyy-MM-dd", "uid", "type=keyword")
);
client().prepareBulk("events_2022")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source("@timestamp", "2022-02-15", "uid", "u1"))
.add(new IndexRequest().source("@timestamp", "2022-05-02", "uid", "u1"))
.add(new IndexRequest().source("@timestamp", "2022-12-15", "uid", "u1"))
.get();
assertAcked(client().admin().indices().prepareCreate("events_2023").setMapping("@timestamp", "type=date", "uid", "type=keyword"));
client().prepareBulk("events_2023")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source("@timestamp", "2023-01-15", "uid", "u2"))
.add(new IndexRequest().source("@timestamp", "2023-02-01", "uid", "u2"))
.add(new IndexRequest().source("@timestamp", "2023-02-11", "uid", "u1"))
.add(new IndexRequest().source("@timestamp", "2023-03-25", "uid", "u1"))
.get();
try {
Set<String> queriedIndices = ConcurrentCollections.newConcurrentSet();
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
as(transportService, MockTransportService.class).addRequestHandlingBehavior(
ComputeService.DATA_ACTION_NAME,
(handler, request, channel, task) -> {
DataNodeRequest dataNodeRequest = (DataNodeRequest) request;
for (DataNodeRequest.Shard shard : dataNodeRequest.shards()) {
queriedIndices.add(shard.shardId().getIndexName());
}
handler.messageReceived(request, channel, task);
}
);
}
try (
EsqlQueryResponse resp = run(
syncEsqlQueryRequest("from events_*").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("@timestamp").gte("2023-01-01"))
)
) {
assertThat(getValuesList(resp), hasSize(4));
assertThat(queriedIndices, equalTo(Set.of("events_2023")));
queriedIndices.clear();
}
try (EsqlQueryResponse resp = run("from events_* | WHERE @timestamp >= date_parse(\"yyyy-MM-dd\", \"2023-01-01\")")) {
assertThat(getValuesList(resp), hasSize(4));
assertThat(queriedIndices, equalTo(Set.of("events_2023")));
queriedIndices.clear();
}
try (
EsqlQueryResponse resp = run(
syncEsqlQueryRequest("from events_*").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("@timestamp").lt("2023-01-01"))
)
) {
assertThat(getValuesList(resp), hasSize(3));
assertThat(queriedIndices, equalTo(Set.of("events_2022")));
queriedIndices.clear();
}
try (EsqlQueryResponse resp = run("from events_* | WHERE @timestamp < date_parse(\"yyyy-MM-dd\", \"2023-01-01\")")) {
assertThat(getValuesList(resp), hasSize(3));
assertThat(queriedIndices, equalTo(Set.of("events_2022")));
queriedIndices.clear();
}
try (
EsqlQueryResponse resp = run(
syncEsqlQueryRequest("from events_*").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("@timestamp").gt("2022-01-01").lt("2023-12-31"))
)
) {
assertThat(getValuesList(resp), hasSize(7));
assertThat(queriedIndices, equalTo(Set.of("events_2022", "events_2023")));
queriedIndices.clear();
}
try (
EsqlQueryResponse resp = run(
"from events_* "
+ "| WHERE @timestamp > date_parse(\"yyyy-MM-dd\", \"2022-01-01\") "
+ "AND @timestamp < date_parse(\"yyyy-MM-dd\", \"2023-12-31\")"
)
) {
assertThat(getValuesList(resp), hasSize(7));
assertThat(queriedIndices, equalTo(Set.of("events_2022", "events_2023")));
queriedIndices.clear();
}
try (
EsqlQueryResponse resp = run(
syncEsqlQueryRequest("from events_*").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("@timestamp").gt("2021-01-01").lt("2021-12-31"))
)
) {
assertThat(getValuesList(resp), hasSize(0));
assertThat(queriedIndices, empty());
queriedIndices.clear();
}
try (
EsqlQueryResponse resp = run(
"from events_* "
+ "| WHERE @timestamp > date_parse(\"yyyy-MM-dd\", \"2023-01-01\") "
+ "AND @timestamp < date_parse(\"yyyy-MM-dd\", \"2023-01-01\")"
)
) {
assertThat(getValuesList(resp), hasSize(0));
assertThat(queriedIndices, empty());
queriedIndices.clear();
}
} finally {
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
as(transportService, MockTransportService.class).clearAllRules();
}
}
}
public void testAliasFilters() {
assertAcked(
client().admin()
.indices()
.prepareCreate("employees")
.setMapping("emp_no", "type=long", "dept", "type=keyword", "hired", "type=date,format=yyyy-MM-dd", "salary", "type=double")
);
client().prepareBulk("employees")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source("emp_no", 101, "dept", "engineering", "hired", "2012-02-05", "salary", 20))
.add(new IndexRequest().source("emp_no", 102, "dept", "sales", "hired", "2012-03-15", "salary", 25))
.add(new IndexRequest().source("emp_no", 103, "dept", "engineering", "hired", "2012-03-27", "salary", 22))
.add(new IndexRequest().source("emp_no", 104, "dept", "engineering", "hired", "2012-04-20", "salary", 39.6))
.add(new IndexRequest().source("emp_no", 105, "dept", "engineering", "hired", "2012-06-30", "salary", 25))
.add(new IndexRequest().source("emp_no", 106, "dept", "sales", "hired", "2012-08-09", "salary", 30.1))
.get();
assertAcked(
client().admin()
.indices()
.prepareAliases(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT)
.addAlias("employees", "engineers", new MatchQueryBuilder("dept", "engineering"))
.addAlias("employees", "sales", new MatchQueryBuilder("dept", "sales"))
);
// employees index
try (var resp = run("from employees | stats count(emp_no)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(6L)));
}
try (var resp = run("from employees | stats avg(salary)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d)));
}
try (
var resp = run(
syncEsqlQueryRequest("from employees | stats count(emp_no)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(4L)));
}
try (
var resp = run(
syncEsqlQueryRequest("from employees | stats avg(salary)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d)));
}
// match both employees index and engineers alias -> employees
try (var resp = run("from e* | stats count(emp_no)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(6L)));
}
try (var resp = run("from employees | stats avg(salary)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(26.95d)));
}
try (
var resp = run(
syncEsqlQueryRequest("from e* | stats count(emp_no)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(4L)));
}
try (
var resp = run(
syncEsqlQueryRequest("from e* | stats avg(salary)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d)));
}
// engineers alias
try (var resp = run("from engineer* | stats count(emp_no)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(4L)));
}
try (var resp = run("from engineer* | stats avg(salary)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(26.65d)));
}
try (
var resp = run(
syncEsqlQueryRequest("from engineer* | stats count(emp_no)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(3L)));
}
try (
var resp = run(
syncEsqlQueryRequest("from engineer* | stats avg(salary)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(27.2d)));
}
// sales alias
try (var resp = run("from sales | stats count(emp_no)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(2L)));
}
try (var resp = run("from sales | stats avg(salary)")) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(27.55d)));
}
try (
var resp = run(
syncEsqlQueryRequest("from sales | stats count(emp_no)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(1L)));
}
try (
var resp = run(
syncEsqlQueryRequest("from sales | stats avg(salary)").pragmas(randomPragmas())
.filter(new RangeQueryBuilder("hired").lt("2012-04-30"))
)
) {
assertThat(getValuesList(resp).get(0), equalTo(List.of(25.0d)));
}
}
public void testFailOnUnavailableShards() throws Exception {
internalCluster().ensureAtLeastNumDataNodes(2);
String logsOnlyNode = internalCluster().startDataOnlyNode();
assertAcked(
client().admin()
.indices()
.prepareCreate("events")
.setSettings(
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put("index.routing.allocation.exclude._name", logsOnlyNode)
)
.setMapping("timestamp", "type=long", "message", "type=keyword")
);
client().prepareBulk("events")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source("timestamp", 1, "message", "a"))
.add(new IndexRequest().source("timestamp", 2, "message", "b"))
.add(new IndexRequest().source("timestamp", 3, "message", "c"))
.get();
assertAcked(
client().admin()
.indices()
.prepareCreate("logs")
.setSettings(
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put("index.routing.allocation.include._name", logsOnlyNode)
)
.setMapping("timestamp", "type=long", "message", "type=keyword")
);
client().prepareBulk("logs")
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.add(new IndexRequest().source("timestamp", 10, "message", "aa"))
.add(new IndexRequest().source("timestamp", 11, "message", "bb"))
.get();
// when all shards available
try (EsqlQueryResponse resp = run("from events,logs | KEEP timestamp,message")) {
assertThat(getValuesList(resp), hasSize(5));
}
internalCluster().stopNode(logsOnlyNode);
ensureClusterSizeConsistency();
// when one shard is unavailable
expectThrows(
Exception.class,
containsString("index [logs] has no active shard copy"),
() -> run("from events,logs | KEEP timestamp,message")
);
expectThrows(
Exception.class,
containsString("index [logs] has no active shard copy"),
() -> run("from * | KEEP timestamp,message")
);
try (EsqlQueryResponse resp = run(syncEsqlQueryRequest("from events,logs | KEEP timestamp,message").allowPartialResults(true))) {
assertTrue(resp.isPartial());
EsqlExecutionInfo.Cluster local = resp.getExecutionInfo().getCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
assertThat(local.getFailures(), hasSize(1));
assertThat(local.getFailures().get(0).reason(), containsString("index [logs] has no active shard copy"));
assertThat(getValuesList(resp), hasSize(3));
}
}
public void testSkipOnIndexName() {
internalCluster().ensureAtLeastNumDataNodes(2);
int numIndices = between(2, 10);
Map<String, Integer> indexToNumDocs = new HashMap<>();
for (int i = 0; i < numIndices; i++) {
String index = "events-" + i;
assertAcked(client().admin().indices().prepareCreate(index).setMapping("timestamp", "type=long", "message", "type=keyword"));
BulkRequestBuilder bulk = client().prepareBulk(index).setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
int docs = between(1, 5);
long timestamp = 1;
for (int d = 0; d < docs; d++) {
bulk.add(new IndexRequest().source("timestamp", ++timestamp, "message", "v-" + d));
}
bulk.get();
indexToNumDocs.put(index, docs);
}
Set<String> queriedIndices = ConcurrentCollections.newConcurrentSet();
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
as(transportService, MockTransportService.class).addRequestHandlingBehavior(
ComputeService.DATA_ACTION_NAME,
(handler, request, channel, task) -> {
DataNodeRequest dataNodeRequest = (DataNodeRequest) request;
for (DataNodeRequest.Shard shard : dataNodeRequest.shards()) {
queriedIndices.add(shard.shardId().getIndexName());
}
handler.messageReceived(request, channel, task);
}
);
}
try {
for (int i = 0; i < numIndices; i++) {
queriedIndices.clear();
String index = "events-" + i;
try (EsqlQueryResponse resp = run("from events* METADATA _index | WHERE _index == \"" + index + "\" | KEEP timestamp")) {
assertThat(getValuesList(resp), hasSize(indexToNumDocs.get(index)));
}
assertThat(queriedIndices, equalTo(Set.of(index)));
}
} finally {
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
as(transportService, MockTransportService.class).clearAllRules();
}
}
}
}
| CanMatchIT |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/stream/state/JoinRecordStateViews.java | {
"start": 6165,
"end": 8824
} | class ____ implements JoinRecordStateView {
private final MapState<RowData, Integer> recordState;
private InputSideHasNoUniqueKey(
RuntimeContext ctx,
String stateName,
InternalTypeInfo<RowData> recordType,
StateTtlConfig ttlConfig) {
MapStateDescriptor<RowData, Integer> recordStateDesc =
new MapStateDescriptor<>(stateName, recordType, Types.INT);
if (ttlConfig.isEnabled()) {
recordStateDesc.enableTimeToLive(ttlConfig);
}
this.recordState = ctx.getMapState(recordStateDesc);
}
@Override
public void addRecord(RowData record) throws Exception {
Integer cnt = recordState.get(record);
if (cnt != null) {
cnt += 1;
} else {
cnt = 1;
}
recordState.put(record, cnt);
}
@Override
public void retractRecord(RowData record) throws Exception {
Integer cnt = recordState.get(record);
if (cnt != null) {
if (cnt > 1) {
recordState.put(record, cnt - 1);
} else {
recordState.remove(record);
}
}
// ignore cnt == null, which means state may be expired
}
@Override
public Iterable<RowData> getRecords() throws Exception {
return new IterableIterator<RowData>() {
private final Iterator<Map.Entry<RowData, Integer>> backingIterable =
recordState.entries().iterator();
private RowData record;
private int remainingTimes = 0;
@Override
public boolean hasNext() {
return backingIterable.hasNext() || remainingTimes > 0;
}
@Override
public RowData next() {
if (remainingTimes > 0) {
checkNotNull(record);
remainingTimes--;
return record;
} else {
Map.Entry<RowData, Integer> entry = backingIterable.next();
record = entry.getKey();
remainingTimes = entry.getValue() - 1;
return record;
}
}
@Override
public Iterator<RowData> iterator() {
return this;
}
};
}
}
}
| InputSideHasNoUniqueKey |
java | alibaba__nacos | config/src/main/java/com/alibaba/nacos/config/server/configuration/ConfigChangeConfigs.java | {
"start": 1344,
"end": 3433
} | class ____ extends Subscriber<ServerConfigChangeEvent> {
private static final Logger LOGGER = LoggerFactory.getLogger(ConfigChangeConfigs.class);
private static final String PREFIX = ConfigChangeConstants.NACOS_CORE_CONFIG_PLUGIN_PREFIX;
private Map<String, Properties> configPluginProperties = new HashMap<>();
public ConfigChangeConfigs() {
NotifyCenter.registerSubscriber(this);
refreshPluginProperties();
}
private void refreshPluginProperties() {
try {
Map<String, Properties> newProperties = new HashMap<>(3);
Properties properties = PropertiesUtil.getPropertiesWithPrefix(EnvUtil.getEnvironment(), PREFIX);
if (properties != null) {
for (String each : properties.stringPropertyNames()) {
int typeIndex = each.indexOf('.');
String type = each.substring(0, typeIndex);
String subKey = each.substring(typeIndex + 1);
newProperties.computeIfAbsent(type, key -> new Properties())
.setProperty(subKey, properties.getProperty(each));
}
}
configPluginProperties = newProperties;
} catch (Exception e) {
LOGGER.warn("[ConfigChangeConfigs]Refresh config plugin properties failed ", e);
}
}
public Properties getPluginProperties(String configPluginType) {
if (!configPluginProperties.containsKey(configPluginType)) {
LOGGER.warn(
"[ConfigChangeConfigs]Can't find config plugin properties for type {}, will use empty properties",
configPluginType);
return new Properties();
}
return configPluginProperties.get(configPluginType);
}
@Override
public void onEvent(ServerConfigChangeEvent event) {
refreshPluginProperties();
}
@Override
public Class<? extends Event> subscribeType() {
return ServerConfigChangeEvent.class;
}
}
| ConfigChangeConfigs |
java | elastic__elasticsearch | x-pack/extras/plugins/microsoft-graph-authz/src/test/java/org/elasticsearch/xpack/security/authz/microsoft/MicrosoftGraphAuthzRealmTests.java | {
"start": 2911,
"end": 21822
} | class ____ extends ESTestCase {
private final Settings globalSettings = Settings.builder().put("path.home", createTempDir()).build();
private final Environment env = TestEnvironment.newEnvironment(globalSettings);
private final ThreadContext threadContext = new ThreadContext(globalSettings);
private final ThreadPool threadPool = new TestThreadPool(getClass().getName());
private final String realmName = randomAlphaOfLengthBetween(4, 10);
private final String roleName = randomAlphaOfLengthBetween(4, 10);
private final String username = randomAlphaOfLengthBetween(4, 10);
private final String name = randomAlphaOfLengthBetween(4, 10);
private final String email = Strings.format("%s@example.com", randomAlphaOfLengthBetween(4, 10));
private final String groupId = randomAlphaOfLengthBetween(4, 10);
private final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier(
MicrosoftGraphAuthzRealmSettings.REALM_TYPE,
realmName
);
private final String clientId = randomAlphaOfLengthBetween(4, 10);
private final String clientSecret = randomAlphaOfLengthBetween(4, 10);
private final String tenantId = randomAlphaOfLengthBetween(4, 10);
private static final AuthenticationToken fakeToken = new AuthenticationToken() {
@Override
public String principal() {
fail("Should never be called");
return null;
}
@Override
public Object credentials() {
fail("Should never be called");
return null;
}
@Override
public void clearCredentials() {
fail("Should never be called");
}
};
@Before
public void setUp() throws Exception {
super.setUp();
final var logger = LogManager.getLogger(MicrosoftGraphAuthzRealm.class);
Loggers.setLevel(logger, Level.TRACE);
}
@After
public void tearDown() throws Exception {
super.tearDown();
terminate(threadPool);
}
public void testLookupUser() {
try (var mockLog = MockLog.capture(MicrosoftGraphAuthzRealm.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"Fetch user properties",
MicrosoftGraphAuthzRealm.class.getName(),
Level.TRACE,
Strings.format("Fetched user with name [%s] and email [%s] from Microsoft Graph", name, email)
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"Fetch group membership",
MicrosoftGraphAuthzRealm.class.getName(),
Level.TRACE,
Strings.format("Fetched [1] groups from Microsoft Graph: [%s]", groupId)
)
);
final var roleMapper = mockRoleMapper(Set.of(groupId), Set.of(roleName));
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
when(client.getRequestAdapter()).thenReturn(mock(RequestAdapter.class));
final var userRequestBuilder = mockGetUser(client);
when(userRequestBuilder.get(any())).thenReturn(user(name, email));
final var graphGroupRequestBuilder = mockGetGroupMembership(userRequestBuilder);
when(graphGroupRequestBuilder.get(any())).thenReturn(groupMembership(groupId));
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<User>();
realm.lookupUser(username, future);
final var user = future.actionGet();
assertThat(user.principal(), equalTo(username));
assertThat(user.fullName(), equalTo(name));
assertThat(user.email(), equalTo(email));
assertThat(user.roles(), arrayContaining(roleName));
mockLog.assertAllExpectationsMatched();
}
}
public void testHandleGetUserPropertiesError() {
final var roleMapper = mockRoleMapper(Set.of(groupId), Set.of(roleName));
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var requestAdapter = mock(RequestAdapter.class);
when(client.getRequestAdapter()).thenReturn(requestAdapter);
final var userItemRequestBuilder = mockGetUser(client);
when(userItemRequestBuilder.get(any())).thenThrow(graphError("bad stuff happened"));
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<User>();
try (var mockLog = MockLog.capture(MicrosoftGraphAuthzRealm.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"Log exception",
MicrosoftGraphAuthzRealm.class.getName(),
Level.ERROR,
Strings.format("Failed to authorize [%s] with MS Graph realm", username)
)
);
realm.lookupUser(username, future);
final var thrown = assertThrows(ODataError.class, future::actionGet);
assertThat(thrown.getMessage(), equalTo("bad stuff happened"));
mockLog.assertAllExpectationsMatched();
}
}
public void testHandleGetGroupMembershipError() {
final var roleMapper = mockRoleMapper(Set.of(groupId), Set.of(roleName));
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
when(client.getRequestAdapter()).thenReturn(mock(RequestAdapter.class));
final var userRequestBuilder = mockGetUser(client);
when(userRequestBuilder.get(any())).thenReturn(user(name, email));
final var graphGroupRequestBuilder = mockGetGroupMembership(userRequestBuilder);
when(graphGroupRequestBuilder.get(any())).thenThrow(graphError("bad stuff happened"));
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<User>();
try (var mockLog = MockLog.capture(MicrosoftGraphAuthzRealm.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"Log exception",
MicrosoftGraphAuthzRealm.class.getName(),
Level.ERROR,
Strings.format("Failed to authorize [%s] with MS Graph realm", username)
)
);
realm.lookupUser(username, future);
final var thrown = assertThrows(ODataError.class, future::actionGet);
assertThat(thrown.getMessage(), equalTo("bad stuff happened"));
mockLog.assertAllExpectationsMatched();
}
}
public void testGroupMembershipPagination() {
final var groupId2 = randomAlphaOfLengthBetween(4, 10);
final var groupId3 = randomAlphaOfLengthBetween(4, 10);
final var roleMapper = mockRoleMapper(Set.of(groupId, groupId2, groupId3), Set.of(roleName));
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var requestAdapter = mock(RequestAdapter.class);
when(client.getRequestAdapter()).thenReturn(requestAdapter);
final var userItemRequestBuilder = mockGetUser(client);
when(userItemRequestBuilder.get(any())).thenReturn(user(name, email));
final var groupMembership1 = groupMembership(groupId);
groupMembership1.setOdataNextLink("http://localhost:12345/page2");
final var groupMembership2 = groupMembership(groupId2);
groupMembership2.setOdataNextLink("http://localhost:12345/page3");
final var groupMembership3 = groupMembership(groupId3);
final var graphGroupRequestBuilder = mockGetGroupMembership(userItemRequestBuilder);
when(graphGroupRequestBuilder.get(any())).thenReturn(groupMembership1);
when(requestAdapter.send(any(), any(), any())).thenReturn(groupMembership2, groupMembership3);
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<User>();
realm.lookupUser(username, future);
final var user = future.actionGet();
assertThat(user.principal(), equalTo(username));
assertThat(user.fullName(), equalTo(name));
assertThat(user.email(), equalTo(email));
assertThat(user.roles(), arrayContaining(roleName));
}
public void testLicenseCheck() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(false);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<User>();
realm.lookupUser(username, future);
final var thrown = assertThrows(ElasticsearchSecurityException.class, future::actionGet);
assertThat(thrown.getMessage(), equalTo("current license is non-compliant for [microsoft_graph]"));
}
public void testClientIdSettingRequired() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().put(
getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_ID),
randomBoolean() ? "" : null
).build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var thrown = assertThrows(
SettingsException.class,
() -> new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool)
);
assertThat(
thrown.getMessage(),
equalTo(
Strings.format(
"The configuration setting [%s] is required",
getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_ID)
)
)
);
}
public void testClientSecretSettingRequired() {
final var roleMapper = mock(UserRoleMapper.class);
final var secureSettings = new MockSecureSettings();
if (randomBoolean()) {
secureSettings.setString(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_SECRET), "");
}
final var realmSettings = Settings.builder()
.put(globalSettings)
.put(getFullSettingKey(realmId, RealmSettings.ORDER_SETTING), 0)
.put(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_ID), clientId)
.put(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.TENANT_ID), tenantId)
.setSecureSettings(secureSettings)
.build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var thrown = assertThrows(
SettingsException.class,
() -> new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool)
);
assertThat(
thrown.getMessage(),
equalTo(
Strings.format(
"The configuration setting [%s] is required",
getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_SECRET)
)
)
);
}
public void testTenantIdSettingRequired() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().put(
getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.TENANT_ID),
randomBoolean() ? "" : null
).build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var thrown = assertThrows(
SettingsException.class,
() -> new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool)
);
assertThat(
thrown.getMessage(),
equalTo(
Strings.format(
"The configuration setting [%s] is required",
getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.TENANT_ID)
)
)
);
}
public void testSupportsAlwaysReturnsFalse() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
assertThat(realm.supports(fakeToken), equalTo(false));
}
public void testTokenAlwaysReturnsNull() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
assertThat(realm.token(threadContext), equalTo(null));
}
public void testAuthenticateAlwaysReturnsNotHandled() {
final var roleMapper = mock(UserRoleMapper.class);
final var realmSettings = realmSettings().build();
final var config = new RealmConfig(realmId, realmSettings, env, threadContext);
final var client = mock(GraphServiceClient.class);
final var licenseState = mockLicense(true);
final var realm = new MicrosoftGraphAuthzRealm(roleMapper, config, client, licenseState, threadPool);
final var future = new PlainActionFuture<AuthenticationResult<User>>();
realm.authenticate(fakeToken, future);
final var result = future.actionGet();
assertThat(result, equalTo(AuthenticationResult.notHandled()));
}
private UserRoleMapper mockRoleMapper(Set<String> expectedGroups, Set<String> rolesToReturn) {
final var roleMapper = mock(UserRoleMapper.class);
doAnswer(invocation -> {
var userData = (UserRoleMapper.UserData) invocation.getArguments()[0];
assertEquals(userData.getGroups(), expectedGroups);
@SuppressWarnings("unchecked")
var listener = (ActionListener<Set<String>>) invocation.getArguments()[1];
listener.onResponse(rolesToReturn);
return null;
}).when(roleMapper).resolveRoles(any(), any());
return roleMapper;
}
private Settings.Builder realmSettings() {
final var secureSettings = new MockSecureSettings();
secureSettings.setString(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_SECRET), clientSecret);
return Settings.builder()
.put(globalSettings)
.put(getFullSettingKey(realmId, RealmSettings.ORDER_SETTING), 0)
.put(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.CLIENT_ID), clientId)
.put(getFullSettingKey(realmName, MicrosoftGraphAuthzRealmSettings.TENANT_ID), tenantId)
.setSecureSettings(secureSettings);
}
private XPackLicenseState mockLicense(boolean msGraphAllowed) {
final var licenseState = MockLicenseState.createMock();
when(licenseState.isAllowed(eq(MICROSOFT_GRAPH_FEATURE))).thenReturn(msGraphAllowed);
return licenseState;
}
private UserItemRequestBuilder mockGetUser(GraphServiceClient client) {
final var userRequestBuilder = mock(UsersRequestBuilder.class);
final var userItemRequestBuilder = mock(UserItemRequestBuilder.class);
when(client.users()).thenReturn(userRequestBuilder);
when(userRequestBuilder.byUserId(eq(username))).thenReturn(userItemRequestBuilder);
return userItemRequestBuilder;
}
private GraphGroupRequestBuilder mockGetGroupMembership(UserItemRequestBuilder userItemRequestBuilder) {
final var memberOfRequestBuilder = mock(TransitiveMemberOfRequestBuilder.class);
final var graphGroupRequestBuilder = mock(GraphGroupRequestBuilder.class);
when(userItemRequestBuilder.transitiveMemberOf()).thenReturn(memberOfRequestBuilder);
when(memberOfRequestBuilder.graphGroup()).thenReturn(graphGroupRequestBuilder);
return graphGroupRequestBuilder;
}
private com.microsoft.graph.models.User user(String name, String email) {
final var msUser = new com.microsoft.graph.models.User();
msUser.setDisplayName(name);
msUser.setMail(email);
return msUser;
}
private GroupCollectionResponse groupMembership(String... groupIds) {
final var groupMembership = new GroupCollectionResponse();
groupMembership.setValue(Arrays.stream(groupIds).map(id -> {
var group = new Group();
group.setId(id);
return group;
}).toList());
return groupMembership;
}
private ODataError graphError(String message) {
final var error = new MainError();
error.setCode("badRequest");
error.setMessage(message);
final var graphError = new ODataError();
graphError.setError(error);
return graphError;
}
}
| MicrosoftGraphAuthzRealmTests |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/internal/script/SingleLineSqlScriptExtractor.java | {
"start": 793,
"end": 1911
} | class ____ implements SqlScriptCommandExtractor {
public static final String SHORT_NAME = "single-line";
public static final SqlScriptCommandExtractor INSTANCE = new SingleLineSqlScriptExtractor();
@Override
public List<String> extractCommands(Reader reader, Dialect dialect) {
final List<String> statementList = new LinkedList<>();
final var bufferedReader = new BufferedReader( reader );
try {
for ( String line = bufferedReader.readLine(); line != null; line = bufferedReader.readLine() ) {
final String trimmedLine = line.trim();
if ( !trimmedLine.isEmpty() && !isComment( trimmedLine ) ) {
final String command =
trimmedLine.endsWith( ";" )
? trimmedLine.substring( 0, trimmedLine.length() - 1 )
: trimmedLine;
statementList.add( command );
}
}
return statementList;
}
catch (IOException e) {
throw new SqlScriptException( "Error during sql-script parsing.", e );
}
}
private boolean isComment(final String line) {
return line.startsWith( "--" ) || line.startsWith( "//" ) || line.startsWith( "/*" );
}
}
| SingleLineSqlScriptExtractor |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/MultipleTopLevelClasses.java | {
"start": 1775,
"end": 2726
} | class ____ extends BugChecker
implements CompilationUnitTreeMatcher {
@Override
public Description matchCompilationUnit(CompilationUnitTree tree, VisitorState state) {
List<String> names = new ArrayList<>();
for (Tree member : tree.getTypeDecls()) {
if (member instanceof ClassTree classMember) {
if (isSuppressed(classMember, state)) {
// If any top-level classes have @SuppressWarnings("TopLevel"), ignore
// this compilation unit. We can't rely on the normal suppression
// mechanism because the only enclosing element is the package declaration,
// and @SuppressWarnings can't be applied to packages.
return NO_MATCH;
}
names.add(classMember.getSimpleName().toString());
}
}
if (names.size() <= 1) {
return NO_MATCH;
}
String message =
String.format(
"Expected at most one top-level | MultipleTopLevelClasses |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RCollectionReactive.java | {
"start": 801,
"end": 899
} | interface ____ collection object
*
* @author Nikita Koksharov
*
* @param <V> value
*/
public | for |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/CommandListenerIntegrationTests.java | {
"start": 1652,
"end": 3626
} | class ____ extends TestSupport {
private final RedisClient client;
@Inject
public CommandListenerIntegrationTests(RedisClient client) {
this.client = client;
}
@Test
void shouldWorkRedisCommandListener() {
List<CommandStartedEvent> startedEvents = Collections.synchronizedList(new ArrayList<>());
List<CommandSucceededEvent> succeededEvents = Collections.synchronizedList(new ArrayList<>());
List<CommandFailedEvent> failedEvents = Collections.synchronizedList(new ArrayList<>());
CommandListener listener = new CommandListener() {
@Override
public void commandStarted(CommandStartedEvent event) {
event.getContext().put(key, value);
startedEvents.add(event);
assertThat(event.getStartedAt()).isNotNull();
}
@Override
public void commandSucceeded(CommandSucceededEvent event) {
succeededEvents.add(event);
assertThat(event.getContext()).containsEntry(key, value);
assertThat(event.getDuration()).isPositive();
}
@Override
public void commandFailed(CommandFailedEvent event) {
failedEvents.add(event);
assertThat(event.getContext()).containsEntry(key, value);
assertThat(event.getCause()).isInstanceOf(RedisCommandExecutionException.class);
}
};
client.addListener(listener);
RedisCommands<String, String> sync = client.connect().sync();
sync.set(key, value);
sync.get(key);
try {
sync.llen(key);
} catch (RedisCommandExecutionException ignored) {
}
assertThat(startedEvents).hasSize(3);
assertThat(succeededEvents).hasSize(2);
assertThat(failedEvents).hasSize(1);
client.removeListener(listener);
}
}
| CommandListenerIntegrationTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/BadAnnotationImplementationTest.java | {
"start": 2365,
"end": 2541
} | interface ____ {}")
.addSourceLines(
"AnonymousClass.java",
"""
import java.lang.annotation.Annotation;
public | MyAnnotation |
java | processing__processing4 | core/src/processing/core/PConstants.java | {
"start": 1349,
"end": 14658
} | interface ____ {
int X = 0;
int Y = 1;
int Z = 2;
// renderers known to processing.core
/*
// List of renderers used inside PdePreprocessor
static final StringList rendererList = new StringList(new String[] {
"JAVA2D", "JAVA2D_2X",
"P2D", "P2D_2X", "P3D", "P3D_2X", "OPENGL",
"E2D", "FX2D", "FX2D_2X", // experimental
"LWJGL.P2D", "LWJGL.P3D", // hmm
"PDF" // no DXF because that's only for beginRaw()
});
*/
String JAVA2D = "processing.awt.PGraphicsJava2D";
String P2D = "processing.opengl.PGraphics2D";
String P3D = "processing.opengl.PGraphics3D";
// When will it be time to remove this?
@Deprecated
String OPENGL = P3D;
// Experimental, higher-performance Java 2D renderer (but no pixel ops)
// static final String E2D = PGraphicsDanger2D.class.getName();
// Experimental JavaFX renderer; even better 2D performance
String FX2D = "processing.javafx.PGraphicsFX2D";
String PDF = "processing.pdf.PGraphicsPDF";
String SVG = "processing.svg.PGraphicsSVG";
String DXF = "processing.dxf.RawDXF";
// platform IDs for PApplet.platform
int OTHER = 0;
int WINDOWS = 1;
int MACOS = 2;
int LINUX = 3;
/** @deprecated Marketers gonna market, use {@link #MACOS} */
@Deprecated
int MACOSX = 2;
String[] platformNames = {
"other", "windows", "macos", "linux"
};
float EPSILON = 0.0001f;
// max/min values for numbers
/**
* Same as Float.MAX_VALUE, but included for parity with MIN_VALUE,
* and to avoid teaching static methods on the first day.
*/
float MAX_FLOAT = Float.MAX_VALUE;
/**
* Note that Float.MIN_VALUE is the smallest <EM>positive</EM> value
* for a floating point number, not actually the minimum (negative) value
* for a float. This constant equals 0xFF7FFFFF, the smallest (farthest
* negative) value a float can have before it hits NaN.
*/
float MIN_FLOAT = -Float.MAX_VALUE;
/** Largest possible (positive) integer value */
int MAX_INT = Integer.MAX_VALUE;
/** Smallest possible (negative) integer value */
int MIN_INT = Integer.MIN_VALUE;
// shapes
int VERTEX = 0;
int BEZIER_VERTEX = 1;
int QUADRATIC_VERTEX = 2;
int CURVE_VERTEX = 3;
int BREAK = 4;
@Deprecated
int QUAD_BEZIER_VERTEX = 2; // should not have been exposed
// useful goodness
/**
*
* <b>PI</b> is a mathematical constant with the value 3.1415927. It is the
* ratio of the circumference of a circle to its diameter. It is useful in
* combination with the trigonometric functions <b>sin()</b> and <b>cos()</b>.
*
* @webref constants
* @webBrief PI is a mathematical constant with the value
* 3.14159265358979323846
* @see PConstants#TWO_PI
* @see PConstants#TAU
* @see PConstants#HALF_PI
* @see PConstants#QUARTER_PI
*
*/
float PI = (float) Math.PI;
/**
*
* <b>HALF_PI</b> is a mathematical constant with the value 1.5707964. It is
* half the ratio of the circumference of a circle to its diameter. It is useful
* in combination with the trigonometric functions <b>sin()</b> and
* <b>cos()</b>.
*
* @webref constants
* @webBrief HALF_PI is a mathematical constant with the value
* 1.57079632679489661923
* @see PConstants#PI
* @see PConstants#TWO_PI
* @see PConstants#TAU
* @see PConstants#QUARTER_PI
*/
float HALF_PI = (float) (Math.PI / 2.0);
float THIRD_PI = (float) (Math.PI / 3.0);
/**
*
* <b>QUARTER_PI</b> is a mathematical constant with the value 0.7853982. It is
* one quarter the ratio of the circumference of a circle to its diameter.
* It is useful in combination with the trigonometric functions
* <b>sin()</b> and <b>cos()</b>.
*
* @webref constants
* @webBrief QUARTER_PI is a mathematical constant with the value 0.7853982
* @see PConstants#PI
* @see PConstants#TWO_PI
* @see PConstants#TAU
* @see PConstants#HALF_PI
*/
float QUARTER_PI = (float) (Math.PI / 4.0);
/**
*
* <b>TWO_PI</b> is a mathematical constant with the value 6.2831855.
* It is twice the ratio of the circumference of a circle to its diameter.
* It is useful in combination with the trigonometric functions
* <b>sin()</b> and <b>cos()</b>.
*
* @webref constants
* @webBrief TWO_PI is a mathematical constant with the value 6.28318530717958647693
* @see PConstants#PI
* @see PConstants#TAU
* @see PConstants#HALF_PI
* @see PConstants#QUARTER_PI
*/
float TWO_PI = (float) (2.0 * Math.PI);
/**
*
* <b>TAU</b> is a mathematical constant with the value 6.2831855. It is the
* circle constant relating the circumference of a circle to its linear
* dimension, the ratio of the circumference of a circle to its radius. It is
* useful in combination with trigonometric functions such as <b>sin()</b> and
* <b>cos()</b>.
*
* @webref constants
* @webBrief An alias for <b>TWO_PI</b>
* @see PConstants#PI
* @see PConstants#TWO_PI
* @see PConstants#HALF_PI
* @see PConstants#QUARTER_PI
*/
float TAU = (float) (2.0 * Math.PI);
float DEG_TO_RAD = PI/180.0f;
float RAD_TO_DEG = 180.0f/PI;
// angle modes
//static final int RADIANS = 0;
//static final int DEGREES = 1;
// used by split, all the standard whitespace chars
// (also includes unicode nbsp, that little !#$!#%)
String WHITESPACE = " \t\n\r\f\u00A0";
// for colors and/or images
int RGB = 1; // image & color
int ARGB = 2; // image
int HSB = 3; // color
int ALPHA = 4; // image
// static final int CMYK = 5; // image & color (someday)
// image file types
int TIFF = 0;
int TARGA = 1;
int JPEG = 2;
int GIF = 3;
// filter/convert types
int BLUR = 11;
int GRAY = 12;
int INVERT = 13;
int OPAQUE = 14;
int POSTERIZE = 15;
int THRESHOLD = 16;
int ERODE = 17;
int DILATE = 18;
// blend mode keyword definitions
// @see processing.core.PImage#blendColor(int,int,int)
int REPLACE = 0;
int BLEND = 1 << 0;
int ADD = 1 << 1;
int SUBTRACT = 1 << 2;
int LIGHTEST = 1 << 3;
int DARKEST = 1 << 4;
int DIFFERENCE = 1 << 5;
int EXCLUSION = 1 << 6;
int MULTIPLY = 1 << 7;
int SCREEN = 1 << 8;
int OVERLAY = 1 << 9;
int HARD_LIGHT = 1 << 10;
int SOFT_LIGHT = 1 << 11;
int DODGE = 1 << 12;
int BURN = 1 << 13;
// for messages
int CHATTER = 0;
int COMPLAINT = 1;
int PROBLEM = 2;
// types of transformation matrices
int PROJECTION = 0;
int MODELVIEW = 1;
// types of projection matrices
int CUSTOM = 0; // user-specified fanciness
int ORTHOGRAPHIC = 2; // 2D isometric projection
int PERSPECTIVE = 3; // perspective matrix
// shapes
// the low four bits set the variety,
// higher bits set the specific shape type
int GROUP = 0; // createShape()
int POINT = 2; // primitive
int POINTS = 3; // vertices
int LINE = 4; // primitive
int LINES = 5; // beginShape(), createShape()
int LINE_STRIP = 50; // beginShape()
int LINE_LOOP = 51;
int TRIANGLE = 8; // primitive
int TRIANGLES = 9; // vertices
int TRIANGLE_STRIP = 10; // vertices
int TRIANGLE_FAN = 11; // vertices
int QUAD = 16; // primitive
int QUADS = 17; // vertices
int QUAD_STRIP = 18; // vertices
int POLYGON = 20; // in the end, probably cannot
int PATH = 21; // separate these two
int RECT = 30; // primitive
int ELLIPSE = 31; // primitive
int ARC = 32; // primitive
int SPHERE = 40; // primitive
int BOX = 41; // primitive
// static public final int POINT_SPRITES = 52;
// static public final int NON_STROKED_SHAPE = 60;
// static public final int STROKED_SHAPE = 61;
// shape closing modes
int OPEN = 1;
int CLOSE = 2;
// shape drawing modes
/** Draw mode convention to use (x, y) to (width, height) */
int CORNER = 0;
/** Draw mode convention to use (x1, y1) to (x2, y2) coordinates */
int CORNERS = 1;
/** Draw mode from the center, and using the radius */
int RADIUS = 2;
/**
* Draw from the center, using second pair of values as the diameter.
* Formerly called CENTER_DIAMETER in alpha releases.
*/
int CENTER = 3;
/**
* Synonym for the CENTER constant. Draw from the center,
* using second pair of values as the diameter.
*/
int DIAMETER = 3;
// arc drawing modes
//static final int OPEN = 1; // shared
int CHORD = 2;
int PIE = 3;
// vertically alignment modes for text
/** Default vertical alignment for text placement */
int BASELINE = 0;
/** Align text to the top */
int TOP = 101;
/** Align text from the bottom, using the baseline. */
int BOTTOM = 102;
// uv texture orientation modes
/** texture coordinates in 0..1 range */
int NORMAL = 1;
/** texture coordinates based on image width/height */
int IMAGE = 2;
// texture wrapping modes
/** textures are clamped to their edges */
int CLAMP = 0;
/** textures wrap around when uv values go outside 0..1 range */
int REPEAT = 1;
// text placement modes
/**
* textMode(MODEL) is the default, meaning that characters
* will be affected by transformations like any other shapes.
* <p/>
* Changed value in 0093 to not interfere with LEFT, CENTER, and RIGHT.
*/
int MODEL = 4;
/**
* textMode(SHAPE) draws text using the the glyph outlines of
* individual characters rather than as textures. If the outlines are
* not available, then textMode(SHAPE) will be ignored and textMode(MODEL)
* will be used instead. For this reason, be sure to call textMode()
* <EM>after</EM> calling textFont().
* <p/>
* Currently, textMode(SHAPE) is only supported by OPENGL mode.
* It also requires Java 1.2 or higher (OPENGL requires 1.4 anyway)
*/
int SHAPE = 5;
// text alignment modes
// are inherited from LEFT, CENTER, RIGHT
// stroke modes
int SQUARE = 1 << 0; // called 'butt' in the svg spec
int ROUND = 1 << 1;
int PROJECT = 1 << 2; // called 'square' in the svg spec
int MITER = 1 << 3;
int BEVEL = 1 << 5;
// lighting
int AMBIENT = 0;
int DIRECTIONAL = 1;
//static final int POINT = 2; // shared with shape feature
int SPOT = 3;
// key constants
// only including the most-used of these guys
// if people need more esoteric keys, they can learn about
// the esoteric java KeyEvent api and of virtual keys
// both key and keyCode will equal these values
// for 0125, these were changed to 'char' values, because they
// can be upgraded to ints automatically by Java, but having them
// as ints prevented split(blah, TAB) from working
char BACKSPACE = 8;
char TAB = 9;
char ENTER = 10;
char RETURN = 13;
char ESC = 27;
char DELETE = 127;
// i.e. if ((key == CODED) && (keyCode == UP))
int CODED = 0xffff;
// key will be CODED and keyCode will be this value
int UP = KeyEvent.VK_UP;
int DOWN = KeyEvent.VK_DOWN;
int LEFT = KeyEvent.VK_LEFT;
int RIGHT = KeyEvent.VK_RIGHT;
// key will be CODED and keyCode will be this value
int ALT = KeyEvent.VK_ALT;
int CONTROL = KeyEvent.VK_CONTROL;
int SHIFT = KeyEvent.VK_SHIFT;
// orientations (only used on Android, ignored on desktop)
/** Screen orientation constant for portrait (the hamburger way). */
int PORTRAIT = 1;
/** Screen orientation constant for landscape (the hot dog way). */
int LANDSCAPE = 2;
/** Use with fullScreen() to indicate all available displays. */
int SPAN = 0;
// cursor types
int ARROW = Cursor.DEFAULT_CURSOR;
int CROSS = Cursor.CROSSHAIR_CURSOR;
int HAND = Cursor.HAND_CURSOR;
int MOVE = Cursor.MOVE_CURSOR;
int TEXT = Cursor.TEXT_CURSOR;
int WAIT = Cursor.WAIT_CURSOR;
// image interpolation modes
int NEAREST_NEIGHBOR = 0;
int BILINEAR = 1;
int BICUBIC = 2;
// hints - hint values are positive for the alternate version,
// negative of the same value returns to the normal/default state
@Deprecated
int ENABLE_NATIVE_FONTS = 1;
@Deprecated
int DISABLE_NATIVE_FONTS = -1;
int DISABLE_DEPTH_TEST = 2;
int ENABLE_DEPTH_TEST = -2;
int ENABLE_DEPTH_SORT = 3;
int DISABLE_DEPTH_SORT = -3;
int DISABLE_OPENGL_ERRORS = 4;
int ENABLE_OPENGL_ERRORS = -4;
int DISABLE_DEPTH_MASK = 5;
int ENABLE_DEPTH_MASK = -5;
int DISABLE_OPTIMIZED_STROKE = 6;
int ENABLE_OPTIMIZED_STROKE = -6;
int ENABLE_STROKE_PERSPECTIVE = 7;
int DISABLE_STROKE_PERSPECTIVE = -7;
int DISABLE_TEXTURE_MIPMAPS = 8;
int ENABLE_TEXTURE_MIPMAPS = -8;
int ENABLE_STROKE_PURE = 9;
int DISABLE_STROKE_PURE = -9;
int ENABLE_BUFFER_READING = 10;
int DISABLE_BUFFER_READING = -10;
int DISABLE_KEY_REPEAT = 11;
int ENABLE_KEY_REPEAT = -11;
int DISABLE_ASYNC_SAVEFRAME = 12;
int ENABLE_ASYNC_SAVEFRAME = -12;
int HINT_COUNT = 13;
}
| PConstants |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/bulk/stats/BulkStats.java | {
"start": 5200,
"end": 5727
} | class ____ {
static final String BULK = "bulk";
static final String TOTAL_OPERATIONS = "total_operations";
static final String TOTAL_TIME = "total_time";
static final String AVG_TIME = "avg_time";
static final String TOTAL_TIME_IN_MILLIS = "total_time_in_millis";
static final String TOTAL_SIZE_IN_BYTES = "total_size_in_bytes";
static final String AVG_TIME_IN_MILLIS = "avg_time_in_millis";
static final String AVG_SIZE_IN_BYTES = "avg_size_in_bytes";
}
}
| Fields |
java | apache__camel | core/camel-base/src/main/java/org/apache/camel/impl/event/CamelContextStartedEvent.java | {
"start": 951,
"end": 1348
} | class ____ extends AbstractContextEvent implements CamelEvent.CamelContextStartedEvent {
private static final @Serial long serialVersionUID = 6761726800283073490L;
public CamelContextStartedEvent(CamelContext source) {
super(source);
}
@Override
public String toString() {
return "Started CamelContext: " + getContext().getName();
}
}
| CamelContextStartedEvent |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/io/network/partition/consumer/InputBuffersMetricsTest.java | {
"start": 2061,
"end": 14617
} | class ____ {
private CloseableRegistry closeableRegistry;
@BeforeEach
void setup() {
closeableRegistry = new CloseableRegistry();
}
@AfterEach
void tearDown() throws IOException {
closeableRegistry.close();
}
@Test
void testCalculateTotalBuffersSize() throws Exception {
int numberOfRemoteChannels = 2;
int numberOfLocalChannels = 0;
int numberOfBufferPerChannel = 2;
int numberOfBuffersPerGate = 8;
NettyShuffleEnvironment network =
new NettyShuffleEnvironmentBuilder()
.setNetworkBuffersPerChannel(numberOfBufferPerChannel)
.setFloatingNetworkBuffersPerGate(numberOfBuffersPerGate)
.build();
closeableRegistry.registerCloseable(network::close);
SingleInputGate inputGate1 =
buildInputGate(network, numberOfRemoteChannels, numberOfLocalChannels).f0;
closeableRegistry.registerCloseable(inputGate1::close);
inputGate1.setup();
SingleInputGate[] inputGates = new SingleInputGate[] {inputGate1};
FloatingBuffersUsageGauge floatingBuffersUsageGauge =
new FloatingBuffersUsageGauge(inputGates);
ExclusiveBuffersUsageGauge exclusiveBuffersUsageGauge =
new ExclusiveBuffersUsageGauge(inputGates);
CreditBasedInputBuffersUsageGauge inputBufferPoolUsageGauge =
new CreditBasedInputBuffersUsageGauge(
floatingBuffersUsageGauge, exclusiveBuffersUsageGauge, inputGates);
closeableRegistry.registerCloseable(network::close);
closeableRegistry.registerCloseable(inputGate1::close);
assertThat(floatingBuffersUsageGauge.calculateTotalBuffers(inputGate1))
.isEqualTo(numberOfBuffersPerGate);
assertThat(exclusiveBuffersUsageGauge.calculateTotalBuffers(inputGate1))
.isEqualTo(numberOfRemoteChannels * numberOfBufferPerChannel);
assertThat(inputBufferPoolUsageGauge.calculateTotalBuffers(inputGate1))
.isEqualTo(
numberOfRemoteChannels * numberOfBufferPerChannel + numberOfBuffersPerGate);
}
@Test
void testExclusiveBuffersUsage() throws Exception {
int numberOfRemoteChannelsGate1 = 2;
int numberOfLocalChannelsGate1 = 0;
int numberOfRemoteChannelsGate2 = 1;
int numberOfLocalChannelsGate2 = 1;
int totalNumberOfRemoteChannels = numberOfRemoteChannelsGate1 + numberOfRemoteChannelsGate2;
int buffersPerChannel = 2;
int extraNetworkBuffersPerGate = 8;
NettyShuffleEnvironment network =
new NettyShuffleEnvironmentBuilder()
.setNetworkBuffersPerChannel(buffersPerChannel)
.setFloatingNetworkBuffersPerGate(extraNetworkBuffersPerGate)
.build();
closeableRegistry.registerCloseable(network::close);
Tuple2<SingleInputGate, List<RemoteInputChannel>> tuple1 =
buildInputGate(network, numberOfRemoteChannelsGate1, numberOfLocalChannelsGate1);
Tuple2<SingleInputGate, List<RemoteInputChannel>> tuple2 =
buildInputGate(network, numberOfRemoteChannelsGate2, numberOfLocalChannelsGate2);
SingleInputGate inputGate1 = tuple1.f0;
SingleInputGate inputGate2 = tuple2.f0;
closeableRegistry.registerCloseable(inputGate1::close);
closeableRegistry.registerCloseable(inputGate2::close);
inputGate1.setup();
inputGate2.setup();
List<RemoteInputChannel> remoteInputChannels = tuple1.f1;
SingleInputGate[] inputGates = new SingleInputGate[] {tuple1.f0, tuple2.f0};
FloatingBuffersUsageGauge floatingBuffersUsageGauge =
new FloatingBuffersUsageGauge(inputGates);
ExclusiveBuffersUsageGauge exclusiveBuffersUsageGauge =
new ExclusiveBuffersUsageGauge(inputGates);
CreditBasedInputBuffersUsageGauge inputBuffersUsageGauge =
new CreditBasedInputBuffersUsageGauge(
floatingBuffersUsageGauge, exclusiveBuffersUsageGauge, inputGates);
assertThat(exclusiveBuffersUsageGauge.getValue()).isEqualTo(0.0f, offset(0.0f));
assertThat(inputBuffersUsageGauge.getValue()).isEqualTo(0.0f, offset(0.0f));
int totalBuffers =
extraNetworkBuffersPerGate * inputGates.length
+ buffersPerChannel * totalNumberOfRemoteChannels;
int channelIndex = 1;
for (RemoteInputChannel channel : remoteInputChannels) {
drainAndValidate(
buffersPerChannel,
buffersPerChannel * channelIndex++,
channel,
totalBuffers,
buffersPerChannel * totalNumberOfRemoteChannels,
exclusiveBuffersUsageGauge,
inputBuffersUsageGauge,
inputGate1);
}
}
@Test
void testFloatingBuffersUsage() throws Exception {
int numberOfRemoteChannelsGate1 = 2;
int numberOfLocalChannelsGate1 = 0;
int numberOfRemoteChannelsGate2 = 1;
int numberOfLocalChannelsGate2 = 1;
int totalNumberOfRemoteChannels = numberOfRemoteChannelsGate1 + numberOfRemoteChannelsGate2;
int buffersPerChannel = 2;
int extraNetworkBuffersPerGate = 8;
NettyShuffleEnvironment network =
new NettyShuffleEnvironmentBuilder()
.setNetworkBuffersPerChannel(buffersPerChannel)
.setFloatingNetworkBuffersPerGate(extraNetworkBuffersPerGate)
.build();
closeableRegistry.registerCloseable(network::close);
Tuple2<SingleInputGate, List<RemoteInputChannel>> tuple1 =
buildInputGate(network, numberOfRemoteChannelsGate1, numberOfLocalChannelsGate1);
SingleInputGate inputGate2 =
buildInputGate(network, numberOfRemoteChannelsGate2, numberOfLocalChannelsGate2).f0;
SingleInputGate inputGate1 = tuple1.f0;
closeableRegistry.registerCloseable(inputGate1::close);
closeableRegistry.registerCloseable(inputGate2::close);
inputGate1.setup();
inputGate2.setup();
RemoteInputChannel remoteInputChannel1 = tuple1.f1.get(0);
SingleInputGate[] inputGates = new SingleInputGate[] {tuple1.f0, inputGate2};
FloatingBuffersUsageGauge floatingBuffersUsageGauge =
new FloatingBuffersUsageGauge(inputGates);
ExclusiveBuffersUsageGauge exclusiveBuffersUsageGauge =
new ExclusiveBuffersUsageGauge(inputGates);
CreditBasedInputBuffersUsageGauge inputBuffersUsageGauge =
new CreditBasedInputBuffersUsageGauge(
floatingBuffersUsageGauge, exclusiveBuffersUsageGauge, inputGates);
assertThat(floatingBuffersUsageGauge.getValue()).isEqualTo(0.0f, offset(0.0f));
assertThat(inputBuffersUsageGauge.getValue()).isEqualTo(0.0f, offset(0.0f));
// drain gate1's exclusive buffers
drainBuffer(buffersPerChannel, remoteInputChannel1);
int totalBuffers =
extraNetworkBuffersPerGate * inputGates.length
+ buffersPerChannel * totalNumberOfRemoteChannels;
remoteInputChannel1.requestSubpartitions();
int backlog = 3;
int totalRequestedBuffers = buffersPerChannel + backlog;
remoteInputChannel1.onSenderBacklog(backlog);
assertThat(remoteInputChannel1.unsynchronizedGetFloatingBuffersAvailable())
.isEqualTo(totalRequestedBuffers);
drainBuffer(totalRequestedBuffers, remoteInputChannel1);
assertThat(remoteInputChannel1.unsynchronizedGetFloatingBuffersAvailable()).isZero();
assertThat((double) inputBuffersUsageGauge.getValue())
.isEqualTo(
(double) (buffersPerChannel + totalRequestedBuffers) / totalBuffers,
offset(0.0001));
}
private void drainAndValidate(
int numBuffersToRequest,
int totalRequestedBuffers,
RemoteInputChannel channel,
int totalBuffers,
int totalExclusiveBuffers,
ExclusiveBuffersUsageGauge exclusiveBuffersUsageGauge,
CreditBasedInputBuffersUsageGauge inputBuffersUsageGauge,
SingleInputGate inputGate)
throws IOException {
drainBuffer(numBuffersToRequest, channel);
assertThat(exclusiveBuffersUsageGauge.calculateUsedBuffers(inputGate))
.isEqualTo(totalRequestedBuffers);
assertThat((double) exclusiveBuffersUsageGauge.getValue())
.isEqualTo((double) totalRequestedBuffers / totalExclusiveBuffers, offset(0.0001));
assertThat((double) inputBuffersUsageGauge.getValue())
.isEqualTo((double) totalRequestedBuffers / totalBuffers, offset(0.0001));
}
private void drainBuffer(int boundary, RemoteInputChannel channel) throws IOException {
for (int i = 0; i < boundary; i++) {
Buffer buffer = channel.requestBuffer();
if (buffer != null) {
closeableRegistry.registerCloseable(buffer::recycleBuffer);
} else {
break;
}
}
}
private Tuple2<SingleInputGate, List<RemoteInputChannel>> buildInputGate(
NettyShuffleEnvironment network, int numberOfRemoteChannels, int numberOfLocalChannels)
throws Exception {
SingleInputGate inputGate =
new SingleInputGateBuilder()
.setNumberOfChannels(numberOfRemoteChannels + numberOfLocalChannels)
.setResultPartitionType(ResultPartitionType.PIPELINED_BOUNDED)
.setupBufferPoolFactory(network)
.build();
InputChannel[] inputChannels =
new InputChannel[numberOfRemoteChannels + numberOfLocalChannels];
Tuple2<SingleInputGate, List<RemoteInputChannel>> res =
Tuple2.of(inputGate, new ArrayList<>());
int channelIdx = 0;
for (int i = 0; i < numberOfRemoteChannels; i++) {
ResultPartition partition =
PartitionTestUtils.createPartition(
network, ResultPartitionType.PIPELINED_BOUNDED, 1);
closeableRegistry.registerCloseable(partition::close);
partition.setup();
RemoteInputChannel remoteChannel =
buildRemoteChannel(channelIdx, inputGate, network, partition);
inputChannels[i] = remoteChannel;
res.f1.add(remoteChannel);
channelIdx++;
}
for (int i = 0; i < numberOfLocalChannels; i++) {
ResultPartition partition =
PartitionTestUtils.createPartition(
network, ResultPartitionType.PIPELINED_BOUNDED, 1);
closeableRegistry.registerCloseable(partition::close);
partition.setup();
inputChannels[numberOfRemoteChannels + i] =
buildLocalChannel(channelIdx, inputGate, network, partition);
}
inputGate.setInputChannels(inputChannels);
return res;
}
private RemoteInputChannel buildRemoteChannel(
int channelIndex,
SingleInputGate inputGate,
NettyShuffleEnvironment network,
ResultPartition partition) {
return new InputChannelBuilder()
.setPartitionId(partition.getPartitionId())
.setChannelIndex(channelIndex)
.setupFromNettyShuffleEnvironment(network)
.setConnectionManager(new TestingConnectionManager())
.buildRemoteChannel(inputGate);
}
private LocalInputChannel buildLocalChannel(
int channelIndex,
SingleInputGate inputGate,
NettyShuffleEnvironment network,
ResultPartition partition) {
return new InputChannelBuilder()
.setPartitionId(partition.getPartitionId())
.setChannelIndex(channelIndex)
.setupFromNettyShuffleEnvironment(network)
.setConnectionManager(new TestingConnectionManager())
.buildLocalChannel(inputGate);
}
}
| InputBuffersMetricsTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/type/UUIDTypeConverterTest.java | {
"start": 4196,
"end": 5507
} | class ____ implements AttributeConverter<UUID, byte[]> {
@Override
public byte[] convertToDatabaseColumn(UUID attribute) {
return toBytes( attribute );
}
@Override
public UUID convertToEntityAttribute(byte[] dbData) {
return toUuid( dbData );
}
private UUID toUuid(byte[] bytes) {
if ( bytes == null || bytes.length < 16 ) {
return null;
}
long mostSignificantBits = getMostSignificantBits( bytes );
long leastSignificantBits = getLeastSignificantBits( bytes );
return new UUID( mostSignificantBits, leastSignificantBits );
}
private long getMostSignificantBits(byte[] bytes) {
byte[] b = new byte[8];
for ( int i = 0; i < 8; i++ ) {
b[i] = bytes[i];
}
return toLong( b );
}
private long getLeastSignificantBits(byte[] bytes) {
byte[] b = new byte[8];
int j = 0;
for ( int i = 8; i < 16; i++ ) {
b[j++] = bytes[i];
}
return toLong( b );
}
private long toLong(byte[] bytes) {
return ByteBuffer.wrap( bytes ).getLong();
}
private byte[] toBytes(UUID uuid) {
if ( uuid == null ) {
return new byte[] {};
}
ByteBuffer bb = ByteBuffer.wrap( new byte[16] );
bb.putLong( uuid.getMostSignificantBits() );
bb.putLong( uuid.getLeastSignificantBits() );
return bb.array();
}
}
}
| UuidBase64TypeConverter |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/annotation/MergedAnnotationsComposedOnSingleAnnotatedElementTests.java | {
"start": 8641,
"end": 8748
} | interface ____ {
}
@Cacheable(cacheName = "fooCache", key = "fooKey")
private static | ComposedCacheInterface |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/accesslog/AccessLogFileTestCase.java | {
"start": 2024,
"end": 6404
} | class ____ {
@RegisterExtension
public static QuarkusUnitTest unitTest = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
Path logDirectory;
try {
logDirectory = Files.createTempDirectory("quarkus-tests");
//backslash is an escape char, we need this to be properly formatted for windows
Properties p = new Properties();
p.setProperty("quarkus.http.access-log.enabled", "true");
p.setProperty("quarkus.http.access-log.log-to-file", "true");
p.setProperty("quarkus.http.access-log.base-file-name", "server");
p.setProperty("quarkus.http.access-log.log-directory", logDirectory.toAbsolutePath().toString());
p.setProperty("quarkus.http.access-log.pattern", "long");
p.setProperty("quarkus.http.access-log.exclude-pattern", "/health|/liveliness");
ByteArrayOutputStream out = new ByteArrayOutputStream();
p.store(out, null);
return ShrinkWrap.create(JavaArchive.class)
.add(new ByteArrayAsset(out.toByteArray()),
"application.properties");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
@ConfigProperty(name = "quarkus.http.access-log.log-directory")
Path logDirectory;
@BeforeEach
public void before() throws IOException {
Files.createDirectories(logDirectory);
}
@AfterEach
public void after() throws IOException {
IoUtils.recursiveDelete(logDirectory);
}
@Test
public void testSingleLogMessageToFile() throws IOException, InterruptedException {
// issue the request with a specific HTTP protocol version, so that we can then verify
// the protocol value logged in the access log file
final RestAssuredConfig http10Config = RestAssured.config().httpClient(
new HttpClientConfig().setParam(CoreProtocolPNames.PROTOCOL_VERSION, new ProtocolVersion("HTTP", 1, 0)));
final RequestSpecification requestSpec = new RequestSpecBuilder().setConfig(http10Config).build();
final String paramValue = UUID.randomUUID().toString();
RestAssured.given(requestSpec).get("/health"); //should be ignored
RestAssured.given(requestSpec).get("/liveliness"); //should be ignored
RestAssured.given(requestSpec).get("/does-not-exist?foo=" + paramValue);
Awaitility.given().pollInterval(100, TimeUnit.MILLISECONDS)
.atMost(10, TimeUnit.SECONDS)
.untilAsserted(new ThrowingRunnable() {
@Override
public void run() throws Throwable {
try (Stream<Path> files = Files.list(logDirectory)) {
Assertions.assertEquals(1, (int) files.count());
}
Path path = logDirectory.resolve("server.log");
Assertions.assertTrue(Files.exists(path));
String data = Files.readString(path);
Assertions.assertFalse(data.contains("/health"));
Assertions.assertFalse(data.contains("/liveliness"));
Assertions.assertTrue(data.contains("/does-not-exist"));
Assertions.assertTrue(data.contains("?foo=" + paramValue),
"access log is missing query params");
Assertions.assertFalse(data.contains("?foo=" + paramValue + "?foo=" + paramValue),
"access log contains duplicated query params");
Assertions.assertTrue(data.contains("HTTP/1.0"),
"HTTP/1.0 protocol value is missing in the access log");
Assertions.assertTrue(data.contains("Accept: */*"),
"Accept header is missing in the access log");
}
});
}
}
| AccessLogFileTestCase |
java | apache__flink | flink-tests/src/test/java/org/apache/flink/test/checkpointing/IgnoreInFlightDataITCase.java | {
"start": 9797,
"end": 13158
} | class ____ implements SourceFunction<Integer>, CheckpointedFunction {
private static final long serialVersionUID = 1L;
private final SharedReference<AtomicInteger> lastCheckpointValue;
private ListState<Integer> valueState;
private volatile boolean isRunning = true;
public NumberSource(SharedReference<AtomicInteger> lastCheckpointValue) {
this.lastCheckpointValue = lastCheckpointValue;
}
@Override
public void run(SourceContext<Integer> ctx) throws Exception {
Iterator<Integer> stateIt = valueState.get().iterator();
boolean isRecovered = stateIt.hasNext();
if (isRecovered) {
synchronized (ctx.getCheckpointLock()) {
Integer lastValue = stateIt.next();
// Checking that ListState is recovered correctly.
assertEquals(lastCheckpointValue.get().intValue(), lastValue.intValue());
// if it is started after recovery, just send one more value and finish.
ctx.collect(lastValue + 1);
}
} else {
int next = 0;
synchronized (ctx.getCheckpointLock()) {
// Emit batch of data in order to having the downstream data for each subtask of
// the Map before the first checkpoint.
do {
next++;
valueState.update(singletonList(next));
ctx.collect(next);
} while (next < PARALLELISM); // One value for each map subtask is enough.
}
while (isRunning) {
// Wait for the checkpoint.
LockSupport.parkNanos(100000);
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
Iterator<Integer> integerIterator = valueState.get().iterator();
if (!integerIterator.hasNext()
|| integerIterator.next() < PARALLELISM
|| (context.getCheckpointId() > 1
&& lastCheckpointValue.get().get() < PARALLELISM)) {
// Try to restart task.
throw new RuntimeException(
"Not enough data to guarantee the in-flight data were generated before the first checkpoint");
}
if (context.getCheckpointId() > 2) {
// It is possible if checkpoint was triggered too fast after restart.
return; // Just ignore it.
}
if (context.getCheckpointId() == 2) {
throw new ExpectedTestException("The planned fail on the second checkpoint");
}
lastCheckpointValue.get().set(valueState.get().iterator().next());
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
this.valueState =
context.getOperatorStateStore()
.getListState(new ListStateDescriptor<>("state", Types.INT));
}
}
private static | NumberSource |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/UTF8ByteArrayUtils.java | {
"start": 1024,
"end": 3421
} | class ____ {
/**
* Find the first occurrence of the given byte b in a UTF-8 encoded string
* @param utf a byte array containing a UTF-8 encoded string
* @param start starting offset
* @param end ending position
* @param b the byte to find
* @return position that first byte occurs, otherwise -1
*/
public static int findByte(byte [] utf, int start, int end, byte b) {
for(int i=start; i<end; i++) {
if (utf[i]==b) {
return i;
}
}
return -1;
}
/**
* Find the first occurrence of the given bytes b in a UTF-8 encoded string
* @param utf a byte array containing a UTF-8 encoded string
* @param start starting offset
* @param end ending position
* @param b the bytes to find
* @return position that first byte occurs, otherwise -1
*/
public static int findBytes(byte [] utf, int start, int end, byte[] b) {
int matchEnd = end - b.length;
for(int i=start; i<=matchEnd; i++) {
boolean matched = true;
for(int j=0; j<b.length; j++) {
if (utf[i+j] != b[j]) {
matched = false;
break;
}
}
if (matched) {
return i;
}
}
return -1;
}
/**
* Find the nth occurrence of the given byte b in a UTF-8 encoded string
* @param utf a byte array containing a UTF-8 encoded string
* @param start starting offset
* @param length the length of byte array
* @param b the byte to find
* @param n the desired occurrence of the given byte
* @return position that nth occurrence of the given byte if exists; otherwise -1
*/
public static int findNthByte(byte [] utf, int start, int length, byte b, int n) {
int pos = -1;
int nextStart = start;
for (int i = 0; i < n; i++) {
pos = findByte(utf, nextStart, length, b);
if (pos < 0) {
return pos;
}
nextStart = pos + 1;
}
return pos;
}
/**
* Find the nth occurrence of the given byte b in a UTF-8 encoded string
* @param utf a byte array containing a UTF-8 encoded string
* @param b the byte to find
* @param n the desired occurrence of the given byte
* @return position that nth occurrence of the given byte if exists; otherwise -1
*/
public static int findNthByte(byte [] utf, byte b, int n) {
return findNthByte(utf, 0, utf.length, b, n);
}
}
| UTF8ByteArrayUtils |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/internals/StreamsMembershipManager.java | {
"start": 3144,
"end": 3603
} | class ____ implements RequestManager {
/**
* A data structure to represent the current task assignment, and target task assignment of a member in a
* streams group.
* <p/>
* Besides the assigned tasks, it contains a local epoch that is bumped whenever the assignment changes, to ensure
* that two assignments with the same tasks but different local epochs are not considered equal.
*/
private static | StreamsMembershipManager |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/TransportEqlStatsAction.java | {
"start": 986,
"end": 2841
} | class ____ extends TransportNodesAction<
EqlStatsRequest,
EqlStatsResponse,
EqlStatsRequest.NodeStatsRequest,
EqlStatsResponse.NodeStatsResponse,
Void> {
// the plan executor holds the metrics
private final PlanExecutor planExecutor;
@Inject
public TransportEqlStatsAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
PlanExecutor planExecutor
) {
super(
EqlStatsAction.NAME,
clusterService,
transportService,
actionFilters,
EqlStatsRequest.NodeStatsRequest::new,
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.planExecutor = planExecutor;
}
@Override
protected EqlStatsResponse newResponse(
EqlStatsRequest request,
List<EqlStatsResponse.NodeStatsResponse> nodes,
List<FailedNodeException> failures
) {
return new EqlStatsResponse(clusterService.getClusterName(), nodes, failures);
}
@Override
protected EqlStatsRequest.NodeStatsRequest newNodeRequest(EqlStatsRequest request) {
return new EqlStatsRequest.NodeStatsRequest(request);
}
@Override
protected EqlStatsResponse.NodeStatsResponse newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException {
return new EqlStatsResponse.NodeStatsResponse(in);
}
@Override
protected EqlStatsResponse.NodeStatsResponse nodeOperation(EqlStatsRequest.NodeStatsRequest request, Task task) {
EqlStatsResponse.NodeStatsResponse statsResponse = new EqlStatsResponse.NodeStatsResponse(clusterService.localNode());
statsResponse.setStats(planExecutor.metrics().stats());
return statsResponse;
}
}
| TransportEqlStatsAction |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/provider/EnumSource.java | {
"start": 2768,
"end": 2921
} | enum ____ to provide.
*
* <p>If no names or regular expressions are specified, and neither {@link #from}
* nor {@link #to} are specified, all | constants |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/resultmapping/DiscriminatedSubType2.java | {
"start": 375,
"end": 848
} | class ____ extends DiscriminatedRoot {
private String subType2Name;
public DiscriminatedSubType2() {
super();
}
public DiscriminatedSubType2(Integer id, String rootName, String subType2Name) {
super( id, rootName );
this.subType2Name = subType2Name;
}
@Column( name = "subtype2_name" )
public String getSubType2Name() {
return subType2Name;
}
public void setSubType2Name(String subType2Name) {
this.subType2Name = subType2Name;
}
}
| DiscriminatedSubType2 |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/cors/reactive/CorsUtils.java | {
"start": 1060,
"end": 1250
} | class ____ CORS reactive request handling based on the
* <a href="https://www.w3.org/TR/cors/">CORS W3C recommendation</a>.
*
* @author Sebastien Deleuze
* @since 5.0
*/
public abstract | for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java | {
"start": 42035,
"end": 42367
} | class ____ {
public static final Object mu = new Object();
}
}
""")
.addSourceLines(
"threadsafety/Test.java",
"""
package threadsafety;
import com.google.errorprone.annotations.concurrent.GuardedBy;
public | Inner |
java | ReactiveX__RxJava | src/jmh/java/io/reactivex/rxjava3/core/ObservableFlatMapPerf.java | {
"start": 1036,
"end": 1922
} | class ____ {
@Param({ "1", "10", "100", "1000", "10000", "100000", "1000000" })
public int count;
Observable<Integer> source;
@Setup
public void setup() {
int d = 1000000 / count;
Integer[] mainArray = new Integer[count];
Integer[] innerArray = new Integer[d];
Arrays.fill(mainArray, 777);
Arrays.fill(innerArray, 777);
Observable<Integer> outer = Observable.fromArray(mainArray);
final Observable<Integer> inner = Observable.fromArray(innerArray);
source = outer.flatMap(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t) {
return inner;
}
});
}
@Benchmark
public void flatMapXRange(Blackhole bh) {
source.subscribe(new PerfObserver(bh));
}
}
| ObservableFlatMapPerf |
java | mybatis__mybatis-3 | src/test/java/org/apache/ibatis/submitted/selectkey/AnnotatedMapper.java | {
"start": 966,
"end": 3565
} | interface ____ {
@Insert("insert into table2 (name) values(#{name})")
@SelectKey(statement = "call identity()", keyProperty = "nameId", before = false, resultType = int.class)
int insertTable2(Name name);
@Insert("insert into table2 (name) values(#{name})")
@Options(useGeneratedKeys = true, keyProperty = "nameId,generatedName", keyColumn = "ID,NAME_FRED")
int insertTable2WithGeneratedKey(Name name);
int insertTable2WithGeneratedKeyXml(Name name);
@Insert("insert into table2 (name) values(#{name})")
@SelectKey(statement = "select id, name_fred from table2 where id = identity()", keyProperty = "nameId,generatedName", keyColumn = "ID,NAME_FRED", before = false, resultType = Map.class)
int insertTable2WithSelectKeyWithKeyMap(Name name);
int insertTable2WithSelectKeyWithKeyMapXml(Name name);
@Insert("insert into table2 (name) values(#{name})")
@SelectKey(statement = "select id as nameId, name_fred as generatedName from table2 where id = identity()", keyProperty = "nameId,generatedName", before = false, resultType = Name.class)
int insertTable2WithSelectKeyWithKeyObject(Name name);
int insertTable2WithSelectKeyWithKeyObjectXml(Name name);
@Insert("insert into table3 (id, name) values(#{nameId}, #{name})")
@SelectKey(statement = "call next value for TestSequence", keyProperty = "nameId", before = true, resultType = int.class)
int insertTable3(Name name);
@InsertProvider(type = SqlProvider.class, method = "insertTable3_2")
@SelectKey(statement = "call next value for TestSequence", keyProperty = "nameId", before = true, resultType = int.class)
int insertTable3_2(Name name);
@Update("update table2 set name = #{name} where id = #{nameId}")
@Options(useGeneratedKeys = true, keyProperty = "generatedName")
int updateTable2WithGeneratedKey(Name name);
int updateTable2WithGeneratedKeyXml(Name name);
@Update("update table2 set name = #{name} where id = #{nameId}")
@SelectKey(statement = "select name_fred from table2 where id = #{nameId}", keyProperty = "generatedName", keyColumn = "NAME_FRED", before = false, resultType = String.class)
int updateTable2WithSelectKeyWithKeyMap(Name name);
int updateTable2WithSelectKeyWithKeyMapXml(Name name);
@Update("update table2 set name = #{name} where id = #{nameId}")
@SelectKey(statement = "select name_fred as generatedName from table2 where id = #{nameId}", keyProperty = "generatedName", before = false, resultType = Name.class)
int updateTable2WithSelectKeyWithKeyObject(Name name);
int updateTable2WithSelectKeyWithKeyObjectXml(Name name);
}
| AnnotatedMapper |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/component/EmbeddableAndGenericExtendingSerializableMappedSuperclassTest.java | {
"start": 1791,
"end": 2054
} | class ____ {
@Column
private String value;
public AnotherEmbeddable() {
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
@MappedSuperclass
public static abstract | AnotherEmbeddable |
java | apache__commons-lang | src/main/java/org/apache/commons/lang3/SystemUtils.java | {
"start": 49745,
"end": 50190
} | class ____ loaded.
* </p>
*
* @since 2.0
*/
public static final boolean IS_OS_IRIX = getOsNameMatches("Irix");
/**
* The constant {@code true} if this is Linux.
* <p>
* The result depends on the value of the {@link #OS_NAME} constant.
* </p>
* <p>
* The field will return {@code false} if {@link #OS_NAME} is {@code null}.
* </p>
* <p>
* This value is initialized when the | is |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestNestedMountPoint.java | {
"start": 1459,
"end": 1605
} | class ____ {
private InodeTree inodeTree;
private Configuration conf;
private String mtName;
private URI fsUri;
static | TestNestedMountPoint |
java | netty__netty | transport/src/main/java/io/netty/channel/socket/DatagramChannel.java | {
"start": 4520,
"end": 6459
} | interface ____ the specified source and notifies
* the {@link ChannelFuture} once the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture leaveGroup(
InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source,
ChannelPromise future);
/**
* Block the given sourceToBlock address for the given multicastAddress on the given networkInterface and notifies
* the {@link ChannelFuture} once the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture block(
InetAddress multicastAddress, NetworkInterface networkInterface,
InetAddress sourceToBlock);
/**
* Block the given sourceToBlock address for the given multicastAddress on the given networkInterface and notifies
* the {@link ChannelFuture} once the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture block(
InetAddress multicastAddress, NetworkInterface networkInterface,
InetAddress sourceToBlock, ChannelPromise future);
/**
* Block the given sourceToBlock address for the given multicastAddress and notifies the {@link ChannelFuture} once
* the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture block(InetAddress multicastAddress, InetAddress sourceToBlock);
/**
* Block the given sourceToBlock address for the given multicastAddress and notifies the {@link ChannelFuture} once
* the operation completes.
*
* The given {@link ChannelFuture} will be notified and also returned.
*/
ChannelFuture block(
InetAddress multicastAddress, InetAddress sourceToBlock, ChannelPromise future);
}
| using |
java | elastic__elasticsearch | x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java | {
"start": 14626,
"end": 30686
} | class ____ extends OptimizerExpressionRule<BinaryLogic> {
public PropagateEquals() {
super(TransformDirection.DOWN);
}
@Override
public Expression rule(BinaryLogic e) {
if (e instanceof And) {
return propagate((And) e);
} else if (e instanceof Or) {
return propagate((Or) e);
}
return e;
}
// combine conjunction
private static Expression propagate(And and) {
List<Range> ranges = new ArrayList<>();
// Only equalities, not-equalities and inequalities with a foldable .right are extracted separately;
// the others go into the general 'exps'.
List<BinaryComparison> equals = new ArrayList<>();
List<NotEquals> notEquals = new ArrayList<>();
List<BinaryComparison> inequalities = new ArrayList<>();
List<Expression> exps = new ArrayList<>();
boolean changed = false;
for (Expression ex : Predicates.splitAnd(and)) {
if (ex instanceof Range) {
ranges.add((Range) ex);
} else if (ex instanceof Equals || ex instanceof NullEquals) {
BinaryComparison otherEq = (BinaryComparison) ex;
// equals on different values evaluate to FALSE
// ignore date/time fields as equality comparison might actually be a range check
if (otherEq.right().foldable() && DataTypes.isDateTime(otherEq.left().dataType()) == false) {
for (BinaryComparison eq : equals) {
if (otherEq.left().semanticEquals(eq.left())) {
Integer comp = BinaryComparison.compare(eq.right().fold(), otherEq.right().fold());
if (comp != null) {
// var cannot be equal to two different values at the same time
if (comp != 0) {
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
}
}
}
}
equals.add(otherEq);
} else {
exps.add(otherEq);
}
} else if (ex instanceof GreaterThan
|| ex instanceof GreaterThanOrEqual
|| ex instanceof LessThan
|| ex instanceof LessThanOrEqual) {
BinaryComparison bc = (BinaryComparison) ex;
if (bc.right().foldable()) {
inequalities.add(bc);
} else {
exps.add(ex);
}
} else if (ex instanceof NotEquals otherNotEq) {
if (otherNotEq.right().foldable()) {
notEquals.add(otherNotEq);
} else {
exps.add(ex);
}
} else {
exps.add(ex);
}
}
// check
for (BinaryComparison eq : equals) {
Object eqValue = eq.right().fold();
for (Iterator<Range> iterator = ranges.iterator(); iterator.hasNext();) {
Range range = iterator.next();
if (range.value().semanticEquals(eq.left())) {
// if equals is outside the interval, evaluate the whole expression to FALSE
if (range.lower().foldable()) {
Integer compare = BinaryComparison.compare(range.lower().fold(), eqValue);
if (compare != null && (
// eq outside the lower boundary
compare > 0 ||
// eq matches the boundary but should not be included
(compare == 0 && range.includeLower() == false))) {
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
}
}
if (range.upper().foldable()) {
Integer compare = BinaryComparison.compare(range.upper().fold(), eqValue);
if (compare != null && (
// eq outside the upper boundary
compare < 0 ||
// eq matches the boundary but should not be included
(compare == 0 && range.includeUpper() == false))) {
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
}
}
// it's in the range and thus, remove it
iterator.remove();
changed = true;
}
}
// evaluate all NotEquals against the Equal
for (Iterator<NotEquals> iter = notEquals.iterator(); iter.hasNext();) {
NotEquals neq = iter.next();
if (eq.left().semanticEquals(neq.left())) {
Integer comp = BinaryComparison.compare(eqValue, neq.right().fold());
if (comp != null) {
if (comp == 0) { // clashing and conflicting: a = 1 AND a != 1
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
} else { // clashing and redundant: a = 1 AND a != 2
iter.remove();
changed = true;
}
}
}
}
// evaluate all inequalities against the Equal
for (Iterator<BinaryComparison> iter = inequalities.iterator(); iter.hasNext();) {
BinaryComparison bc = iter.next();
if (eq.left().semanticEquals(bc.left())) {
Integer compare = BinaryComparison.compare(eqValue, bc.right().fold());
if (compare != null) {
if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { // a = 2 AND a </<= ?
if ((compare == 0 && bc instanceof LessThan) || // a = 2 AND a < 2
0 < compare) { // a = 2 AND a </<= 1
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
}
} else if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { // a = 2 AND a >/>= ?
if ((compare == 0 && bc instanceof GreaterThan) || // a = 2 AND a > 2
compare < 0) { // a = 2 AND a >/>= 3
return new Literal(and.source(), Boolean.FALSE, DataTypes.BOOLEAN);
}
}
iter.remove();
changed = true;
}
}
}
}
return changed ? Predicates.combineAnd(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : and;
}
// combine disjunction:
// a = 2 OR a > 3 -> nop; a = 2 OR a > 1 -> a > 1
// a = 2 OR a < 3 -> a < 3; a = 2 OR a < 1 -> nop
// a = 2 OR 3 < a < 5 -> nop; a = 2 OR 1 < a < 3 -> 1 < a < 3; a = 2 OR 0 < a < 1 -> nop
// a = 2 OR a != 2 -> TRUE; a = 2 OR a = 5 -> nop; a = 2 OR a != 5 -> a != 5
private static Expression propagate(Or or) {
List<Expression> exps = new ArrayList<>();
List<Equals> equals = new ArrayList<>(); // foldable right term Equals
List<NotEquals> notEquals = new ArrayList<>(); // foldable right term NotEquals
List<Range> ranges = new ArrayList<>();
List<BinaryComparison> inequalities = new ArrayList<>(); // foldable right term (=limit) BinaryComparision
// split expressions by type
for (Expression ex : Predicates.splitOr(or)) {
if (ex instanceof Equals eq) {
if (eq.right().foldable()) {
equals.add(eq);
} else {
exps.add(ex);
}
} else if (ex instanceof NotEquals neq) {
if (neq.right().foldable()) {
notEquals.add(neq);
} else {
exps.add(ex);
}
} else if (ex instanceof Range) {
ranges.add((Range) ex);
} else if (ex instanceof BinaryComparison bc) {
if (bc.right().foldable()) {
inequalities.add(bc);
} else {
exps.add(ex);
}
} else {
exps.add(ex);
}
}
boolean updated = false; // has the expression been modified?
// evaluate the impact of each Equal over the different types of Expressions
for (Iterator<Equals> iterEq = equals.iterator(); iterEq.hasNext();) {
Equals eq = iterEq.next();
Object eqValue = eq.right().fold();
boolean removeEquals = false;
// Equals OR NotEquals
for (NotEquals neq : notEquals) {
if (eq.left().semanticEquals(neq.left())) { // a = 2 OR a != ? -> ...
Integer comp = BinaryComparison.compare(eqValue, neq.right().fold());
if (comp != null) {
if (comp == 0) { // a = 2 OR a != 2 -> TRUE
return TRUE;
} else { // a = 2 OR a != 5 -> a != 5
removeEquals = true;
break;
}
}
}
}
if (removeEquals) {
iterEq.remove();
updated = true;
continue;
}
// Equals OR Range
for (int i = 0; i < ranges.size(); i++) { // might modify list, so use index loop
Range range = ranges.get(i);
if (eq.left().semanticEquals(range.value())) {
Integer lowerComp = range.lower().foldable() ? BinaryComparison.compare(eqValue, range.lower().fold()) : null;
Integer upperComp = range.upper().foldable() ? BinaryComparison.compare(eqValue, range.upper().fold()) : null;
if (lowerComp != null && lowerComp == 0) {
if (range.includeLower() == false) { // a = 2 OR 2 < a < ? -> 2 <= a < ?
ranges.set(
i,
new Range(
range.source(),
range.value(),
range.lower(),
true,
range.upper(),
range.includeUpper(),
range.zoneId()
)
);
} // else : a = 2 OR 2 <= a < ? -> 2 <= a < ?
removeEquals = true; // update range with lower equality instead or simply superfluous
break;
} else if (upperComp != null && upperComp == 0) {
if (range.includeUpper() == false) { // a = 2 OR ? < a < 2 -> ? < a <= 2
ranges.set(
i,
new Range(
range.source(),
range.value(),
range.lower(),
range.includeLower(),
range.upper(),
true,
range.zoneId()
)
);
} // else : a = 2 OR ? < a <= 2 -> ? < a <= 2
removeEquals = true; // update range with upper equality instead
break;
} else if (lowerComp != null && upperComp != null) {
if (0 < lowerComp && upperComp < 0) { // a = 2 OR 1 < a < 3
removeEquals = true; // equality is superfluous
break;
}
}
}
}
if (removeEquals) {
iterEq.remove();
updated = true;
continue;
}
// Equals OR Inequality
for (int i = 0; i < inequalities.size(); i++) {
BinaryComparison bc = inequalities.get(i);
if (eq.left().semanticEquals(bc.left())) {
Integer comp = BinaryComparison.compare(eqValue, bc.right().fold());
if (comp != null) {
if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) {
if (comp < 0) { // a = 1 OR a > 2 -> nop
continue;
} else if (comp == 0 && bc instanceof GreaterThan) { // a = 2 OR a > 2 -> a >= 2
inequalities.set(i, new GreaterThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId()));
} // else (0 < comp || bc instanceof GreaterThanOrEqual) :
// a = 3 OR a > 2 -> a > 2; a = 2 OR a => 2 -> a => 2
removeEquals = true; // update range with equality instead or simply superfluous
break;
} else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) {
if (comp > 0) { // a = 2 OR a < 1 -> nop
continue;
}
if (comp == 0 && bc instanceof LessThan) { // a = 2 OR a < 2 -> a <= 2
inequalities.set(i, new LessThanOrEqual(bc.source(), bc.left(), bc.right(), bc.zoneId()));
} // else (comp < 0 || bc instanceof LessThanOrEqual) : a = 2 OR a < 3 -> a < 3; a = 2 OR a <= 2 -> a <= 2
removeEquals = true; // update range with equality instead or simply superfluous
break;
}
}
}
}
if (removeEquals) {
iterEq.remove();
updated = true;
}
}
return updated ? Predicates.combineOr(CollectionUtils.combine(exps, equals, notEquals, inequalities, ranges)) : or;
}
}
public static final | PropagateEquals |
java | apache__logging-log4j2 | log4j-core/src/main/java/org/apache/logging/log4j/core/pattern/AbstractPatternConverter.java | {
"start": 2293,
"end": 2688
} | class ____ should be applied to the LoggingEvent passed as parameter, which can
* be null.
* <p>
* This information is currently used only by HtmlLayout.
* </p>
*
* @param e
* null values are accepted
* @return the name of the conversion pattern
*/
@Override
public String getStyleClass(final Object e) {
return style;
}
}
| that |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/NoUniqueBeanDefinitionFailureAnalyzerTests.java | {
"start": 8840,
"end": 9106
} | class ____ {
@Bean
String consumer(ObjectProvider<TestBean> testBeanProvider) {
testBeanProvider.getIfAvailable();
return "foo";
}
}
@Configuration(proxyBeanMethods = false)
@ImportResource("classpath:consumer.xml")
static | ObjectProviderMethodConsumer |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/selector/LabelSelector.java | {
"start": 1573,
"end": 4234
} | class ____<T extends Instance> extends AbstractCmdbSelector<T> {
private static final String TYPE = "label";
/**
* {@link Entity} labels key.
*/
private Set<String> labels;
public Set<String> getLabels() {
return labels;
}
public void setLabels(Set<String> labels) {
this.labels = labels;
}
@Override
protected List<T> doSelect(CmdbContext<T> context) {
if (CollectionUtils.isEmpty(labels)) {
return context.getProviders()
.stream()
.map(CmdbContext.CmdbInstance::getInstance)
.collect(Collectors.toList());
}
CmdbContext.CmdbInstance<T> consumer = context.getConsumer();
Map<String, String> consumerLabels = Optional.ofNullable(consumer.getEntity())
.map(Entity::getLabels)
.orElse(Collections.emptyMap());
// filter the instance if consumer and providers' label values equals.
List<T> result = context.getProviders()
.stream()
.filter(ci -> {
Entity providerEntity = ci.getEntity();
if (Objects.isNull(providerEntity)) {
return false;
}
Map<String, String> providerLabels = Optional.ofNullable(ci.getEntity().getLabels())
.orElse(Collections.emptyMap());
return labels.stream()
.allMatch(label -> {
String consumerLabelValue = consumerLabels.get(label);
if (StringUtils.isBlank(consumerLabelValue)) {
return false;
}
return Objects.equals(consumerLabelValue, providerLabels.get(label));
});
})
.map(CmdbContext.CmdbInstance::getInstance)
.collect(Collectors.toList());
// if none match, then return all providers.
if (CollectionUtils.isEmpty(result)) {
return context.getProviders()
.stream()
.map(CmdbContext.CmdbInstance::getInstance)
.collect(Collectors.toList());
}
return result;
}
@Override
protected void doParse(String expression) throws NacosException {
this.labels = ExpressionInterpreter.parseExpression(expression);
}
@Override
public String getType() {
return TYPE;
}
}
| LabelSelector |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/config/ConfigException.java | {
"start": 959,
"end": 1428
} | class ____ extends KafkaException {
private static final long serialVersionUID = 1L;
public ConfigException(String message) {
super(message);
}
public ConfigException(String name, Object value) {
this(name, value, null);
}
public ConfigException(String name, Object value, String message) {
super("Invalid value " + value + " for configuration " + name + (message == null ? "" : ": " + message));
}
}
| ConfigException |
java | reactor__reactor-core | reactor-core/src/main/java11/reactor/core/publisher/CallSiteSupplierFactory.java | {
"start": 991,
"end": 1048
} | class ____ the call-site extracting on Java 9+.
*/
final | for |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/RuntimeUtils.java | {
"start": 2666,
"end": 10252
} | class ____ {
static final Logger QUERY_LOG = LogManager.getLogger(QueryClient.class);
private RuntimeUtils() {}
public static ActionListener<SearchResponse> searchLogListener(
ActionListener<SearchResponse> listener,
Logger log,
boolean allowPartialResults
) {
return listener.delegateFailureAndWrap((delegate, response) -> {
ShardSearchFailure[] failures = response.getShardFailures();
if (CollectionUtils.isEmpty(failures) == false && allowPartialResults == false) {
delegate.onFailure(new EqlIllegalArgumentException(failures[0].reason(), failures[0].getCause()));
return;
}
if (log.isTraceEnabled()) {
logSearchResponse(response, log);
}
delegate.onResponse(response);
});
}
public static ActionListener<MultiSearchResponse> multiSearchLogListener(
ActionListener<MultiSearchResponse> listener,
boolean allowPartialSearchResults,
Logger log
) {
return listener.delegateFailureAndWrap((delegate, items) -> {
for (MultiSearchResponse.Item item : items) {
Exception failure = item.getFailure();
SearchResponse response = item.getResponse();
if (failure == null) {
if (allowPartialSearchResults == false) {
ShardSearchFailure[] failures = response.getShardFailures();
if (CollectionUtils.isEmpty(failures) == false) {
failure = new EqlIllegalArgumentException(failures[0].reason(), failures[0].getCause());
}
}
}
if (failure != null) {
delegate.onFailure(failure);
return;
}
if (log.isTraceEnabled()) {
logSearchResponse(response, log);
}
}
delegate.onResponse(items);
});
}
private static void logSearchResponse(SearchResponse response, Logger logger) {
List<InternalAggregation> aggs = Collections.emptyList();
if (response.getAggregations() != null) {
aggs = response.getAggregations().asList();
}
StringBuilder aggsNames = new StringBuilder();
for (int i = 0; i < aggs.size(); i++) {
aggsNames.append(aggs.get(i).getName() + (i + 1 == aggs.size() ? "" : ", "));
}
SearchHit[] hits = response.getHits().getHits();
int count = hits != null ? hits.length : 0;
logger.trace(
"Got search response [hits {}, {} aggregations: [{}], {} failed shards, {} skipped shards, "
+ "{} successful shards, {} total shards, took {}, timed out [{}]]",
count,
aggs.size(),
aggsNames,
response.getFailedShards(),
response.getSkippedShards(),
response.getSuccessfulShards(),
response.getTotalShards(),
response.getTook(),
response.isTimedOut()
);
}
public static List<HitExtractor> createExtractor(List<FieldExtraction> fields, EqlConfiguration cfg) {
List<HitExtractor> extractors = new ArrayList<>(fields.size());
for (FieldExtraction fe : fields) {
extractors.add(createExtractor(fe, cfg));
}
return extractors;
}
public static BucketExtractor createBucketExtractor(FieldExtraction ref) {
if (ref instanceof CompositeAggRef aggRef) {
return new CompositeKeyExtractor(aggRef.key(), false);
} else if (ref instanceof ComputedRef computedRef) {
Pipe proc = computedRef.processor();
String hitName = Expressions.name(proc.expression());
return new ComputingExtractor(proc.asProcessor(), hitName);
}
throw new EqlIllegalArgumentException("Unexpected value reference {}", ref.getClass());
}
public static HitExtractor createExtractor(FieldExtraction ref, EqlConfiguration cfg) {
if (ref instanceof SearchHitFieldRef f) {
return new FieldHitExtractor(f.name(), f.getDataType(), cfg.zoneId(), f.hitName(), FULL);
}
if (ref instanceof ComputedRef computedRef) {
Pipe proc = computedRef.processor();
// collect hitNames
Set<String> hitNames = new LinkedHashSet<>();
proc = proc.transformDown(ReferenceInput.class, l -> {
HitExtractor he = createExtractor(l.context(), cfg);
hitNames.add(he.hitName());
if (hitNames.size() > 1) {
throw new EqlClientException("Multi-level nested fields [{}] not supported yet", hitNames);
}
return new HitExtractorInput(l.source(), l.expression(), he);
});
String hitName = null;
if (hitNames.size() == 1) {
hitName = hitNames.iterator().next();
}
return new ComputingExtractor(proc.asProcessor(), hitName);
}
throw new EqlIllegalArgumentException("Unexpected value reference {}", ref.getClass());
}
public static SearchRequest prepareRequest(
SearchSourceBuilder source,
boolean includeFrozen,
boolean allowPartialSearchResults,
String... indices
) {
SearchRequest searchRequest = new SearchRequest();
searchRequest.indices(indices);
searchRequest.source(source);
searchRequest.allowPartialSearchResults(allowPartialSearchResults);
searchRequest.indicesOptions(
includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS
);
return searchRequest;
}
/**
* optimized method that adds filter to existing bool queries without additional wrapping
* additionally checks whether the given query exists for safe decoration
*/
public static SearchSourceBuilder combineFilters(SearchSourceBuilder source, QueryBuilder filter) {
var query = Queries.combine(FILTER, Arrays.asList(source.query(), filter));
query = query == null ? boolQuery() : query;
return source.query(query);
}
public static SearchSourceBuilder replaceFilter(
SearchSourceBuilder source,
List<QueryBuilder> oldFilters,
List<QueryBuilder> newFilters
) {
var query = source.query();
query = removeFilters(query, oldFilters);
query = Queries.combine(
FILTER,
org.elasticsearch.xpack.ql.util.CollectionUtils.combine(Collections.singletonList(query), newFilters)
);
query = query == null ? boolQuery() : query;
return source.query(query);
}
public static SearchSourceBuilder wrapAsFilter(SearchSourceBuilder source) {
QueryBuilder query = source.query();
BoolQueryBuilder bool = boolQuery();
if (query != null) {
bool.filter(query);
}
source.query(bool);
return source;
}
public static QueryBuilder removeFilters(QueryBuilder query, List<QueryBuilder> filters) {
if (query instanceof BoolQueryBuilder boolQueryBuilder) {
if (org.elasticsearch.xpack.ql.util.CollectionUtils.isEmpty(filters) == false) {
boolQueryBuilder.filter().removeAll(filters);
}
}
return query;
}
}
| RuntimeUtils |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ReferenceEqualityTest.java | {
"start": 2933,
"end": 3270
} | class ____ extends Sup {
boolean f(Object a, Test b) {
return a == b;
}
}
""")
.doTest();
}
@Test
public void positive_extendsAbstract_equals() {
compilationHelper
.addSourceLines(
"Sup.java",
"""
abstract | Test |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/service/TestServiceLifecycle.java | {
"start": 1326,
"end": 9595
} | class ____ extends ServiceAssert {
private static final Logger LOG =
LoggerFactory.getLogger(TestServiceLifecycle.class);
/**
* Walk the {@link BreakableService} through it's lifecycle,
* more to verify that service's counters work than anything else
* @throws Throwable if necessary
*/
@Test
public void testWalkthrough() throws Throwable {
BreakableService svc = new BreakableService();
assertServiceStateCreated(svc);
assertStateCount(svc, Service.STATE.NOTINITED, 1);
assertStateCount(svc, Service.STATE.INITED, 0);
assertStateCount(svc, Service.STATE.STARTED, 0);
assertStateCount(svc, Service.STATE.STOPPED, 0);
svc.init(new Configuration());
assertServiceStateInited(svc);
assertStateCount(svc, Service.STATE.INITED, 1);
svc.start();
assertServiceStateStarted(svc);
assertStateCount(svc, Service.STATE.STARTED, 1);
svc.stop();
assertServiceStateStopped(svc);
assertStateCount(svc, Service.STATE.STOPPED, 1);
}
/**
* call init twice
* @throws Throwable if necessary
*/
@Test
public void testInitTwice() throws Throwable {
BreakableService svc = new BreakableService();
Configuration conf = new Configuration();
conf.set("test.init","t");
svc.init(conf);
svc.init(new Configuration());
assertStateCount(svc, Service.STATE.INITED, 1);
assertServiceConfigurationContains(svc, "test.init");
}
/**
* Call start twice
* @throws Throwable if necessary
*/
@Test
public void testStartTwice() throws Throwable {
BreakableService svc = new BreakableService();
svc.init(new Configuration());
svc.start();
svc.start();
assertStateCount(svc, Service.STATE.STARTED, 1);
}
/**
* Verify that when a service is stopped more than once, no exception
* is thrown.
* @throws Throwable if necessary
*/
@Test
public void testStopTwice() throws Throwable {
BreakableService svc = new BreakableService();
svc.init(new Configuration());
svc.start();
svc.stop();
assertStateCount(svc, Service.STATE.STOPPED, 1);
svc.stop();
assertStateCount(svc, Service.STATE.STOPPED, 1);
}
/**
* Show that if the service failed during an init
* operation, it stays in the created state, even after stopping it
* @throws Throwable if necessary
*/
@Test
public void testStopFailedInit() throws Throwable {
BreakableService svc = new BreakableService(true, false, false);
assertServiceStateCreated(svc);
try {
svc.init(new Configuration());
fail("Expected a failure, got " + svc);
} catch (BreakableService.BrokenLifecycleEvent e) {
//expected
}
//the service state wasn't passed
assertServiceStateStopped(svc);
assertStateCount(svc, Service.STATE.INITED, 1);
assertStateCount(svc, Service.STATE.STOPPED, 1);
//now try to stop
svc.stop();
assertStateCount(svc, Service.STATE.STOPPED, 1);
}
/**
* Show that if the service failed during an init
* operation, it stays in the created state, even after stopping it
* @throws Throwable if necessary
*/
@Test
public void testStopFailedStart() throws Throwable {
BreakableService svc = new BreakableService(false, true, false);
svc.init(new Configuration());
assertServiceStateInited(svc);
try {
svc.start();
fail("Expected a failure, got " + svc);
} catch (BreakableService.BrokenLifecycleEvent e) {
//expected
}
//the service state wasn't passed
assertServiceStateStopped(svc);
}
/**
* verify that when a service fails during its stop operation,
* its state does not change.
* @throws Throwable if necessary
*/
@Test
public void testFailingStop() throws Throwable {
BreakableService svc = new BreakableService(false, false, true);
svc.init(new Configuration());
svc.start();
try {
svc.stop();
fail("Expected a failure, got " + svc);
} catch (BreakableService.BrokenLifecycleEvent e) {
//expected
}
assertStateCount(svc, Service.STATE.STOPPED, 1);
}
/**
* verify that when a service that is not started is stopped, the
* service enters the stopped state
* @throws Throwable on a failure
*/
@Test
public void testStopUnstarted() throws Throwable {
BreakableService svc = new BreakableService();
svc.stop();
assertServiceStateStopped(svc);
assertStateCount(svc, Service.STATE.INITED, 0);
assertStateCount(svc, Service.STATE.STOPPED, 1);
}
/**
* Show that if the service failed during an init
* operation, stop was called.
*/
@Test
public void testStopFailingInitAndStop() throws Throwable {
BreakableService svc = new BreakableService(true, false, true);
svc.registerServiceListener(new LoggingStateChangeListener());
try {
svc.init(new Configuration());
fail("Expected a failure, got " + svc);
} catch (BreakableService.BrokenLifecycleEvent e) {
assertEquals(Service.STATE.INITED, e.state);
}
//the service state is stopped
assertServiceStateStopped(svc);
assertEquals(Service.STATE.INITED, svc.getFailureState());
Throwable failureCause = svc.getFailureCause();
assertNotNull(failureCause, "Null failure cause in " + svc);
BreakableService.BrokenLifecycleEvent cause =
(BreakableService.BrokenLifecycleEvent) failureCause;
assertNotNull(cause.state, "null state in " + cause + " raised by " + svc);
assertEquals(Service.STATE.INITED, cause.state);
}
@Test
public void testInitNullConf() throws Throwable {
BreakableService svc = new BreakableService(false, false, false);
try {
svc.init(null);
LOG.warn("Null Configurations are permitted ");
} catch (ServiceStateException e) {
//expected
}
}
@Test
public void testServiceNotifications() throws Throwable {
BreakableService svc = new BreakableService(false, false, false);
BreakableStateChangeListener listener = new BreakableStateChangeListener();
svc.registerServiceListener(listener);
svc.init(new Configuration());
assertEventCount(listener, 1);
svc.start();
assertEventCount(listener, 2);
svc.stop();
assertEventCount(listener, 3);
svc.stop();
assertEventCount(listener, 3);
}
/**
* Test that when a service listener is unregistered, it stops being invoked
* @throws Throwable on a failure
*/
@Test
public void testServiceNotificationsStopOnceUnregistered() throws Throwable {
BreakableService svc = new BreakableService(false, false, false);
BreakableStateChangeListener listener = new BreakableStateChangeListener();
svc.registerServiceListener(listener);
svc.init(new Configuration());
assertEventCount(listener, 1);
svc.unregisterServiceListener(listener);
svc.start();
assertEventCount(listener, 1);
svc.stop();
assertEventCount(listener, 1);
svc.stop();
}
/**
* This test uses a service listener that unregisters itself during the callbacks.
* This a test that verifies the concurrency logic on the listener management
* code, that it doesn't throw any immutable state change exceptions
* if you change list membership during the notifications.
* The standard <code>AbstractService</code> implementation copies the list
* to an array in a <code>synchronized</code> block then iterates through
* the copy precisely to prevent this problem.
* @throws Throwable on a failure
*/
@Test
public void testServiceNotificationsUnregisterDuringCallback() throws Throwable {
BreakableService svc = new BreakableService(false, false, false);
BreakableStateChangeListener listener =
new SelfUnregisteringBreakableStateChangeListener();
BreakableStateChangeListener l2 =
new BreakableStateChangeListener();
svc.registerServiceListener(listener);
svc.registerServiceListener(l2);
svc.init(new Configuration());
assertEventCount(listener, 1);
assertEventCount(l2, 1);
svc.unregisterServiceListener(listener);
svc.start();
assertEventCount(listener, 1);
assertEventCount(l2, 2);
svc.stop();
assertEventCount(listener, 1);
svc.stop();
}
private static | TestServiceLifecycle |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/nullness/ExtendsObjectTest.java | {
"start": 1352,
"end": 1579
} | class ____<T extends @NonNull Object> {}
""")
.doTest();
}
@Test
public void extendsParameterWithObjectErasure_noFinding() {
helper
.addInputLines(
"Test.java", //
" | Foo |
java | spring-projects__spring-framework | spring-jdbc/src/main/java/org/springframework/jdbc/support/SQLErrorCodesFactory.java | {
"start": 5059,
"end": 5169
} | class ____.
* <p><b>Not to be overridden by application developers, who should obtain
* instances of this | path |
java | apache__thrift | lib/java/src/main/java/org/apache/thrift/partial/ThriftMetadata.java | {
"start": 9349,
"end": 11088
} | class ____ extends ThriftContainer {
public final ThriftObject keyData;
public final ThriftObject valueData;
ThriftMap(
ThriftObject parent, TFieldIdEnum fieldId, FieldMetaData data, List<ThriftField> fields) {
super(parent, fieldId, data);
this.keyData =
ThriftObject.Factory.createNew(
this,
FieldTypeEnum.MAP_KEY,
new FieldMetaData(
getSubElementName(fieldId, "key"),
TFieldRequirementType.REQUIRED,
((MapMetaData) data.valueMetaData).keyMetaData),
Collections.emptyList());
this.valueData =
ThriftObject.Factory.createNew(
this,
FieldTypeEnum.MAP_VALUE,
new FieldMetaData(
getSubElementName(fieldId, "value"),
TFieldRequirementType.REQUIRED,
((MapMetaData) data.valueMetaData).valueMetaData),
fields);
}
@Override
public boolean hasUnion() {
return (this.keyData instanceof ThriftUnion) || (this.valueData instanceof ThriftUnion);
}
@Override
protected void toPrettyString(StringBuilder sb, int level) {
this.append(sb, "%smap<\n", this.getIndent(level));
this.append(sb, "%skey = {\n", this.getIndent(level + 1));
this.keyData.toPrettyString(sb, level + 2);
this.append(sb, "%s},\n", this.getIndent(level + 1));
this.append(sb, "%svalue = {\n", this.getIndent(level + 1));
this.valueData.toPrettyString(sb, level + 2);
this.append(sb, "%s}\n", this.getIndent(level + 1));
this.append(sb, "%s> %s;\n", this.getIndent(level), this.getName());
}
}
/**
* Base | ThriftMap |
java | apache__camel | components/camel-micrometer/src/main/java/org/apache/camel/component/micrometer/routepolicy/MicrometerRoutePolicy.java | {
"start": 2403,
"end": 3228
} | class ____ extends RoutePolicySupport implements NonManagedService {
private static final Logger LOG = LoggerFactory.getLogger(MicrometerRoutePolicy.class);
private final MicrometerRoutePolicyFactory factory;
private MeterRegistry meterRegistry;
private boolean prettyPrint;
private boolean skipCamelInfo;
private TimeUnit durationUnit = TimeUnit.MILLISECONDS;
private MicrometerRoutePolicyNamingStrategy namingStrategy = MicrometerRoutePolicyNamingStrategy.DEFAULT;
private MicrometerRoutePolicyConfiguration configuration = MicrometerRoutePolicyConfiguration.DEFAULT;
private final Map<Route, MetricsStatistics> statisticsMap = new HashMap<>();
private RouteMetric contextStatistic;
boolean registerKamelets;
boolean registerTemplates = true;
static | MicrometerRoutePolicy |
java | google__auto | common/src/test/java/com/google/auto/common/AnnotationMirrorsTest.java | {
"start": 3262,
"end": 3348
} | class ____ {}
@OuterWithValueArray({@DefaultingOuter})
static | TestValueArrayWithEmpty |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/MtlsWithPemTrustStoreWithHttpServerWithTlsRegistryTest.java | {
"start": 866,
"end": 2681
} | class ____ {
private static final String configuration = """
quarkus.tls.key-store.jks.path=target/certs/grpc-keystore.jks
quarkus.tls.key-store.jks.password=password
quarkus.tls.trust-store.jks.path=target/certs/grpc-server-truststore.jks
quarkus.tls.trust-store.jks.password=password
quarkus.tls.my-client.trust-store.pem.certs=target/certs/grpc-client-ca.crt
quarkus.tls.my-client.key-store.pem.0.cert=target/certs/grpc-client.crt
quarkus.tls.my-client.key-store.pem.0.key=target/certs/grpc-client.key
quarkus.grpc.clients.hello.plain-text=false
quarkus.grpc.clients.hello.tls-configuration-name=my-client
quarkus.grpc.clients.hello.use-quarkus-grpc-client=true
quarkus.grpc.server.use-separate-server=false
quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests
quarkus.http.ssl.client-auth=REQUIRED
quarkus.http.insecure-requests=disabled
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addPackage(HelloWorldTlsEndpoint.class.getPackage())
.addPackage(GreeterGrpc.class.getPackage())
.add(new StringAsset(configuration), "application.properties"));
@GrpcClient("hello")
GreeterGrpc.GreeterBlockingStub blockingHelloService;
@Test
void testClientTlsConfiguration() {
HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build());
assertThat(reply.getMessage()).isEqualTo("Hello neo");
}
}
| MtlsWithPemTrustStoreWithHttpServerWithTlsRegistryTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/multipart/MultipartFilenameTest.java | {
"start": 17922,
"end": 18076
} | class ____ {
@FormParam("myFile")
@PartType(APPLICATION_OCTET_STREAM)
public List<File> files;
}
public static | ClientListForm |
java | google__guava | android/guava/src/com/google/common/base/Suppliers.java | {
"start": 16569,
"end": 16925
} | enum ____ implements SupplierFunction<@Nullable Object> {
INSTANCE;
// Note: This makes T a "pass-through type"
@Override
public @Nullable Object apply(Supplier<@Nullable Object> input) {
return input.get();
}
@Override
public String toString() {
return "Suppliers.supplierFunction()";
}
}
}
| SupplierFunctionImpl |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTerms.java | {
"start": 1325,
"end": 4763
} | class ____ extends InternalRareTerms.Bucket<Bucket> {
long term;
public Bucket(long term, long docCount, InternalAggregations aggregations, DocValueFormat format) {
super(docCount, aggregations, format);
this.term = term;
}
/**
* Read from a stream.
*/
public Bucket(StreamInput in, DocValueFormat format) throws IOException {
super(in, format);
term = in.readLong();
}
@Override
protected void writeTermTo(StreamOutput out) throws IOException {
out.writeLong(term);
}
@Override
public String getKeyAsString() {
return format.format(term).toString();
}
@Override
public Object getKey() {
return term;
}
@Override
public int compareKey(Bucket other) {
return Long.compare(term, other.term);
}
@Override
protected final XContentBuilder keyToXContent(XContentBuilder builder) throws IOException {
builder.field(CommonFields.KEY.getPreferredName(), term);
if (format != DocValueFormat.RAW) {
builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), format.format(term).toString());
}
return builder;
}
@Override
public boolean equals(Object obj) {
return super.equals(obj) && Objects.equals(term, ((Bucket) obj).term);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), term);
}
}
LongRareTerms(
String name,
BucketOrder order,
Map<String, Object> metadata,
DocValueFormat format,
List<LongRareTerms.Bucket> buckets,
long maxDocCount,
SetBackedScalingCuckooFilter filter
) {
super(name, order, metadata, format, buckets, maxDocCount, filter);
}
/**
* Read from a stream.
*/
public LongRareTerms(StreamInput in) throws IOException {
super(in, LongRareTerms.Bucket::new);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public LongRareTerms create(List<LongRareTerms.Bucket> buckets) {
return new LongRareTerms(name, order, metadata, format, buckets, maxDocCount, filter);
}
@Override
public LongRareTerms.Bucket createBucket(InternalAggregations aggregations, LongRareTerms.Bucket prototype) {
return new LongRareTerms.Bucket(prototype.term, prototype.getDocCount(), aggregations, prototype.format);
}
@Override
protected LongRareTerms createWithFilter(String name, List<LongRareTerms.Bucket> buckets, SetBackedScalingCuckooFilter filter) {
return new LongRareTerms(name, order, getMetadata(), format, buckets, maxDocCount, filter);
}
@Override
public boolean containsTerm(SetBackedScalingCuckooFilter filter, LongRareTerms.Bucket bucket) {
return filter.mightContain((long) bucket.getKey());
}
@Override
public void addToFilter(SetBackedScalingCuckooFilter filter, LongRareTerms.Bucket bucket) {
filter.add((long) bucket.getKey());
}
@Override
Bucket createBucket(long docCount, InternalAggregations aggs, LongRareTerms.Bucket prototype) {
return new Bucket(prototype.term, docCount, aggs, format);
}
}
| Bucket |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/SlidingWindowedCogroupedKStreamImpl.java | {
"start": 1578,
"end": 4795
} | class ____<K, V> extends AbstractStream<K, V> implements TimeWindowedCogroupedKStream<K, V> {
private final SlidingWindows windows;
private final CogroupedStreamAggregateBuilder<K, V> aggregateBuilder;
private final Map<KGroupedStreamImpl<K, ?>, Aggregator<? super K, ? super Object, V>> groupPatterns;
SlidingWindowedCogroupedKStreamImpl(final SlidingWindows windows,
final InternalStreamsBuilder builder,
final Set<String> subTopologySourceNodes,
final String name,
final CogroupedStreamAggregateBuilder<K, V> aggregateBuilder,
final GraphNode graphNode,
final Map<KGroupedStreamImpl<K, ?>, Aggregator<? super K, ? super Object, V>> groupPatterns) {
super(name, null, null, subTopologySourceNodes, graphNode, builder);
//keySerde and valueSerde are null because there are many different groupStreams that they could be from
this.windows = windows;
this.aggregateBuilder = aggregateBuilder;
this.groupPatterns = groupPatterns;
}
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer) {
return aggregate(initializer, Materialized.with(null, null));
}
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer,
final Materialized<K, V, WindowStore<Bytes, byte[]>> materialized) {
return aggregate(initializer, NamedInternal.empty(), materialized);
}
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer,
final Named named) {
return aggregate(initializer, named, Materialized.with(null, null));
}
@Override
public KTable<Windowed<K>, V> aggregate(final Initializer<V> initializer,
final Named named,
final Materialized<K, V, WindowStore<Bytes, byte[]>> materialized) {
Objects.requireNonNull(initializer, "initializer can't be null");
Objects.requireNonNull(named, "named can't be null");
Objects.requireNonNull(materialized, "materialized can't be null");
final MaterializedInternal<K, V, WindowStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(
materialized,
builder,
CogroupedKStreamImpl.AGGREGATE_NAME);
return aggregateBuilder.build(
groupPatterns,
initializer,
new NamedInternal(named),
new SlidingWindowStoreMaterializer<>(materializedInternal, windows, EmitStrategy.onWindowUpdate()),
materializedInternal.keySerde() != null ?
new FullTimeWindowedSerde<>(materializedInternal.keySerde(), windows.timeDifferenceMs())
: null,
materializedInternal.valueSerde(),
materializedInternal.queryableStoreName(),
windows);
}
}
| SlidingWindowedCogroupedKStreamImpl |
java | apache__camel | test-infra/camel-test-infra-aws-common/src/main/java/org/apache/camel/test/infra/aws/common/services/AWSInfraService.java | {
"start": 974,
"end": 1202
} | interface ____ extends InfrastructureService {
Properties getConnectionProperties();
String amazonAWSHost();
String region();
String protocol();
String accessKey();
String secretKey();
}
| AWSInfraService |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/pubsub/PubSubCommandHandler.java | {
"start": 9034,
"end": 9691
} | class ____<K, V> extends ReplayOutput<K, V> {
Integer multiCount;
String firstElement;
@Override
public void set(ByteBuffer bytes) {
if (firstElement == null && bytes != null && bytes.remaining() > 0) {
bytes.mark();
firstElement = StringCodec.ASCII.decodeKey(bytes);
bytes.reset();
}
super.set(bytes);
}
@Override
public void multi(int count) {
if (multiCount == null) {
multiCount = count;
}
super.multi(count);
}
}
}
| ResponseHeaderReplayOutput |
java | elastic__elasticsearch | libs/exponential-histogram/src/test/java/org/elasticsearch/exponentialhistogram/CompressedExponentialHistogramTests.java | {
"start": 1242,
"end": 3897
} | class ____ extends ExponentialHistogramTestCase {
public void testEncodeDecodeRandomHistogram() throws IOException {
ReleasableExponentialHistogram input = randomHistogramWithDoubleZeroThreshold();
CompressedExponentialHistogram decoded = toCompressedHistogram(input);
assertThat(decoded, equalTo(input));
}
private static CompressedExponentialHistogram toCompressedHistogram(ReleasableExponentialHistogram input) throws IOException {
ByteArrayOutputStream encodedStream = new ByteArrayOutputStream();
CompressedExponentialHistogram.writeHistogramBytes(
encodedStream,
input.scale(),
input.negativeBuckets().iterator(),
input.positiveBuckets().iterator()
);
CompressedExponentialHistogram decoded = new CompressedExponentialHistogram();
byte[] encodedBytes = encodedStream.toByteArray();
decoded.reset(
input.zeroBucket().zeroThreshold(),
input.valueCount(),
input.sum(),
input.min(),
input.max(),
newBytesRef(encodedBytes)
);
return decoded;
}
private ReleasableExponentialHistogram randomHistogramWithDoubleZeroThreshold() {
ExponentialHistogram random = randomHistogram();
// Compressed histogram are lossy for index-based zero thresholds, so ensure we use a double-based one
ReleasableExponentialHistogram input = ExponentialHistogram.builder(random, breaker())
.zeroBucket(ZeroBucket.create(random.zeroBucket().zeroThreshold(), random.zeroBucket().count()))
.build();
autoReleaseOnTestEnd(input);
return input;
}
public void testIteratorCopy() throws IOException {
ReleasableExponentialHistogram input = randomHistogramWithDoubleZeroThreshold();
CompressedExponentialHistogram decoded = toCompressedHistogram(input);
assertThat(decoded, equalTo(input));
CopyableBucketIterator it = decoded.positiveBuckets().iterator();
int skipBuckets = randomIntBetween(0, decoded.positiveBuckets().bucketCount());
for (int i = 0; i < skipBuckets; i++) {
it.advance();
}
BucketIterator copy = it.copy();
while (it.hasNext()) {
assertThat(copy.hasNext(), equalTo(true));
assertThat(copy.peekIndex(), equalTo(it.peekIndex()));
assertThat(copy.peekCount(), equalTo(it.peekCount()));
it.advance();
copy.advance();
}
assertThat(copy.hasNext(), equalTo(false));
}
}
| CompressedExponentialHistogramTests |
java | quarkusio__quarkus | extensions/tls-registry/deployment/src/test/java/io/quarkus/tls/JavaxNetSslTrustStoreProviderTest.java | {
"start": 935,
"end": 7045
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = createConfig();
static QuarkusUnitTest createConfig() {
final Path tsPath = defaultTrustStorePath();
String tsType = System.getProperty("javax.net.ssl.trustStoreType", KeyStore.getDefaultType())
.toLowerCase(Locale.US);
if (tsType.equals("pkcs12")) {
tsType = "p12";
}
final String password = System.getProperty("javax.net.ssl.trustStorePassword", "changeit");
return new QuarkusUnitTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class))
.overrideConfigKey("quarkus.tls.javaNetSslLike.trust-store." + tsType + ".path", tsPath.toString())
.overrideConfigKey("quarkus.tls.javaNetSslLike.trust-store." + tsType + ".password", password);
}
static Path defaultTrustStorePath() {
final String rawTsPath = System.getProperty("javax.net.ssl.trustStore");
if (rawTsPath != null && !rawTsPath.isEmpty()) {
return Path.of(rawTsPath);
}
final String javaHome = System.getProperty("java.home");
if (javaHome == null || javaHome.isEmpty()) {
throw new IllegalStateException(
"Could not locate the default Java truststore because the 'java.home' property is not set");
}
final Path javaHomePath = Path.of(javaHome);
if (!Files.isDirectory(javaHomePath)) {
throw new IllegalStateException("Could not locate the default Java truststore because the 'java.home' path '"
+ javaHome + "' is not a directory");
}
final Path jssecacerts = javaHomePath.resolve("lib/security/jssecacerts");
if (Files.isRegularFile(jssecacerts)) {
return jssecacerts;
}
final Path cacerts = javaHomePath.resolve("lib/security/cacerts");
if (Files.isRegularFile(cacerts)) {
return cacerts;
}
throw new IllegalStateException(
"Could not locate the default Java truststore. Tried javax.net.ssl.trustStore system property, " + jssecacerts
+ " and " + cacerts);
}
@Inject
TlsConfigurationRegistry certificates;
@Inject
Vertx vertx;
@Test
void test() throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException {
TlsConfiguration def = certificates.get("javax.net.ssl").orElseThrow();
assertThat(def.getTrustStoreOptions()).isNotNull();
final KeyStore actualTs = def.getTrustStore();
assertThat(actualTs).isNotNull();
/*
* Get the default trust managers, one of which should be SunJSSE based,
* which in turn should use the same default trust store lookup algo
* like we do in io.quarkus.tls.runtime.JavaNetSslTlsBucketConfig.defaultTrustStorePath()
*/
final TrustManagerFactory trustManagerFactory = TrustManagerFactory
.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init((KeyStore) null);
final List<X509TrustManager> defaultTrustManagers = Stream.of(trustManagerFactory.getTrustManagers())
.filter(m -> m instanceof X509TrustManager)
.map(m -> (X509TrustManager) m)
.collect(Collectors.toList());
assertThat(defaultTrustManagers).hasSizeGreaterThan(0);
final List<String> actualAliases = Collections.list(actualTs.aliases());
assertThat(actualAliases).hasSizeGreaterThan(0);
for (String alias : actualAliases) {
/*
* Get the certs from the trust store loaded by us from $JAVA_HOME/lib/security/cacerts or similar
* and validate those against the default trust managers.
* In that way we make sure indirectly that we have loaded some valid trust material.
*/
final X509Certificate cert = (X509Certificate) actualTs.getCertificate(alias);
CertificateException lastException = null;
boolean passed = false;
for (X509TrustManager tm : defaultTrustManagers) {
try {
tm.checkServerTrusted(new X509Certificate[] { cert }, "RSA");
passed = true;
break;
} catch (CertificateException e) {
lastException = e;
}
}
if (!passed && lastException != null) {
throw lastException;
}
}
}
@Test
void certs() throws Exception {
/*
* The javaNetSslLike named TLS bucket mimics what JavaNetSslTrustStoreProvider does programmatically.
* By asserting that the set of certs they contain are equal, we make sure that JavaNetSslTrustStoreProvider
* behaves correctly.
*/
final TrustManager[] javaNetSslTrustManagers = trustManagers("javax.net.ssl");
final TrustManager[] javaNetSslLikeTrustManagers = trustManagers("javaNetSslLike");
assertThat(javaNetSslTrustManagers.length).isEqualTo(javaNetSslLikeTrustManagers.length);
for (int i = 0; i < javaNetSslTrustManagers.length; i++) {
X509TrustManager javaNetSslTm = (X509TrustManager) javaNetSslTrustManagers[i];
X509TrustManager javaNetSslLikeTm = (X509TrustManager) javaNetSslLikeTrustManagers[i];
assertThat(javaNetSslTm.getAcceptedIssuers().length).isGreaterThan(0);
assertThat(javaNetSslTm.getAcceptedIssuers()).containsExactlyInAnyOrder(javaNetSslLikeTm.getAcceptedIssuers());
}
}
TrustManager[] trustManagers(String key) throws Exception {
final TlsConfiguration javaNetSsl = certificates.get(key).orElseThrow();
final TrustManagerFactory javaNetSslTrustManagerFactory = javaNetSsl.getSSLOptions().getTrustOptions()
.getTrustManagerFactory(vertx);
return javaNetSslTrustManagerFactory.getTrustManagers();
}
}
| JavaxNetSslTrustStoreProviderTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/PropertySourceAnnotationTests.java | {
"start": 18816,
"end": 19119
} | class ____ {
}
@Configuration
@PropertySources({
@PropertySource(name = "psName", value = "classpath:org/springframework/context/annotation/p1.properties"),
@PropertySource(name = "psName", value = "classpath:org/springframework/context/annotation/p2.properties"),
})
static | ResourcePatternConfig |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/ValidatorKey.java | {
"start": 1049,
"end": 1375
} | class ____ extends ValueHolder<String> {
private final DataType type;
public ValidatorKey(DataType type) {
super(type.toString());
this.type = type;
}
public DataType getType() {
return type;
}
@Override
public String toString() {
return get();
}
}
| ValidatorKey |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java | {
"start": 48362,
"end": 49025
} | class ____ {
private final Object actual = ZonedDateTime.now();
@Test
void createAssert() {
// WHEN
TemporalAssert result = TEMPORAL.createAssert(actual);
// THEN
result.isCloseTo(ZonedDateTime.now(), within(10, SECONDS));
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
TemporalAssert result = TEMPORAL.createAssert(valueProvider);
// THEN
result.isCloseTo(ZonedDateTime.now(), within(10, SECONDS));
verify(valueProvider).apply(Temporal.class);
}
}
@Nested
| Temporal_Factory |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/streaming/api/operators/source/ProgressiveTimestampsAndWatermarks.java | {
"start": 7650,
"end": 9008
} | class ____<T> extends SourceOutputWithWatermarks<T>
implements ReaderOutput<T> {
private final SplitLocalOutputs<T> splitLocalOutputs;
StreamingReaderOutput(
PushingAsyncDataInput.DataOutput<T> output,
WatermarkOutput watermarkOutput,
TimestampAssigner<T> timestampAssigner,
WatermarkGenerator<T> watermarkGenerator,
SplitLocalOutputs<T> splitLocalOutputs) {
super(output, watermarkOutput, watermarkOutput, timestampAssigner, watermarkGenerator);
this.splitLocalOutputs = splitLocalOutputs;
}
@Override
public SourceOutput<T> createOutputForSplit(String splitId) {
return splitLocalOutputs.createOutputForSplit(splitId);
}
@Override
public void releaseOutputForSplit(String splitId) {
splitLocalOutputs.releaseOutputForSplit(splitId);
}
}
// ------------------------------------------------------------------------
/**
* A holder and factory for split-local {@link SourceOutput}s. The split-local outputs maintain
* local watermark generators with their own state, to facilitate per-split watermarking logic.
*
* @param <T> The type of the emitted records.
*/
private static final | StreamingReaderOutput |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/web/servlet/client/RestTestClient.java | {
"start": 24901,
"end": 25867
} | interface ____<B, S extends BodySpec<B, S>> {
/**
* Assert the extracted body is equal to the given value.
*/
<T extends S> T isEqualTo(@Nullable B expected);
/**
* Assert the extracted body with a {@link Consumer}.
*/
<T extends S> T value(Consumer<@Nullable B> consumer);
/**
* Transform the extracted the body with a function, for example, extracting a
* property, and assert the mapped value with a {@link Consumer}.
*/
<T extends S, R> T value(Function<@Nullable B, @Nullable R> bodyMapper, Consumer<? super @Nullable R> consumer);
/**
* Assert the exchange result with the given {@link Consumer}.
*/
<T extends S> T consumeWith(Consumer<EntityExchangeResult<B>> consumer);
/**
* Exit the chained API and return an {@code EntityExchangeResult} with the
* decoded response content.
*/
EntityExchangeResult<B> returnResult();
}
/**
* Spec for expectations on the response body content.
*/
| BodySpec |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/admin/internals/AllBrokersStrategy.java | {
"start": 2135,
"end": 4448
} | class ____ implements AdminApiLookupStrategy<AllBrokersStrategy.BrokerKey> {
public static final BrokerKey ANY_BROKER = new BrokerKey(OptionalInt.empty());
public static final Set<BrokerKey> LOOKUP_KEYS = Collections.singleton(ANY_BROKER);
private static final ApiRequestScope SINGLE_REQUEST_SCOPE = new ApiRequestScope() {
};
private final Logger log;
public AllBrokersStrategy(
LogContext logContext
) {
this.log = logContext.logger(AllBrokersStrategy.class);
}
@Override
public ApiRequestScope lookupScope(BrokerKey key) {
return SINGLE_REQUEST_SCOPE;
}
@Override
public MetadataRequest.Builder buildRequest(Set<BrokerKey> keys) {
validateLookupKeys(keys);
// Send empty `Metadata` request. We are only interested in the brokers from the response
return new MetadataRequest.Builder(new MetadataRequestData());
}
@Override
public LookupResult<BrokerKey> handleResponse(Set<BrokerKey> keys, AbstractResponse abstractResponse) {
validateLookupKeys(keys);
MetadataResponse response = (MetadataResponse) abstractResponse;
MetadataResponseData.MetadataResponseBrokerCollection brokers = response.data().brokers();
if (brokers.isEmpty()) {
log.debug("Metadata response contained no brokers. Will backoff and retry");
return LookupResult.empty();
} else {
log.debug("Discovered all brokers {} to send requests to", brokers);
}
Map<BrokerKey, Integer> brokerKeys = brokers.stream().collect(Collectors.toMap(
broker -> new BrokerKey(OptionalInt.of(broker.nodeId())),
MetadataResponseData.MetadataResponseBroker::nodeId
));
return new LookupResult<>(
Collections.singletonList(ANY_BROKER),
Collections.emptyMap(),
brokerKeys
);
}
private void validateLookupKeys(Set<BrokerKey> keys) {
if (keys.size() != 1) {
throw new IllegalArgumentException("Unexpected key set: " + keys);
}
BrokerKey key = keys.iterator().next();
if (key != ANY_BROKER) {
throw new IllegalArgumentException("Unexpected key set: " + keys);
}
}
public static | AllBrokersStrategy |
java | assertj__assertj-core | assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/class_/ClassAssert_isNotSealed_Test.java | {
"start": 1192,
"end": 2446
} | class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Class<?> actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).isNotSealed());
// THEN
then(error).hasMessage(shouldNotBeNull().create());
}
@Test
void should_fail_if_actual_is_sealed() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(SealedClass.class).isNotSealed());
// THEN
then(assertionError).hasMessage(shouldNotBeSealed(SealedClass.class).create());
}
@ParameterizedTest
@MethodSource("nonSealed")
void should_pass_if_actual_is_not_sealed(Class<?> actual) {
// WHEN/THEN
assertThat(actual).isNotSealed();
}
private static Stream<Class<?>> nonSealed() {
return Stream.of(NonSealedClass.class,
Object.class,
List.class,
Object[].class,
Boolean.TYPE,
Byte.TYPE,
Character.TYPE,
Double.TYPE,
Float.TYPE,
Integer.TYPE,
Long.TYPE,
Short.TYPE,
Void.TYPE);
}
private static sealed | ClassAssert_isNotSealed_Test |
java | square__okhttp | samples/slack/src/main/java/okhttp3/slack/RtmSession.java | {
"start": 815,
"end": 2413
} | class ____ extends WebSocketListener implements Closeable {
private final SlackApi slackApi;
/** Guarded by this. */
private WebSocket webSocket;
public RtmSession(SlackApi slackApi) {
this.slackApi = slackApi;
}
public void open(String accessToken) throws IOException {
if (webSocket != null) throw new IllegalStateException();
RtmStartResponse rtmStartResponse = slackApi.rtmStart(accessToken);
webSocket = slackApi.rtm(rtmStartResponse.url, this);
}
// TODO(jwilson): can I read the response body? Do I have to?
// the body from slack is a 0-byte-buffer
@Override public synchronized void onOpen(WebSocket webSocket, Response response) {
System.out.println("onOpen: " + response);
}
// TOOD(jwilson): decode incoming messages and dispatch them somewhere.
@Override public void onMessage(WebSocket webSocket, String text) {
System.out.println("onMessage: " + text);
}
@Override public void onClosing(WebSocket webSocket, int code, String reason) {
webSocket.close(1000, null);
System.out.println("onClose (" + code + "): " + reason);
}
@Override public void onFailure(WebSocket webSocket, Throwable t, Response response) {
// TODO(jwilson): can I read the response body? Do I have to?
System.out.println("onFailure " + response);
}
@Override public void close() throws IOException {
if (webSocket == null) return;
WebSocket webSocket;
synchronized (this) {
webSocket = this.webSocket;
}
if (webSocket != null) {
webSocket.close(1000, "bye");
}
}
}
| RtmSession |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/issue_1000/Issue1089.java | {
"start": 162,
"end": 425
} | class ____ extends TestCase {
public void test_for_issue() throws Exception {
String json = "{\"ab\":123,\"a_b\":456}";
TestBean tb = JSON.parseObject(json, TestBean.class);
assertEquals(123, tb.getAb());
}
public static | Issue1089 |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestQueryUserActionTests.java | {
"start": 1974,
"end": 8338
} | class ____ extends ESTestCase {
private final XPackLicenseState mockLicenseState = mock(XPackLicenseState.class);
@Override
protected NamedXContentRegistry xContentRegistry() {
final SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of());
return new NamedXContentRegistry(searchModule.getNamedXContents());
}
public void testQueryParsing() throws Exception {
final String query1 = """
{
"query": {
"bool": {
"must": [
{
"terms": {
"username": [ "bart", "homer" ]
}
}
],
"should": [ { "prefix": { "username": "ba" } } ]
}
}
}""";
final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(
new BytesArray(query1),
XContentType.JSON
).build();
final SetOnce<RestResponse> responseSetOnce = new SetOnce<>();
final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) {
@Override
public void sendResponse(RestResponse restResponse) {
responseSetOnce.set(restResponse);
}
};
try (var threadPool = createThreadPool()) {
final var client = new NodeClient(Settings.EMPTY, threadPool, TestProjectResolvers.alwaysThrow()) {
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
QueryUserRequest queryUserRequest = (QueryUserRequest) request;
final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder();
assertNotNull(queryBuilder);
assertThat(queryBuilder.getClass(), is(BoolQueryBuilder.class));
final BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder;
assertTrue(boolQueryBuilder.filter().isEmpty());
assertTrue(boolQueryBuilder.mustNot().isEmpty());
assertThat(boolQueryBuilder.must(), hasSize(1));
final QueryBuilder mustQueryBuilder = boolQueryBuilder.must().get(0);
assertThat(mustQueryBuilder.getClass(), is(TermsQueryBuilder.class));
assertThat(((TermsQueryBuilder) mustQueryBuilder).fieldName(), equalTo("username"));
assertThat(boolQueryBuilder.should(), hasSize(1));
final QueryBuilder shouldQueryBuilder = boolQueryBuilder.should().get(0);
assertThat(shouldQueryBuilder.getClass(), is(PrefixQueryBuilder.class));
assertThat(((PrefixQueryBuilder) shouldQueryBuilder).fieldName(), equalTo("username"));
listener.onResponse((Response) new QueryUserResponse(0, List.of()));
}
};
final RestQueryUserAction restQueryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState);
restQueryUserAction.handleRequest(restRequest, restChannel, client);
}
assertNotNull(responseSetOnce.get());
}
public void testParsingSearchParameters() throws Exception {
final String requestBody = """
{
"query": {
"match_all": {}
},
"from": 42,
"size": 20,
"sort": [ "username", "full_name"],
"search_after": [ "bart" ]
}""";
final FakeRestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(
new BytesArray(requestBody),
XContentType.JSON
).build();
final SetOnce<RestResponse> responseSetOnce = new SetOnce<>();
final RestChannel restChannel = new AbstractRestChannel(restRequest, randomBoolean()) {
@Override
public void sendResponse(RestResponse restResponse) {
responseSetOnce.set(restResponse);
}
};
try (var threadPool = createThreadPool()) {
final var client = new NodeClient(Settings.EMPTY, threadPool, TestProjectResolvers.alwaysThrow()) {
@SuppressWarnings("unchecked")
@Override
public <Request extends ActionRequest, Response extends ActionResponse> void doExecute(
ActionType<Response> action,
Request request,
ActionListener<Response> listener
) {
QueryUserRequest queryUserRequest = (QueryUserRequest) request;
final QueryBuilder queryBuilder = queryUserRequest.getQueryBuilder();
assertNotNull(queryBuilder);
assertThat(queryBuilder.getClass(), is(MatchAllQueryBuilder.class));
assertThat(queryUserRequest.getFrom(), equalTo(42));
assertThat(queryUserRequest.getSize(), equalTo(20));
final List<FieldSortBuilder> fieldSortBuilders = queryUserRequest.getFieldSortBuilders();
assertThat(fieldSortBuilders, hasSize(2));
assertThat(fieldSortBuilders.get(0), equalTo(new FieldSortBuilder("username")));
assertThat(fieldSortBuilders.get(1), equalTo(new FieldSortBuilder("full_name")));
final SearchAfterBuilder searchAfterBuilder = queryUserRequest.getSearchAfterBuilder();
assertThat(searchAfterBuilder, equalTo(new SearchAfterBuilder().setSortValues(new String[] { "bart" })));
listener.onResponse((Response) new QueryUserResponse(0, List.of()));
}
};
final RestQueryUserAction queryUserAction = new RestQueryUserAction(Settings.EMPTY, mockLicenseState);
queryUserAction.handleRequest(restRequest, restChannel, client);
}
assertNotNull(responseSetOnce.get());
}
}
| RestQueryUserActionTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java | {
"start": 1898,
"end": 4595
} | class ____ implements ToXContent, Iterable<ContextMapping<?>> {
private final List<ContextMapping<?>> contextMappings;
private final Map<String, ContextMapping<?>> contextNameMap;
public ContextMappings(List<ContextMapping<?>> contextMappings) {
if (contextMappings.size() > 255) {
// we can support more, but max of 255 (1 byte) unique context types per suggest field
// seems reasonable?
throw new UnsupportedOperationException("Maximum of 10 context types are supported was: " + contextMappings.size());
}
this.contextMappings = contextMappings;
contextNameMap = Maps.newMapWithExpectedSize(contextMappings.size());
for (ContextMapping<?> mapping : contextMappings) {
contextNameMap.put(mapping.name(), mapping);
}
}
/**
* @return number of context mappings
* held by this instance
*/
public int size() {
return contextMappings.size();
}
/**
* Returns a context mapping by its name
*/
public ContextMapping<?> get(String name) {
ContextMapping<?> contextMapping = contextNameMap.get(name);
if (contextMapping == null) {
List<String> keys = new ArrayList<>(contextNameMap.keySet());
Collections.sort(keys);
throw new IllegalArgumentException("Unknown context name [" + name + "], must be one of " + keys.toString());
}
return contextMapping;
}
/**
* Adds a context-enabled field for all the defined mappings to <code>document</code>
* see {@link org.elasticsearch.search.suggest.completion.context.ContextMappings.TypedContextField}
*/
public void addField(LuceneDocument document, String name, String input, int weight, Map<String, Set<String>> contexts) {
document.add(new TypedContextField(name, input, weight, contexts, document));
}
@Override
public Iterator<ContextMapping<?>> iterator() {
return contextMappings.iterator();
}
/**
* Field prepends context values with a suggestion
* Context values are associated with a type, denoted by
* a type id, which is prepended to the context value.
*
* Every defined context mapping yields a unique type id (index of the
* corresponding context mapping in the context mappings list)
* for all its context values
*
* The type, context and suggestion values are encoded as follows:
* <p>
* TYPE_ID | CONTEXT_VALUE | CONTEXT_SEP | SUGGESTION_VALUE
* </p>
*
* Field can also use values of other indexed fields as contexts
* at index time
*/
private | ContextMappings |
java | reactor__reactor-core | reactor-core/src/blockHoundTest/java/reactor/core/scheduler/BoundedElasticSchedulerBlockhoundTest.java | {
"start": 1542,
"end": 4336
} | class ____ {
private static final Logger LOGGER = Loggers.getLogger(BoundedElasticSchedulerBlockhoundTest.class);
private Disposable.Composite toDispose;
@BeforeAll
static void setup() {
BlockHound.install();
}
@BeforeEach
void setupComposite() {
toDispose = Disposables.composite();
}
@AfterEach
void dispose() {
toDispose.dispose();
}
<T extends Disposable> T autoDispose(T disposable) {
toDispose.add(disposable);
return disposable;
}
@Test
void smokeTestBlockhound() throws InterruptedException, TimeoutException {
try {
FutureTask<?> task = new FutureTask<>(() -> {
Thread.sleep(0);
return "";
});
Schedulers.parallel().schedule(task);
task.get(10, TimeUnit.SECONDS);
Assertions.fail("should fail");
} catch (ExecutionException e) {
assertThat(e).hasCauseInstanceOf(BlockingOperationError.class);
}
}
//see https://github.com/reactor/reactor-core/issues/2143
@Test
void shouldNotReportBlockingCallWithZoneIdUsage() throws Throwable {
FutureTask<Disposable> testTask = new FutureTask<>(() -> new BoundedElasticScheduler(1, 1, Thread::new, 1));
Schedulers.single().schedule(testTask);
//automatically re-throw in case of blocking call, in which case the scheduler hasn't been created so there is no leak.
testTask.get().dispose();
}
@RepeatedTest(3) //we got false positives from time to time. with repeat(3), only 1 block out of 500 was a false positive (with a total of 75 false positives out of 1500 runs)
void shouldNotReportEnsureQueueCapacity() {
BoundedElasticScheduler scheduler = autoDispose(new BoundedElasticScheduler(1, 100, Thread::new, 1));
scheduler.init();
ExecutorServiceWorker worker = (ExecutorServiceWorker) autoDispose(scheduler.createWorker());
BoundedElasticScheduler.BoundedScheduledExecutorService executor =
(BoundedElasticScheduler.BoundedScheduledExecutorService) worker.exec;
AtomicReference<Throwable> error = new AtomicReference<>();
Runnable runnable = () -> {
try {
executor.ensureQueueCapacity(1);
}
catch (final Throwable t) {
error.updateAndGet(current -> {
if (current == null) return t;
List<Throwable> multiple = new ArrayList<>(Exceptions.unwrapMultiple(current));
multiple.add(t);
return Exceptions.multiple(multiple);
});
}
};
Scheduler sch = autoDispose(Schedulers.newParallel("test", 10));
RaceTestUtils.race(sch,
runnable,
runnable,
runnable,
runnable,
runnable,
runnable,
runnable,
runnable,
runnable,
runnable);
//assertions don't really show the stack trace unless we modify a global config
//so here we simply throw the composite if there is one
if (error.get() != null) throw Exceptions.propagate(error.get());
}
}
| BoundedElasticSchedulerBlockhoundTest |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/dev/CompilerFlags.java | {
"start": 387,
"end": 3263
} | class ____ {
private final Set<String> defaultFlags;
private final List<String> userFlags;
private final String releaseJavaVersion; //can be null
private final String sourceJavaVersion; //can be null
private final String targetJavaVersion; //can be null
private final List<String> annotationProcessors; //can be null
public CompilerFlags(
Set<String> defaultFlags,
Collection<String> userFlags,
String releaseJavaVersion,
String sourceJavaVersion,
String targetJavaVersion,
List<String> annotationProcessors) {
this.defaultFlags = defaultFlags == null ? new LinkedHashSet<>() : new LinkedHashSet<>(defaultFlags);
this.userFlags = userFlags == null ? new ArrayList<>() : new ArrayList<>(userFlags);
this.releaseJavaVersion = releaseJavaVersion;
this.sourceJavaVersion = sourceJavaVersion;
this.targetJavaVersion = targetJavaVersion;
this.annotationProcessors = annotationProcessors;
}
public List<String> toList() {
List<String> flagList = new ArrayList<>();
// The set of effective default flags is the set of default flags except the ones also
// set by the user. This ensures that we do not needlessly pass the default flags twice.
Set<String> effectiveDefaultFlags = new LinkedHashSet<>(this.defaultFlags);
effectiveDefaultFlags.removeAll(userFlags);
flagList.addAll(effectiveDefaultFlags);
// Prefer --release over -source and -target flags to make sure to not run into:
// "error: option --source cannot be used together with --release"
// This is *not* checking defaultFlags; it is not expected that defaultFlags ever contain --release etc.!
if (releaseJavaVersion != null) {
flagList.add("--release");
flagList.add(releaseJavaVersion);
} else {
if (sourceJavaVersion != null) {
flagList.add("-source");
flagList.add(sourceJavaVersion);
}
if (targetJavaVersion != null) {
flagList.add("-target");
flagList.add(targetJavaVersion);
}
}
if (annotationProcessors != null && !annotationProcessors.isEmpty()) {
flagList.add("-processor");
flagList.add(String.join(",", annotationProcessors));
}
flagList.addAll(userFlags);
return flagList;
}
@Override
public int hashCode() {
return toList().hashCode();
}
@Override
public boolean equals(Object obj) {
return obj instanceof CompilerFlags && toList().equals(((CompilerFlags) obj).toList());
}
@Override
public String toString() {
return "CompilerFlags@{" + String.join(", ", toList()) + "}";
}
}
| CompilerFlags |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java | {
"start": 10998,
"end": 11861
} | class ____
implements AsyncFunction<Integer, Integer> {
static final AtomicBoolean TIMED_OUT = new AtomicBoolean(false);
static final CountDownLatch COMPLETION_TRIGGER = new CountDownLatch(1);
@Override
public void asyncInvoke(Integer input, ResultFuture<Integer> resultFuture) {
ForkJoinPool.commonPool()
.submit(
() -> {
COMPLETION_TRIGGER.await();
resultFuture.complete(Collections.singletonList(input));
return null;
});
}
@Override
public void timeout(Integer input, ResultFuture<Integer> resultFuture) {
TIMED_OUT.set(true);
}
}
private static | TimeoutAfterCompletionTestFunction |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/BasicRestClientTest.java | {
"start": 555,
"end": 2789
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(HelloClient.class, HelloResource.class, TestBean.class, HelloClient2.class,
HelloNonSimpleClient.class, TestJacksonBasicMessageBodyReader.class,
TestJacksonBasicMessageBodyWriter.class))
.withConfigurationResource("basic-test-application.properties");
@Inject
TestBean testBean;
@Test
void shouldHello() {
assertThat(testBean.helloViaBuiltClient("w0rld")).isEqualTo("hello, w0rld");
}
@Test
void shouldHelloThroughInjectedClient() {
assertThat(testBean.helloViaInjectedClient("wor1d")).isEqualTo("hello, wor1d");
}
@Test
void shouldIgnoreNonBodyParams() {
assertThat(testBean.helloViaInjectedClientIgnoreParams("wor1d")).isEqualTo("hello, wor1d");
}
@Test
void shouldHaveApplicationScopeByDefault() {
BeanManager beanManager = Arc.container().beanManager();
Set<Bean<?>> beans = beanManager.getBeans(HelloClient2.class, RestClient.LITERAL);
Bean<?> resolvedBean = beanManager.resolve(beans);
assertThat(resolvedBean.getScope()).isEqualTo(ApplicationScoped.class);
}
@Test
void shouldInvokeClientResponseOnSameContext() {
assertThat(testBean.bug18977()).isEqualTo("Hello");
}
@Test
void shouldHelloBytes() {
assertThat(testBean.helloNonSimpleSyncBytes()).isEqualTo(new byte[] { 1, 2, 3 });
}
@Test
void shouldHelloInts() {
assertThat(testBean.helloNonSimpleSyncInts()).isEqualTo(new Integer[] { 1, 2, 3 });
}
@Test
void shouldMapQueryParamsWithSpecialCharacters() {
Map<String, String> map = testBean.helloQueryParamsToMap();
assertThat(map).size().isEqualTo(6);
assertThat(map.get("p1")).isEqualTo("1");
assertThat(map.get("p2")).isEqualTo("2");
assertThat(map.get("p3")).isEqualTo("3");
assertThat(map.get("p4")).isEqualTo("4");
assertThat(map.get("p5")).isEqualTo("5");
assertThat(map.get("p6")).isEqualTo("6");
}
}
| BasicRestClientTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/file/FileAssert_hasSameTextualContentAs_Test.java | {
"start": 1219,
"end": 2442
} | class ____ extends FileAssertBaseTest {
private static File expected;
@BeforeAll
static void beforeOnce() {
expected = new File("xyz");
}
@Override
protected FileAssert invoke_api_method() {
return assertions.hasSameTextualContentAs(expected);
}
@Override
protected void verify_internal_effects() {
verify(files).assertSameContentAs(getInfo(assertions), getActual(assertions), defaultCharset, expected, defaultCharset);
}
@Test
void should_use_charset_specified_by_usingCharset_to_read_actual_file_content() throws Exception {
// GIVEN
File actual = createTempFileWithContent("Gerçek", TURKISH_CHARSET);
File expected = createTempFileWithContent("Gerçek", defaultCharset);
// WHEN/THEN
then(actual).usingCharset(TURKISH_CHARSET).hasSameTextualContentAs(expected);
}
@Test
void should_allow_charset_to_be_specified_for_reading_expected_file_content() throws Exception {
// GIVEN
File actual = createTempFileWithContent("Gerçek", defaultCharset);
File expected = createTempFileWithContent("Gerçek", TURKISH_CHARSET);
// WHEN/THEN
then(actual).hasSameTextualContentAs(expected, TURKISH_CHARSET);
}
}
| FileAssert_hasSameTextualContentAs_Test |
java | junit-team__junit5 | junit-jupiter-api/src/main/java/org/junit/jupiter/api/MethodOrdererContext.java | {
"start": 776,
"end": 1815
} | class ____ this context.
*
* @return the test class; never {@code null}
*/
Class<?> getTestClass();
/**
* Get the list of {@linkplain MethodDescriptor method descriptors} to
* order.
*
* @return the list of method descriptors; never {@code null}
*/
List<? extends MethodDescriptor> getMethodDescriptors();
/**
* Get the configuration parameter stored under the specified {@code key}.
*
* <p>If no such key is present in the {@code ConfigurationParameters} for
* the JUnit Platform, an attempt will be made to look up the value as a
* JVM system property. If no such system property exists, an attempt will
* be made to look up the value in the JUnit Platform properties file.
*
* @param key the key to look up; never {@code null} or blank
* @return an {@code Optional} containing the value; never {@code null}
* but potentially empty
*
* @see System#getProperty(String)
* @see org.junit.platform.engine.ConfigurationParameters
*/
Optional<String> getConfigurationParameter(String key);
}
| for |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericToManyAssociationTest.java | {
"start": 4852,
"end": 5099
} | class ____ extends AbstractParent<Child, String> {
@Id
private Long id;
public Parent() {
}
public Parent(Long id) {
this.id = id;
}
public Long getId() {
return this.id;
}
}
@MappedSuperclass
public abstract static | Parent |
java | apache__kafka | jmh-benchmarks/src/main/java/org/apache/kafka/jmh/log/StressTestLog.java | {
"start": 5693,
"end": 6911
} | class ____ extends WorkerThread {
private final UnifiedLog log;
public WriterThread(UnifiedLog log) {
this.log = log;
}
@Override
protected void work() throws Exception {
byte[] value = Long.toString(currentOffset).getBytes(StandardCharsets.UTF_8);
MemoryRecords records = TestUtils.singletonRecords(value,
null,
Compression.NONE,
RecordBatch.NO_TIMESTAMP,
RecordBatch.CURRENT_MAGIC_VALUE);
LogAppendInfo logAppendInfo = log.appendAsLeader(records,
0,
AppendOrigin.CLIENT,
RequestLocal.noCaching(),
VerificationGuard.SENTINEL,
TV_UNKNOWN);
if ((logAppendInfo.firstOffset() != -1 && logAppendInfo.firstOffset() != currentOffset)
|| logAppendInfo.lastOffset() != currentOffset) {
throw new RuntimeException("Offsets do not match");
}
currentOffset++;
if (currentOffset % 1000 == 0) {
Thread.sleep(50);
}
}
}
static | WriterThread |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/connector/source/lib/NumberSequenceSource.java | {
"start": 2994,
"end": 6492
} | class ____
implements Source<
Long,
NumberSequenceSource.NumberSequenceSplit,
Collection<NumberSequenceSource.NumberSequenceSplit>>,
ResultTypeQueryable<Long> {
private static final long serialVersionUID = 1L;
/** The starting number in the sequence, inclusive. */
private final long from;
/** The end number in the sequence, inclusive. */
private final long to;
/**
* Creates a new {@code NumberSequenceSource} that produces parallel sequences covering the
* range {@code from} to {@code to} (both boundaries are inclusive).
*/
public NumberSequenceSource(long from, long to) {
checkArgument(from <= to, "'from' must be <= 'to'");
this.from = from;
this.to = to;
}
public long getFrom() {
return from;
}
public long getTo() {
return to;
}
// ------------------------------------------------------------------------
// source methods
// ------------------------------------------------------------------------
@Override
public TypeInformation<Long> getProducedType() {
return Types.LONG;
}
@Override
public Boundedness getBoundedness() {
return Boundedness.BOUNDED;
}
@Override
public SourceReader<Long, NumberSequenceSplit> createReader(SourceReaderContext readerContext) {
return new IteratorSourceReader<>(readerContext);
}
@Override
public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> createEnumerator(
final SplitEnumeratorContext<NumberSequenceSplit> enumContext) {
final List<NumberSequenceSplit> splits =
splitNumberRange(from, to, enumContext.currentParallelism());
return new IteratorSourceEnumerator<>(enumContext, splits);
}
@Override
public SplitEnumerator<NumberSequenceSplit, Collection<NumberSequenceSplit>> restoreEnumerator(
final SplitEnumeratorContext<NumberSequenceSplit> enumContext,
Collection<NumberSequenceSplit> checkpoint) {
return new IteratorSourceEnumerator<>(enumContext, checkpoint);
}
@Override
public SimpleVersionedSerializer<NumberSequenceSplit> getSplitSerializer() {
return new SplitSerializer();
}
@Override
public SimpleVersionedSerializer<Collection<NumberSequenceSplit>>
getEnumeratorCheckpointSerializer() {
return new CheckpointSerializer();
}
protected List<NumberSequenceSplit> splitNumberRange(long from, long to, int numSplits) {
final NumberSequenceIterator[] subSequences =
new NumberSequenceIterator(from, to).split(numSplits);
final ArrayList<NumberSequenceSplit> splits = new ArrayList<>(subSequences.length);
int splitId = 1;
for (NumberSequenceIterator seq : subSequences) {
if (seq.hasNext()) {
splits.add(
new NumberSequenceSplit(
String.valueOf(splitId++), seq.getCurrent(), seq.getTo()));
}
}
return splits;
}
// ------------------------------------------------------------------------
// splits & checkpoint
// ------------------------------------------------------------------------
/** A split of the source, representing a number sub-sequence. */
public static | NumberSequenceSource |
java | google__dagger | javatests/dagger/functional/membersinject/MembersInjectTest.java | {
"start": 3717,
"end": 3773
} | class ____ {
@Inject String value;
}
@Component
| C |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/PackageScanClassResolver.java | {
"start": 2877,
"end": 2970
} | interface ____,
* subclasses will be collected.
*
* @param parent the | class |
java | google__dagger | javatests/dagger/functional/builder/PrivateConstructorsTest.java | {
"start": 1159,
"end": 1454
} | interface ____ {
// M should not be required, even though the constructor is inaccessible
C build();
}
}
@Test
public void componentTest() {
C component = DaggerPrivateConstructorsTest_C.builder().build();
assertThat(component.string()).isEqualTo("str");
}
}
| Builder |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstractionResolver.java | {
"start": 1774,
"end": 19013
} | class ____ {
private final IndexNameExpressionResolver indexNameExpressionResolver;
public IndexAbstractionResolver(IndexNameExpressionResolver indexNameExpressionResolver) {
this.indexNameExpressionResolver = indexNameExpressionResolver;
}
public ResolvedIndexExpressions resolveIndexAbstractions(
final List<String> indices,
final IndicesOptions indicesOptions,
final ProjectMetadata projectMetadata,
final Function<IndexComponentSelector, Set<String>> allAuthorizedAndAvailableBySelector,
final BiPredicate<String, IndexComponentSelector> isAuthorized,
final boolean includeDataStreams
) {
final ResolvedIndexExpressions.Builder resolvedExpressionsBuilder = ResolvedIndexExpressions.builder();
boolean wildcardSeen = false;
for (String originalIndexExpression : indices) {
wildcardSeen = resolveIndexAbstraction(
resolvedExpressionsBuilder,
originalIndexExpression,
originalIndexExpression, // in the case of local resolution, the local expression is always the same as the original
indicesOptions,
projectMetadata,
allAuthorizedAndAvailableBySelector,
isAuthorized,
includeDataStreams,
Set.of(),
wildcardSeen
);
}
return resolvedExpressionsBuilder.build();
}
public ResolvedIndexExpressions resolveIndexAbstractions(
final List<String> indices,
final IndicesOptions indicesOptions,
final ProjectMetadata projectMetadata,
final Function<IndexComponentSelector, Set<String>> allAuthorizedAndAvailableBySelector,
final BiPredicate<String, IndexComponentSelector> isAuthorized,
final TargetProjects targetProjects,
final boolean includeDataStreams,
@Nullable final String projectRouting
) {
if (targetProjects == TargetProjects.LOCAL_ONLY_FOR_CPS_DISABLED) {
final String message = "cannot resolve indices cross project if target set is local only";
assert false : message;
throw new IllegalArgumentException(message);
}
final String originProjectAlias = targetProjects.originProjectAlias();
final Set<String> linkedProjectAliases = targetProjects.allProjectAliases();
final ResolvedIndexExpressions.Builder resolvedExpressionsBuilder = ResolvedIndexExpressions.builder();
boolean wildcardSeen = false;
for (String originalIndexExpression : indices) {
final CrossProjectIndexExpressionsRewriter.IndexRewriteResult indexRewriteResult = CrossProjectIndexExpressionsRewriter
.rewriteIndexExpression(originalIndexExpression, originProjectAlias, linkedProjectAliases, projectRouting);
final String localIndexExpression = indexRewriteResult.localExpression();
if (localIndexExpression == null) {
// TODO we may still need to update the `wildcardSeen` value to correctly handle exclusions
// (there can be an exclusion without any local index expressions)
// nothing to resolve locally so skip resolve abstraction call
resolvedExpressionsBuilder.addRemoteExpressions(originalIndexExpression, indexRewriteResult.remoteExpressions());
continue;
}
wildcardSeen = resolveIndexAbstraction(
resolvedExpressionsBuilder,
originalIndexExpression,
localIndexExpression,
indicesOptions,
projectMetadata,
allAuthorizedAndAvailableBySelector,
isAuthorized,
includeDataStreams,
indexRewriteResult.remoteExpressions(),
wildcardSeen
);
}
return resolvedExpressionsBuilder.build();
}
private boolean resolveIndexAbstraction(
final ResolvedIndexExpressions.Builder resolvedExpressionsBuilder,
final String originalIndexExpression,
final String localIndexExpression,
final IndicesOptions indicesOptions,
final ProjectMetadata projectMetadata,
final Function<IndexComponentSelector, Set<String>> allAuthorizedAndAvailableBySelector,
final BiPredicate<String, IndexComponentSelector> isAuthorized,
final boolean includeDataStreams,
final Set<String> remoteExpressions,
boolean wildcardSeen
) {
String indexAbstraction;
boolean minus = false;
if (localIndexExpression.charAt(0) == '-' && wildcardSeen) {
indexAbstraction = localIndexExpression.substring(1);
minus = true;
} else {
indexAbstraction = localIndexExpression;
}
// Always check to see if there's a selector on the index expression
final Tuple<String, String> expressionAndSelector = IndexNameExpressionResolver.splitSelectorExpression(indexAbstraction);
final String selectorString = expressionAndSelector.v2();
if (indicesOptions.allowSelectors() == false && selectorString != null) {
throw new UnsupportedSelectorException(indexAbstraction);
}
indexAbstraction = expressionAndSelector.v1();
IndexComponentSelector selector = IndexComponentSelector.getByKeyOrThrow(selectorString);
// we always need to check for date math expressions
indexAbstraction = IndexNameExpressionResolver.resolveDateMathExpression(indexAbstraction);
if (indicesOptions.expandWildcardExpressions() && Regex.isSimpleMatchPattern(indexAbstraction)) {
wildcardSeen = true;
final HashSet<String> resolvedIndices = new HashSet<>();
for (String authorizedIndex : allAuthorizedAndAvailableBySelector.apply(selector)) {
if (Regex.simpleMatch(indexAbstraction, authorizedIndex)
&& isIndexVisible(
indexAbstraction,
selectorString,
authorizedIndex,
indicesOptions,
projectMetadata,
indexNameExpressionResolver,
includeDataStreams
)) {
resolveSelectorsAndCollect(authorizedIndex, selectorString, indicesOptions, resolvedIndices, projectMetadata);
}
}
if (resolvedIndices.isEmpty()) {
// es core honours allow_no_indices for each wildcard expression, we do the same here by throwing index not found.
if (indicesOptions.allowNoIndices() == false) {
throw new IndexNotFoundException(indexAbstraction);
}
resolvedExpressionsBuilder.addExpressions(originalIndexExpression, new HashSet<>(), SUCCESS, remoteExpressions);
} else {
if (minus) {
resolvedExpressionsBuilder.excludeFromLocalExpressions(resolvedIndices);
} else {
resolvedExpressionsBuilder.addExpressions(originalIndexExpression, resolvedIndices, SUCCESS, remoteExpressions);
}
}
} else {
final HashSet<String> resolvedIndices = new HashSet<>();
resolveSelectorsAndCollect(indexAbstraction, selectorString, indicesOptions, resolvedIndices, projectMetadata);
if (minus) {
resolvedExpressionsBuilder.excludeFromLocalExpressions(resolvedIndices);
} else {
final boolean authorized = isAuthorized.test(indexAbstraction, selector);
if (authorized) {
final boolean visible = indexExists(projectMetadata, indexAbstraction)
&& isIndexVisible(
indexAbstraction,
selectorString,
indexAbstraction,
indicesOptions,
projectMetadata,
indexNameExpressionResolver,
includeDataStreams
);
final LocalIndexResolutionResult result = visible ? SUCCESS : CONCRETE_RESOURCE_NOT_VISIBLE;
resolvedExpressionsBuilder.addExpressions(originalIndexExpression, resolvedIndices, result, remoteExpressions);
} else if (indicesOptions.ignoreUnavailable()) {
// ignoreUnavailable implies that the request should not fail if an index is not authorized
// so we map this expression to an empty list,
resolvedExpressionsBuilder.addExpressions(
originalIndexExpression,
new HashSet<>(),
CONCRETE_RESOURCE_UNAUTHORIZED,
remoteExpressions
);
} else {
// store the calculated expansion as unauthorized, it will be rejected later
resolvedExpressionsBuilder.addExpressions(
originalIndexExpression,
resolvedIndices,
CONCRETE_RESOURCE_UNAUTHORIZED,
remoteExpressions
);
}
}
}
return wildcardSeen;
}
private static void resolveSelectorsAndCollect(
String indexAbstraction,
String selectorString,
IndicesOptions indicesOptions,
Set<String> collect,
ProjectMetadata projectMetadata
) {
if (indicesOptions.allowSelectors()) {
IndexAbstraction abstraction = projectMetadata.getIndicesLookup().get(indexAbstraction);
// We can't determine which selectors are valid for a nonexistent abstraction, so simply propagate them as if they supported
// all of them so we don't drop anything.
boolean acceptsAllSelectors = abstraction == null || abstraction.isDataStreamRelated();
// Supply default if needed
if (selectorString == null) {
selectorString = IndexComponentSelector.DATA.getKey();
}
// A selector is always passed along as-is, it's validity for this kind of abstraction is tested later
collect.add(IndexNameExpressionResolver.combineSelectorExpression(indexAbstraction, selectorString));
} else {
assert selectorString == null
: "A selector string [" + selectorString + "] is present but selectors are disabled in this context";
collect.add(indexAbstraction);
}
}
public static boolean isIndexVisible(
String expression,
@Nullable String selectorString,
String index,
IndicesOptions indicesOptions,
ProjectMetadata projectMetadata,
IndexNameExpressionResolver resolver,
boolean includeDataStreams
) {
IndexAbstraction indexAbstraction = projectMetadata.getIndicesLookup().get(index);
if (indexAbstraction == null) {
throw new IllegalStateException("could not resolve index abstraction [" + index + "]");
}
final boolean isHidden = indexAbstraction.isHidden();
boolean isVisible = isHidden == false || indicesOptions.expandWildcardsHidden() || isVisibleDueToImplicitHidden(expression, index);
if (indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) {
// it's an alias, ignore expandWildcardsOpen and expandWildcardsClosed.
// it's complicated to support those options with aliases pointing to multiple indices...
isVisible = isVisible && indicesOptions.ignoreAliases() == false;
if (isVisible && indexAbstraction.isSystem()) {
// check if it is net new
if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) {
// don't give this code any particular credit for being *correct*. it's just trying to resolve a combination of
// issues in a way that happens to *work*. there's probably a better way of writing things such that this won't
// be necessary, but for the moment, it happens to be expedient to write things this way.
// unwrap the alias and re-run the function on the write index of the alias -- that is, the alias is visible if
// the concrete index that it refers to is visible
Index writeIndex = indexAbstraction.getWriteIndex();
if (writeIndex == null) {
return false;
} else {
return isIndexVisible(
expression,
selectorString,
writeIndex.getName(),
indicesOptions,
projectMetadata,
resolver,
includeDataStreams
);
}
}
}
if (isVisible && selectorString != null) {
// Check if a selector was present, and if it is, check if this alias is applicable to it
IndexComponentSelector selector = IndexComponentSelector.getByKey(selectorString);
if (IndexComponentSelector.FAILURES.equals(selector)) {
isVisible = indexAbstraction.isDataStreamRelated();
}
}
return isVisible;
}
if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) {
if (includeDataStreams == false) {
return false;
}
if (indexAbstraction.isSystem()) {
return isSystemIndexVisible(resolver, indexAbstraction);
} else {
return isVisible;
}
}
assert indexAbstraction.getIndices().size() == 1 : "concrete index must point to a single index";
if (isVisible == false) {
return false;
}
if (indexAbstraction.isSystem()) {
// check if it is net new
if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) {
return isSystemIndexVisible(resolver, indexAbstraction);
}
// does the system index back a system data stream?
if (indexAbstraction.getParentDataStream() != null) {
if (indexAbstraction.getParentDataStream().isSystem() == false) {
assert false : "system index is part of a data stream that is not a system data stream";
throw new IllegalStateException("system index is part of a data stream that is not a system data stream");
}
return isSystemIndexVisible(resolver, indexAbstraction);
}
}
if (selectorString != null && Regex.isMatchAllPattern(selectorString) == false) {
// Check if a selector was present, and if it is, check if this index is applicable to it
IndexComponentSelector selector = IndexComponentSelector.getByKey(selectorString);
if (IndexComponentSelector.FAILURES.equals(selector)) {
return false;
}
}
IndexMetadata indexMetadata = projectMetadata.index(indexAbstraction.getIndices().get(0));
if (indexMetadata.getState() == IndexMetadata.State.CLOSE && indicesOptions.expandWildcardsClosed()) {
return true;
}
if (indexMetadata.getState() == IndexMetadata.State.OPEN && indicesOptions.expandWildcardsOpen()) {
return true;
}
return false;
}
private static boolean isSystemIndexVisible(IndexNameExpressionResolver resolver, IndexAbstraction indexAbstraction) {
final SystemIndexAccessLevel level = resolver.getSystemIndexAccessLevel();
switch (level) {
case ALL:
return true;
case NONE:
return false;
case RESTRICTED:
return resolver.getSystemIndexAccessPredicate().test(indexAbstraction.getName());
case BACKWARDS_COMPATIBLE_ONLY:
return resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName());
default:
assert false : "unexpected system index access level [" + level + "]";
throw new IllegalStateException("unexpected system index access level [" + level + "]");
}
}
private static boolean isVisibleDueToImplicitHidden(String expression, String index) {
return index.startsWith(".") && expression.startsWith(".") && Regex.isSimpleMatchPattern(expression);
}
private static boolean indexExists(ProjectMetadata projectMetadata, String indexAbstraction) {
return projectMetadata.getIndicesLookup().get(indexAbstraction) != null;
}
}
| IndexAbstractionResolver |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/api/AbstractPathAssert.java | {
"start": 48980,
"end": 52235
} | class ____ for
* more details. If this is not what you want, use {@link #startsWith(Path)}
* instead.</em>
* </p>
*
* <p>
* Checks that the given {@link Path} starts with another path, without performing canonicalization on its arguments.
* This means that the only criterion to determine whether a path starts with another is the tested path's, and the
* argument's, name elements.
* </p>
*
* <p>
* This may lead to some surprising results: for instance, path {@code /../home/foo} does <em>not</em> start with
* {@code /home} since the first name element of the former ({@code ..}) is different from the first name element of
* the latter ({@code home}).
* </p>
*
* Examples:
* <pre><code class="java"> // fs is a Unix filesystem
* final Path tested = fs.getPath("/home/joe/myfile");
*
* // the following assertion succeeds:
* assertThat(tested).startsWithRaw(fs.getPath("/home/joe"));
*
* // the following assertion fails:
* assertThat(tested).startsWithRaw(fs.getPath("/home/harry"));
* // .... and this one too as given path is not canonicalized
* assertThat(tested).startsWithRaw(fs.getPath("/home/joe/.."));</code></pre>
*
* @param other the other path
* @return self
*
* @throws NullPointerException if the given path is null.
*
* @see Path#startsWith(Path)
*/
public SELF startsWithRaw(final Path other) {
paths.assertStartsWithRaw(info, actual, other);
return myself;
}
/**
* Assert that the tested {@link Path} ends with the given path.
*
* <p>
* This assertion will attempt to canonicalize the tested path and normalize the path given as an argument before
* performing the actual test.
* </p>
*
* <p>
* Note that the criterion to determine success is determined by the path's name elements; therefore,
* {@code /home/foobar/baz} does <em>not</em> end with {@code bar/baz}.
* </p>
*
* Examples:
* <pre><code class="java"> // fs is a Unix filesystem.
* // the current directory is supposed to be /home.
* final Path tested = fs.getPath("/home/joe/myfile");
* // as tested will be canonicalized, it could have been written: /home/jane/../joe/myfile
*
* // the following assertion succeeds:
* assertThat(tested).endsWith(fs.getPath("joe/myfile"));
*
* // the following assertions fail:
* assertThat(tested).endsWith(fs.getPath("joe/otherfile"));
* // this path will be normalized to joe/otherfile
* assertThat(tested).endsWith(fs.getPath("joe/myfile/../otherfile"));</code></pre>
*
* @param other the other path
* @return self
*
* @throws NullPointerException if the given path is null.
* @throws PathsException failed to canonicalize the tested path (see class
* description)
*
* @see Path#endsWith(Path)
* @see Path#toRealPath(LinkOption...)
*/
public SELF endsWith(final Path other) {
paths.assertEndsWith(info, actual, other);
return myself;
}
/**
* Assert that the tested {@link Path} ends with the given path.
*
* <p>
* <em>This assertion will not perform any canonicalization (on the
* tested path) or normalization (on the path given as an argument); see the
* | description |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_537.java | {
"start": 1402,
"end": 1466
} | class ____ {
public int value;
}
private static | VO |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlockForTest.java | {
"start": 984,
"end": 1542
} | class ____ extends AggregatedLogsBlock {
final private Map<String, String> params = new HashMap<String, String>();
private HttpServletRequest request;
public AggregatedLogsBlockForTest(Configuration conf) {
super(conf);
}
@Override
public void render(Block html) {
super.render(html);
}
public Map<String, String> moreParams() {
return params;
}
public HttpServletRequest request() {
return request;
}
public void setRequest(HttpServletRequest request) {
this.request = request;
}
}
| AggregatedLogsBlockForTest |
java | apache__flink | flink-filesystems/flink-s3-fs-hadoop/src/test/java/org/apache/flink/fs/s3hadoop/HadoopS3FileSystemITCase.java | {
"start": 1644,
"end": 2539
} | class ____ extends AbstractHadoopFileSystemITTest {
@BeforeAll
static void setup() throws IOException {
// check whether credentials exist
S3TestCredentials.assumeCredentialsAvailable();
// initialize configuration with valid credentials
final Configuration conf = new Configuration();
conf.setString("s3.access.key", S3TestCredentials.getS3AccessKey());
conf.setString("s3.secret.key", S3TestCredentials.getS3SecretKey());
FileSystem.initialize(conf);
basePath = new Path(S3TestCredentials.getTestBucketUri() + "tests-" + UUID.randomUUID());
fs = basePath.getFileSystem();
consistencyToleranceNS = 30_000_000_000L; // 30 seconds
// check for uniqueness of the test directory
// directory must not yet exist
assertThat(fs.exists(basePath)).isFalse();
}
}
| HadoopS3FileSystemITCase |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/StreamTaskSystemExitTest.java | {
"start": 10154,
"end": 11289
} | class ____
extends StreamTask<String, AbstractStreamOperator<String>> {
private final ExitPoint exitPoint;
public SystemExitStreamTask(Environment env) throws Exception {
this(env, ExitPoint.NONE);
}
public SystemExitStreamTask(Environment env, ExitPoint exitPoint) throws Exception {
super(env, null);
this.exitPoint = exitPoint;
}
@Override
protected void init() {
if (exitPoint == ExitPoint.INIT) {
systemExit();
}
}
@Override
protected void processInput(MailboxDefaultAction.Controller controller) throws Exception {
if (exitPoint == ExitPoint.PROCESS_INPUT) {
systemExit();
}
}
@Override
protected void cleanUpInternal() {}
@Override
protected void cancelTask() {
if (exitPoint == ExitPoint.CANCEL) {
systemExit();
}
}
/** Inside invoke() call, specify where system exit is called. */
protected | SystemExitStreamTask |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.