language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
qos-ch__slf4j
|
jcl-over-slf4j/src/main/java/org/apache/commons/logging/LogFactory.java
|
{
"start": 9292,
"end": 9571
}
|
class ____ utilized, all of the
* properties defined in this file will be set as configuration attributes on
* the corresponding <code>LogFactory</code> instance.
*
*
* @exception LogConfigurationException
* if the implementation
|
is
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/bug/Issue1075.java
|
{
"start": 155,
"end": 342
}
|
class ____ extends TestCase {
public void test_for_issue() throws Exception {
String json = "{ \"question\": \"1+1=?\\u1505a\"}";
JSON.parseObject(json);
}
}
|
Issue1075
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/BindsInstanceValidationTest.java
|
{
"start": 4831,
"end": 5761
}
|
interface ____ {",
" @BindsInstance void manyParams(int i, long l);",
"}");
CompilerTests.daggerCompiler(notAbstract)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining(
"@BindsInstance methods should have exactly one parameter for the bound type")
.onSource(notAbstract)
.onLine(6);
});
}
@Test
public void bindsInstanceFrameworkType() {
Source bindsFrameworkType =
CompilerTests.javaSource(
"test.BindsInstanceFrameworkType",
"package test;",
"",
"import dagger.BindsInstance;",
"import dagger.producers.Producer;",
"import javax.inject.Provider;",
"",
"
|
BindsInstanceManyParameters
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/util/XmlExpectationsHelper.java
|
{
"start": 1229,
"end": 1323
}
|
class ____ assertions on XML content.
*
* @author Rossen Stoyanchev
* @since 3.2
*/
public
|
for
|
java
|
apache__flink
|
flink-table/flink-table-common/src/main/java/org/apache/flink/table/annotation/FunctionHint.java
|
{
"start": 3495,
"end": 3670
}
|
class ____ extends ScalarFunction { ... }
*
* // accepts (INT, STRING) or (BOOLEAN) and always returns BOOLEAN
* @FunctionHint(
* output = @DataTypeHint("BOOLEAN")
* )
*
|
X
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/iterables/Iterables_assertContainsSubsequence_Test.java
|
{
"start": 1915,
"end": 10768
}
|
class ____ extends IterablesBaseTest {
@Override
@BeforeEach
public void setUp() {
super.setUp();
actual = list("Yoda", "Luke", "Leia", "Obi-Wan");
}
@Test
void should_pass_if_actual_contains_subsequence_without_elements_between() {
iterables.assertContainsSubsequence(someInfo(), actual, array("Luke", "Leia"));
}
@Test
void should_pass_if_actual_contains_subsequence_with_elements_between() {
iterables.assertContainsSubsequence(someInfo(), actual, array("Yoda", "Leia"));
}
@Test
void should_pass_if_actual_with_duplicate_elements_contains_subsequence() {
actual = list("Yoda", "Luke", "Yoda", "Obi-Wan");
iterables.assertContainsSubsequence(someInfo(), actual, array("Yoda", "Obi-Wan"));
iterables.assertContainsSubsequence(someInfo(), actual, array("Luke", "Obi-Wan"));
iterables.assertContainsSubsequence(someInfo(), actual, array("Yoda", "Yoda"));
}
@Test
void should_pass_if_actual_and_subsequence_are_equal() {
iterables.assertContainsSubsequence(someInfo(), actual, array("Yoda", "Luke", "Leia", "Obi-Wan"));
}
@Test
void should_pass_if_actual_contains_full_subsequence_even_if_partial_subsequence_is_found_before() {
// GIVEN
actual = list("Yoda", "Luke", "Leia", "Yoda", "Luke", "Obi-Wan");
// WHEN/THEN
// note that actual starts with {"Yoda", "Luke"} a partial sequence of {"Yoda", "Luke", "Obi-Wan"}
iterables.assertContainsSubsequence(INFO, actual, array("Yoda", "Luke", "Obi-Wan"));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual.clear();
iterables.assertContainsSubsequence(someInfo(), actual, array());
}
@Test
void should_throw_error_if_subsequence_is_null() {
// GIVEN
Object[] subsequence = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> iterables.assertContainsSubsequence(INFO, actual, subsequence));
// THEN
then(npe).hasMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
// GIVEN
Object[] subsequence = {};
// WHEN
expectAssertionError(() -> iterables.assertContainsSubsequence(INFO, actual, subsequence));
// THEN
verify(failures).failure(INFO, actualIsNotEmpty(actual));
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
actual = null;
// WHEN
var assertionError = expectAssertionError(() -> iterables.assertContainsSubsequence(INFO, actual, array("Yoda")));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_fail_if_subsequence_is_bigger_than_actual() {
// GIVEN
Object[] subsequence = { "Luke", "Leia", "Obi-Wan", "Han", "C-3PO", "R2-D2", "Anakin" };
// WHEN
expectAssertionError(() -> iterables.assertContainsSubsequence(info, actual, subsequence));
// THEN
verify(failures).failure(INFO, actualDoesNotHaveEnoughElementsToContainSubsequence(actual, subsequence));
}
@Test
void should_fail_if_actual_does_not_contain_whole_subsequence() {
// GIVEN
Object[] subsequence = { "Han", "C-3PO" };
// WHEN
expectAssertionError(() -> iterables.assertContainsSubsequence(info, actual, subsequence));
// THEN
verifyFailureThrownWhenSubsequenceNotFound(info, subsequence, 0);
}
@Test
void should_fail_if_actual_contains_first_elements_of_subsequence_but_not_whole_subsequence() {
// GIVEN
Object[] subsequence = { "Luke", "Leia", "Han" };
// WHEN
expectAssertionError(() -> iterables.assertContainsSubsequence(info, actual, subsequence));
// THEN
verifyFailureThrownWhenSubsequenceNotFound(info, subsequence, 2);
}
@Test
void should_fail_if_actual_does_not_have_enough_elements_left_to_contain_subsequence_elements_still_to_be_matched() {
// GIVEN
actual = list("Leia", "Luke", "Yoda", "Obi-Wan", "Anakin");
Object[] subsequence = { "Leia", "Obi-Wan", "Han" };
// WHEN
expectAssertionError(() -> iterables.assertContainsSubsequence(INFO, actual, subsequence));
// THEN
verifyFailureThrownWhenSubsequenceNotFound(info, subsequence, 2);
}
private void verifyFailureThrownWhenSubsequenceNotFound(AssertionInfo info, Object[] subsequence, int subsequenceIndex) {
verify(failures).failure(info, shouldContainSubsequence(actual, subsequence, subsequenceIndex,
StandardComparisonStrategy.instance()));
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
void should_pass_if_actual_contains_subsequence_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(someInfo(), actual, array("yODa", "leia"));
}
@Test
void should_pass_if_actual_and_subsequence_are_equal_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(someInfo(), actual,
array("YODA", "luke", "lEIA", "Obi-wan"));
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
// GIVEN
actual = null;
// WHEN
var assertionError = expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(INFO,
actual,
array("Yoda")));
// THEN
then(assertionError).hasMessage(actualIsNull());
}
@Test
void should_throw_error_if_subsequence_is_null_whatever_custom_comparison_strategy_is() {
// GIVEN
Object[] subsequence = null;
// WHEN
NullPointerException npe = catchNullPointerException(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(INFO,
actual,
subsequence));
// THEN
then(npe).hasMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
// GIVEN
Object[] subsequence = {};
// WHEN
expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(INFO, actual,
subsequence));
// THEN
verify(failures).failure(INFO, actualIsNotEmpty(actual));
}
@Test
void should_fail_if_subsequence_is_bigger_than_actual_according_to_custom_comparison_strategy() {
// GIVEN
Object[] subsequence = { "LUKE", "LeiA", "Obi-Wan", "Han", "C-3PO", "R2-D2", "Anakin" };
// WHEN
expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(INFO, actual,
subsequence));
// THEN
verify(failures).failure(INFO, actualDoesNotHaveEnoughElementsToContainSubsequence(actual, subsequence));
}
@Test
void should_fail_if_actual_does_not_contain_whole_subsequence_according_to_custom_comparison_strategy() {
// GIVEN
Object[] subsequence = { "Han", "C-3PO" };
// WHEN
expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(info, actual,
subsequence));
// THEN
verify(failures).failure(info, shouldContainSubsequence(actual, subsequence, 0, comparisonStrategy));
}
@Test
void should_fail_if_actual_contains_first_elements_of_subsequence_but_not_whole_subsequence_according_to_custom_comparison_strategy() {
// GIVEN
Object[] subsequence = { "Luke", "Leia", "Han" };
// WHEN
expectAssertionError(() -> iterablesWithCaseInsensitiveComparisonStrategy.assertContainsSubsequence(info, actual,
subsequence));
// THEN
verify(failures).failure(info, shouldContainSubsequence(actual, subsequence, 2, comparisonStrategy));
}
}
|
Iterables_assertContainsSubsequence_Test
|
java
|
spring-projects__spring-framework
|
framework-docs/src/main/java/org/springframework/docs/core/expressions/expressionsbeandef/ShapeGuess.java
|
{
"start": 782,
"end": 1075
}
|
class ____ {
private double initialShapeSeed;
@Value("#{ numberGuess.randomNumber }")
public void setInitialShapeSeed(double initialShapeSeed) {
this.initialShapeSeed = initialShapeSeed;
}
public double getInitialShapeSeed() {
return initialShapeSeed;
}
}
// end::snippet[]
|
ShapeGuess
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/NativeLibraryLoader.java
|
{
"start": 1624,
"end": 19502
}
|
class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(NativeLibraryLoader.class);
private static final String NATIVE_RESOURCE_HOME = "META-INF/native/";
private static final File WORKDIR;
private static final boolean DELETE_NATIVE_LIB_AFTER_LOADING;
private static final boolean TRY_TO_PATCH_SHADED_ID;
private static final boolean DETECT_NATIVE_LIBRARY_DUPLICATES;
// Just use a-Z and numbers as valid ID bytes.
private static final byte[] UNIQUE_ID_BYTES =
"0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ".getBytes(CharsetUtil.US_ASCII);
static {
String workdir = SystemPropertyUtil.get("io.netty.native.workdir");
if (workdir != null) {
File f = new File(workdir);
if (!f.exists() && !f.mkdirs()) {
throw new ExceptionInInitializerError(
new IOException("Custom native workdir mkdirs failed: " + workdir));
}
try {
f = f.getAbsoluteFile();
} catch (Exception ignored) {
// Good to have an absolute path, but it's OK.
}
WORKDIR = f;
logger.debug("-Dio.netty.native.workdir: " + WORKDIR);
} else {
WORKDIR = PlatformDependent.tmpdir();
logger.debug("-Dio.netty.native.workdir: " + WORKDIR + " (io.netty.tmpdir)");
}
DELETE_NATIVE_LIB_AFTER_LOADING = SystemPropertyUtil.getBoolean(
"io.netty.native.deleteLibAfterLoading", true);
logger.debug("-Dio.netty.native.deleteLibAfterLoading: {}", DELETE_NATIVE_LIB_AFTER_LOADING);
TRY_TO_PATCH_SHADED_ID = SystemPropertyUtil.getBoolean(
"io.netty.native.tryPatchShadedId", true);
logger.debug("-Dio.netty.native.tryPatchShadedId: {}", TRY_TO_PATCH_SHADED_ID);
DETECT_NATIVE_LIBRARY_DUPLICATES = SystemPropertyUtil.getBoolean(
"io.netty.native.detectNativeLibraryDuplicates", true);
logger.debug("-Dio.netty.native.detectNativeLibraryDuplicates: {}", DETECT_NATIVE_LIBRARY_DUPLICATES);
}
/**
* Loads the first available library in the collection with the specified
* {@link ClassLoader}.
*
* @throws IllegalArgumentException
* if none of the given libraries load successfully.
*/
public static void loadFirstAvailable(ClassLoader loader, String... names) {
List<Throwable> suppressed = new ArrayList<Throwable>();
for (String name : names) {
try {
load(name, loader);
logger.debug("Loaded library with name '{}'", name);
return;
} catch (Throwable t) {
suppressed.add(t);
}
}
IllegalArgumentException iae =
new IllegalArgumentException("Failed to load any of the given libraries: " + Arrays.toString(names));
ThrowableUtil.addSuppressedAndClear(iae, suppressed);
throw iae;
}
/**
* Calculates the mangled shading prefix added to this class's full name.
*
* <p>This method mangles the package name as follows, so we can unmangle it back later:
* <ul>
* <li>{@code _} to {@code _1}</li>
* <li>{@code .} to {@code _}</li>
* </ul>
*
* <p>Note that we don't mangle non-ASCII characters here because it's extremely unlikely to have
* a non-ASCII character in a package name. For more information, see:
* <ul>
* <li><a href="https://docs.oracle.com/javase/8/docs/technotes/guides/jni/spec/design.html">JNI
* specification</a></li>
* <li>{@code parsePackagePrefix()} in {@code netty_jni_util.c}.</li>
* </ul>
*
* @throws UnsatisfiedLinkError if the shader used something other than a prefix
*/
private static String calculateMangledPackagePrefix() {
String maybeShaded = NativeLibraryLoader.class.getName();
// Use ! instead of . to avoid shading utilities from modifying the string
String expected = "io!netty!util!internal!NativeLibraryLoader".replace('!', '.');
if (!maybeShaded.endsWith(expected)) {
throw new UnsatisfiedLinkError(String.format(
"Could not find prefix added to %s to get %s. When shading, only adding a "
+ "package prefix is supported", expected, maybeShaded));
}
return maybeShaded.substring(0, maybeShaded.length() - expected.length())
.replace("_", "_1")
.replace('.', '_');
}
/**
* Load the given library with the specified {@link ClassLoader}
*/
public static void load(String originalName, ClassLoader loader) {
String mangledPackagePrefix = calculateMangledPackagePrefix();
String name = mangledPackagePrefix + originalName;
List<Throwable> suppressed = new ArrayList<>();
try {
// first try to load from java.library.path
loadLibrary(loader, name, false);
return;
} catch (Throwable ex) {
suppressed.add(ex);
}
String libname = System.mapLibraryName(name);
String path = NATIVE_RESOURCE_HOME + libname;
File tmpFile = null;
URL url = getResource(path, loader);
try {
if (url == null) {
if (PlatformDependent.isOsx()) {
String fileName = path.endsWith(".jnilib") ? NATIVE_RESOURCE_HOME + "lib" + name + ".dynlib" :
NATIVE_RESOURCE_HOME + "lib" + name + ".jnilib";
url = getResource(fileName, loader);
if (url == null) {
FileNotFoundException fnf = new FileNotFoundException(fileName);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
} else {
FileNotFoundException fnf = new FileNotFoundException(path);
ThrowableUtil.addSuppressedAndClear(fnf, suppressed);
throw fnf;
}
}
int index = libname.lastIndexOf('.');
String prefix = libname.substring(0, index);
String suffix = libname.substring(index);
tmpFile = PlatformDependent.createTempFile(prefix, suffix, WORKDIR);
try (InputStream in = url.openStream();
OutputStream out = new FileOutputStream(tmpFile)) {
byte[] buffer = new byte[8192];
int length;
while ((length = in.read(buffer)) > 0) {
out.write(buffer, 0, length);
}
out.flush();
if (shouldShadedLibraryIdBePatched(mangledPackagePrefix)) {
// Let's try to patch the id and re-sign it. This is a best-effort and might fail if a
// SecurityManager is setup or the right executables are not installed :/
tryPatchShadedLibraryIdAndSign(tmpFile, originalName);
}
}
// Close the output stream before loading the unpacked library,
// because otherwise Windows will refuse to load it when it's in use by other process.
loadLibrary(loader, tmpFile.getPath(), true);
} catch (UnsatisfiedLinkError e) {
try {
if (tmpFile != null && tmpFile.isFile() && tmpFile.canRead() &&
!NoexecVolumeDetector.canExecuteExecutable(tmpFile)) {
// Pass "io.netty.native.workdir" as an argument to allow shading tools to see
// the string. Since this is printed out to users to tell them what to do next,
// we want the value to be correct even when shading.
String message = String.format(
"%s exists but cannot be executed even when execute permissions set; " +
"check volume for \"noexec\" flag; use -D%s=[path] " +
"to set native working directory separately.",
tmpFile.getPath(), "io.netty.native.workdir");
logger.info(message);
suppressed.add(ThrowableUtil.unknownStackTrace(
new UnsatisfiedLinkError(message), NativeLibraryLoader.class, "load"));
}
} catch (Throwable t) {
suppressed.add(t);
logger.debug("Error checking if {} is on a file store mounted with noexec", tmpFile, t);
}
// Re-throw to fail the load
ThrowableUtil.addSuppressedAndClear(e, suppressed);
throw e;
} catch (Exception e) {
UnsatisfiedLinkError ule = new UnsatisfiedLinkError("could not load a native library: " + name);
ule.initCause(e);
ThrowableUtil.addSuppressedAndClear(ule, suppressed);
throw ule;
} finally {
// After we load the library it is safe to delete the file.
// We delete the file immediately to free up resources as soon as possible,
// and if this fails fallback to deleting on JVM exit.
if (tmpFile != null && (!DELETE_NATIVE_LIB_AFTER_LOADING || !tmpFile.delete())) {
tmpFile.deleteOnExit();
}
}
}
private static URL getResource(String path, ClassLoader loader) {
final Enumeration<URL> urls;
try {
if (loader == null) {
urls = ClassLoader.getSystemResources(path);
} else {
urls = loader.getResources(path);
}
} catch (IOException iox) {
throw new RuntimeException("An error occurred while getting the resources for " + path, iox);
}
List<URL> urlsList = Collections.list(urls);
int size = urlsList.size();
switch (size) {
case 0:
return null;
case 1:
return urlsList.get(0);
default:
if (DETECT_NATIVE_LIBRARY_DUPLICATES) {
try {
MessageDigest md = MessageDigest.getInstance("SHA-256");
// We found more than 1 resource with the same name. Let's check if the content of the file is
// the same as in this case it will not have any bad effect.
URL url = urlsList.get(0);
byte[] digest = digest(md, url);
boolean allSame = true;
if (digest != null) {
for (int i = 1; i < size; i++) {
byte[] digest2 = digest(md, urlsList.get(i));
if (digest2 == null || !Arrays.equals(digest, digest2)) {
allSame = false;
break;
}
}
} else {
allSame = false;
}
if (allSame) {
return url;
}
} catch (NoSuchAlgorithmException e) {
logger.debug("Don't support SHA-256, can't check if resources have same content.", e);
}
throw new IllegalStateException(
"Multiple resources found for '" + path + "' with different content: " + urlsList);
} else {
logger.warn("Multiple resources found for '" + path + "' with different content: " +
urlsList + ". Please fix your dependency graph.");
return urlsList.get(0);
}
}
}
private static byte[] digest(MessageDigest digest, URL url) {
try (InputStream in = url.openStream()) {
byte[] bytes = new byte[8192];
int i;
while ((i = in.read(bytes)) != -1) {
digest.update(bytes, 0, i);
}
return digest.digest();
} catch (IOException e) {
logger.debug("Can't read resource.", e);
return null;
}
}
static void tryPatchShadedLibraryIdAndSign(File libraryFile, String originalName) {
if (!new File("/Library/Developer/CommandLineTools").exists()) {
logger.debug("Can't patch shaded library id as CommandLineTools are not installed." +
" Consider installing CommandLineTools with 'xcode-select --install'");
return;
}
String newId = new String(generateUniqueId(originalName.length()), CharsetUtil.UTF_8);
if (!tryExec("install_name_tool -id " + newId + " " + libraryFile.getAbsolutePath())) {
return;
}
tryExec("codesign -s - " + libraryFile.getAbsolutePath());
}
private static boolean tryExec(String cmd) {
try {
int exitValue = Runtime.getRuntime().exec(cmd).waitFor();
if (exitValue != 0) {
logger.debug("Execution of '{}' failed: {}", cmd, exitValue);
return false;
}
logger.debug("Execution of '{}' succeed: {}", cmd, exitValue);
return true;
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} catch (IOException e) {
logger.info("Execution of '{}' failed.", cmd, e);
} catch (SecurityException e) {
logger.error("Execution of '{}' failed.", cmd, e);
}
return false;
}
private static boolean shouldShadedLibraryIdBePatched(String packagePrefix) {
return TRY_TO_PATCH_SHADED_ID && PlatformDependent.isOsx() && !packagePrefix.isEmpty();
}
private static byte[] generateUniqueId(int length) {
byte[] idBytes = new byte[length];
for (int i = 0; i < idBytes.length; i++) {
// We should only use bytes as replacement that are in our UNIQUE_ID_BYTES array.
idBytes[i] = UNIQUE_ID_BYTES[ThreadLocalRandom.current()
.nextInt(UNIQUE_ID_BYTES.length)];
}
return idBytes;
}
/**
* Loading the native library into the specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where the native library will be loaded into
* @param name - The native library path or name
* @param absolute - Whether the native library will be loaded by path or by name
*/
private static void loadLibrary(final ClassLoader loader, final String name, final boolean absolute) {
Throwable suppressed = null;
try {
try {
// Make sure the helper belongs to the target ClassLoader.
final Class<?> newHelper = tryToLoadClass(loader, NativeLibraryUtil.class);
loadLibraryByHelper(newHelper, name, absolute);
logger.debug("Successfully loaded the library {}", name);
return;
} catch (UnsatisfiedLinkError e) { // Should by pass the UnsatisfiedLinkError here!
suppressed = e;
} catch (Exception e) {
suppressed = e;
}
NativeLibraryUtil.loadLibrary(name, absolute); // Fallback to local helper class.
logger.debug("Successfully loaded the library {}", name);
} catch (NoSuchMethodError nsme) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(nsme, suppressed);
}
throw new LinkageError(
"Possible multiple incompatible native libraries on the classpath for '" + name + "'?", nsme);
} catch (UnsatisfiedLinkError ule) {
if (suppressed != null) {
ThrowableUtil.addSuppressed(ule, suppressed);
}
throw ule;
}
}
private static void loadLibraryByHelper(final Class<?> helper, final String name, final boolean absolute)
throws UnsatisfiedLinkError {
Object ret = AccessController.doPrivileged(new PrivilegedAction<Object>() {
@Override
public Object run() {
try {
// Invoke the helper to load the native library, if it succeeds, then the native
// library belong to the specified ClassLoader.
Method method = helper.getMethod("loadLibrary", String.class, boolean.class);
method.setAccessible(true);
return method.invoke(null, name, absolute);
} catch (Exception e) {
return e;
}
}
});
if (ret instanceof Throwable) {
Throwable t = (Throwable) ret;
assert !(t instanceof UnsatisfiedLinkError) : t + " should be a wrapper throwable";
Throwable cause = t.getCause();
if (cause instanceof UnsatisfiedLinkError) {
throw (UnsatisfiedLinkError) cause;
}
UnsatisfiedLinkError ule = new UnsatisfiedLinkError(t.getMessage());
ule.initCause(t);
throw ule;
}
}
/**
* Try to load the helper {@link Class} into specified {@link ClassLoader}.
* @param loader - The {@link ClassLoader} where to load the helper {@link Class}
* @param helper - The helper {@link Class}
* @return A new helper Class defined in the specified ClassLoader.
* @throws ClassNotFoundException Helper
|
NativeLibraryLoader
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/script/ScriptManager.java
|
{
"start": 1929,
"end": 1996
}
|
class ____ implements FileWatcher {
private abstract
|
ScriptManager
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/apidiff/Java8ApiCheckerTest.java
|
{
"start": 1646,
"end": 2074
}
|
class ____ {
void f(ByteBuffer b, int i) {
// BUG: Diagnostic contains: ByteBuffer#position(int) is not available
b.position(i);
}
}
""")
.doTest();
}
@Test
public void bufferNegative() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import java.nio.ByteBuffer;
|
Test
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/multipart/AbstractHttpData.java
|
{
"start": 1084,
"end": 4713
}
|
class ____ extends AbstractReferenceCounted implements HttpData {
private final String name;
protected long definedSize;
protected long size;
private Charset charset = HttpConstants.DEFAULT_CHARSET;
private boolean completed;
private long maxSize = DefaultHttpDataFactory.MAXSIZE;
protected AbstractHttpData(String name, Charset charset, long size) {
ObjectUtil.checkNotNull(name, "name");
this.name = checkNonEmpty(cleanName(name), "name");
if (charset != null) {
setCharset(charset);
}
definedSize = size;
}
//Replaces \r and \t with a space
//Removes leading and trailing whitespace and newlines
private static String cleanName(String name) {
int len = name.length();
StringBuilder sb = null;
int start = 0;
int end = len;
// Trim leading whitespace
while (start < end && Character.isWhitespace(name.charAt(start))) {
start++;
}
// Trim trailing whitespace
while (end > start && Character.isWhitespace(name.charAt(end - 1))) {
end--;
}
for (int i = start; i < end; i++) {
char c = name.charAt(i);
if (c == '\n') {
// Skip newline entirely
if (sb == null) {
sb = new StringBuilder(len);
sb.append(name, start, i);
}
continue;
}
if (c == '\r' || c == '\t') {
if (sb == null) {
sb = new StringBuilder(len);
sb.append(name, start, i);
}
sb.append(' ');
} else if (sb != null) {
sb.append(c);
}
}
// If no replacements were needed, return the trimmed slice
return sb == null ? name.substring(start, end) : sb.toString();
}
@Override
public long getMaxSize() {
return maxSize;
}
@Override
public void setMaxSize(long maxSize) {
this.maxSize = maxSize;
}
@Override
public void checkSize(long newSize) throws IOException {
if (maxSize >= 0 && newSize > maxSize) {
throw new IOException("Size exceed allowed maximum capacity");
}
}
@Override
public String getName() {
return name;
}
@Override
public boolean isCompleted() {
return completed;
}
protected void setCompleted() {
setCompleted(true);
}
protected void setCompleted(boolean completed) {
this.completed = completed;
}
@Override
public Charset getCharset() {
return charset;
}
@Override
public void setCharset(Charset charset) {
this.charset = ObjectUtil.checkNotNull(charset, "charset");
}
@Override
public long length() {
return size;
}
@Override
public long definedLength() {
return definedSize;
}
@Override
public ByteBuf content() {
try {
return getByteBuf();
} catch (IOException e) {
throw new ChannelException(e);
}
}
@Override
protected void deallocate() {
delete();
}
@Override
public HttpData retain() {
super.retain();
return this;
}
@Override
public HttpData retain(int increment) {
super.retain(increment);
return this;
}
@Override
public abstract HttpData touch();
@Override
public abstract HttpData touch(Object hint);
}
|
AbstractHttpData
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/support/DefaultEndpointTest.java
|
{
"start": 2737,
"end": 3094
}
|
class ____ extends DefaultEndpoint {
@Override
public Producer createProducer() {
return null;
}
@Override
public Consumer createConsumer(Processor processor) {
return null;
}
@Override
public boolean isSingleton() {
return false;
}
}
}
|
MyEndpoint
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/logging/logback/ColorConverter.java
|
{
"start": 1425,
"end": 2838
}
|
class ____ extends CompositeConverter<ILoggingEvent> {
private static final Map<String, AnsiElement> ELEMENTS;
static {
Map<String, AnsiElement> ansiElements = new HashMap<>();
Arrays.stream(AnsiColor.values())
.filter((color) -> color != AnsiColor.DEFAULT)
.forEach((color) -> ansiElements.put(color.name().toLowerCase(Locale.ROOT), color));
ansiElements.put("faint", AnsiStyle.FAINT);
ELEMENTS = Collections.unmodifiableMap(ansiElements);
}
private static final Map<Integer, AnsiElement> LEVELS;
static {
Map<Integer, AnsiElement> ansiLevels = new HashMap<>();
ansiLevels.put(Level.ERROR_INTEGER, AnsiColor.RED);
ansiLevels.put(Level.WARN_INTEGER, AnsiColor.YELLOW);
LEVELS = Collections.unmodifiableMap(ansiLevels);
}
@Override
protected String transform(ILoggingEvent event, String in) {
AnsiElement color = ELEMENTS.get(getFirstOption());
if (color == null) {
// Assume highlighting
color = LEVELS.get(event.getLevel().toInteger());
color = (color != null) ? color : AnsiColor.GREEN;
}
return toAnsiString(in, color);
}
protected String toAnsiString(String in, AnsiElement element) {
return AnsiOutput.toString(element, in);
}
static String getName(AnsiElement element) {
return ELEMENTS.entrySet()
.stream()
.filter((entry) -> entry.getValue().equals(element))
.map(Map.Entry::getKey)
.findFirst()
.orElseThrow();
}
}
|
ColorConverter
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/api/AtomicLongAssert.java
|
{
"start": 1185,
"end": 14943
}
|
class ____ extends AbstractAssertWithComparator<AtomicLongAssert, AtomicLong> {
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Comparables comparables = new Comparables();
// TODO reduce the visibility of the fields annotated with @VisibleForTesting
Longs longs = Longs.instance();
public AtomicLongAssert(AtomicLong actual) {
super(actual, AtomicLongAssert.class);
}
/**
* Verifies that the actual atomic has a value in [start, end] range (start included, end included).
* <p>
* Example:
* <pre><code class='java'> AtomicLong actual = new AtomicLong(5);
*
* // assertions succeed
* assertThat(actual).hasValueBetween(4, 6)
* .hasValueBetween(4, 5)
* .hasValueBetween(5, 6);
*
* // assertions fail
* assertThat(actual).hasValueBetween(6, 8)
* .hasValueBetween(0, 4);</code></pre>
*
* @param startInclusive the start value (inclusive).
* @param endInclusive the end value (inclusive).
* @return this assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not in [start, end] range.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueBetween(long startInclusive, long endInclusive) {
isNotNull();
longs.assertIsBetween(info, actual.get(), startInclusive, endInclusive);
return myself;
}
/**
* Verifies that the actual atomic has a value strictly less than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(1)).hasValueLessThan(2);
* assertThat(new AtomicLong(-2)).hasValueLessThan(-1);
*
* // assertions will fail:
* assertThat(new AtomicLong(1)).hasValueLessThan(0)
* .hasValueLessThan(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual value is equal to or greater than the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueLessThan(long other) {
isNotNull();
longs.assertLessThan(info, actual.get(), other);
return myself;
}
/**
* Verifies that the actual atomic has a value strictly less than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(1)).hasValueLessThanOrEqualTo(1)
* .hasValueLessThanOrEqualTo(2);
* assertThat(new AtomicLong(-2)).hasValueLessThanOrEqualTo(-1);
*
* // assertion will fail:
* assertThat(new AtomicLong(1)).hasValueLessThanOrEqualTo(0);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is greater than the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueLessThanOrEqualTo(long other) {
isNotNull();
longs.assertLessThanOrEqualTo(info, actual.get(), other);
return myself;
}
/**
* Verifies that the actual atomic has a value strictly greater than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(1)).hasValueGreaterThan(0);
* assertThat(new AtomicLong(-1)).hasValueGreaterThan(-2);
*
* // assertions will fail:
* assertThat(new AtomicLong(1)).hasValueGreaterThan(2)
* .hasValueGreaterThan(1);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if actual is {@code null}.
* @throws AssertionError if the actual atomic value is equal to or less than the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueGreaterThan(long other) {
isNotNull();
longs.assertGreaterThan(info, actual.get(), other);
return myself;
}
/**
* Verifies that the actual atomic has a value strictly greater than the given one.
* <p>
* Example:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(1)).hasValueGreaterThanOrEqualTo(0)
* .hasValueGreaterThanOrEqualTo(1);
* assertThat(new AtomicLong(-1)).hasValueGreaterThanOrEqualTo(-2);
*
* // assertion will fail:
* assertThat(new AtomicLong(1)).hasValueGreaterThanOrEqualTo(2);</code></pre>
*
* @param other the given value to compare the actual value to.
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is less than the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueGreaterThanOrEqualTo(long other) {
isNotNull();
longs.assertGreaterThanOrEqualTo(info, actual.get(), other);
return myself;
}
/**
* Verifies that the actual atomic has a positive value.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new AtomicLong(42)).hasPositiveValue();
*
* // assertions will fail
* assertThat(new AtomicLong(0)).hasPositiveValue();
* assertThat(new AtomicLong(-1)).hasPositiveValue();</code></pre>
*
* @return this assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not positive.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasPositiveValue() {
isNotNull();
longs.assertIsPositive(info, actual.get());
return myself;
}
/**
* Verifies that the actual atomic has a non positive value (negative or equal zero).
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(new AtomicLong(-42)).hasNonPositiveValue();
* assertThat(new AtomicLong(0)).hasNonPositiveValue();
*
* // assertion will fail
* assertThat(new AtomicLong(42)).hasNonPositiveValue();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not non positive.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasNonPositiveValue() {
isNotNull();
longs.assertIsNotPositive(info, actual.get());
return myself;
}
/**
* Verifies that the actual atomic has a negative value.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new AtomicLong(-42)).hasNegativeValue();
*
* // assertions will fail
* assertThat(new AtomicLong(0)).hasNegativeValue();
* assertThat(new AtomicLong(42)).hasNegativeValue();</code></pre>
*
* @return this assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not negative.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasNegativeValue() {
isNotNull();
longs.assertIsNegative(info, actual.get());
return myself;
}
/**
* Verifies that the actual atomic has a non negative value (positive or equal zero).
* <p>
* Example:
* <pre><code class='java'> // assertions will pass
* assertThat(new AtomicLong(42)).hasNonNegativeValue();
* assertThat(new AtomicLong(0)).hasNonNegativeValue();
*
* // assertion will fail
* assertThat(new AtomicLong(-42)).hasNonNegativeValue();</code></pre>
*
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not non negative.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasNonNegativeValue() {
isNotNull();
longs.assertIsNotNegative(info, actual.get());
return myself;
}
/**
* Verifies that the actual atomic has a value close to the given one within the given percentage.<br>
* If difference is equal to the percentage value, assertion is considered valid.
* <p>
* Example with Long:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(11)).hasValueCloseTo(10, withinPercentage(20));
*
* // if difference is exactly equals to the computed offset (1), it's ok
* assertThat(new AtomicLong(11)).hasValueCloseTo(10, withinPercentage(10));
*
* // assertion will fail
* assertThat(new AtomicLong(11)).hasValueCloseTo(10, withinPercentage(5));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param percentage the given positive percentage.
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@link Percentage} is {@code null}.
* @throws AssertionError if the actual atomic value is not close enough to the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueCloseTo(long expected, Percentage percentage) {
isNotNull();
longs.assertIsCloseToPercentage(info, actual.get(), expected, percentage);
return myself;
}
/**
* Verifies that the actual atomic has a value close to the given one within the given offset.
* <p>
* When <i>abs(actual - expected) == offset value</i>, the assertion:
* <ul>
* <li><b>succeeds</b> when using {@link Assertions#within(Long)} or {@link Offset#offset(Number)}</li>
* <li><b>fails</b> when using {@link Assertions#byLessThan(Long)} or {@link Offset#strictOffset(Number)}</li>
* </ul>
* <p>
* <b>Breaking change</b> since 2.9.0/3.9.0: using {@link Assertions#byLessThan(Long)} implies a <b>strict</b> comparison,
* use {@link Assertions#within(Long)} to get the old behavior.
* <p>
* Example with Long:
* <pre><code class='java'> // assertions will pass:
* assertThat(new AtomicLong(5)).hasValueCloseTo(7L, within(3L))
* .hasValueCloseTo(7L, byLessThan(3L));
*
* // if the difference is exactly equals to the offset, it's ok ...
* assertThat(new AtomicLong(5)).hasValueCloseTo(7L, within(2L));
* // ... but not with byLessThan which implies a strict comparison
* assertThat(new AtomicLong(5)).hasValueCloseTo(7L, byLessThan(2L)); // FAIL
*
* // assertion will fail
* assertThat(new AtomicLong(5)).hasValueCloseTo(7L, within(1L));
* assertThat(new AtomicLong(5)).hasValueCloseTo(7L, byLessThan(1L));</code></pre>
*
* @param expected the given number to compare the actual value to.
* @param offset the given allowed {@link Offset}.
* @return {@code this} assertion object.
* @throws NullPointerException if the given {@link Offset} is {@code null}.
* @throws AssertionError if the actual atomic value is not close enough to the given one.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValueCloseTo(long expected, Offset<Long> offset) {
isNotNull();
longs.assertIsCloseTo(info, actual.get(), expected, offset);
return myself;
}
/**
* Verifies that the actual atomic has the given value.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new AtomicLong(42)).hasValue(42);
*
* // assertion will fail
* assertThat(new AtomicLong(42)).hasValue(0);</code></pre>
*
* @param expectedValue the value not expected .
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not non negative.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert hasValue(long expectedValue) {
isNotNull();
long actualValue = actual.get();
if (!objects.getComparisonStrategy().areEqual(actualValue, expectedValue)) {
throwAssertionError(shouldHaveValue(actual, expectedValue));
}
return myself;
}
/**
* Verifies that the actual atomic has not the given value.
* <p>
* Example:
* <pre><code class='java'> // assertion will pass
* assertThat(new AtomicLong(42)).doesNotHaveValue(0);
*
* // assertion will fail
* assertThat(new AtomicLong(42)).doesNotHaveValue(42);</code></pre>
*
* @param expectedValue the value not expected .
* @return {@code this} assertion object.
* @throws AssertionError if the actual atomic is {@code null}.
* @throws AssertionError if the actual atomic value is not non negative.
*
* @since 2.7.0 / 3.7.0
*/
public AtomicLongAssert doesNotHaveValue(long expectedValue) {
isNotNull();
long actualValue = actual.get();
if (objects.getComparisonStrategy().areEqual(actualValue, expectedValue)) {
throwAssertionError(shouldNotContainValue(actual, expectedValue));
}
return myself;
}
@Override
@CheckReturnValue
public AtomicLongAssert usingComparator(Comparator<? super AtomicLong> customComparator) {
return usingComparator(customComparator, null);
}
@Override
@CheckReturnValue
public AtomicLongAssert usingComparator(Comparator<? super AtomicLong> customComparator, String customComparatorDescription) {
longs = new Longs(new ComparatorBasedComparisonStrategy(customComparator, customComparatorDescription));
return super.usingComparator(customComparator, customComparatorDescription);
}
@Override
@CheckReturnValue
public AtomicLongAssert usingDefaultComparator() {
longs = Longs.instance();
return super.usingDefaultComparator();
}
}
|
AtomicLongAssert
|
java
|
apache__kafka
|
share-coordinator/src/main/java/org/apache/kafka/coordinator/share/metrics/ShareCoordinatorMetrics.java
|
{
"start": 1647,
"end": 5879
}
|
class ____ extends CoordinatorMetrics implements AutoCloseable {
// write (write-rate and write-total) Meter share-coordinator-metrics The number of share-group state write calls per second.
// write-latency (write-latency-avg and write-latency-max) Meter share-coordinator-metrics The time taken for a share-group state write call, including the time to write to the share-group state topic.
public static final String METRICS_GROUP = "share-coordinator-metrics";
private final Metrics metrics;
private final Map<TopicPartition, ShareCoordinatorMetricsShard> shards = new ConcurrentHashMap<>();
public static final String SHARE_COORDINATOR_WRITE_SENSOR_NAME = "ShareCoordinatorWrite";
public static final String SHARE_COORDINATOR_WRITE_LATENCY_SENSOR_NAME = "ShareCoordinatorWriteLatency";
public static final String SHARE_COORDINATOR_STATE_TOPIC_PRUNE_SENSOR_NAME = "ShareCoordinatorStateTopicPrune";
private final Map<TopicPartition, ShareGroupPruneMetrics> pruneMetrics = new ConcurrentHashMap<>();
/**
* Global sensors. These are shared across all metrics shards.
*/
public final Map<String, Sensor> globalSensors;
public ShareCoordinatorMetrics() {
this(new Metrics());
}
public ShareCoordinatorMetrics(Metrics metrics) {
this.metrics = Objects.requireNonNull(metrics);
Sensor shareCoordinatorWriteSensor = metrics.sensor(SHARE_COORDINATOR_WRITE_SENSOR_NAME);
shareCoordinatorWriteSensor.add(new Meter(
metrics.metricName("write-rate",
METRICS_GROUP,
"The number of share-group state write calls per second."),
metrics.metricName("write-total",
METRICS_GROUP,
"Total number of share-group state write calls.")));
Sensor shareCoordinatorWriteLatencySensor = metrics.sensor(SHARE_COORDINATOR_WRITE_LATENCY_SENSOR_NAME);
shareCoordinatorWriteLatencySensor.add(
metrics.metricName("write-latency-avg",
METRICS_GROUP,
"The average time taken for a share-group state write call, including the time to write to the share-group state topic."),
new Avg());
shareCoordinatorWriteLatencySensor.add(
metrics.metricName("write-latency-max",
METRICS_GROUP,
"The maximum time taken for a share-group state write call, including the time to write to the share-group state topic."),
new Max());
this.globalSensors = Collections.unmodifiableMap(Utils.mkMap(
Utils.mkEntry(SHARE_COORDINATOR_WRITE_SENSOR_NAME, shareCoordinatorWriteSensor),
Utils.mkEntry(SHARE_COORDINATOR_WRITE_LATENCY_SENSOR_NAME, shareCoordinatorWriteLatencySensor)
));
}
@Override
public void close() throws Exception {
List.of(
SHARE_COORDINATOR_WRITE_SENSOR_NAME,
SHARE_COORDINATOR_WRITE_LATENCY_SENSOR_NAME
).forEach(metrics::removeSensor);
pruneMetrics.values().forEach(v -> metrics.removeSensor(v.pruneSensor.name()));
}
@Override
public ShareCoordinatorMetricsShard newMetricsShard(SnapshotRegistry snapshotRegistry, TopicPartition tp) {
return new ShareCoordinatorMetricsShard(snapshotRegistry, globalSensors, tp);
}
@Override
public void activateMetricsShard(CoordinatorMetricsShard shard) {
if (!(shard instanceof ShareCoordinatorMetricsShard)) {
throw new IllegalArgumentException("ShareCoordinatorMetrics can only activate ShareCoordinatorMetricShard");
}
shards.put(shard.topicPartition(), (ShareCoordinatorMetricsShard) shard);
}
@Override
public void deactivateMetricsShard(CoordinatorMetricsShard shard) {
if (!(shard instanceof ShareCoordinatorMetricsShard)) {
throw new IllegalArgumentException("ShareCoordinatorMetrics can only deactivate ShareCoordinatorMetricShard");
}
shards.remove(shard.topicPartition());
}
@Override
public MetricsRegistry registry() {
// we are not using MetricsRegistry in share coordinator
// but this method is part for implemented
|
ShareCoordinatorMetrics
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/annotations/JdbcTypeRegistration.java
|
{
"start": 947,
"end": 1628
}
|
class ____ only applied once Hibernate begins to process
* that class; it will also affect all future processing. However, it will not change
* previous resolutions to use this newly registered one. Due to this nondeterminism,
* it is recommended to only apply registrations to packages or to use a
* {@link org.hibernate.boot.model.TypeContributor}.
*
* @see org.hibernate.boot.model.TypeContributor
*
* @implNote {@link JdbcType} registrations are maintained by the
* {@link org.hibernate.type.descriptor.jdbc.spi.JdbcTypeRegistry}.
*
* @since 6.0
*/
@Target({PACKAGE, TYPE})
@Inherited
@Retention(RUNTIME)
@Repeatable( JdbcTypeRegistrations.class )
public @
|
are
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/expressions/ContextConsumerTest.java
|
{
"start": 208,
"end": 397
}
|
class ____ {
@Test
void testContextConsumer(ContextConsumer consumer) {
Assertions.assertTrue(consumer.randomField > 0 && consumer.randomField < 20);
}
}
|
ContextConsumerTest
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/ImmutableMultimap.java
|
{
"start": 6492,
"end": 14151
}
|
class ____<K, V> {
@Nullable Map<K, ImmutableCollection.Builder<V>> builderMap;
@Nullable Comparator<? super K> keyComparator;
@Nullable Comparator<? super V> valueComparator;
int expectedValuesPerKey = ImmutableCollection.Builder.DEFAULT_INITIAL_CAPACITY;
/**
* Creates a new builder. The returned builder is equivalent to the builder generated by {@link
* ImmutableMultimap#builder}.
*/
public Builder() {}
/** Creates a new builder with a hint for the number of distinct keys. */
Builder(int expectedKeys) {
if (expectedKeys > 0) {
builderMap = Platform.preservesInsertionOrderOnPutsMapWithExpectedSize(expectedKeys);
}
// otherwise, leave it null to be constructed lazily
}
Map<K, ImmutableCollection.Builder<V>> ensureBuilderMapNonNull() {
Map<K, ImmutableCollection.Builder<V>> result = builderMap;
if (result == null) {
result = Platform.preservesInsertionOrderOnPutsMap();
builderMap = result;
}
return result;
}
ImmutableCollection.Builder<V> newValueCollectionBuilderWithExpectedSize(int expectedSize) {
return ImmutableList.builderWithExpectedSize(expectedSize);
}
/**
* Provides a hint for how many values will be associated with each key newly added to the
* builder after this call. This does not change semantics, but may improve performance if
* {@code expectedValuesPerKey} is a good estimate.
*
* <p>This may be called more than once; each newly added key will use the most recent call to
* {@link #expectedValuesPerKey} as its hint.
*
* @throws IllegalArgumentException if {@code expectedValuesPerKey} is negative
* @since 33.3.0
*/
@CanIgnoreReturnValue
public Builder<K, V> expectedValuesPerKey(int expectedValuesPerKey) {
checkNonnegative(expectedValuesPerKey, "expectedValuesPerKey");
// Always presize to at least 1, since we only bother creating a value collection if there's
// at least one element.
this.expectedValuesPerKey = max(expectedValuesPerKey, 1);
return this;
}
/**
* By default, if we are handed a value collection bigger than expectedValuesPerKey, presize to
* accept that many elements.
*
* <p>This gets overridden in ImmutableSetMultimap.Builder to only trust the size of {@code
* values} if it is a Set and therefore probably already deduplicated.
*/
int expectedValueCollectionSize(int defaultExpectedValues, Iterable<?> values) {
if (values instanceof Collection<?>) {
Collection<?> collection = (Collection<?>) values;
return max(defaultExpectedValues, collection.size());
} else {
return defaultExpectedValues;
}
}
/** Adds a key-value mapping to the built multimap. */
@CanIgnoreReturnValue
public Builder<K, V> put(K key, V value) {
checkEntryNotNull(key, value);
ImmutableCollection.Builder<V> valuesBuilder = ensureBuilderMapNonNull().get(key);
if (valuesBuilder == null) {
valuesBuilder = newValueCollectionBuilderWithExpectedSize(expectedValuesPerKey);
ensureBuilderMapNonNull().put(key, valuesBuilder);
}
valuesBuilder.add(value);
return this;
}
/**
* Adds an entry to the built multimap.
*
* @since 11.0
*/
@CanIgnoreReturnValue
public Builder<K, V> put(Entry<? extends K, ? extends V> entry) {
return put(entry.getKey(), entry.getValue());
}
/**
* Adds entries to the built multimap.
*
* @since 19.0
*/
@CanIgnoreReturnValue
public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) {
for (Entry<? extends K, ? extends V> entry : entries) {
put(entry);
}
return this;
}
/**
* Stores a collection of values with the same key in the built multimap.
*
* @throws NullPointerException if {@code key}, {@code values}, or any element in {@code values}
* is null. The builder is left in an invalid state.
*/
@CanIgnoreReturnValue
public Builder<K, V> putAll(K key, Iterable<? extends V> values) {
if (key == null) {
throw new NullPointerException("null key in entry: null=" + Iterables.toString(values));
}
Iterator<? extends V> valuesItr = values.iterator();
if (!valuesItr.hasNext()) {
return this;
}
ImmutableCollection.Builder<V> valuesBuilder = ensureBuilderMapNonNull().get(key);
if (valuesBuilder == null) {
valuesBuilder =
newValueCollectionBuilderWithExpectedSize(
expectedValueCollectionSize(expectedValuesPerKey, values));
ensureBuilderMapNonNull().put(key, valuesBuilder);
}
while (valuesItr.hasNext()) {
V value = valuesItr.next();
checkEntryNotNull(key, value);
valuesBuilder.add(value);
}
return this;
}
/**
* Stores an array of values with the same key in the built multimap.
*
* @throws NullPointerException if the key or any value is null. The builder is left in an
* invalid state.
*/
@CanIgnoreReturnValue
public Builder<K, V> putAll(K key, V... values) {
return putAll(key, asList(values));
}
/**
* Stores another multimap's entries in the built multimap. The generated multimap's key and
* value orderings correspond to the iteration ordering of the {@code multimap.asMap()} view,
* with new keys and values following any existing keys and values.
*
* @throws NullPointerException if any key or value in {@code multimap} is null. The builder is
* left in an invalid state.
*/
@CanIgnoreReturnValue
public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) {
for (Entry<? extends K, ? extends Collection<? extends V>> entry :
multimap.asMap().entrySet()) {
putAll(entry.getKey(), entry.getValue());
}
return this;
}
/**
* Specifies the ordering of the generated multimap's keys.
*
* @since 8.0
*/
@CanIgnoreReturnValue
public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) {
this.keyComparator = checkNotNull(keyComparator);
return this;
}
/**
* Specifies the ordering of the generated multimap's values for each key.
*
* @since 8.0
*/
@CanIgnoreReturnValue
public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) {
this.valueComparator = checkNotNull(valueComparator);
return this;
}
@CanIgnoreReturnValue
Builder<K, V> combine(Builder<K, V> other) {
if (other.builderMap != null) {
for (Map.Entry<K, ImmutableCollection.Builder<V>> entry : other.builderMap.entrySet()) {
putAll(entry.getKey(), entry.getValue().build());
}
}
return this;
}
/** Returns a newly-created immutable multimap. */
public ImmutableMultimap<K, V> build() {
if (builderMap == null) {
return ImmutableListMultimap.of();
}
Collection<Map.Entry<K, ImmutableCollection.Builder<V>>> mapEntries = builderMap.entrySet();
if (keyComparator != null) {
mapEntries = Ordering.from(keyComparator).<K>onKeys().immutableSortedCopy(mapEntries);
}
return ImmutableListMultimap.fromMapBuilderEntries(mapEntries, valueComparator);
}
}
/**
* Returns an immutable multimap containing the same mappings as {@code multimap}, in the
* "key-grouped" iteration order described in the
|
Builder
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/creation/bytebuddy/ByteBuddyCrossClassLoaderSerializationSupport.java
|
{
"start": 12715,
"end": 12878
}
|
class ____ the <code>ObjectStreamClass</code>
* doesn't carry useful information for this purpose.</p>
*
* @param desc Description of the
|
as
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/join/ResetableIterator.java
|
{
"start": 1028,
"end": 1237
}
|
interface ____ a stateful Iterator that can replay elements
* added to it directly.
* Note that this does not extend {@link java.util.Iterator}.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public
|
to
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/QueryProducer.java
|
{
"start": 9575,
"end": 11058
}
|
class ____ the typed query reference
*
* @see jakarta.persistence.EntityManager#createQuery(TypedQueryReference)
*/
<R> Query<R> createQuery(TypedQueryReference<R> typedQueryReference);
/**
* Create a {@link Query} for the given JPA {@link CriteriaQuery}.
*/
<R> Query<R> createQuery(CriteriaQuery<R> criteriaQuery);
/**
* Create a {@link MutationQuery} for the given JPA {@link CriteriaUpdate}
*
* @deprecated use {@link #createMutationQuery(CriteriaUpdate)}
*/
@Deprecated(since = "6.0") @SuppressWarnings("rawtypes")
Query createQuery(CriteriaUpdate updateQuery);
/**
* Create a {@link MutationQuery} for the given JPA {@link CriteriaDelete}
*
* @deprecated use {@link #createMutationQuery(CriteriaDelete)}
*/
@Deprecated(since = "6.0") @SuppressWarnings("rawtypes")
Query createQuery(CriteriaDelete deleteQuery);
/**
* Create a {@link NativeQuery} instance for the given native SQL query.
*
* @param sqlString a native SQL query string
*
* @return The {@link NativeQuery} instance for manipulation and execution
*
* @see jakarta.persistence.EntityManager#createNativeQuery(String)
*
* @deprecated use {@link #createNativeQuery(String, Class)}
*/
@Deprecated(since = "6.0") @SuppressWarnings("rawtypes")
NativeQuery createNativeQuery(String sqlString);
/**
* Create a {@link NativeQuery} instance for the given native SQL query
* using an implicit mapping to the specified Java type.
* <ul>
* <li>If the given
|
of
|
java
|
apache__maven
|
impl/maven-cli/src/main/java/org/apache/maven/cling/MavenUpCling.java
|
{
"start": 1374,
"end": 3075
}
|
class ____ extends ClingSupport {
/**
* "Normal" Java entry point. Note: Maven uses ClassWorld Launcher and this entry point is NOT used under normal
* circumstances.
*/
public static void main(String[] args) throws IOException {
int exitCode = new MavenUpCling().run(args, null, null, null, false);
System.exit(exitCode);
}
/**
* ClassWorld Launcher "enhanced" entry point: returning exitCode and accepts Class World.
*/
public static int main(String[] args, ClassWorld world) throws IOException {
return new MavenUpCling(world).run(args, null, null, null, false);
}
/**
* ClassWorld Launcher "embedded" entry point: returning exitCode and accepts Class World and streams.
*/
public static int main(
String[] args,
ClassWorld world,
@Nullable InputStream stdIn,
@Nullable OutputStream stdOut,
@Nullable OutputStream stdErr)
throws IOException {
return new MavenUpCling(world).run(args, stdIn, stdOut, stdErr, true);
}
public MavenUpCling() {
super();
}
public MavenUpCling(ClassWorld classWorld) {
super(classWorld);
}
@Override
protected Invoker createInvoker() {
return new UpgradeInvoker(
ProtoLookup.builder().addMapping(ClassWorld.class, classWorld).build(), null);
}
@Override
protected Parser createParser() {
return new UpgradeParser();
}
@Override
protected ParserRequest.Builder createParserRequestBuilder(String[] args) {
return ParserRequest.mvnup(args, createMessageBuilderFactory());
}
}
|
MavenUpCling
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ArrayJavaType.java
|
{
"start": 1134,
"end": 10516
}
|
class ____<T> extends AbstractArrayJavaType<T[], T> {
public ArrayJavaType(BasicType<T> baseDescriptor) {
this( baseDescriptor.getJavaTypeDescriptor() );
}
public ArrayJavaType(JavaType<T> baseDescriptor) {
super( arrayClass( baseDescriptor.getJavaTypeClass() ),
baseDescriptor,
new ArrayMutabilityPlan<>( baseDescriptor ) );
}
@Override
public BasicType<?> resolveType(
TypeConfiguration typeConfiguration,
Dialect dialect,
BasicType<T> elementType,
ColumnTypeInformation columnTypeInformation,
JdbcTypeIndicators stdIndicators) {
if ( stdIndicators.isLob() ) {
final Class<?> javaTypeClass = getJavaTypeClass();
if ( javaTypeClass == Byte[].class ) {
return typeConfiguration.getBasicTypeRegistry().resolve(
ByteArrayJavaType.INSTANCE,
ByteArrayJavaType.INSTANCE.getRecommendedJdbcType( stdIndicators )
);
}
if ( javaTypeClass == Character[].class ) {
return typeConfiguration.getBasicTypeRegistry().resolve(
CharacterArrayJavaType.INSTANCE,
CharacterArrayJavaType.INSTANCE.getRecommendedJdbcType( stdIndicators )
);
}
}
final var elementTypeJavaType = elementType.getJavaTypeDescriptor();
final var elementJavaTypeClass = elementTypeJavaType.getJavaTypeClass();
if ( elementType instanceof BasicPluralType<?, ?>
|| elementJavaTypeClass != null
&& elementJavaTypeClass.isArray()
&& elementJavaTypeClass != byte[].class ) {
// No support for nested arrays, except for byte[][]
return null;
}
final ArrayJavaType<T> arrayJavaType;
if ( getElementJavaType() == elementTypeJavaType ) {
arrayJavaType = this;
}
else {
arrayJavaType = new ArrayJavaType<>( elementTypeJavaType );
// Register the array type as that will be resolved in the next step
typeConfiguration.getJavaTypeRegistry().addDescriptor( arrayJavaType );
}
final var valueConverter = elementType.getValueConverter();
return valueConverter == null
? resolveType( typeConfiguration, arrayJavaType, elementType, columnTypeInformation, stdIndicators )
: createTypeUsingConverter( typeConfiguration, elementType, columnTypeInformation, stdIndicators, valueConverter );
}
@Override
public String extractLoggableRepresentation(T[] value) {
if ( value == null ) {
return "null";
}
int iMax = value.length - 1;
if ( iMax == -1 ) {
return "[]";
}
final var sb = new StringBuilder();
sb.append( '[' );
for ( int i = 0; ; i++ ) {
sb.append( getElementJavaType().extractLoggableRepresentation( value[i] ) );
if ( i == iMax ) {
return sb.append( ']' ).toString();
}
sb.append( ", " );
}
}
@Override
public boolean areEqual(T[] one, T[] another) {
if ( one == null && another == null ) {
return true;
}
if ( one == null || another == null ) {
return false;
}
if ( one.length != another.length ) {
return false;
}
int l = one.length;
for ( int i = 0; i < l; i++ ) {
if ( !getElementJavaType().areEqual( one[i], another[i] )) {
return false;
}
}
return true;
}
@Override
public int extractHashCode(T[] value) {
if ( value == null ) {
return 0;
}
int result = 1;
for ( T element : value ) {
result = 31 * result + ( element == null ? 0 : getElementJavaType().extractHashCode( element ) );
}
return result;
}
@Override
public String toString(T[] value) {
if ( value == null ) {
return null;
}
final StringBuilder sb = new StringBuilder();
sb.append( '{' );
String glue = "";
for ( T v : value ) {
sb.append( glue );
if ( v == null ) {
sb.append( "null" );
glue = ",";
continue;
}
sb.append( '"' );
final String valstr = getElementJavaType().toString( v );
// using replaceAll is a shorter, but much slower way to do this
for (int i = 0, len = valstr.length(); i < len; i ++ ) {
char c = valstr.charAt( i );
// Surrogate pairs. This is how they're done.
if (c == '\\' || c == '"') {
sb.append( '\\' );
}
sb.append( c );
}
sb.append( '"' );
glue = ",";
}
sb.append( '}' );
return sb.toString();
}
@Override
public T[] fromString(CharSequence charSequence) {
if ( charSequence == null ) {
return null;
}
final var lst = new java.util.ArrayList<String>();
StringBuilder sb = null;
char lastChar = charSequence.charAt( charSequence.length() - 1 );
char firstChar = charSequence.charAt( 0 );
if ( firstChar != '{' || lastChar != '}' ) {
throw new IllegalArgumentException( "Cannot parse given string into array of strings. First and last character must be { and }" );
}
int len = charSequence.length();
boolean inquote = false;
for ( int i = 1; i < len; i ++ ) {
char c = charSequence.charAt( i );
if ( c == '"' ) {
if (inquote) {
lst.add( sb.toString() );
}
else {
sb = new StringBuilder();
}
inquote = !inquote;
continue;
}
else if ( !inquote ) {
if ( Character.isWhitespace( c ) ) {
continue;
}
else if ( c == ',' ) {
// treat no-value between commas to mean null
if ( sb == null ) {
lst.add( null );
}
else {
sb = null;
}
continue;
}
else {
// i + 4, because there has to be a comma or closing brace after null
if ( i + 4 < len
&& charSequence.charAt( i ) == 'n'
&& charSequence.charAt( i + 1 ) == 'u'
&& charSequence.charAt( i + 2 ) == 'l'
&& charSequence.charAt( i + 3 ) == 'l') {
lst.add( null );
i += 4;
continue;
}
if (i + 1 == len) {
break;
}
throw new IllegalArgumentException( "Cannot parse given string into array of strings."
+ " Outside of quote, but neither whitespace, comma, array end, nor null found." );
}
}
else if ( c == '\\' && i + 2 < len && (charSequence.charAt( i + 1 ) == '\\' || charSequence.charAt( i + 1 ) == '"')) {
c = charSequence.charAt( ++i );
}
// If there is ever a null-pointer here, the if-else logic before is incomplete
sb.append( c );
}
//noinspection unchecked
final var result = (T[]) newInstance( getElementJavaType().getJavaTypeClass(), lst.size() );
for ( int i = 0; i < result.length; i ++ ) {
if ( lst.get( i ) != null ) {
result[i] = getElementJavaType().fromString( lst.get( i ) );
}
}
return result;
}
@Override
public <X> X unwrap(T[] value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( type.isInstance( value ) ) {
//noinspection unchecked
return (X) value;
}
else if ( type == byte[].class ) {
return (X) toBytes( value );
}
else if ( type == BinaryStream.class ) {
//noinspection unchecked
return (X) new ArrayBackedBinaryStream( toBytes( value ) );
}
else if ( type.isArray() ) {
final var preferredJavaTypeClass = type.getComponentType();
final Object[] unwrapped = (Object[]) newInstance( preferredJavaTypeClass, value.length );
for ( int i = 0; i < value.length; i++ ) {
unwrapped[i] = getElementJavaType().unwrap( value[i], preferredJavaTypeClass, options );
}
//noinspection unchecked
return (X) unwrapped;
}
throw unknownUnwrap( type );
}
@Override
public <X> T[] wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( value instanceof java.sql.Array array ) {
try {
//noinspection unchecked
value = (X) array.getArray();
}
catch ( SQLException ex ) {
// This basically shouldn't happen unless you've lost connection to the database.
throw new HibernateException( ex );
}
}
final var elementJavaType = getElementJavaType();
if ( value instanceof Object[] raw ) {
final var componentClass = elementJavaType.getJavaTypeClass();
//noinspection unchecked
final var wrapped = (T[]) newInstance( componentClass, raw.length );
if ( componentClass.isAssignableFrom( value.getClass().getComponentType() ) ) {
for (int i = 0; i < raw.length; i++) {
//noinspection unchecked
wrapped[i] = (T) raw[i];
}
}
else {
for ( int i = 0; i < raw.length; i++ ) {
wrapped[i] = elementJavaType.wrap( raw[i], options );
}
}
return wrapped;
}
else if ( value instanceof byte[] bytes ) {
return fromBytes( bytes );
}
else if ( value instanceof BinaryStream binaryStream ) {
// When the value is a BinaryStream, this is a deserialization request
return fromBytes( binaryStream.getBytes() );
}
else if ( elementJavaType.isInstance( value ) ) {
// Support binding a single element as parameter value
//noinspection unchecked
final var wrapped = (T[]) newInstance( elementJavaType.getJavaTypeClass(), 1 );
//noinspection unchecked
wrapped[0] = (T) value;
return wrapped;
}
else if ( value instanceof Collection<?> collection ) {
//noinspection unchecked
final var wrapped = (T[]) newInstance( elementJavaType.getJavaTypeClass(), collection.size() );
int i = 0;
for ( Object e : collection ) {
wrapped[i++] = elementJavaType.wrap( e, options );
}
return wrapped;
}
throw unknownWrap( value.getClass() );
}
private static <T> byte[] toBytes(T[] array) {
if ( array.getClass().getComponentType().isEnum() ) {
final byte[] bytes = new byte[array.length];
for (int i = 0; i < array.length; i++ ) {
final T value = array[i];
// encode null
|
ArrayJavaType
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/http/InterceptUrlConfigTests.java
|
{
"start": 2670,
"end": 14308
}
|
class ____ {
private static final String CONFIG_LOCATION_PREFIX = "classpath:org/springframework/security/config/http/InterceptUrlConfigTests";
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired
MockMvc mvc;
/**
* sec-2256
*/
@Test
public void requestWhenMethodIsSpecifiedThenItIsNotGivenPriority() throws Exception {
this.spring.configLocations(this.xml("Sec2256")).autowire();
// @formatter:off
this.mvc.perform(post("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
// @formatter:on
}
/**
* sec-2256
*/
@Test
public void requestWhenMethodIsSpecifiedAndAuthorizationManagerThenItIsNotGivenPriority() throws Exception {
this.spring.configLocations(this.xml("Sec2256AuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(post("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
/**
* sec-2355
*/
@Test
public void requestWhenUsingPatchThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("PatchMethod")).autowire();
// @formatter:off
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(patch("/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(patch("/path").with(adminCredentials()))
.andExpect(status().isOk());
// @formatter:on
}
/**
* sec-2355
*/
@Test
public void requestWhenUsingPatchAndAuthorizationManagerThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("PatchMethodAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(patch("/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(patch("/path").with(adminCredentials()))
.andExpect(status().isOk());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
@Test
public void requestWhenUsingHasAnyRoleThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("HasAnyRole")).autowire();
// @formatter:off
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path").with(adminCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
}
@Test
public void requestWhenUsingHasAnyRoleAndAuthorizationManagerThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("HasAnyRoleAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path").with(adminCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
/**
* sec-2059
*/
@Test
public void requestWhenUsingPathVariablesThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("PathVariables")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
}
/**
* sec-2059
*/
@Test
public void requestWhenUsingPathVariablesAndAuthorizationManagerThenAuthorizesRequestsAccordingly()
throws Exception {
this.spring.configLocations(this.xml("PathVariablesAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
/**
* gh-3786
*/
@Test
public void requestWhenUsingCamelCasePathVariablesThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("CamelCasePathVariables")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/PATH/user/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
}
/**
* gh-3786
*/
@Test
public void requestWhenUsingCamelCasePathVariablesAndAuthorizationManagerThenAuthorizesRequestsAccordingly()
throws Exception {
this.spring.configLocations(this.xml("CamelCasePathVariablesAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/PATH/user/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
/**
* sec-2059
*/
@Test
public void requestWhenUsingPathVariablesAndTypeConversionThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("TypeConversionPathVariables")).autowire();
// @formatter:off
this.mvc.perform(get("/path/1/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/2/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
}
/**
* sec-2059
*/
@Test
public void requestWhenUsingPathVariablesAndTypeConversionAndAuthorizationManagerThenAuthorizesRequestsAccordingly()
throws Exception {
this.spring.configLocations(this.xml("TypeConversionPathVariablesAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path/1/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/2/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
@Test
public void requestWhenUsingMvcMatchersAndPathVariablesThenAuthorizesRequestsAccordingly() throws Exception {
this.spring.configLocations(this.xml("MvcMatchersPathVariables")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/PATH/user/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
}
@Test
public void requestWhenUsingMvcMatchersAndPathVariablesAndAuthorizationManagerThenAuthorizesRequestsAccordingly()
throws Exception {
this.spring.configLocations(this.xml("MvcMatchersPathVariablesAuthorizationManager")).autowire();
// @formatter:off
this.mvc.perform(get("/path/user/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path/otheruser/path").with(userCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/PATH/user/path").with(userCredentials()))
.andExpect(status().isForbidden());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
@Test
public void configureWhenUsingRegexMatcherAndServletPathThenThrowsException() {
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> this.spring.configLocations(this.xml("RegexMatcherServletPath")).autowire());
}
@Test
public void configureWhenUsingRegexMatcherAndServletPathAndAuthorizationManagerThenThrowsException() {
assertThatExceptionOfType(BeanDefinitionParsingException.class).isThrownBy(
() -> this.spring.configLocations(this.xml("RegexMatcherServletPathAuthorizationManager")).autowire());
}
@Test
public void configureWhenUsingCiRegexMatcherAndServletPathThenThrowsException() {
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> this.spring.configLocations(this.xml("CiRegexMatcherServletPath")).autowire());
}
@Test
public void configureWhenUsingCiRegexMatcherAndServletPathAndAuthorizationManagerThenThrowsException() {
assertThatExceptionOfType(BeanDefinitionParsingException.class)
.isThrownBy(() -> this.spring.configLocations(this.xml("CiRegexMatcherServletPathAuthorizationManager"))
.autowire());
}
@Test
public void configureWhenUsingDefaultMatcherAndServletPathThenNoException() {
assertThatNoException()
.isThrownBy(() -> this.spring.configLocations(this.xml("DefaultMatcherServletPath")).autowire());
}
@Test
public void configureWhenUsingDefaultMatcherAndServletPathAndAuthorizationManagerThenNoException() {
assertThatNoException()
.isThrownBy(() -> this.spring.configLocations(this.xml("DefaultMatcherServletPathAuthorizationManager"))
.autowire());
}
@Test
public void requestWhenUsingFilterAllDispatcherTypesAndAuthorizationManagerThenAuthorizesRequestsAccordingly()
throws Exception {
this.spring.configLocations(this.xml("AuthorizationManagerFilterAllDispatcherTypes")).autowire();
// @formatter:off
this.mvc.perform(get("/path").with(userCredentials()))
.andExpect(status().isOk());
this.mvc.perform(get("/path").with(adminCredentials()))
.andExpect(status().isForbidden());
this.mvc.perform(get("/error").with((request) -> {
request.setAttribute(WebUtils.ERROR_REQUEST_URI_ATTRIBUTE, "/error");
request.setDispatcherType(DispatcherType.ERROR);
return request;
})).andExpect(status().isOk());
this.mvc.perform(get("/path").with((request) -> {
request.setAttribute(WebUtils.ERROR_REQUEST_URI_ATTRIBUTE, "/path");
request.setDispatcherType(DispatcherType.ERROR);
return request;
})).andExpect(status().isUnauthorized());
// @formatter:on
assertThat(this.spring.getContext().getBean(AuthorizationManager.class)).isNotNull();
}
private static RequestPostProcessor adminCredentials() {
return httpBasic("admin", "password");
}
private static RequestPostProcessor userCredentials() {
return httpBasic("user", "password");
}
private MockServletContext mockServletContext(String servletPath) {
MockServletContext servletContext = spy(new MockServletContext());
final ServletRegistration registration = mock(ServletRegistration.class);
given(registration.getMappings()).willReturn(Collections.singleton(servletPath));
Answer<Map<String, ? extends ServletRegistration>> answer = (invocation) -> Collections.singletonMap("spring",
registration);
given(servletContext.getServletRegistrations()).willAnswer(answer);
return servletContext;
}
private String xml(String configName) {
return CONFIG_LOCATION_PREFIX + "-" + configName + ".xml";
}
@RestController
static
|
InterceptUrlConfigTests
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java
|
{
"start": 1089,
"end": 3877
}
|
class ____ extends RpcMessage {
public static final int RPC_VERSION = 2;
private static final Logger LOG = LoggerFactory.getLogger(RpcCall.class);
public static RpcCall read(XDR xdr) {
return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(),
Credentials.readFlavorAndCredentials(xdr),
Verifier.readFlavorAndVerifier(xdr));
}
public static RpcCall getInstance(int xid, int program, int version,
int procedure, Credentials cred, Verifier verifier) {
return new RpcCall(xid, RpcMessage.Type.RPC_CALL, 2, program, version,
procedure, cred, verifier);
}
private final int rpcVersion;
private final int program;
private final int version;
private final int procedure;
private final Credentials credentials;
private final Verifier verifier;
protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion,
int program, int version, int procedure, Credentials credential,
Verifier verifier) {
super(xid, messageType);
this.rpcVersion = rpcVersion;
this.program = program;
this.version = version;
this.procedure = procedure;
this.credentials = credential;
this.verifier = verifier;
if (LOG.isTraceEnabled()) {
LOG.trace(this.toString());
}
validate();
}
private void validateRpcVersion() {
if (rpcVersion != RPC_VERSION) {
throw new IllegalArgumentException("RPC version is expected to be "
+ RPC_VERSION + " but got " + rpcVersion);
}
}
public void validate() {
validateMessageType(RpcMessage.Type.RPC_CALL);
validateRpcVersion();
// Validate other members
// Throw exception if validation fails
}
public int getRpcVersion() {
return rpcVersion;
}
public int getProgram() {
return program;
}
public int getVersion() {
return version;
}
public int getProcedure() {
return procedure;
}
public Credentials getCredential() {
return credentials;
}
public Verifier getVerifier() {
return verifier;
}
@Override
public XDR write(XDR xdr) {
xdr.writeInt(xid);
xdr.writeInt(RpcMessage.Type.RPC_CALL.getValue());
xdr.writeInt(2);
xdr.writeInt(program);
xdr.writeInt(version);
xdr.writeInt(procedure);
Credentials.writeFlavorAndCredentials(credentials, xdr);
Verifier.writeFlavorAndVerifier(verifier, xdr);
return xdr;
}
@Override
public String toString() {
return String.format("Xid:%d, messageType:%s, rpcVersion:%d, program:%d,"
+ " version:%d, procedure:%d, credential:%s, verifier:%s", xid,
messageType, rpcVersion, program, version, procedure,
credentials.toString(), verifier.toString());
}
}
|
RpcCall
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/junit/jupiter/BDDSoftAssertionsExtensionIntegrationTest.java
|
{
"start": 1993,
"end": 2801
}
|
class ____ extends AbstractSoftAssertionsExtensionIntegrationTests {
@Override
protected Class<?> getTestInstancePerMethodTestCase() {
return TestInstancePerMethodExample.class;
}
@Override
protected Class<?> getTestInstancePerClassTestCase() {
return TestInstancePerClassExample.class;
}
@Override
protected Class<?> getTestInstancePerMethodNestedTestCase() {
return TestInstancePerMethodNestedExample.class;
}
@Override
protected Class<?> getTestInstancePerClassNestedTestCase() {
return TestInstancePerClassNestedExample.class;
}
// -------------------------------------------------------------------------
@ExtendWith(SoftAssertionsExtension.class)
@TestMethodOrder(OrderAnnotation.class)
private static abstract
|
BDDSoftAssertionsExtensionIntegrationTest
|
java
|
apache__camel
|
components/camel-grpc/src/test/java/org/apache/camel/component/grpc/GrpcConsumerPropagationTest.java
|
{
"start": 7157,
"end": 7457
}
|
class ____ {
public PongResponse buildAsyncPongResponse(PingRequest pingRequests) {
return PongResponse.newBuilder().setPongName(pingRequests.getPingName() + GRPC_TEST_PONG_VALUE)
.setPongId(pingRequests.getPingId()).build();
}
}
}
|
GrpcMessageBuilder
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/event/EagerTestExecutionEventPublishingTests.java
|
{
"start": 2968,
"end": 3204
}
|
class
____.class, //
BeforeTestMethodEvent.class, //
BeforeTestExecutionEvent.class, //
AfterTestExecutionEvent.class, //
AfterTestMethodEvent.class, //
AfterTestClassEvent.class, //
// 2nd test
|
PrepareTestInstanceEvent
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java
|
{
"start": 2002,
"end": 2848
}
|
class ____ extends Configured
implements GroupMappingServiceProvider {
@VisibleForTesting
protected static final Logger LOG =
LoggerFactory.getLogger(ShellBasedUnixGroupsMapping.class);
private long timeout = CommonConfigurationKeys.
HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_DEFAULT;
private static final Set<String> EMPTY_GROUPS_SET = Collections.emptySet();
@Override
public void setConf(Configuration conf) {
super.setConf(conf);
if (conf != null) {
timeout = conf.getTimeDuration(
CommonConfigurationKeys.
HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_KEY,
CommonConfigurationKeys.
HADOOP_SECURITY_GROUP_SHELL_COMMAND_TIMEOUT_DEFAULT,
TimeUnit.MILLISECONDS);
}
}
@SuppressWarnings("serial")
private static
|
ShellBasedUnixGroupsMapping
|
java
|
apache__kafka
|
connect/runtime/src/main/java/org/apache/kafka/connect/storage/KafkaConfigBackingStore.java
|
{
"start": 7697,
"end": 10834
}
|
class ____'t currently make
* use of Kafka's multi-record transactions and instead uses task commit messages to ensure that readers do not
* end up using inconsistent configs. For example, consider if a connector wrote configs for its tasks,
* then was reconfigured and only managed to write updated configs for half its tasks. If task configs
* were applied immediately you could be using half the old configs and half the new configs. In that condition, some
* partitions may be double-assigned because the old config and new config may use completely different assignments.
* Therefore, when reading the log, we must buffer config updates for a connector's tasks and only apply atomically them
* once a commit message has been read.
* </p>
* <p>
* However, there are also further challenges. This simple buffering approach would work fine as long as the entire log was
* always available, but we would like to be able to enable compaction so our configuration topic does not grow
* indefinitely. Compaction may break a normal log because old entries will suddenly go missing. A new worker reading
* from the beginning of the log in order to build up the full current configuration will see task commits, but some
* records required for those commits will have been removed because the same keys have subsequently been rewritten.
* For example, if you have a sequence of record keys [connector-foo-config, task-foo-1-config, task-foo-2-config,
* commit-foo (2 tasks), task-foo-1-config, commit-foo (1 task)], we can end up with a compacted log containing
* [connector-foo-config, task-foo-2-config, commit-foo (2 tasks), task-foo-1-config, commit-foo (1 task)]. When read
* back, the first commit will see an invalid state because the first task-foo-1-config has been cleaned up.
* </p>
* <p>
* Compaction can further complicate things if writing new task configs fails mid-write. Consider a similar scenario
* as the previous one, but in this case both the first and second update will write 2 task configs. However, the
* second write fails half of the way through:
* [connector-foo-config, task-foo-1-config, task-foo-2-config, commit-foo (2 tasks), task-foo-1-config]. Now compaction
* occurs and we're left with
* [connector-foo-config, task-foo-2-config, commit-foo (2 tasks), task-foo-1-config]. At the first commit, we don't
* have a complete set of configs. And because of the failure, there is no second commit. We are left in an inconsistent
* state with no obvious way to resolve the issue -- we can try to keep on reading, but the failed node may never
* recover and write the updated config. Meanwhile, other workers may have seen the entire log; they will see the second
* task-foo-1-config waiting to be applied, but will otherwise think everything is ok -- they have a valid set of task
* configs for connector "foo".
* </p>
* <p>
* Because we can encounter these inconsistencies and addressing them requires support from the rest of the system
* (resolving the task configuration inconsistencies requires support from the connector instance to regenerate updated
* configs), this
|
doesn
|
java
|
alibaba__nacos
|
common/src/test/java/com/alibaba/nacos/common/notify/NotifyCenterTest.java
|
{
"start": 11159,
"end": 11313
}
|
class ____ extends SlowEvent {
private static final long serialVersionUID = 6713279688910446154L;
}
private static
|
TestSlowEvent
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/SubclassGenerator.java
|
{
"start": 28528,
"end": 36897
}
|
class ____ not constructed yet
db0.ifNot(cc.this_().field(constructedField), db1 -> {
if (method.isAbstract()) {
db1.throw_(IllegalStateException.class, "Cannot invoke abstract method");
} else {
db1.return_(db1.invokeVirtual(forwardDesc, cc.this_(), params));
}
});
DecoratorMethod decoratorMethod = decoration.firstDecoratorMethod();
DecoratorInfo firstDecorator = decoratorMethod.decorator;
FieldVar decoratorInstance = cc.this_().field(FieldDesc.of(cc.type(),
firstDecorator.getIdentifier(), Object.class));
// We need to use the decorator method in order to support not visible or generic decorators
MethodDesc decoratorMethodDesc = methodDescOf(decoratorMethod.method);
db0.return_(db0.cast(db0.invokeInterface(decoratorMethodDesc, decoratorInstance, params),
methodDesc.returnType()));
});
});
}
}
bc.return_();
});
});
}
static Function<Set<AnnotationInstanceEquivalenceProxy>, String> createBindingsFun(IntegerHolder bindingIdx,
BlockCreator bc, Expr bindingsMap, Map<AnnotationInstanceEquivalenceProxy, Expr> bindingsLiterals,
BeanInfo bean, AnnotationLiteralProcessor annotationLiterals) {
Function<AnnotationInstanceEquivalenceProxy, Expr> bindingsLiteralFun = binding -> {
// Create annotation literal if needed
ClassInfo bindingClass = bean.getDeployment().getInterceptorBinding(binding.get().name());
return bc.localVar("literal", annotationLiterals.create(bc, bindingClass, binding.get()));
};
return bindings -> {
String key = "b" + bindingIdx.i++;
Expr value;
if (bindings.size() == 1) {
value = bc.invokeStatic(MethodDescs.COLLECTIONS_SINGLETON,
bindingsLiterals.computeIfAbsent(bindings.iterator().next(), bindingsLiteralFun));
} else {
LocalVar bindingsArray = bc.localVar("bindings", bc.newEmptyArray(Object.class, bindings.size()));
int bindingsIndex = 0;
for (AnnotationInstanceEquivalenceProxy binding : bindings) {
bc.set(bindingsArray.elem(bindingsIndex), bindingsLiterals.computeIfAbsent(binding, bindingsLiteralFun));
bindingsIndex++;
}
value = bc.invokeStatic(MethodDescs.SETS_OF, bindingsArray);
}
bc.withMap(bindingsMap).put(Const.of(key), value);
return key;
};
}
static Function<List<InterceptorInfo>, String> createInterceptorChainKeysFun(IntegerHolder chainIdx,
BlockCreator bc, Expr interceptorChainMap, Map<String, LocalVar> interceptorInstanceToLocalVar,
Map<String, ? extends Var> interceptorBeanToVar) {
return interceptors -> {
String key = "i" + chainIdx.i++;
if (interceptors.size() == 1) {
// List<InvocationContextImpl.InterceptorInvocation> chain = Collections.singletonList(...);
InterceptorInfo interceptor = interceptors.get(0);
LocalVar interceptorInstance = interceptorInstanceToLocalVar.get(interceptor.getIdentifier());
Expr interceptionInvocation = bc.invokeStatic(MethodDescs.INTERCEPTOR_INVOCATION_AROUND_INVOKE,
interceptorBeanToVar.get(interceptor.getIdentifier()), interceptorInstance);
bc.withMap(interceptorChainMap).put(Const.of(key), bc.listOf(interceptionInvocation));
} else {
// List<InvocationContextImpl.InterceptorInvocation> chain = new ArrayList<>();
LocalVar chain = bc.localVar("chain", bc.new_(ConstructorDesc.of(ArrayList.class)));
for (InterceptorInfo interceptor : interceptors) {
// chain.add(InvocationContextImpl.InterceptorInvocation.aroundInvoke(p3,interceptorInstanceMap.get(InjectableInterceptor.getIdentifier())))
LocalVar interceptorInstance = interceptorInstanceToLocalVar.get(interceptor.getIdentifier());
Expr interceptionInvocation = bc.invokeStatic(MethodDescs.INTERCEPTOR_INVOCATION_AROUND_INVOKE,
interceptorBeanToVar.get(interceptor.getIdentifier()), interceptorInstance);
bc.withList(chain).add(interceptionInvocation);
}
bc.withMap(interceptorChainMap).put(Const.of(key), chain);
}
return key;
};
}
private void processDecorator(Gizmo gizmo, ClassCreator subclass,
DecoratorInfo decorator, BeanInfo bean, Type providerType,
BlockCreator subclassCtor,
ParamVar decoratorParam, Map<String, LocalVar> decoratorToLocalVar,
ParamVar ccParam, Map<MethodDesc, MethodDesc> forwardingMethods) {
// First generate the delegate subclass
// An instance of this subclass is injected in the delegate injection point of a decorator instance
ClassInfo decoratorClass = decorator.getTarget().get().asClass();
String baseName;
if (decoratorClass.enclosingClass() != null) {
baseName = decoratorClass.enclosingClass().withoutPackagePrefix() + UNDERSCORE
+ decoratorClass.name().withoutPackagePrefix();
} else {
baseName = decoratorClass.name().withoutPackagePrefix();
}
// Name: AlphaDecorator_FooBeanId_Delegate_Subclass
String generatedName = generatedName(providerType.name(),
baseName + UNDERSCORE + bean.getIdentifier() + UNDERSCORE + "Delegate");
Set<MethodInfo> decoratedMethods = bean.getDecoratedMethods(decorator);
Set<MethodDesc> decoratedMethodDescriptors = new HashSet<>(decoratedMethods.size());
for (MethodInfo m : decoratedMethods) {
decoratedMethodDescriptors.add(methodDescOf(m));
}
Map<MethodDesc, DecoratorMethod> nextDecorators = bean.getNextDecorators(decorator);
List<DecoratorInfo> decoratorParameters = new ArrayList<>();
for (DecoratorMethod decoratorMethod : nextDecorators.values()) {
decoratorParameters.add(decoratorMethod.decorator);
}
Collections.sort(decoratorParameters);
List<ClassDesc> delegateSubclassCtorParams = new ArrayList<>();
ClassDesc delegateSubclass = gizmo.class_(generatedName, cc -> {
ClassInfo delegateTypeClass = decorator.getDelegateTypeClass();
boolean delegateTypeIsInterface = delegateTypeClass.isInterface();
// The subclass implements/extends the delegate type
if (delegateTypeIsInterface) {
cc.implements_(classDescOf(delegateTypeClass));
} else {
cc.extends_(classDescOf(delegateTypeClass));
}
// Holds a reference to the subclass of the decorated bean
FieldDesc subclassField = cc.field("subclass", fc -> {
fc.private_();
fc.final_();
fc.setType(subclass.type());
});
List<ClassDesc> nextDecoratorTypes = new ArrayList<>();
Map<DecoratorInfo, FieldDesc> nextDecoratorToField = new HashMap<>();
for (DecoratorInfo nextDecorator : decoratorParameters) {
FieldDesc desc = cc.field(nextDecorator.getIdentifier(), fc -> {
fc.private_();
fc.final_();
// this can be always of type `Object`, because decorated types are always interfaces
// and their methods are always invoked via `invokeinterface` (see elsewhere in this class)
// and the JVM verifier doesn't care about the receiver type of
|
if
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/hive/HiveCreateTableTest_31_mappedBy_for_dla.java
|
{
"start": 917,
"end": 2817
}
|
class ____ extends OracleTest {
public void test_0() throws Exception {
String sql = //
"create table aaaa (\n" +
" id int not null MAPPED BY (name='pk',format='yyy',charset='utf8',type='string')\n" +
") MAPPED by (name='AAAA')";
List<SQLStatement> statementList = SQLUtils.toStatementList(sql, JdbcConstants.HIVE);
SQLStatement stmt = statementList.get(0);
System.out.println(stmt.toString());
assertEquals(1, statementList.size());
SchemaStatVisitor visitor = SQLUtils.createSchemaStatVisitor(JdbcConstants.HIVE);
stmt.accept(visitor);
assertEquals("CREATE TABLE aaaa (\n" +
"\tid int NOT NULL MAPPED BY (name = 'pk', format = 'yyy', charset = 'utf8', type = 'string')\n" +
")" +
" MAPPED BY (name = 'AAAA')", stmt.toString());
assertEquals("CREATE TABLE aaaa (\n" +
"\tid int NOT NULL MAPPED BY (name = 'pk', format = 'yyy', charset = 'utf8', type = 'string')\n" +
")" +
" MAPPED BY (name = 'AAAA')", stmt.clone().toString());
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(1, visitor.getTables().size());
assertEquals(1, visitor.getColumns().size());
assertEquals(0, visitor.getConditions().size());
assertEquals(0, visitor.getRelationships().size());
assertEquals(0, visitor.getOrderByColumns().size());
assertTrue(visitor.containsTable("aaaa"));
}
}
|
HiveCreateTableTest_31_mappedBy_for_dla
|
java
|
elastic__elasticsearch
|
x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/LocalStateAutoscaling.java
|
{
"start": 823,
"end": 1227
}
|
class ____ extends LocalStateCompositeXPackPlugin {
private final AutoscalingTestPlugin testPlugin;
public LocalStateAutoscaling(final Settings settings) {
super(settings, null);
testPlugin = new AutoscalingTestPlugin();
plugins.add(testPlugin);
}
public AutoscalingTestPlugin testPlugin() {
return testPlugin;
}
public static
|
LocalStateAutoscaling
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/exchange/support/DefaultFuture.java
|
{
"start": 2260,
"end": 11846
}
|
class ____ extends CompletableFuture<Object> {
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(DefaultFuture.class);
/**
* in-flight channels
*/
private static final Map<Long, Channel> CHANNELS = new ConcurrentHashMap<>();
/**
* in-flight requests
*/
private static final Map<Long, DefaultFuture> FUTURES = new ConcurrentHashMap<>();
private static final GlobalResourceInitializer<Timer> TIME_OUT_TIMER = new GlobalResourceInitializer<>(
() -> new HashedWheelTimer(new NamedThreadFactory("dubbo-future-timeout", true), 30, TimeUnit.MILLISECONDS),
DefaultFuture::destroy);
// invoke id.
private final Long id;
private final Channel channel;
private final Request request;
private final int timeout;
private final long start = System.currentTimeMillis();
private volatile long sent;
private Timeout timeoutCheckTask;
private ExecutorService executor;
public ExecutorService getExecutor() {
return executor;
}
public void setExecutor(ExecutorService executor) {
this.executor = executor;
}
private DefaultFuture(Channel channel, Request request, int timeout) {
this.channel = channel;
this.request = request;
this.id = request.getId();
this.timeout = timeout > 0 ? timeout : channel.getUrl().getPositiveParameter(TIMEOUT_KEY, DEFAULT_TIMEOUT);
// put into waiting map.
FUTURES.put(id, this);
CHANNELS.put(id, channel);
}
/**
* check time out of the future
*/
private static void timeoutCheck(DefaultFuture future) {
TimeoutCheckTask task = new TimeoutCheckTask(future.getId());
future.timeoutCheckTask = TIME_OUT_TIMER.get().newTimeout(task, future.getTimeout(), TimeUnit.MILLISECONDS);
}
public static void destroy() {
TIME_OUT_TIMER.remove(Timer::stop);
FUTURES.clear();
CHANNELS.clear();
}
/**
* init a DefaultFuture
* 1.init a DefaultFuture
* 2.timeout check
*
* @param channel channel
* @param request the request
* @param timeout timeout
* @return a new DefaultFuture
*/
public static DefaultFuture newFuture(Channel channel, Request request, int timeout, ExecutorService executor) {
final DefaultFuture future = new DefaultFuture(channel, request, timeout);
future.setExecutor(executor);
// timeout check
timeoutCheck(future);
return future;
}
public static DefaultFuture getFuture(long id) {
return FUTURES.get(id);
}
public static boolean hasFuture(Channel channel) {
return CHANNELS.containsValue(channel);
}
public static void sent(Channel channel, Request request) {
DefaultFuture future = FUTURES.get(request.getId());
if (future != null) {
future.doSent();
}
}
/**
* close a channel when a channel is inactive
* directly return the unfinished requests.
*
* @param channel channel to close
*/
public static void closeChannel(Channel channel, long timeout) {
long deadline = timeout > 0 ? System.currentTimeMillis() + timeout : 0;
for (Map.Entry<Long, Channel> entry : CHANNELS.entrySet()) {
if (channel.equals(entry.getValue())) {
DefaultFuture future = getFuture(entry.getKey());
if (future != null && !future.isDone()) {
long restTime = deadline - System.currentTimeMillis();
if (restTime > 0) {
try {
future.get(restTime, TimeUnit.MILLISECONDS);
} catch (java.util.concurrent.TimeoutException ignore) {
logger.warn(
PROTOCOL_TIMEOUT_SERVER,
"",
"",
"Trying to close channel " + channel + ", but response is not received in "
+ timeout + "ms, and the request id is " + future.id);
} catch (Throwable ignore) {
}
}
if (!future.isDone()) {
respInactive(channel, future);
}
}
}
}
}
private static void respInactive(Channel channel, DefaultFuture future) {
Response disconnectResponse = new Response(future.getId());
disconnectResponse.setStatus(Response.CHANNEL_INACTIVE);
disconnectResponse.setErrorMessage("Channel " + channel
+ " is inactive. Directly return the unFinished request : "
+ (logger.isDebugEnabled()
? future.getRequest()
: future.getRequest().copyWithoutData()));
DefaultFuture.received(channel, disconnectResponse);
}
public static void received(Channel channel, Response response) {
received(channel, response, false);
}
public static void received(Channel channel, Response response, boolean timeout) {
try {
DefaultFuture future = FUTURES.remove(response.getId());
if (future != null) {
Timeout t = future.timeoutCheckTask;
if (!timeout) {
// decrease Time
t.cancel();
}
future.doReceived(response);
shutdownExecutorIfNeeded(future);
} else {
logger.warn(
PROTOCOL_TIMEOUT_SERVER,
"",
"",
"The timeout response finally returned at "
+ (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date()))
+ ", response status is " + response.getStatus()
+ (channel == null
? ""
: ", channel: " + channel.getLocalAddress() + " -> "
+ channel.getRemoteAddress())
+ ", please check provider side for detailed result.");
}
} finally {
CHANNELS.remove(response.getId());
}
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
Response errorResult = new Response(id);
errorResult.setStatus(Response.CLIENT_ERROR);
errorResult.setErrorMessage("request future has been canceled.");
this.doReceived(errorResult);
DefaultFuture future = FUTURES.remove(id);
shutdownExecutorIfNeeded(future);
CHANNELS.remove(id);
timeoutCheckTask.cancel();
return true;
}
private static void shutdownExecutorIfNeeded(DefaultFuture future) {
ExecutorService executor = future.getExecutor();
if (executor instanceof ThreadlessExecutor && !executor.isShutdown()) {
executor.shutdownNow();
}
}
public void cancel() {
this.cancel(true);
}
private void doReceived(Response res) {
if (res == null) {
throw new IllegalStateException("response cannot be null");
}
if (res.getStatus() == Response.OK) {
this.complete(res.getResult());
} else if (res.getStatus() == Response.CLIENT_TIMEOUT || res.getStatus() == Response.SERVER_TIMEOUT) {
this.completeExceptionally(
new TimeoutException(res.getStatus() == Response.SERVER_TIMEOUT, channel, res.getErrorMessage()));
} else if (res.getStatus() == Response.SERIALIZATION_ERROR) {
this.completeExceptionally(new SerializationException(res.getErrorMessage()));
} else {
this.completeExceptionally(new RemotingException(channel, res.getErrorMessage()));
}
}
private long getId() {
return id;
}
private Channel getChannel() {
return channel;
}
private boolean isSent() {
return sent > 0;
}
public Request getRequest() {
return request;
}
private int getTimeout() {
return timeout;
}
private void doSent() {
sent = System.currentTimeMillis();
}
private String getTimeoutMessage(boolean scan) {
long nowTimestamp = System.currentTimeMillis();
return (sent > 0 && sent - start < timeout
? "Waiting server-side response timeout"
: "Sending request timeout in client-side")
+ (scan ? " by scan timer" : "") + ". start time: "
+ (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(start))) + ", end time: "
+ (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS").format(new Date(nowTimestamp))) + ","
+ (sent > 0
? " client elapsed: " + (sent - start) + " ms, server elapsed: " + (nowTimestamp - sent)
: " elapsed: " + (nowTimestamp - start))
+ " ms, timeout: "
+ timeout + " ms, request: " + (logger.isDebugEnabled() ? request : request.copyWithoutData())
+ ", channel: " + channel.getLocalAddress()
+ " -> " + channel.getRemoteAddress();
}
private static
|
DefaultFuture
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ViewInfo.java
|
{
"start": 1155,
"end": 4934
}
|
class ____ {
private final Identifier ident;
private final String sql;
private final String currentCatalog;
private final String[] currentNamespace;
private final StructType schema;
private final String[] queryColumnNames;
private final String[] columnAliases;
private final String[] columnComments;
private final Map<String, String> properties;
public ViewInfo(
Identifier ident,
String sql,
String currentCatalog,
String[] currentNamespace,
StructType schema,
String[] queryColumnNames,
String[] columnAliases,
String[] columnComments,
Map<String, String> properties) {
this.ident = ident;
this.sql = sql;
this.currentCatalog = currentCatalog;
this.currentNamespace = currentNamespace;
this.schema = schema;
this.queryColumnNames = queryColumnNames;
this.columnAliases = columnAliases;
this.columnComments = columnComments;
this.properties = properties;
}
/**
* @return The view identifier
*/
@Nonnull
public Identifier ident() {
return ident;
}
/**
* @return The SQL text that defines the view
*/
@Nonnull
public String sql() {
return sql;
}
/**
* @return The current catalog
*/
@Nonnull
public String currentCatalog() {
return currentCatalog;
}
/**
* @return The current namespace
*/
@Nonnull
public String[] currentNamespace() {
return currentNamespace;
}
/**
* @return The view query output schema
*/
@Nonnull
public StructType schema() {
return schema;
}
/**
* @return The query column names
*/
@Nonnull
public String[] queryColumnNames() {
return queryColumnNames;
}
/**
* @return The column aliases
*/
@Nonnull
public String[] columnAliases() {
return columnAliases;
}
/**
* @return The column comments
*/
@Nonnull
public String[] columnComments() {
return columnComments;
}
/**
* @return The view properties
*/
@Nonnull
public Map<String, String> properties() {
return properties;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ViewInfo viewInfo = (ViewInfo) o;
return ident.equals(viewInfo.ident) && sql.equals(viewInfo.sql) &&
currentCatalog.equals(viewInfo.currentCatalog) &&
Arrays.equals(currentNamespace, viewInfo.currentNamespace) &&
schema.equals(viewInfo.schema) &&
Arrays.equals(queryColumnNames, viewInfo.queryColumnNames) &&
Arrays.equals(columnAliases, viewInfo.columnAliases) &&
Arrays.equals(columnComments, viewInfo.columnComments) &&
properties.equals(viewInfo.properties);
}
@Override
public int hashCode() {
int result = Objects.hash(ident, sql, currentCatalog, schema, properties);
result = 31 * result + Arrays.hashCode(currentNamespace);
result = 31 * result + Arrays.hashCode(queryColumnNames);
result = 31 * result + Arrays.hashCode(columnAliases);
result = 31 * result + Arrays.hashCode(columnComments);
return result;
}
@Override
public String toString() {
return new StringJoiner(", ", ViewInfo.class.getSimpleName() + "[", "]")
.add("ident=" + ident)
.add("sql='" + sql + "'")
.add("currentCatalog='" + currentCatalog + "'")
.add("currentNamespace=" + Arrays.toString(currentNamespace))
.add("schema=" + schema)
.add("queryColumnNames=" + Arrays.toString(queryColumnNames))
.add("columnAliases=" + Arrays.toString(columnAliases))
.add("columnComments=" + Arrays.toString(columnComments))
.add("properties=" + properties)
.toString();
}
}
|
ViewInfo
|
java
|
quarkusio__quarkus
|
extensions/mongodb-client/deployment/src/main/java/io/quarkus/mongodb/deployment/DevServicesMongoProcessor.java
|
{
"start": 2276,
"end": 14324
}
|
class ____ {
private static final Logger log = Logger.getLogger(DevServicesMongoProcessor.class);
static volatile List<RunningDevService> devServices;
static volatile Map<String, CapturedProperties> capturedProperties;
static volatile boolean first = true;
private static final String MONGO_SCHEME = "mongodb://";
private static final int MONGO_EXPOSED_PORT = 27017;
/**
* Label to add to shared Dev Service for Mongo running in containers.
* This allows other applications to discover the running service and use it instead of starting a new instance.
*/
private static final String DEV_SERVICE_LABEL = "quarkus-dev-service-mongodb";
private static final ContainerLocator MONGO_CONTAINER_LOCATOR = locateContainerWithLabels(MONGO_EXPOSED_PORT,
DEV_SERVICE_LABEL);
@BuildStep
public List<DevServicesResultBuildItem> startMongo(List<MongoConnectionNameBuildItem> mongoConnections,
DockerStatusBuildItem dockerStatusBuildItem,
DevServicesComposeProjectBuildItem composeProjectBuildItem,
MongoClientBuildTimeConfig mongoClientBuildTimeConfig,
List<DevServicesSharedNetworkBuildItem> devServicesSharedNetworkBuildItem,
Optional<ConsoleInstalledBuildItem> consoleInstalledBuildItem,
CuratedApplicationShutdownBuildItem closeBuildItem,
LaunchModeBuildItem launchMode,
LoggingSetupBuildItem loggingSetupBuildItem,
DevServicesConfig devServicesConfig) {
List<String> connectionNames = new ArrayList<>(mongoConnections.size());
for (MongoConnectionNameBuildItem mongoConnection : mongoConnections) {
connectionNames.add(mongoConnection.getName());
}
Map<String, CapturedProperties> currentCapturedProperties = captureProperties(connectionNames,
mongoClientBuildTimeConfig);
//figure out if we need to shut down and restart existing databases
//if not and the DB's have already started we just return
if (devServices != null) {
boolean restartRequired = !currentCapturedProperties.equals(capturedProperties);
if (!restartRequired) {
return devServices.stream().map(RunningDevService::toBuildItem).collect(Collectors.toList());
}
for (Closeable i : devServices) {
try {
i.close();
} catch (Throwable e) {
log.error("Failed to stop database", e);
}
}
devServices = null;
capturedProperties = null;
}
List<RunningDevService> newDevServices = new ArrayList<>(mongoConnections.size());
for (String connectionName : connectionNames) {
RunningDevService devService;
StartupLogCompressor compressor = new StartupLogCompressor(
(launchMode.isTest() ? "(test) " : "") + "Mongo Dev Services Starting:", consoleInstalledBuildItem,
loggingSetupBuildItem);
try {
boolean useSharedNetwork = DevServicesSharedNetworkBuildItem.isSharedNetworkRequired(devServicesConfig,
devServicesSharedNetworkBuildItem);
devService = startMongo(dockerStatusBuildItem, composeProjectBuildItem, connectionName,
currentCapturedProperties.get(connectionName),
useSharedNetwork, devServicesConfig.timeout(), launchMode.getLaunchMode(),
mongoClientBuildTimeConfig.devservices().serviceName());
if (devService == null) {
compressor.closeAndDumpCaptured();
} else {
compressor.close();
newDevServices.add(devService);
}
} catch (Throwable t) {
compressor.closeAndDumpCaptured();
throw new RuntimeException(t);
}
}
if (first) {
first = false;
Runnable closeTask = new Runnable() {
@Override
public void run() {
if (devServices != null) {
for (Closeable i : devServices) {
try {
i.close();
} catch (Throwable t) {
log.error("Failed to stop database", t);
}
}
}
first = true;
devServices = null;
capturedProperties = null;
}
};
closeBuildItem.addCloseTask(closeTask, true);
}
devServices = newDevServices;
capturedProperties = currentCapturedProperties;
return devServices.stream().map(RunningDevService::toBuildItem).collect(Collectors.toList());
}
private RunningDevService startMongo(DockerStatusBuildItem dockerStatusBuildItem,
DevServicesComposeProjectBuildItem composeProjectBuildItem,
String connectionName, CapturedProperties capturedProperties,
boolean useSharedNetwork, Optional<Duration> timeout,
LaunchMode launchMode, String serviceName) {
if (!capturedProperties.devServicesEnabled) {
// explicitly disabled
log.debug(
"Not starting devservices for " + (isDefaultClient(connectionName) ? "default datasource" : connectionName)
+ " as it has been disabled in the config");
return null;
}
String configPrefix = getConfigPrefix(connectionName);
boolean needToStart = !ConfigUtils.isPropertyNonEmpty(configPrefix + "connection-string")
&& !ConfigUtils.isPropertyNonEmpty(configPrefix + "hosts");
if (!needToStart) {
// a connection string has been provided
log.debug(
"Not starting devservices for " + (isDefaultClient(connectionName) ? "default datasource" : connectionName)
+ " as a connection string and/or server addresses have been provided");
return null;
}
if (!dockerStatusBuildItem.isContainerRuntimeAvailable()) {
log.warn("Please configure datasource URL for "
+ (isDefaultClient(connectionName) ? "default datasource" : connectionName)
+ " or get a working docker instance");
return null;
}
Supplier<RunningDevService> defaultMongoServerSupplier = () -> {
QuarkusMongoDBContainer mongoDBContainer;
if (capturedProperties.imageName != null) {
mongoDBContainer = new QuarkusMongoDBContainer(
DockerImageName.parse(capturedProperties.imageName).asCompatibleSubstituteFor("mongo"),
capturedProperties.fixedExposedPort,
composeProjectBuildItem.getDefaultNetworkId(),
useSharedNetwork, launchMode, serviceName);
} else {
mongoDBContainer = new QuarkusMongoDBContainer(capturedProperties.fixedExposedPort,
composeProjectBuildItem.getDefaultNetworkId(), useSharedNetwork, launchMode, serviceName);
}
timeout.ifPresent(mongoDBContainer::withStartupTimeout);
mongoDBContainer.withEnv(capturedProperties.containerEnv);
mongoDBContainer.start();
final String effectiveUrl = getEffectiveUrl(configPrefix, mongoDBContainer.getEffectiveHost(),
mongoDBContainer.getEffectivePort(), capturedProperties);
return new RunningDevService(Feature.MONGODB_CLIENT.getName(), mongoDBContainer.getContainerId(),
mongoDBContainer::close, getConfigPrefix(connectionName) + "connection-string", effectiveUrl);
};
return MONGO_CONTAINER_LOCATOR
.locateContainer(capturedProperties.serviceName(), capturedProperties.shared(), launchMode)
.or(() -> ComposeLocator.locateContainer(composeProjectBuildItem,
List.of(capturedProperties.imageName, "mongo"), MONGO_EXPOSED_PORT, launchMode, useSharedNetwork))
.map(containerAddress -> {
final String effectiveUrl = getEffectiveUrl(configPrefix, containerAddress.getHost(),
containerAddress.getPort(), capturedProperties);
return new RunningDevService(Feature.MONGODB_CLIENT.getName(), containerAddress.getId(),
null, getConfigPrefix(connectionName) + "connection-string", effectiveUrl);
})
.orElseGet(defaultMongoServerSupplier);
}
private String getEffectiveUrl(String configPrefix, String host, int port, CapturedProperties capturedProperties) {
final String databaseName = ConfigProvider.getConfig()
.getOptionalValue(configPrefix + "database", String.class)
.orElse("test");
String effectiveUrl = String.format("%s%s:%d/%s", MONGO_SCHEME, host, port, databaseName);
if ((capturedProperties.connectionProperties != null) && !capturedProperties.connectionProperties.isEmpty()) {
effectiveUrl = effectiveUrl + "?"
+ URLEncodedUtils.format(
capturedProperties.connectionProperties.entrySet().stream()
.map(e -> new BasicNameValuePair(e.getKey(), e.getValue()))
.collect(Collectors.toList()),
StandardCharsets.UTF_8);
}
return effectiveUrl;
}
private String getConfigPrefix(String connectionName) {
String configPrefix = "quarkus." + MongoConfig.CONFIG_NAME + ".";
if (!isDefaultClient(connectionName)) {
configPrefix = configPrefix + connectionName + ".";
}
return configPrefix;
}
private Map<String, CapturedProperties> captureProperties(List<String> connectionNames,
MongoClientBuildTimeConfig mongoClientBuildTimeConfig) {
Map<String, CapturedProperties> result = new HashMap<>();
for (String connectionName : connectionNames) {
result.put(connectionName, captureProperties(connectionName, mongoClientBuildTimeConfig));
}
return result;
}
private CapturedProperties captureProperties(String connectionName, MongoClientBuildTimeConfig mongoClientBuildTimeConfig) {
String configPrefix = getConfigPrefix(connectionName);
String databaseName = ConfigProvider.getConfig().getOptionalValue(configPrefix + "database", String.class).orElse(null);
String connectionString = ConfigProvider.getConfig().getOptionalValue(configPrefix + "connection-string", String.class)
.orElse(null);
//TODO: update for multiple connections
DevServicesBuildTimeConfig devServicesConfig = mongoClientBuildTimeConfig.devservices();
boolean devServicesEnabled = devServicesConfig.enabled().orElse(true);
return new CapturedProperties(databaseName, connectionString, devServicesEnabled,
devServicesConfig.imageName().orElseGet(() -> ConfigureUtil.getDefaultImageNameFor("mongo")),
devServicesConfig.port().orElse(null), devServicesConfig.properties(), devServicesConfig.containerEnv(),
devServicesConfig.shared(), devServicesConfig.serviceName());
}
private record CapturedProperties(String database, String connectionString, boolean devServicesEnabled,
String imageName, Integer fixedExposedPort,
Map<String, String> connectionProperties, Map<String, String> containerEnv,
boolean shared, String serviceName) {
}
private static final
|
DevServicesMongoProcessor
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/TimeoutExtensionTests.java
|
{
"start": 3484,
"end": 15275
}
|
class ____ extends AbstractJupiterTestEngineTests {
@Test
@DisplayName("is applied on annotated @Test methods")
void appliesTimeoutOnAnnotatedTestMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectMethod(TimeoutAnnotatedTestMethodTestCase.class, "testMethod")) //
.configurationParameter(DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("testMethod() timed out after 10 milliseconds");
}
@Test
@DisplayName("is not applied on annotated @Test methods using timeout mode: disabled")
void doesNotApplyTimeoutOnAnnotatedTestMethodsUsingDisabledTimeoutMode() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectMethod(TimeoutAnnotatedTestMethodTestCase.class, "testMethod")) //
.configurationParameter(DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.configurationParameter(TIMEOUT_MODE_PROPERTY_NAME, "disabled").build());
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable()) //
.isEmpty();
}
@Test
@DisplayName("is not applied on annotated @Test methods using timeout mode: disabled")
void applyTimeoutOnAnnotatedTestMethodsUsingDisabledOnDebugTimeoutMode() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectMethod(TimeoutAnnotatedTestMethodTestCase.class, "testMethod")) //
.configurationParameter(DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.configurationParameter(TIMEOUT_MODE_PROPERTY_NAME, "disabled_on_debug").build());
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
// The check to see if debugging is pushing the timer just above 1 second
.isLessThan(Duration.ofSeconds(2));
// Should we test if we're debugging? This test will fail if we are debugging.
if (RuntimeUtils.isDebugMode()) {
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable()) //
.isEmpty();
}
else {
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("testMethod() timed out after 10 milliseconds");
}
}
@Test
@DisplayName("is applied on annotated @TestTemplate methods")
void appliesTimeoutOnAnnotatedTestTemplateMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectMethod(TimeoutAnnotatedTestMethodTestCase.class, "testTemplateMethod")) //
.configurationParameter(DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Stream.of("repetition 1", "repetition 2").forEach(displayName -> {
Execution execution = findExecution(results.testEvents(), displayName);
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("testTemplateMethod() timed out after 10 milliseconds");
});
}
@Test
@DisplayName("is applied on annotated @TestFactory methods")
void appliesTimeoutOnAnnotatedTestFactoryMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectMethod(TimeoutAnnotatedTestMethodTestCase.class, "testFactoryMethod")) //
.configurationParameter(DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.containerEvents(), "testFactoryMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("testFactoryMethod() timed out after 10 milliseconds");
}
@ParameterizedTest(name = "{0}")
@ValueSource(classes = { TimeoutAnnotatedClassTestCase.class, InheritedTimeoutAnnotatedClassTestCase.class })
@DisplayName("is applied on testable methods in annotated classes")
void appliesTimeoutOnTestableMethodsInAnnotatedClasses(Class<?> testClass) {
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(testClass)) //
.configurationParameter(DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.configurationParameter(DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.configurationParameter(DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
assertAll(Stream.of("testMethod()", "repetition 1", "repetition 2", "testFactoryMethod()") //
.map(displayName -> () -> {
Execution execution = findExecution(results.allEvents(), displayName);
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessageEndingWith("timed out after 10000000 nanoseconds");
}));
}
@Test
@DisplayName("fails methods that do not throw InterruptedException")
void failsMethodsWithoutInterruptedException() {
EngineExecutionResults results = executeTestsForClass(MethodWithoutInterruptedExceptionTestCase.class);
Execution execution = findExecution(results.testEvents(), "methodThatDoesNotThrowInterruptedException()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(1)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getStatus()).isEqualTo(FAILED);
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("methodThatDoesNotThrowInterruptedException() timed out after 1 millisecond");
}
@Test
@DisplayName("is applied on annotated @BeforeAll methods")
void appliesTimeoutOnAnnotatedBeforeAllMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(TimeoutAnnotatedBeforeAllMethodTestCase.class)) //
.configurationParameter(DEFAULT_BEFORE_ALL_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.containerEvents(),
TimeoutAnnotatedBeforeAllMethodTestCase.class.getSimpleName());
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("setUp() timed out after 10 milliseconds");
}
@Test
@DisplayName("is applied on annotated @BeforeEach methods")
void appliesTimeoutOnAnnotatedBeforeEachMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(TimeoutAnnotatedBeforeEachMethodTestCase.class)) //
.configurationParameter(DEFAULT_BEFORE_EACH_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("setUp() timed out after 10 milliseconds");
}
@Test
@DisplayName("is applied on annotated @AfterEach methods")
void appliesTimeoutOnAnnotatedAfterEachMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(TimeoutAnnotatedAfterEachMethodTestCase.class)) //
.configurationParameter(DEFAULT_AFTER_EACH_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.testEvents(), "testMethod()");
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("tearDown() timed out after 10 milliseconds");
}
@Test
@DisplayName("is applied on annotated @AfterAll methods")
void appliesTimeoutOnAnnotatedAfterAllMethods() {
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(TimeoutAnnotatedAfterAllMethodTestCase.class)) //
.configurationParameter(DEFAULT_AFTER_ALL_METHOD_TIMEOUT_PROPERTY_NAME, "42ns") //
.build());
Execution execution = findExecution(results.containerEvents(),
TimeoutAnnotatedAfterAllMethodTestCase.class.getSimpleName());
assertThat(execution.getDuration()) //
.isGreaterThanOrEqualTo(Duration.ofMillis(10)) //
.isLessThan(Duration.ofSeconds(1));
assertThat(execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.isInstanceOf(TimeoutException.class) //
.hasMessage("tearDown() timed out after 10 milliseconds");
}
@ParameterizedTest(name = "{0}")
@MethodSource
@DisplayName("is applied from configuration parameters by default")
void appliesDefaultTimeoutsFromConfigurationParameters(String propertyName, String slowMethod) {
PlainTestCase.slowMethod = slowMethod;
EngineExecutionResults results = executeTests(request() //
.selectors(selectClass(PlainTestCase.class)) //
.configurationParameter(propertyName, "1ns") //
.build());
var failure = results.allEvents().executions().failed() //
.map(execution -> execution.getTerminationInfo().getExecutionResult().getThrowable().orElseThrow()) //
.findFirst();
assertThat(failure).containsInstanceOf(TimeoutException.class);
assertThat(failure.orElseThrow()).hasMessage(slowMethod + " timed out after 1 nanosecond");
}
static Stream<Arguments> appliesDefaultTimeoutsFromConfigurationParameters() {
return Stream.of( //
Arguments.of(DEFAULT_BEFORE_ALL_METHOD_TIMEOUT_PROPERTY_NAME, "beforeAll()"), //
Arguments.of(DEFAULT_BEFORE_EACH_METHOD_TIMEOUT_PROPERTY_NAME, "beforeEach()"), //
Arguments.of(DEFAULT_TEST_METHOD_TIMEOUT_PROPERTY_NAME, "test()"), //
Arguments.of(DEFAULT_TEST_TEMPLATE_METHOD_TIMEOUT_PROPERTY_NAME, "testTemplate()"), //
Arguments.of(DEFAULT_TEST_FACTORY_METHOD_TIMEOUT_PROPERTY_NAME, "testFactory()"), //
Arguments.of(DEFAULT_AFTER_EACH_METHOD_TIMEOUT_PROPERTY_NAME, "afterEach()"), //
Arguments.of(DEFAULT_AFTER_ALL_METHOD_TIMEOUT_PROPERTY_NAME, "afterAll()") //
);
}
@Test
@DisplayName("does not swallow unrecoverable exceptions")
void doesNotSwallowUnrecoverableExceptions() {
assertThrows(OutOfMemoryError.class, () -> executeTestsForClass(UnrecoverableExceptionTestCase.class));
}
@Test
@DisplayName("does not affect tests that don't exceed the timeout")
void doesNotAffectTestsThatDoNotExceedTimeoutDuration() {
executeTestsForClass(NonTimeoutExceedingTestCase.class).allEvents().assertStatistics(stats -> stats.failed(0));
}
@Test
@DisplayName("includes fully qualified
|
TimeoutExtensionTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/streaming/api/graph/DefaultStreamGraphContext.java
|
{
"start": 2612,
"end": 19087
}
|
class ____ implements StreamGraphContext {
private static final Logger LOG = LoggerFactory.getLogger(DefaultStreamGraphContext.class);
private final StreamGraph streamGraph;
private final ImmutableStreamGraph immutableStreamGraph;
// The attributes below are reused from AdaptiveGraphManager as AdaptiveGraphManager also needs
// to use the modified information to create the job vertex.
// A modifiable map which records the ids of stream nodes to their forward groups.
// When stream edge's partitioner is modified to forward, we need get forward groups by source
// and target node id and merge them.
private final Map<Integer, StreamNodeForwardGroup> steamNodeIdToForwardGroupMap;
// A read only map which records the id of stream node which job vertex is created, used to
// ensure that the stream nodes involved in the modification have not yet created job vertices.
private final Map<Integer, Integer> frozenNodeToStartNodeMap;
// A modifiable map which key is the id of stream node which creates the non-chained output, and
// value is the stream edge connected to the stream node and the non-chained output subscribed
// by the edge. It is used to verify whether the edge being modified is subscribed to a reused
// output and ensures that modifications to StreamEdge can be synchronized to NonChainedOutput
// as they reuse some attributes.
private final Map<Integer, Map<StreamEdge, NonChainedOutput>> opIntermediateOutputsCaches;
private final Map<String, IntermediateDataSet> consumerEdgeIdToIntermediateDataSetMap;
private final Set<Integer> finishedStreamNodeIds;
@Nullable private final StreamGraphUpdateListener streamGraphUpdateListener;
@VisibleForTesting
public DefaultStreamGraphContext(
StreamGraph streamGraph,
Map<Integer, StreamNodeForwardGroup> steamNodeIdToForwardGroupMap,
Map<Integer, Integer> frozenNodeToStartNodeMap,
Map<Integer, Map<StreamEdge, NonChainedOutput>> opIntermediateOutputsCaches,
Map<String, IntermediateDataSet> consumerEdgeIdToIntermediateDataSetMap,
Set<Integer> finishedStreamNodeIds,
ClassLoader userClassloader) {
this(
streamGraph,
steamNodeIdToForwardGroupMap,
frozenNodeToStartNodeMap,
opIntermediateOutputsCaches,
consumerEdgeIdToIntermediateDataSetMap,
finishedStreamNodeIds,
userClassloader,
null);
}
public DefaultStreamGraphContext(
StreamGraph streamGraph,
Map<Integer, StreamNodeForwardGroup> steamNodeIdToForwardGroupMap,
Map<Integer, Integer> frozenNodeToStartNodeMap,
Map<Integer, Map<StreamEdge, NonChainedOutput>> opIntermediateOutputsCaches,
Map<String, IntermediateDataSet> consumerEdgeIdToIntermediateDataSetMap,
Set<Integer> finishedStreamNodeIds,
ClassLoader userClassloader,
@Nullable StreamGraphUpdateListener streamGraphUpdateListener) {
this.streamGraph = checkNotNull(streamGraph);
this.steamNodeIdToForwardGroupMap = checkNotNull(steamNodeIdToForwardGroupMap);
this.frozenNodeToStartNodeMap = checkNotNull(frozenNodeToStartNodeMap);
this.opIntermediateOutputsCaches = checkNotNull(opIntermediateOutputsCaches);
this.immutableStreamGraph = new ImmutableStreamGraph(this.streamGraph, userClassloader);
this.consumerEdgeIdToIntermediateDataSetMap =
checkNotNull(consumerEdgeIdToIntermediateDataSetMap);
this.finishedStreamNodeIds = finishedStreamNodeIds;
this.streamGraphUpdateListener = streamGraphUpdateListener;
}
@Override
public ImmutableStreamGraph getStreamGraph() {
return immutableStreamGraph;
}
@Override
public @Nullable StreamOperatorFactory<?> getOperatorFactory(Integer streamNodeId) {
return streamGraph.getStreamNode(streamNodeId).getOperatorFactory();
}
@Override
public boolean modifyStreamEdge(List<StreamEdgeUpdateRequestInfo> requestInfos) {
// We first verify the legality of all requestInfos to ensure that all requests can be
// modified atomically.
for (StreamEdgeUpdateRequestInfo requestInfo : requestInfos) {
if (!validateStreamEdgeUpdateRequest(requestInfo)) {
return false;
}
}
for (StreamEdgeUpdateRequestInfo requestInfo : requestInfos) {
StreamEdge targetEdge =
getStreamEdge(
requestInfo.getSourceId(),
requestInfo.getTargetId(),
requestInfo.getEdgeId());
StreamPartitioner<?> newPartitioner = requestInfo.getOutputPartitioner();
if (newPartitioner != null) {
modifyOutputPartitioner(targetEdge, newPartitioner);
}
if (requestInfo.getTypeNumber() != 0) {
targetEdge.setTypeNumber(requestInfo.getTypeNumber());
}
if (requestInfo.getIntraInputKeyCorrelated() != null) {
modifyIntraInputKeyCorrelation(
targetEdge, requestInfo.getIntraInputKeyCorrelated());
}
}
// Notify the listener that the StreamGraph has been updated.
if (streamGraphUpdateListener != null) {
streamGraphUpdateListener.onStreamGraphUpdated();
}
return true;
}
@Override
public boolean modifyStreamNode(List<StreamNodeUpdateRequestInfo> requestInfos) {
for (StreamNodeUpdateRequestInfo requestInfo : requestInfos) {
StreamNode streamNode = streamGraph.getStreamNode(requestInfo.getNodeId());
if (requestInfo.getTypeSerializersIn() != null) {
if (requestInfo.getTypeSerializersIn().length
!= streamNode.getTypeSerializersIn().length) {
LOG.info(
"Modification for node {} is not allowed as the array size of typeSerializersIn is not matched.",
requestInfo.getNodeId());
return false;
}
streamNode.setSerializersIn(requestInfo.getTypeSerializersIn());
}
}
// Notify the listener that the StreamGraph has been updated.
if (streamGraphUpdateListener != null) {
streamGraphUpdateListener.onStreamGraphUpdated();
}
return true;
}
@Override
public boolean checkUpstreamNodesFinished(ImmutableStreamNode streamNode, Integer typeNumber) {
List<ImmutableStreamEdge> inEdgesWithTypeNumber =
streamNode.getInEdges().stream()
.filter(edge -> typeNumber == null || edge.getTypeNumber() == typeNumber)
.collect(Collectors.toList());
checkState(
!inEdgesWithTypeNumber.isEmpty(),
String.format("The stream edge with typeNumber %s does not exist.", typeNumber));
return inEdgesWithTypeNumber.stream()
.allMatch(edge -> finishedStreamNodeIds.contains(edge.getSourceId()));
}
@Override
public IntermediateDataSetID getConsumedIntermediateDataSetId(String edgeId) {
return consumerEdgeIdToIntermediateDataSetMap.get(edgeId).getId();
}
@Override
public StreamPartitioner<?> getOutputPartitioner(
String edgeId, Integer sourceId, Integer targetId) {
return checkNotNull(getStreamEdge(sourceId, targetId, edgeId)).getPartitioner();
}
private boolean validateStreamEdgeUpdateRequest(StreamEdgeUpdateRequestInfo requestInfo) {
Integer sourceNodeId = requestInfo.getSourceId();
Integer targetNodeId = requestInfo.getTargetId();
StreamEdge targetEdge = getStreamEdge(sourceNodeId, targetNodeId, requestInfo.getEdgeId());
// Modification to output partitioner is not allowed when the subscribing output is reused.
if (requestInfo.getOutputPartitioner() != null) {
Map<StreamEdge, NonChainedOutput> opIntermediateOutputs =
opIntermediateOutputsCaches.get(sourceNodeId);
NonChainedOutput output =
opIntermediateOutputs != null ? opIntermediateOutputs.get(targetEdge) : null;
if (output != null) {
Set<StreamEdge> consumerStreamEdges =
opIntermediateOutputs.entrySet().stream()
.filter(entry -> entry.getValue().equals(output))
.map(Map.Entry::getKey)
.collect(Collectors.toSet());
if (consumerStreamEdges.size() != 1) {
LOG.info(
"Skip modifying edge {} because the subscribing output is reused.",
targetEdge);
return false;
}
}
}
if (frozenNodeToStartNodeMap.containsKey(targetNodeId)) {
LOG.info(
"Skip modifying edge {} because the target node with id {} is in frozen list.",
targetEdge,
targetNodeId);
return false;
}
StreamPartitioner<?> newPartitioner = requestInfo.getOutputPartitioner();
if (newPartitioner != null) {
if (targetEdge.getPartitioner().getClass().equals(ForwardPartitioner.class)) {
LOG.info(
"Modification for edge {} is not allowed as the origin partitioner is ForwardPartitioner.",
targetEdge);
return false;
}
if (newPartitioner.getClass().equals(ForwardPartitioner.class)
&& !canTargetMergeIntoSourceForwardGroup(
steamNodeIdToForwardGroupMap.get(targetEdge.getSourceId()),
steamNodeIdToForwardGroupMap.get(targetEdge.getTargetId()))) {
LOG.info(
"Skip modifying edge {} because forward groups can not be merged.",
targetEdge);
return false;
}
}
return true;
}
private void modifyOutputPartitioner(
StreamEdge targetEdge, StreamPartitioner<?> newPartitioner) {
if (newPartitioner == null) {
return;
}
StreamPartitioner<?> oldPartitioner = targetEdge.getPartitioner();
targetEdge.setPartitioner(newPartitioner);
if (targetEdge.getPartitioner() instanceof ForwardPartitioner) {
tryConvertForwardPartitionerAndMergeForwardGroup(targetEdge);
}
// The partitioner in NonChainedOutput and IntermediateDataSet derived from the consumer
// edge, so we need to ensure that any modifications to the partitioner of consumer edge are
// synchronized with NonChainedOutput and IntermediateDataSet.
Map<StreamEdge, NonChainedOutput> opIntermediateOutputs =
opIntermediateOutputsCaches.get(targetEdge.getSourceId());
NonChainedOutput output =
opIntermediateOutputs != null ? opIntermediateOutputs.get(targetEdge) : null;
if (output != null) {
output.setPartitioner(targetEdge.getPartitioner());
}
Optional.ofNullable(consumerEdgeIdToIntermediateDataSetMap.get(targetEdge.getEdgeId()))
.ifPresent(
dataSet -> {
DistributionPattern distributionPattern =
targetEdge.getPartitioner().isPointwise()
? DistributionPattern.POINTWISE
: DistributionPattern.ALL_TO_ALL;
dataSet.updateOutputPattern(
distributionPattern,
targetEdge.getPartitioner().isBroadcast(),
targetEdge
.getPartitioner()
.getClass()
.equals(ForwardPartitioner.class));
});
LOG.info(
"The original partitioner of the edge {} is: {} , requested change to: {} , and finally modified to: {}.",
targetEdge,
oldPartitioner,
newPartitioner,
targetEdge.getPartitioner());
}
private void modifyIntraInputKeyCorrelation(
StreamEdge targetEdge, boolean existIntraInputKeyCorrelation) {
if (targetEdge.isIntraInputKeyCorrelated() == existIntraInputKeyCorrelation) {
return;
}
targetEdge.setIntraInputKeyCorrelated(existIntraInputKeyCorrelation);
}
private void tryConvertForwardPartitionerAndMergeForwardGroup(StreamEdge targetEdge) {
checkState(targetEdge.getPartitioner() instanceof ForwardPartitioner);
Integer sourceNodeId = targetEdge.getSourceId();
Integer targetNodeId = targetEdge.getTargetId();
if (canConvertToForwardPartitioner(targetEdge)) {
targetEdge.setPartitioner(new ForwardPartitioner<>());
checkState(mergeForwardGroups(sourceNodeId, targetNodeId));
} else if (targetEdge.getPartitioner() instanceof ForwardForUnspecifiedPartitioner) {
targetEdge.setPartitioner(new RescalePartitioner<>());
} else if (targetEdge.getPartitioner() instanceof ForwardForConsecutiveHashPartitioner) {
targetEdge.setPartitioner(
((ForwardForConsecutiveHashPartitioner<?>) targetEdge.getPartitioner())
.getHashPartitioner());
} else {
// For ForwardPartitioner, StreamGraphContext can ensure the success of the merge.
checkState(mergeForwardGroups(sourceNodeId, targetNodeId));
}
}
private boolean canConvertToForwardPartitioner(StreamEdge targetEdge) {
Integer sourceNodeId = targetEdge.getSourceId();
Integer targetNodeId = targetEdge.getTargetId();
if (targetEdge.getPartitioner() instanceof ForwardForUnspecifiedPartitioner) {
return !frozenNodeToStartNodeMap.containsKey(sourceNodeId)
&& StreamingJobGraphGenerator.isChainable(targetEdge, streamGraph, true)
&& canTargetMergeIntoSourceForwardGroup(
steamNodeIdToForwardGroupMap.get(sourceNodeId),
steamNodeIdToForwardGroupMap.get(targetNodeId));
} else if (targetEdge.getPartitioner() instanceof ForwardForConsecutiveHashPartitioner) {
return canTargetMergeIntoSourceForwardGroup(
steamNodeIdToForwardGroupMap.get(sourceNodeId),
steamNodeIdToForwardGroupMap.get(targetNodeId));
} else {
return false;
}
}
private boolean mergeForwardGroups(Integer sourceNodeId, Integer targetNodeId) {
StreamNodeForwardGroup sourceForwardGroup = steamNodeIdToForwardGroupMap.get(sourceNodeId);
StreamNodeForwardGroup forwardGroupToMerge = steamNodeIdToForwardGroupMap.get(targetNodeId);
if (sourceForwardGroup == null || forwardGroupToMerge == null) {
return false;
}
if (!sourceForwardGroup.mergeForwardGroup(forwardGroupToMerge)) {
return false;
}
// Update steamNodeIdToForwardGroupMap.
forwardGroupToMerge
.getVertexIds()
.forEach(nodeId -> steamNodeIdToForwardGroupMap.put(nodeId, sourceForwardGroup));
return true;
}
private StreamEdge getStreamEdge(Integer sourceId, Integer targetId, String edgeId) {
for (StreamEdge edge : streamGraph.getStreamEdges(sourceId, targetId)) {
if (edge.getEdgeId().equals(edgeId)) {
return edge;
}
}
throw new RuntimeException(
String.format(
"Stream edge with id '%s' is not found whose source id is %d, target id is %d.",
edgeId, sourceId, targetId));
}
}
|
DefaultStreamGraphContext
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NegativeCharLiteralTest.java
|
{
"start": 1521,
"end": 1871
}
|
class ____ {
// BUG: Diagnostic contains: 'char x = Character.MAX_VALUE - 1;'
char x = (char) -2;
}
""")
.doTest();
}
@Test
public void positive_literalOneLessThanMultipleOf65536() {
compilationHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/mapper/HistogramFieldMapper.java
|
{
"start": 3148,
"end": 3523
}
|
class ____ extends FieldMapper {
public static final String CONTENT_TYPE = "histogram";
// use the same default as numbers
private static final Setting<Boolean> COERCE_SETTING = NumberFieldMapper.COERCE_SETTING;
private static HistogramFieldMapper toType(FieldMapper in) {
return (HistogramFieldMapper) in;
}
public static
|
HistogramFieldMapper
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/InheritanceNoFieldsTestCase.java
|
{
"start": 480,
"end": 1137
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(MappedParent.class, ChildEntity.class, InheritanceResource.class)
.addAsResource(new StringAsset("INSERT INTO ChildEntity(id, name) VALUES(1, 'my name');\n"), "import.sql")
.addAsResource("application-test.properties",
"application.properties"));
@Test
public void testInheritanceNoFields() {
RestAssured.when().get("/entity/1").then().body(Matchers.is("my name"));
}
}
|
InheritanceNoFieldsTestCase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/schema/TransformIndexerStatsTests.java
|
{
"start": 571,
"end": 905
}
|
class ____ extends AbstractSchemaValidationTestCase<TransformIndexerStats> {
@Override
protected TransformIndexerStats createTestInstance() {
return randomStats();
}
@Override
protected String getJsonSchemaFileName() {
return "transform_indexer_stats.schema.json";
}
}
|
TransformIndexerStatsTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryCopyTest.java
|
{
"start": 3459,
"end": 4190
}
|
class ____ {
List<TestFieldProtoMessage> f(TestProtoMessage m) {
List<TestFieldProtoMessage> l = m.getMultiFieldList();
return l.stream().map(x -> x).collect(ImmutableList.toImmutableList());
}
}
""")
.doTest();
}
@Test
public void positiveViaVariable_map() {
refactoringHelper
.addInputLines(
"Test.java",
"""
import com.google.common.collect.ImmutableMap;
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestProtoMessage;
import com.google.errorprone.bugpatterns.proto.ProtoTest.TestFieldProtoMessage;
import java.util.Map;
|
Test
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/cache/ForwardingCacheTest.java
|
{
"start": 3193,
"end": 3344
}
|
class ____<K, V> extends ForwardingCache<K, V> {
@Override
protected Cache<K, V> delegate() {
throw new AssertionError();
}
}
}
|
OnlyGet
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/transactions/TransactionalRedisDataSource.java
|
{
"start": 2703,
"end": 3323
}
|
class ____ the values
* @param <K> the type of the redis key
* @param <F> the type of the fields (map's keys)
* @param <V> the type of the value
* @return the object to execute commands manipulating hashes (a.k.a. {@code Map<K, V>}).
*/
<K, F, V> TransactionalHashCommands<K, F, V> hash(Class<K> redisKeyType, Class<F> typeOfField, Class<V> typeOfValue);
/**
* Gets the object to execute commands manipulating hashes (a.k.a. {@code Map<String, V>}).
* <p>
* This is a shortcut on {@code hash(String.class, String.class, V)}
*
* @param typeOfValue the
|
of
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/nullness/MultipleNullnessAnnotationsTest.java
|
{
"start": 1493,
"end": 2542
}
|
class ____ {
// BUG: Diagnostic contains:
@Nullable @NonNull Object x;
// BUG: Diagnostic contains:
@NullableDecl static @NonNull Object y;
// BUG: Diagnostic contains:
List<@Nullable @NonNull String> z;
@NullableDecl
// BUG: Diagnostic contains:
abstract @NonNull Object f();
// BUG: Diagnostic contains:
abstract void f(@NullableDecl Object @NonNull [] x);
}
""")
.doTest();
}
@Test
public void negative() {
testHelper
.addSourceLines(
"Test.java",
"""
import org.checkerframework.checker.nullness.compatqual.NonNullDecl;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.checkerframework.checker.nullness.qual.Nullable;
import java.util.List;
abstract
|
Test
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/hive/LogicalScriptTransform.java
|
{
"start": 1445,
"end": 4016
}
|
class ____ extends SingleRel {
// which fields to be process by the script
private final int[] fieldIndices;
// the script to run
private final String script;
// the input/out schema for the script
private final ScriptTransformIOInfo scriptTransformIOInfo;
private final RelDataType outputRowType;
private LogicalScriptTransform(
RelOptCluster cluster,
RelTraitSet traits,
RelNode input,
int[] fieldIndices,
String script,
ScriptTransformIOInfo scriptTransformIOInfo,
RelDataType outputRowType) {
super(cluster, traits, input);
this.fieldIndices = fieldIndices;
this.script = script;
this.scriptTransformIOInfo = scriptTransformIOInfo;
this.outputRowType = outputRowType;
}
public static LogicalScriptTransform create(
RelNode input,
int[] fieldIndices,
String script,
ScriptTransformIOInfo scriptTransformIOInfo,
RelDataType outputRowType) {
return new LogicalScriptTransform(
input.getCluster(),
input.getTraitSet(),
input,
fieldIndices,
script,
scriptTransformIOInfo,
outputRowType);
}
@Override
public LogicalScriptTransform copy(RelTraitSet traitSet, List<RelNode> inputs) {
return new LogicalScriptTransform(
getCluster(),
traitSet,
inputs.get(0),
fieldIndices,
script,
scriptTransformIOInfo,
outputRowType);
}
@Override
public RelNode accept(RelShuttle shuttle) {
return shuttle.visit(this);
}
public String getScript() {
return script;
}
public int[] getFieldIndices() {
return fieldIndices;
}
public ScriptTransformIOInfo getScriptInputOutSchema() {
return scriptTransformIOInfo;
}
@Override
public RelDataType deriveRowType() {
return outputRowType;
}
@Override
public RelWriter explainTerms(RelWriter pw) {
super.explainTerms(pw);
pw.item("script-inputs", RelExplainUtil.fieldToString(fieldIndices, input.getRowType()))
.item("script-outputs", String.join(", ", getRowType().getFieldNames()))
.item("script", script)
.item("script-io-info", scriptTransformIOInfo);
return pw;
}
}
|
LogicalScriptTransform
|
java
|
micronaut-projects__micronaut-core
|
core-processor/src/main/java/io/micronaut/validation/visitor/async/AsyncTypeElementVisitor.java
|
{
"start": 1329,
"end": 2526
}
|
class ____ implements TypeElementVisitor<Object, Object> {
private static final String ANN = "io.micronaut.scheduling.annotation.Async";
@Override
public Set<String> getSupportedAnnotationNames() {
return Set.of(ANN);
}
@NonNull
@Override
public VisitorKind getVisitorKind() {
return VisitorKind.ISOLATING;
}
@Override
public TypeElementQuery query() {
return TypeElementQuery.onlyMethods();
}
@Override
public void visitMethod(MethodElement element, VisitorContext context) {
if (element.hasDeclaredAnnotation(ANN)) {
ClassElement returnType = element.getReturnType();
boolean isValid = returnType != null &&
(returnType.isAssignable(CompletionStage.class) || returnType.isAssignable(void.class) ||
returnType.isAssignable(Publisher.class) ||
Publishers.getKnownReactiveTypes().stream().anyMatch(returnType::isAssignable));
if (!isValid) {
context.fail("Method must return void, a Reactive Streams type or a subtype of CompletionStage", element);
}
}
}
}
|
AsyncTypeElementVisitor
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/config/GatewayMetricsAutoConfiguration.java
|
{
"start": 6778,
"end": 7309
}
|
class ____ {
@Bean
@ConditionalOnMissingBean
@ConditionalOnBean({ Propagator.class, TracingProperties.class })
@Order(Ordered.HIGHEST_PRECEDENCE + 5)
GatewayPropagatingSenderTracingObservationHandler gatewayPropagatingSenderTracingObservationHandler(
Tracer tracer, Propagator propagator, TracingProperties tracingProperties) {
return new GatewayPropagatingSenderTracingObservationHandler(tracer, propagator,
tracingProperties.getBaggage().getRemoteFields());
}
}
}
}
|
GatewayTracingConfiguration
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/sql/dialect/hive/ast/HiveInputOutputFormat.java
|
{
"start": 197,
"end": 1885
}
|
class ____ extends SQLExprImpl {
private SQLExpr input;
private SQLExpr output;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
HiveInputOutputFormat that = (HiveInputOutputFormat) o;
if (input != null ? !input.equals(that.input) : that.input != null) {
return false;
}
return output != null ? output.equals(that.output) : that.output == null;
}
@Override
public int hashCode() {
int result = input != null ? input.hashCode() : 0;
result = 31 * result + (output != null ? output.hashCode() : 0);
return result;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v.visit(this)) {
acceptChild(v, input);
acceptChild(v, output);
}
v.endVisit(this);
}
@Override
public HiveInputOutputFormat clone() {
HiveInputOutputFormat x = new HiveInputOutputFormat();
if (input != null) {
x.setInput(input.clone());
}
if (output != null) {
x.setOutput(output.clone());
}
return x;
}
public SQLExpr getInput() {
return input;
}
public void setInput(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.input = x;
}
public SQLExpr getOutput() {
return output;
}
public void setOutput(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.output = x;
}
}
|
HiveInputOutputFormat
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/FileNameIndexUtils.java
|
{
"start": 1244,
"end": 11617
}
|
class ____ {
// Sanitize job history file for predictable parsing
static final String DELIMITER = "-";
static final String DELIMITER_ESCAPE = "%2D";
private static final Logger LOG =
LoggerFactory.getLogger(FileNameIndexUtils.class);
// Job history file names need to be backwards compatible
// Only append new elements to the end of this list
private static final int JOB_ID_INDEX = 0;
private static final int SUBMIT_TIME_INDEX = 1;
private static final int USER_INDEX = 2;
private static final int JOB_NAME_INDEX = 3;
private static final int FINISH_TIME_INDEX = 4;
private static final int NUM_MAPS_INDEX = 5;
private static final int NUM_REDUCES_INDEX = 6;
private static final int JOB_STATUS_INDEX = 7;
private static final int QUEUE_NAME_INDEX = 8;
private static final int JOB_START_TIME_INDEX = 9;
/**
* Constructs the job history file name from the JobIndexInfo.
*
* @param indexInfo the index info.
* @return the done job history filename.
*/
public static String getDoneFileName(JobIndexInfo indexInfo)
throws IOException {
return getDoneFileName(indexInfo,
JHAdminConfig.DEFAULT_MR_HS_JOBNAME_LIMIT);
}
public static String getDoneFileName(JobIndexInfo indexInfo,
int jobNameLimit) throws IOException {
StringBuilder sb = new StringBuilder();
//JobId
sb.append(encodeJobHistoryFileName(escapeDelimiters(
TypeConverter.fromYarn(indexInfo.getJobId()).toString())));
sb.append(DELIMITER);
//SubmitTime
sb.append(encodeJobHistoryFileName(String.valueOf(
indexInfo.getSubmitTime())));
sb.append(DELIMITER);
//UserName
sb.append(encodeJobHistoryFileName(escapeDelimiters(
getUserName(indexInfo))));
sb.append(DELIMITER);
//JobName
sb.append(trimURLEncodedString(encodeJobHistoryFileName(escapeDelimiters(
getJobName(indexInfo))), jobNameLimit));
sb.append(DELIMITER);
//FinishTime
sb.append(encodeJobHistoryFileName(
String.valueOf(indexInfo.getFinishTime())));
sb.append(DELIMITER);
//NumMaps
sb.append(encodeJobHistoryFileName(
String.valueOf(indexInfo.getNumMaps())));
sb.append(DELIMITER);
//NumReduces
sb.append(encodeJobHistoryFileName(
String.valueOf(indexInfo.getNumReduces())));
sb.append(DELIMITER);
//JobStatus
sb.append(encodeJobHistoryFileName(indexInfo.getJobStatus()));
sb.append(DELIMITER);
//QueueName
sb.append(escapeDelimiters(encodeJobHistoryFileName(
getQueueName(indexInfo))));
sb.append(DELIMITER);
//JobStartTime
sb.append(encodeJobHistoryFileName(
String.valueOf(indexInfo.getJobStartTime())));
sb.append(encodeJobHistoryFileName(
JobHistoryUtils.JOB_HISTORY_FILE_EXTENSION));
return sb.toString();
}
/**
* Parses the provided job history file name to construct a
* JobIndexInfo object which is returned.
*
* @param jhFileName the job history filename.
* @return a JobIndexInfo object built from the filename.
*/
public static JobIndexInfo getIndexInfo(String jhFileName)
throws IOException {
String fileName = jhFileName.substring(0,
jhFileName.indexOf(JobHistoryUtils.JOB_HISTORY_FILE_EXTENSION));
JobIndexInfo indexInfo = new JobIndexInfo();
String[] jobDetails = fileName.split(DELIMITER);
JobID oldJobId =
JobID.forName(decodeJobHistoryFileName(jobDetails[JOB_ID_INDEX]));
JobId jobId = TypeConverter.toYarn(oldJobId);
indexInfo.setJobId(jobId);
// Do not fail if there are some minor parse errors
try {
try {
indexInfo.setSubmitTime(Long.parseLong(
decodeJobHistoryFileName(jobDetails[SUBMIT_TIME_INDEX])));
} catch (NumberFormatException e) {
LOG.warn("Unable to parse submit time from job history file "
+ jhFileName + " : " + e);
}
indexInfo.setUser(
decodeJobHistoryFileName(jobDetails[USER_INDEX]));
indexInfo.setJobName(
decodeJobHistoryFileName(jobDetails[JOB_NAME_INDEX]));
try {
indexInfo.setFinishTime(Long.parseLong(
decodeJobHistoryFileName(jobDetails[FINISH_TIME_INDEX])));
} catch (NumberFormatException e) {
LOG.warn("Unable to parse finish time from job history file "
+ jhFileName + " : " + e);
}
try {
indexInfo.setNumMaps(Integer.parseInt(
decodeJobHistoryFileName(jobDetails[NUM_MAPS_INDEX])));
} catch (NumberFormatException e) {
LOG.warn("Unable to parse num maps from job history file "
+ jhFileName + " : " + e);
}
try {
indexInfo.setNumReduces(Integer.parseInt(
decodeJobHistoryFileName(jobDetails[NUM_REDUCES_INDEX])));
} catch (NumberFormatException e) {
LOG.warn("Unable to parse num reduces from job history file "
+ jhFileName + " : " + e);
}
indexInfo.setJobStatus(
decodeJobHistoryFileName(jobDetails[JOB_STATUS_INDEX]));
indexInfo.setQueueName(
decodeJobHistoryFileName(jobDetails[QUEUE_NAME_INDEX]));
try{
if (jobDetails.length <= JOB_START_TIME_INDEX) {
indexInfo.setJobStartTime(indexInfo.getSubmitTime());
} else {
indexInfo.setJobStartTime(Long.parseLong(
decodeJobHistoryFileName(jobDetails[JOB_START_TIME_INDEX])));
}
} catch (NumberFormatException e){
LOG.warn("Unable to parse start time from job history file "
+ jhFileName + " : " + e);
}
} catch (IndexOutOfBoundsException e) {
LOG.warn("Parsing job history file with partial data encoded into name: "
+ jhFileName);
}
return indexInfo;
}
/**
* Helper function to encode the URL of the filename of the job-history
* log file.
*
* @param logFileName file name of the job-history file
* @return URL encoded filename
* @throws IOException
*/
public static String encodeJobHistoryFileName(String logFileName)
throws IOException {
String replacementDelimiterEscape = null;
// Temporarily protect the escape delimiters from encoding
if (logFileName.contains(DELIMITER_ESCAPE)) {
replacementDelimiterEscape = nonOccursString(logFileName);
logFileName = logFileName.replaceAll(
DELIMITER_ESCAPE, replacementDelimiterEscape);
}
String encodedFileName = null;
try {
encodedFileName = URLEncoder.encode(logFileName, "UTF-8");
} catch (UnsupportedEncodingException uee) {
IOException ioe = new IOException();
ioe.initCause(uee);
ioe.setStackTrace(uee.getStackTrace());
throw ioe;
}
// Restore protected escape delimiters after encoding
if (replacementDelimiterEscape != null) {
encodedFileName = encodedFileName.replaceAll(
replacementDelimiterEscape, DELIMITER_ESCAPE);
}
return encodedFileName;
}
/**
* Helper function to decode the URL of the filename of the job-history
* log file.
*
* @param logFileName file name of the job-history file
* @return URL decoded filename
* @throws IOException
*/
public static String decodeJobHistoryFileName(String logFileName)
throws IOException {
String decodedFileName = null;
try {
decodedFileName = URLDecoder.decode(logFileName, "UTF-8");
} catch (UnsupportedEncodingException uee) {
IOException ioe = new IOException();
ioe.initCause(uee);
ioe.setStackTrace(uee.getStackTrace());
throw ioe;
}
return decodedFileName;
}
static String nonOccursString(String logFileName) {
int adHocIndex = 0;
String unfoundString = "q" + adHocIndex;
while (logFileName.contains(unfoundString)) {
unfoundString = "q" + ++adHocIndex;
}
return unfoundString + "q";
}
private static String getUserName(JobIndexInfo indexInfo) {
return getNonEmptyString(indexInfo.getUser());
}
private static String getJobName(JobIndexInfo indexInfo) {
return getNonEmptyString(indexInfo.getJobName());
}
private static String getQueueName(JobIndexInfo indexInfo) {
return getNonEmptyString(indexInfo.getQueueName());
}
//TODO Maybe handle default values for longs and integers here?
private static String getNonEmptyString(String in) {
if (in == null || in.length() == 0) {
in = "NA";
}
return in;
}
private static String escapeDelimiters(String escapee) {
return escapee.replaceAll(DELIMITER, DELIMITER_ESCAPE);
}
/**
* Trims the url-encoded string if required
*/
private static String trimURLEncodedString(
String encodedString, int limitLength) {
assert(limitLength >= 0) : "limitLength should be positive integer";
if (encodedString.length() <= limitLength) {
return encodedString;
}
int index = 0;
int increase = 0;
byte[] strBytes = encodedString.getBytes(UTF_8);
// calculate effective character length based on UTF-8 specification.
// The size of a character coded in UTF-8 should be 4-byte at most.
// See RFC3629
while (true) {
byte b = strBytes[index];
if (b == '%') {
byte minuend1 = strBytes[index + 1];
byte subtrahend1 = (byte)(Character.isDigit(
minuend1) ? '0' : 'A' - 10);
byte minuend2 = strBytes[index + 2];
byte subtrahend2 = (byte)(Character.isDigit(
minuend2) ? '0' : 'A' - 10);
int initialHex =
((Character.toUpperCase(minuend1) - subtrahend1) << 4) +
(Character.toUpperCase(minuend2) - subtrahend2);
if (0x00 <= initialHex && initialHex <= 0x7F) {
// For 1-byte UTF-8 characters
increase = 3;
} else if (0xC2 <= initialHex && initialHex <= 0xDF) {
// For 2-byte UTF-8 characters
increase = 6;
} else if (0xE0 <= initialHex && initialHex <= 0xEF) {
// For 3-byte UTF-8 characters
increase = 9;
} else {
// For 4-byte UTF-8 characters
increase = 12;
}
} else {
increase = 1;
}
if (index + increase > limitLength) {
break;
} else {
index += increase;
}
}
return encodedString.substring(0, index);
}
}
|
FileNameIndexUtils
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-forst/src/test/java/org/apache/flink/state/forst/ForStResourceContainerTest.java
|
{
"start": 2421,
"end": 16934
}
|
class ____ {
@ClassRule public static final TemporaryFolder TMP_FOLDER = new TemporaryFolder();
@BeforeClass
public static void ensureForStNativeLibraryLoaded() throws IOException {
NativeLibraryLoader.getInstance().loadLibrary(TMP_FOLDER.newFolder().getAbsolutePath());
}
// ------------------------------------------------------------------------
@Test
public void testFreeDBOptionsAfterClose() throws Exception {
ForStResourceContainer container = new ForStResourceContainer();
DBOptions dbOptions = container.getDbOptions();
assertThat(dbOptions.isOwningHandle(), is(true));
container.close();
assertThat(dbOptions.isOwningHandle(), is(false));
}
@Test
public void testFreeMultipleDBOptionsAfterClose() throws Exception {
ForStResourceContainer container = new ForStResourceContainer();
final int optionNumber = 20;
ArrayList<DBOptions> dbOptions = new ArrayList<>(optionNumber);
for (int i = 0; i < optionNumber; i++) {
dbOptions.add(container.getDbOptions());
}
container.close();
for (DBOptions dbOption : dbOptions) {
assertThat(dbOption.isOwningHandle(), is(false));
}
}
/**
* Guard the shared resources will be released after {@link ForStResourceContainer#close()} when
* the {@link ForStResourceContainer} instance is initiated with {@link OpaqueMemoryResource}.
*
* @throws Exception if unexpected error happened.
*/
@Test
public void testSharedResourcesAfterClose() throws Exception {
OpaqueMemoryResource<ForStSharedResources> sharedResources = getSharedResources();
ForStResourceContainer container = new ForStResourceContainer(null, sharedResources);
container.close();
ForStSharedResources forStSharedResources = sharedResources.getResourceHandle();
assertThat(forStSharedResources.getCache().isOwningHandle(), is(false));
assertThat(forStSharedResources.getWriteBufferManager().isOwningHandle(), is(false));
}
/**
* Guard that {@link ForStResourceContainer#getDbOptions()} shares the same {@link
* WriteBufferManager} instance if the {@link ForStResourceContainer} instance is initiated with
* {@link OpaqueMemoryResource}.
*
* @throws Exception if unexpected error happened.
*/
@Test
public void testGetDbOptionsWithSharedResources() throws Exception {
final int optionNumber = 20;
OpaqueMemoryResource<ForStSharedResources> sharedResources = getSharedResources();
ForStResourceContainer container = new ForStResourceContainer(null, sharedResources);
HashSet<WriteBufferManager> writeBufferManagers = new HashSet<>();
for (int i = 0; i < optionNumber; i++) {
DBOptions dbOptions = container.getDbOptions();
WriteBufferManager writeBufferManager = getWriteBufferManager(dbOptions);
writeBufferManagers.add(writeBufferManager);
}
assertThat(writeBufferManagers.size(), is(1));
assertThat(
writeBufferManagers.iterator().next(),
is(sharedResources.getResourceHandle().getWriteBufferManager()));
container.close();
}
/**
* Guard that {@link ForStResourceContainer#getColumnOptions()} shares the same {@link Cache}
* instance if the {@link ForStResourceContainer} instance is initiated with {@link
* OpaqueMemoryResource}.
*
* @throws Exception if unexpected error happened.
*/
@Test
public void testGetColumnFamilyOptionsWithSharedResources() throws Exception {
final int optionNumber = 20;
OpaqueMemoryResource<ForStSharedResources> sharedResources = getSharedResources();
ForStResourceContainer container = new ForStResourceContainer(null, sharedResources);
HashSet<Cache> caches = new HashSet<>();
for (int i = 0; i < optionNumber; i++) {
ColumnFamilyOptions columnOptions = container.getColumnOptions();
Cache cache = getBlockCache(columnOptions);
caches.add(cache);
}
assertThat(caches.size(), is(1));
assertThat(caches.iterator().next(), is(sharedResources.getResourceHandle().getCache()));
container.close();
}
private OpaqueMemoryResource<ForStSharedResources> getSharedResources() {
final long cacheSize = 1024L, writeBufferSize = 512L;
final LRUCache cache = new LRUCache(cacheSize, -1, false, 0.1);
final WriteBufferManager wbm = new WriteBufferManager(writeBufferSize, cache);
ForStSharedResources forStSharedResources =
new ForStSharedResources(cache, wbm, writeBufferSize, false);
return new OpaqueMemoryResource<>(
forStSharedResources, cacheSize, forStSharedResources::close);
}
private Cache getBlockCache(ColumnFamilyOptions columnOptions) {
BlockBasedTableConfig blockBasedTableConfig = null;
try {
blockBasedTableConfig = (BlockBasedTableConfig) columnOptions.tableFormatConfig();
} catch (ClassCastException e) {
fail("Table config got from ColumnFamilyOptions is not BlockBasedTableConfig");
}
Field cacheField = null;
try {
cacheField = BlockBasedTableConfig.class.getDeclaredField("blockCache");
} catch (NoSuchFieldException e) {
fail("blockCache is not defined");
}
cacheField.setAccessible(true);
try {
return (Cache) cacheField.get(blockBasedTableConfig);
} catch (IllegalAccessException e) {
fail("Cannot access blockCache field.");
return null;
}
}
private WriteBufferManager getWriteBufferManager(DBOptions dbOptions) {
Field writeBufferManagerField = null;
try {
writeBufferManagerField = DBOptions.class.getDeclaredField("writeBufferManager_");
} catch (NoSuchFieldException e) {
fail("writeBufferManager_ is not defined.");
}
writeBufferManagerField.setAccessible(true);
try {
return (WriteBufferManager) writeBufferManagerField.get(dbOptions);
} catch (IllegalAccessException e) {
fail("Cannot access writeBufferManager_ field.");
return null;
}
}
@Test
public void testFreeColumnOptionsAfterClose() throws Exception {
ForStResourceContainer container = new ForStResourceContainer();
ColumnFamilyOptions columnFamilyOptions = container.getColumnOptions();
assertThat(columnFamilyOptions.isOwningHandle(), is(true));
container.close();
assertThat(columnFamilyOptions.isOwningHandle(), is(false));
}
@Test
public void testFreeMultipleColumnOptionsAfterClose() throws Exception {
ForStResourceContainer container = new ForStResourceContainer();
final int optionNumber = 20;
ArrayList<ColumnFamilyOptions> columnFamilyOptions = new ArrayList<>(optionNumber);
for (int i = 0; i < optionNumber; i++) {
columnFamilyOptions.add(container.getColumnOptions());
}
container.close();
for (ColumnFamilyOptions columnFamilyOption : columnFamilyOptions) {
assertThat(columnFamilyOption.isOwningHandle(), is(false));
}
}
@Test
public void testFreeSharedResourcesAfterClose() throws Exception {
LRUCache cache = new LRUCache(1024L);
WriteBufferManager wbm = new WriteBufferManager(1024L, cache);
ForStSharedResources sharedResources = new ForStSharedResources(cache, wbm, 1024L, false);
final ThrowingRunnable<Exception> disposer = sharedResources::close;
OpaqueMemoryResource<ForStSharedResources> opaqueResource =
new OpaqueMemoryResource<>(sharedResources, 1024L, disposer);
ForStResourceContainer container = new ForStResourceContainer(null, opaqueResource);
container.close();
assertThat(cache.isOwningHandle(), is(false));
assertThat(wbm.isOwningHandle(), is(false));
}
@Test
public void testFreeWriteReadOptionsAfterClose() throws Exception {
ForStResourceContainer container = new ForStResourceContainer();
WriteOptions writeOptions = container.getWriteOptions();
ReadOptions readOptions = container.getReadOptions();
assertThat(writeOptions.isOwningHandle(), is(true));
assertThat(readOptions.isOwningHandle(), is(true));
container.close();
assertThat(writeOptions.isOwningHandle(), is(false));
assertThat(readOptions.isOwningHandle(), is(false));
}
@Test
public void testGetColumnFamilyOptionsWithPartitionedIndex() throws Exception {
LRUCache cache = new LRUCache(1024L);
WriteBufferManager wbm = new WriteBufferManager(1024L, cache);
ForStSharedResources sharedResources = new ForStSharedResources(cache, wbm, 1024L, true);
final ThrowingRunnable<Exception> disposer = sharedResources::close;
OpaqueMemoryResource<ForStSharedResources> opaqueResource =
new OpaqueMemoryResource<>(sharedResources, 1024L, disposer);
BloomFilter blockBasedFilter = new BloomFilter();
ForStOptionsFactory blockBasedBloomFilterOptionFactory =
new ForStOptionsFactory() {
@Override
public DBOptions createDBOptions(
DBOptions currentOptions, Collection<AutoCloseable> handlesToClose) {
return currentOptions;
}
@Override
public ColumnFamilyOptions createColumnOptions(
ColumnFamilyOptions currentOptions,
Collection<AutoCloseable> handlesToClose) {
TableFormatConfig tableFormatConfig = currentOptions.tableFormatConfig();
BlockBasedTableConfig blockBasedTableConfig =
tableFormatConfig == null
? new BlockBasedTableConfig()
: (BlockBasedTableConfig) tableFormatConfig;
blockBasedTableConfig.setFilter(blockBasedFilter);
handlesToClose.add(blockBasedFilter);
currentOptions.setTableFormatConfig(blockBasedTableConfig);
return currentOptions;
}
};
try (ForStResourceContainer container =
new ForStResourceContainer(blockBasedBloomFilterOptionFactory, opaqueResource)) {
ColumnFamilyOptions columnOptions = container.getColumnOptions();
BlockBasedTableConfig actual =
(BlockBasedTableConfig) columnOptions.tableFormatConfig();
assertThat(actual.indexType(), is(IndexType.kTwoLevelIndexSearch));
assertThat(actual.partitionFilters(), is(true));
assertThat(actual.pinTopLevelIndexAndFilter(), is(true));
assertFalse(actual.filterPolicy() == blockBasedFilter);
}
assertFalse("Block based filter is left unclosed.", blockBasedFilter.isOwningHandle());
}
@Test
public void testDirectoryResources() throws Exception {
Path localJobPath = new Path(TMP_FOLDER.newFolder().getPath());
Path localBasePath = new Path(localJobPath, "base");
localBasePath.getFileSystem().mkdirs(localBasePath);
Path remoteJobPath = new Path(TMP_FOLDER.newFolder().getPath());
Path remoteBasePath = new Path(remoteJobPath, "base");
remoteBasePath.getFileSystem().mkdirs(remoteBasePath);
try (final ForStResourceContainer optionsContainer =
new ForStResourceContainer(
new Configuration(),
null,
null,
ForStPathContainer.of(
localJobPath, localBasePath, remoteJobPath, remoteBasePath),
null,
new FsCheckpointStorageAccess(
new Path(TMP_FOLDER.newFolder().getPath()),
null,
new JobID(),
1024,
4096),
null,
false)) {
optionsContainer.prepareDirectories();
assertTrue(new File(localBasePath.getPath()).exists());
assertTrue(new File(remoteBasePath.getPath()).exists());
assertTrue(optionsContainer.getDbOptions().getEnv() instanceof FlinkEnv);
optionsContainer.clearDirectories();
assertFalse(new File(localBasePath.getPath()).exists());
assertTrue(new File(remoteBasePath.getPath()).exists());
optionsContainer.forceClearRemoteDirectories();
// Do not delete remote directory because it is not created by ForStResourceContainer
assertTrue(new File(remoteBasePath.getPath()).exists());
}
}
@Test
public void testFileSystemInit() throws Exception {
Path localBasePath = new Path(TMP_FOLDER.newFolder().getPath());
Path remoteBasePath = new Path(TMP_FOLDER.newFolder().getPath());
ArrayList<ColumnFamilyHandle> columnFamilyHandles = new ArrayList<>(1);
ArrayList<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>(1);
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(RocksDB.DEFAULT_COLUMN_FAMILY));
DBOptions dbOptions2 =
new DBOptions().setCreateIfMissing(true).setAvoidFlushDuringShutdown(true);
ForStFlinkFileSystem fileSystem =
ForStFlinkFileSystem.get(remoteBasePath.toUri(), localBasePath, null);
dbOptions2.setEnv(
new FlinkEnv(
remoteBasePath.toString(), new StringifiedForStFileSystem(fileSystem)));
RocksDB db =
RocksDB.open(
dbOptions2,
remoteBasePath.getPath(),
columnFamilyDescriptors,
columnFamilyHandles);
db.put("key".getBytes(), "value".getBytes());
db.getSnapshot();
db.close();
}
}
|
ForStResourceContainerTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/UndefinedEqualsTest.java
|
{
"start": 11720,
"end": 12260
}
|
class ____ {
void f(Iterable a, Iterable b) {
assertWithMessage("message").that(a).containsExactlyElementsIn(b);
}
}
""")
.doTest();
}
@Test
public void truthFixDontRewriteIsNotEqualTo() {
BugCheckerRefactoringTestHelper.newInstance(UndefinedEquals.class, getClass())
.addInputLines(
"Test.java",
"""
import static com.google.common.truth.Truth.assertThat;
import java.lang.Iterable;
|
Test
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/errors/BinaryEncodeErrorTest.java
|
{
"start": 825,
"end": 1460
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Echo.class, WSClient.class);
});
@Inject
Vertx vertx;
@TestHTTPResource("echo")
URI testUri;
@Test
void testError() {
WSClient client = WSClient.create(vertx).connect(testUri);
client.send(Buffer.buffer("1"));
client.waitForMessages(1);
assertEquals("Problem encoding: 1", client.getLastMessage().toString());
}
@WebSocket(path = "/echo")
public static
|
BinaryEncodeErrorTest
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/Sns2EndpointBuilderFactory.java
|
{
"start": 28990,
"end": 30475
}
|
class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final Sns2HeaderNameBuilder INSTANCE = new Sns2HeaderNameBuilder();
/**
* The Amazon SNS message ID.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsSnsMessageId}.
*/
public String awsSnsMessageId() {
return "CamelAwsSnsMessageId";
}
/**
* The Amazon SNS message subject. If not set, the subject from the
* SnsConfiguration is used.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsSnsSubject}.
*/
public String awsSnsSubject() {
return "CamelAwsSnsSubject";
}
/**
* The message structure to use such as json.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code AwsSnsMessageStructure}.
*/
public String awsSnsMessageStructure() {
return "CamelAwsSnsMessageStructure";
}
}
static Sns2EndpointBuilder endpointBuilder(String componentName, String path) {
|
Sns2HeaderNameBuilder
|
java
|
quarkusio__quarkus
|
test-framework/junit5/src/main/java/io/quarkus/test/junit/launcher/ArtifactLauncherProvider.java
|
{
"start": 566,
"end": 808
}
|
interface ____ {
Properties quarkusArtifactProperties();
Path buildOutputDirectory();
Class<?> testClass();
ArtifactLauncher.InitContext.DevServicesLaunchResult devServicesLaunchResult();
}
}
|
CreateContext
|
java
|
alibaba__druid
|
core/src/main/java/com/alibaba/druid/wall/WallTableStat.java
|
{
"start": 942,
"end": 17263
}
|
class ____ {
private volatile long selectCount;
private volatile long selectIntoCount;
private volatile long insertCount;
private volatile long updateCount;
private volatile long deleteCount;
private volatile long truncateCount;
private volatile long createCount;
private volatile long alterCount;
private volatile long dropCount;
private volatile long replaceCount;
private volatile long deleteDataCount;
private volatile long updateDataCount;
private volatile long insertDataCount;
private volatile long fetchRowCount;
static final AtomicLongFieldUpdater<WallTableStat> selectCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"selectCount");
static final AtomicLongFieldUpdater<WallTableStat> selectIntoCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"selectIntoCount");
static final AtomicLongFieldUpdater<WallTableStat> insertCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"insertCount");
static final AtomicLongFieldUpdater<WallTableStat> updateCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"updateCount");
static final AtomicLongFieldUpdater<WallTableStat> deleteCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"deleteCount");
static final AtomicLongFieldUpdater<WallTableStat> truncateCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"truncateCount");
static final AtomicLongFieldUpdater<WallTableStat> createCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"createCount");
static final AtomicLongFieldUpdater<WallTableStat> alterCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"alterCount");
static final AtomicLongFieldUpdater<WallTableStat> dropCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"dropCount");
static final AtomicLongFieldUpdater<WallTableStat> replaceCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"replaceCount");
static final AtomicLongFieldUpdater<WallTableStat> deleteDataCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount");
static final AtomicLongFieldUpdater<WallTableStat> insertDataCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"insertDataCount");
static final AtomicLongFieldUpdater<WallTableStat> updateDataCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount");
static final AtomicLongFieldUpdater<WallTableStat> fetchRowCountUpdater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount");
private volatile long fetchRowCount_0_1;
private volatile long fetchRowCount_1_10;
private volatile long fetchRowCount_10_100;
private volatile int fetchRowCount_100_1000;
private volatile int fetchRowCount_1000_10000;
private volatile int fetchRowCount_10000_more;
static final AtomicLongFieldUpdater<WallTableStat> fetchRowCount_0_1_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_0_1");
static final AtomicLongFieldUpdater<WallTableStat> fetchRowCount_1_10_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_1_10");
static final AtomicLongFieldUpdater<WallTableStat> fetchRowCount_10_100_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_10_100");
static final AtomicIntegerFieldUpdater<WallTableStat> fetchRowCount_100_1000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_100_1000");
static final AtomicIntegerFieldUpdater<WallTableStat> fetchRowCount_1000_10000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_1000_10000");
static final AtomicIntegerFieldUpdater<WallTableStat> fetchRowCount_10000_more_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"fetchRowCount_10000_more");
private volatile long updateDataCount_0_1;
private volatile long updateDataCount_1_10;
private volatile long updateDataCount_10_100;
private volatile int updateDataCount_100_1000;
private volatile int updateDataCount_1000_10000;
private volatile int updateDataCount_10000_more;
static final AtomicLongFieldUpdater<WallTableStat> updateDataCount_0_1_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_0_1");
static final AtomicLongFieldUpdater<WallTableStat> updateDataCount_1_10_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_1_10");
static final AtomicLongFieldUpdater<WallTableStat> updateDataCount_10_100_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_10_100");
static final AtomicIntegerFieldUpdater<WallTableStat> updateDataCount_100_1000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_100_1000");
static final AtomicIntegerFieldUpdater<WallTableStat> updateDataCount_1000_10000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_1000_10000");
static final AtomicIntegerFieldUpdater<WallTableStat> updateDataCount_10000_more_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"updateDataCount_10000_more");
private volatile long deleteDataCount_0_1;
private volatile long deleteDataCount_1_10;
private volatile long deleteDataCount_10_100;
private volatile int deleteDataCount_100_1000;
private volatile int deleteDataCount_1000_10000;
private volatile int deleteDataCount_10000_more;
static final AtomicLongFieldUpdater<WallTableStat> deleteDataCount_0_1_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_0_1");
static final AtomicLongFieldUpdater<WallTableStat> deleteDataCount_1_10_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_1_10");
static final AtomicLongFieldUpdater<WallTableStat> deleteDataCount_10_100_Updater = AtomicLongFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_10_100");
static final AtomicIntegerFieldUpdater<WallTableStat> deleteDataCount_100_1000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_100_1000");
static final AtomicIntegerFieldUpdater<WallTableStat> deleteDataCount_1000_10000_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_1000_10000");
static final AtomicIntegerFieldUpdater<WallTableStat> deleteDataCount_10000_more_Updater = AtomicIntegerFieldUpdater.newUpdater(WallTableStat.class,
"deleteDataCount_10000_more");
public WallTableStat() {
}
public long getSelectCount() {
return selectCount;
}
public long getSelectIntoCount() {
return selectIntoCount;
}
public long getInsertCount() {
return insertCount;
}
public long getUpdateCount() {
return updateCount;
}
public long getDeleteCount() {
return deleteCount;
}
public long getTruncateCount() {
return truncateCount;
}
public long getCreateCount() {
return createCount;
}
public long getAlterCount() {
return alterCount;
}
public long getDropCount() {
return dropCount;
}
public long getReplaceCount() {
return replaceCount;
}
public long getDeleteDataCount() {
return this.deleteDataCount;
}
public long[] getDeleteDataCountHistogramValues() {
return new long[]{
//
deleteDataCount_0_1, //
deleteDataCount_1_10, //
deleteDataCount_10_100, //
deleteDataCount_100_1000, //
deleteDataCount_1000_10000, //
deleteDataCount_10000_more //
};
}
public void addDeleteDataCount(long delta) {
deleteDataCountUpdater.addAndGet(this, delta);
if (delta < 1) {
deleteDataCount_0_1_Updater.incrementAndGet(this);
} else if (delta < 10) {
deleteDataCount_1_10_Updater.incrementAndGet(this);
} else if (delta < 100) {
deleteDataCount_10_100_Updater.incrementAndGet(this);
} else if (delta < 1000) {
deleteDataCount_100_1000_Updater.incrementAndGet(this);
} else if (delta < 10000) {
deleteDataCount_1000_10000_Updater.incrementAndGet(this);
} else {
deleteDataCount_10000_more_Updater.incrementAndGet(this);
}
}
public long getUpdateDataCount() {
return this.updateDataCount;
}
public long[] getUpdateDataCountHistogramValues() {
return new long[]{
//
updateDataCount_0_1, //
updateDataCount_1_10, //
updateDataCount_10_100, //
updateDataCount_100_1000, //
updateDataCount_1000_10000, //
updateDataCount_10000_more //
};
}
public long getInsertDataCount() {
return this.insertDataCount;
}
public void addInsertDataCount(long delta) {
insertDataCountUpdater.addAndGet(this, delta);
}
public void addUpdateDataCount(long delta) {
updateDataCountUpdater.addAndGet(this, delta);
if (delta < 1) {
updateDataCount_0_1_Updater.incrementAndGet(this);
} else if (delta < 10) {
updateDataCount_1_10_Updater.incrementAndGet(this);
} else if (delta < 100) {
updateDataCount_10_100_Updater.incrementAndGet(this);
} else if (delta < 1000) {
updateDataCount_100_1000_Updater.incrementAndGet(this);
} else if (delta < 10000) {
updateDataCount_1000_10000_Updater.incrementAndGet(this);
} else {
updateDataCount_10000_more_Updater.incrementAndGet(this);
}
}
public long getFetchRowCount() {
return fetchRowCount;
}
public long[] getFetchRowCountHistogramValues() {
return new long[]{
//
fetchRowCount_0_1, //
fetchRowCount_1_10, //
fetchRowCount_10_100, //
fetchRowCount_100_1000, //
fetchRowCount_1000_10000, //
fetchRowCount_10000_more //
};
}
public void addFetchRowCount(long delta) {
fetchRowCountUpdater.addAndGet(this, delta);
if (delta < 1) {
fetchRowCount_0_1_Updater.incrementAndGet(this);
} else if (delta < 10) {
fetchRowCount_1_10_Updater.incrementAndGet(this);
} else if (delta < 100) {
fetchRowCount_10_100_Updater.incrementAndGet(this);
} else if (delta < 1000) {
fetchRowCount_100_1000_Updater.incrementAndGet(this);
} else if (delta < 10000) {
fetchRowCount_1000_10000_Updater.incrementAndGet(this);
} else {
fetchRowCount_10000_more_Updater.incrementAndGet(this);
}
}
public void addSqlTableStat(WallSqlTableStat stat) {
{
long val = stat.getSelectCount();
if (val > 0) {
selectCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getSelectIntoCount();
if (val > 0) {
selectIntoCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getInsertCount();
if (val > 0) {
insertCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getUpdateCount();
if (val > 0) {
updateCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getDeleteCount();
if (val > 0) {
deleteCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getAlterCount();
if (val > 0) {
alterCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getTruncateCount();
if (val > 0) {
truncateCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getCreateCount();
if (val > 0) {
createCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getDropCount();
if (val > 0) {
dropCountUpdater.addAndGet(this, val);
}
}
{
long val = stat.getReplaceCount();
if (val > 0) {
replaceCountUpdater.addAndGet(this, val);
}
}
}
public String toString() {
Map<String, Object> map = toMap();
return JSONUtils.toJSONString(map);
}
public Map<String, Object> toMap() {
Map<String, Object> map = new LinkedHashMap<String, Object>();
return toMap(map);
}
public WallTableStatValue getStatValue(boolean reset) {
WallTableStatValue statValue = new WallTableStatValue();
statValue.setSelectCount(get(this, selectCountUpdater, reset));
statValue.setDeleteCount(get(this, deleteCountUpdater, reset));
statValue.setInsertCount(get(this, insertCountUpdater, reset));
statValue.setUpdateCount(get(this, updateCountUpdater, reset));
statValue.setAlterCount(get(this, alterCountUpdater, reset));
statValue.setDropCount(get(this, dropCountUpdater, reset));
statValue.setCreateCount(get(this, createCountUpdater, reset));
statValue.setTruncateCount(get(this, truncateCountUpdater, reset));
statValue.setReplaceCount(get(this, replaceCountUpdater, reset));
statValue.setDeleteDataCount(get(this, deleteDataCountUpdater, reset));
statValue.setFetchRowCount(get(this, fetchRowCountUpdater, reset));
statValue.setUpdateDataCount(get(this, updateDataCountUpdater, reset));
statValue.fetchRowCount_0_1 = get(this, fetchRowCount_0_1_Updater, reset);
statValue.fetchRowCount_1_10 = get(this, fetchRowCount_1_10_Updater, reset);
statValue.fetchRowCount_10_100 = get(this, fetchRowCount_10_100_Updater, reset);
statValue.fetchRowCount_100_1000 = get(this, fetchRowCount_100_1000_Updater, reset);
statValue.fetchRowCount_1000_10000 = get(this, fetchRowCount_1000_10000_Updater, reset);
statValue.fetchRowCount_10000_more = get(this, fetchRowCount_10000_more_Updater, reset);
statValue.updateDataCount_0_1 = get(this, updateDataCount_0_1_Updater, reset);
statValue.updateDataCount_1_10 = get(this, updateDataCount_1_10_Updater, reset);
statValue.updateDataCount_10_100 = get(this, updateDataCount_10_100_Updater, reset);
statValue.updateDataCount_100_1000 = get(this, updateDataCount_100_1000_Updater, reset);
statValue.updateDataCount_1000_10000 = get(this, updateDataCount_1000_10000_Updater, reset);
statValue.updateDataCount_10000_more = get(this, updateDataCount_10000_more_Updater, reset);
statValue.deleteDataCount_0_1 = get(this, deleteDataCount_0_1_Updater, reset);
statValue.deleteDataCount_1_10 = get(this, deleteDataCount_1_10_Updater, reset);
statValue.deleteDataCount_10_100 = get(this, deleteDataCount_10_100_Updater, reset);
statValue.deleteDataCount_100_1000 = get(this, deleteDataCount_100_1000_Updater, reset);
statValue.deleteDataCount_1000_10000 = get(this, deleteDataCount_1000_10000_Updater, reset);
statValue.deleteDataCount_10000_more = get(this, deleteDataCount_10000_more_Updater, reset);
return statValue;
}
public Map<String, Object> toMap(Map<String, Object> map) {
return getStatValue(false).toMap(map);
}
}
|
WallTableStat
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/objectid/ObjectId825BTest.java
|
{
"start": 3712,
"end": 5853
}
|
class ____ extends AbstractData {
private static final long serialVersionUID = 1L;
}
/*
/*****************************************************
/* Test methods
/*****************************************************
*/
@Test
public void testFull825() throws Exception
{
final ObjectMapper mapper = jsonMapperBuilder()
.activateDefaultTyping(NoCheckSubTypeValidator.instance,
DefaultTyping.OBJECT_AND_NON_CONCRETE)
.build();
String INPUT = a2q(
"{\n"+
" '@class': '_PKG_CTC',\n"+
" 'var': [{\n"+
" 'ch': {\n"+
" '@class': '_PKG_Ch',\n"+
" 'act': [{\n"+
" '@class': '_PKG_CTD',\n"+
" 'oidString': 'oid1',\n"+
" 'dec': [{\n"+
" '@class': '_PKG_Dec',\n"+
" 'oidString': 'oid2',\n"+
" 'outTr': [{\n"+
" '@class': '_PKG_Tr',\n"+
" 'target': {\n"+
" '@class': '_PKG_Ti',\n"+
" 'oidString': 'oid3',\n"+
" 'timer': 'problemoid',\n"+
" 'outTr': [{\n"+
" '@class': '_PKG_Tr',\n"+
" 'target': {\n"+
" '@class': '_PKG_Ti',\n"+
" 'oidString': 'oid4',\n"+
" 'timer': {\n"+
" '@class': '_PKG_V',\n"+
" 'oidString': 'problemoid'\n"+
" }\n"+
" }\n"+
" }]\n"+
" }\n"+
" }]\n"+
" }]\n"+
" }],\n"+
" 'oidString': 'oid5'\n"+
" },\n"+
" '@class': '_PKG_CTV',\n"+
" 'oidString': 'oid6',\n"+
" 'locV': ['problemoid']\n"+
" }],\n"+
" 'oidString': 'oid7'\n"+
"}\n"
);
// also replace package
final String newPkg = getClass().getName() + "\\$";
INPUT = INPUT.replaceAll("_PKG_", newPkg);
CTC result = mapper.readValue(INPUT, CTC.class);
assertNotNull(result);
}
}
|
V
|
java
|
apache__maven
|
impl/maven-core/src/main/java/org/apache/maven/ProjectCycleException.java
|
{
"start": 907,
"end": 1169
}
|
class ____ extends BuildFailureException {
public ProjectCycleException(String message) {
super(message);
}
public ProjectCycleException(String message, CycleDetectedException cause) {
super(message, cause);
}
}
|
ProjectCycleException
|
java
|
spring-projects__spring-framework
|
spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java
|
{
"start": 262500,
"end": 262767
}
|
class ____ {
static Servlet3SecurityContextHolderAwareRequestWrapper getOne() {
HttpServlet3RequestFactory outer = new HttpServlet3RequestFactory();
return outer.new Servlet3SecurityContextHolderAwareRequestWrapper();
}
// private
|
HttpServlet3RequestFactory
|
java
|
netty__netty
|
codec-dns/src/main/java/io/netty/handler/codec/dns/DefaultDnsPtrRecord.java
|
{
"start": 1011,
"end": 2464
}
|
class ____ the record, usually one of the following:
* <ul>
* <li>{@link #CLASS_IN}</li>
* <li>{@link #CLASS_CSNET}</li>
* <li>{@link #CLASS_CHAOS}</li>
* <li>{@link #CLASS_HESIOD}</li>
* <li>{@link #CLASS_NONE}</li>
* <li>{@link #CLASS_ANY}</li>
* </ul>
* @param timeToLive the TTL value of the record
* @param hostname the hostname this PTR record resolves to.
*/
public DefaultDnsPtrRecord(
String name, int dnsClass, long timeToLive, String hostname) {
super(name, DnsRecordType.PTR, dnsClass, timeToLive);
this.hostname = checkNotNull(hostname, "hostname");
}
@Override
public String hostname() {
return hostname;
}
@Override
public String toString() {
final StringBuilder buf = new StringBuilder(64).append(StringUtil.simpleClassName(this)).append('(');
final DnsRecordType type = type();
buf.append(name().isEmpty()? "<root>" : name())
.append(' ')
.append(timeToLive())
.append(' ');
DnsMessageUtil.appendRecordClass(buf, dnsClass())
.append(' ')
.append(type.name());
buf.append(' ')
.append(hostname);
return buf.toString();
}
}
|
of
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/NullPointerTesterTest.java
|
{
"start": 7745,
"end": 7991
}
|
interface ____ {
static InterfaceDefaultMethodFailsToCheckNull create() {
return new InterfaceDefaultMethodFailsToCheckNull() {};
}
default void doNotCheckNull(String unused) {}
}
private
|
InterfaceDefaultMethodFailsToCheckNull
|
java
|
spring-projects__spring-framework
|
spring-webflux/src/main/java/org/springframework/web/reactive/function/server/EntityResponse.java
|
{
"start": 3430,
"end": 4561
}
|
class ____ elements contained in the publisher
* @param <T> the type of the elements contained in the publisher
* @param <P> the type of the {@code Publisher}
* @return the created builder
*/
static <T, P extends Publisher<T>> Builder<P> fromPublisher(P publisher, Class<T> elementClass) {
return new DefaultEntityResponseBuilder<>(publisher,
BodyInserters.fromPublisher(publisher, elementClass));
}
/**
* Create a builder with the given publisher.
* @param publisher the publisher that represents the body of the response
* @param typeReference the type of elements contained in the publisher
* @param <T> the type of the elements contained in the publisher
* @param <P> the type of the {@code Publisher}
* @return the created builder
*/
static <T, P extends Publisher<T>> Builder<P> fromPublisher(P publisher,
ParameterizedTypeReference<T> typeReference) {
return new DefaultEntityResponseBuilder<>(publisher,
BodyInserters.fromPublisher(publisher, typeReference));
}
/**
* Defines a builder for {@code EntityResponse}.
*
* @param <T> a self reference to the builder type
*/
|
of
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/ArmoryTest.java
|
{
"start": 1523,
"end": 1787
}
|
class ____ {
private List<Object> items = new ArrayList<Object>();
public List<Object> getItems() {
return items;
}
public void setItems(List<Object> items) {
this.items = items;
}
}
}
|
MessageBody
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetClusterMetricsResponsePBImpl.java
|
{
"start": 1559,
"end": 4154
}
|
class ____ extends GetClusterMetricsResponse {
GetClusterMetricsResponseProto proto = GetClusterMetricsResponseProto.getDefaultInstance();
GetClusterMetricsResponseProto.Builder builder = null;
boolean viaProto = false;
private YarnClusterMetrics yarnClusterMetrics = null;
public GetClusterMetricsResponsePBImpl() {
builder = GetClusterMetricsResponseProto.newBuilder();
}
public GetClusterMetricsResponsePBImpl(GetClusterMetricsResponseProto proto) {
this.proto = proto;
viaProto = true;
}
public GetClusterMetricsResponseProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToBuilder() {
if (this.yarnClusterMetrics != null) {
builder.setClusterMetrics(convertToProtoFormat(this.yarnClusterMetrics));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = GetClusterMetricsResponseProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public YarnClusterMetrics getClusterMetrics() {
GetClusterMetricsResponseProtoOrBuilder p = viaProto ? proto : builder;
if (this.yarnClusterMetrics != null) {
return this.yarnClusterMetrics;
}
if (!p.hasClusterMetrics()) {
return null;
}
this.yarnClusterMetrics = convertFromProtoFormat(p.getClusterMetrics());
return this.yarnClusterMetrics;
}
@Override
public void setClusterMetrics(YarnClusterMetrics clusterMetrics) {
maybeInitBuilder();
if (clusterMetrics == null)
builder.clearClusterMetrics();
this.yarnClusterMetrics = clusterMetrics;
}
private YarnClusterMetricsPBImpl convertFromProtoFormat(YarnClusterMetricsProto p) {
return new YarnClusterMetricsPBImpl(p);
}
private YarnClusterMetricsProto convertToProtoFormat(YarnClusterMetrics t) {
return ((YarnClusterMetricsPBImpl)t).getProto();
}
}
|
GetClusterMetricsResponsePBImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/metamodel/model/domain/internal/AnyDiscriminatorSqmPath.java
|
{
"start": 686,
"end": 1797
}
|
class ____<T> extends AbstractSqmPath<T> implements DiscriminatorSqmPath<T> {
protected AnyDiscriminatorSqmPath(
NavigablePath navigablePath,
SqmPathSource<T> referencedPathSource,
SqmPath<?> lhs,
NodeBuilder nodeBuilder) {
super( navigablePath, referencedPathSource, lhs, nodeBuilder );
}
@Override
public AnyDiscriminatorSqmPath<T> copy(SqmCopyContext context) {
final AnyDiscriminatorSqmPath<T> existing = context.getCopy( this );
if ( existing != null ) {
return existing;
}
//noinspection unchecked
return context.registerCopy(
this,
(AnyDiscriminatorSqmPath<T>) getLhs().copy( context ).type()
);
}
@Override
public <X> X accept(SemanticQueryWalker<X> walker) {
return walker.visitAnyDiscriminatorTypeExpression( this ) ;
}
@Override
public @NonNull SqmPath<?> getLhs() {
return castNonNull( super.getLhs() );
}
@Override
public @NonNull AnyDiscriminatorSqmPathSource<T> getExpressible() {
// return (AnyDiscriminatorSqmPathSource<T>) getNodeType();
return (AnyDiscriminatorSqmPathSource<T>) getReferencedPathSource();
}
}
|
AnyDiscriminatorSqmPath
|
java
|
grpc__grpc-java
|
api/src/main/java/io/grpc/LoadBalancer.java
|
{
"start": 9416,
"end": 11530
}
|
class ____ {
private final List<EquivalentAddressGroup> addresses;
@NameResolver.ResolutionResultAttr
private final Attributes attributes;
@Nullable
private final Object loadBalancingPolicyConfig;
// Make sure to update toBuilder() below!
private ResolvedAddresses(
List<EquivalentAddressGroup> addresses,
@NameResolver.ResolutionResultAttr Attributes attributes,
Object loadBalancingPolicyConfig) {
this.addresses =
Collections.unmodifiableList(new ArrayList<>(checkNotNull(addresses, "addresses")));
this.attributes = checkNotNull(attributes, "attributes");
this.loadBalancingPolicyConfig = loadBalancingPolicyConfig;
}
/**
* Factory for constructing a new Builder.
*
* @since 1.21.0
*/
public static Builder newBuilder() {
return new Builder();
}
/**
* Converts this back to a builder.
*
* @since 1.21.0
*/
public Builder toBuilder() {
return newBuilder()
.setAddresses(addresses)
.setAttributes(attributes)
.setLoadBalancingPolicyConfig(loadBalancingPolicyConfig);
}
/**
* Gets the server addresses.
*
* @since 1.21.0
*/
public List<EquivalentAddressGroup> getAddresses() {
return addresses;
}
/**
* Gets the attributes associated with these addresses. If this was not previously set,
* {@link Attributes#EMPTY} will be returned.
*
* @since 1.21.0
*/
@NameResolver.ResolutionResultAttr
public Attributes getAttributes() {
return attributes;
}
/**
* Gets the domain specific load balancing policy. This is the config produced by
* {@link LoadBalancerProvider#parseLoadBalancingPolicyConfig(Map)}.
*
* @since 1.21.0
*/
@Nullable
public Object getLoadBalancingPolicyConfig() {
return loadBalancingPolicyConfig;
}
/**
* Builder for {@link ResolvedAddresses}.
*/
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/1771")
public static final
|
ResolvedAddresses
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/composite/BatchRequest.java
|
{
"start": 1247,
"end": 2037
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
private final Method method;
private final Object richInput;
private final String url;
BatchRequest(final Method method, final String url) {
this(method, url, null);
}
BatchRequest(final Method method, final String url, final Object richInput) {
this.method = method;
this.url = url;
this.richInput = richInput;
}
public Method getMethod() {
return method;
}
public Object getRichInput() {
return richInput;
}
public String getUrl() {
return url;
}
@Override
public String toString() {
return "Batch: " + method + " " + url + ", data:" + richInput;
}
}
|
BatchRequest
|
java
|
apache__camel
|
components/camel-netty/src/main/java/org/apache/camel/component/netty/NettyConsumer.java
|
{
"start": 1088,
"end": 4390
}
|
class ____ extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(NettyConsumer.class);
private CamelContext context;
private NettyConfiguration configuration;
private NettyServerBootstrapFactory nettyServerBootstrapFactory;
public NettyConsumer(NettyEndpoint nettyEndpoint, Processor processor, NettyConfiguration configuration) {
super(nettyEndpoint, processor);
this.context = this.getEndpoint().getCamelContext();
this.configuration = configuration;
setNettyServerBootstrapFactory(configuration.getNettyServerBootstrapFactory());
setExceptionHandler(new NettyConsumerExceptionHandler(this));
}
@Override
public boolean isHostedService() {
// we are hosted if not in client mode
return !configuration.isClientMode();
}
@Override
public NettyEndpoint getEndpoint() {
return (NettyEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
super.doStart();
LOG.debug("Netty consumer binding to: {}", configuration.getAddress());
if (nettyServerBootstrapFactory == null) {
// setup pipeline factory
ServerInitializerFactory pipelineFactory;
ServerInitializerFactory factory = configuration.getServerInitializerFactory();
if (factory != null) {
pipelineFactory = factory.createPipelineFactory(this);
} else {
pipelineFactory = new DefaultServerInitializerFactory(this);
}
if (isTcp()) {
if (configuration.isClientMode()) {
nettyServerBootstrapFactory = new ClientModeTCPNettyServerBootstrapFactory();
} else {
nettyServerBootstrapFactory = new SingleTCPNettyServerBootstrapFactory();
}
} else {
nettyServerBootstrapFactory = new SingleUDPNettyServerBootstrapFactory();
}
nettyServerBootstrapFactory.init(context, configuration, pipelineFactory);
}
ServiceHelper.startService(nettyServerBootstrapFactory);
LOG.info("Netty consumer bound to: {}", configuration.getAddress());
}
@Override
protected void doStop() throws Exception {
LOG.debug("Netty consumer unbinding from: {}", configuration.getAddress());
ServiceHelper.stopService(nettyServerBootstrapFactory);
LOG.info("Netty consumer unbound from: {}", configuration.getAddress());
super.doStop();
}
public CamelContext getContext() {
return context;
}
public NettyConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(NettyConfiguration configuration) {
this.configuration = configuration;
}
public NettyServerBootstrapFactory getNettyServerBootstrapFactory() {
return nettyServerBootstrapFactory;
}
public void setNettyServerBootstrapFactory(NettyServerBootstrapFactory nettyServerBootstrapFactory) {
this.nettyServerBootstrapFactory = nettyServerBootstrapFactory;
}
protected boolean isTcp() {
return configuration.getProtocol().equalsIgnoreCase("tcp");
}
}
|
NettyConsumer
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NullableVoidTest.java
|
{
"start": 3273,
"end": 3385
}
|
class ____ {
<@Nullable T> void f(T t) {}
}
""")
.doTest();
}
}
|
Test
|
java
|
apache__camel
|
components/camel-jaxb/src/test/java/org/apache/camel/example/JaxbErrorLogTest.java
|
{
"start": 1196,
"end": 2036
}
|
class ____ extends CamelTestSupport {
@Test
public void testErrorHandling() throws Exception {
// the 2nd message is set to fail, but the 4 others should be routed
getMockEndpoint("mock:end").expectedMessageCount(4);
// FailingBean will cause message at index 2 to throw exception
for (int i = 0; i < 5; i++) {
sendBody("seda:test", new CannotMarshal(i));
}
MockEndpoint.assertIsSatisfied(context);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("seda:test")
.bean(new FailingBean())
.to("log:end", "mock:end");
}
};
}
public static final
|
JaxbErrorLogTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/TypeUtilsTest2.java
|
{
"start": 3652,
"end": 3697
}
|
class ____<K, V> {
}
public static
|
Pair
|
java
|
quarkusio__quarkus
|
extensions/azure-functions/deployment/src/main/java/io/quarkus/azure/functions/deployment/AzureFunctionBuildItem.java
|
{
"start": 143,
"end": 713
}
|
class ____ extends MultiBuildItem {
private final String functionName;
private final Class declaring;
private final Method method;
public AzureFunctionBuildItem(String functionName, Class declaring, Method method) {
this.functionName = functionName;
this.declaring = declaring;
this.method = method;
}
public Class getDeclaring() {
return declaring;
}
public String getFunctionName() {
return functionName;
}
public Method getMethod() {
return method;
}
}
|
AzureFunctionBuildItem
|
java
|
elastic__elasticsearch
|
x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportWatcherStatsActionTests.java
|
{
"start": 2142,
"end": 6340
}
|
class ____ extends ESTestCase {
private ThreadPool threadPool;
private TransportWatcherStatsAction action;
@Before
public void setupTransportAction() {
ProjectId projectId = randomProjectIdOrDefault();
threadPool = new TestThreadPool("TransportWatcherStatsActionTests");
TransportService transportService = mock(TransportService.class);
when(transportService.getThreadPool()).thenReturn(threadPool);
ClusterService clusterService = mock(ClusterService.class);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.create("nodeId");
when(clusterService.localNode()).thenReturn(discoveryNode);
ClusterName clusterName = new ClusterName("cluster_name");
when(clusterService.getClusterName()).thenReturn(clusterName);
ClusterState clusterState = mock(ClusterState.class);
when(clusterService.state()).thenReturn(clusterState);
Metadata metadata = Metadata.builder().put(ProjectMetadata.builder(projectId).build()).build();
when(clusterState.getMetadata()).thenReturn(metadata);
when(clusterState.metadata()).thenReturn(metadata);
WatcherLifeCycleService watcherLifeCycleService = mock(WatcherLifeCycleService.class);
when(watcherLifeCycleService.getState()).thenReturn(() -> WatcherState.STARTED);
ExecutionService executionService = mock(ExecutionService.class);
when(executionService.executionThreadPoolQueueSize()).thenReturn(100L);
when(executionService.executionThreadPoolMaxSize()).thenReturn(5L);
Counters firstExecutionCounters = new Counters();
firstExecutionCounters.inc("spam.eggs", 1);
Counters secondExecutionCounters = new Counters();
secondExecutionCounters.inc("whatever", 1);
secondExecutionCounters.inc("foo.bar.baz", 123);
when(executionService.executionTimes()).thenReturn(firstExecutionCounters, secondExecutionCounters);
TriggerService triggerService = mock(TriggerService.class);
when(triggerService.count()).thenReturn(10L, 30L);
Counters firstTriggerServiceStats = new Counters();
firstTriggerServiceStats.inc("foo.bar.baz", 1024);
Counters secondTriggerServiceStats = new Counters();
secondTriggerServiceStats.inc("foo.bar.baz", 1024);
when(triggerService.stats()).thenReturn(firstTriggerServiceStats, secondTriggerServiceStats);
action = new TransportWatcherStatsAction(
transportService,
clusterService,
threadPool,
new ActionFilters(Collections.emptySet()),
watcherLifeCycleService,
executionService,
triggerService,
TestProjectResolvers.singleProject(projectId)
);
}
@After
public void cleanup() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
public void testWatcherStats() throws Exception {
WatcherStatsRequest request = new WatcherStatsRequest();
request.includeStats(true);
WatcherStatsResponse.Node nodeResponse1 = action.nodeOperation(new WatcherStatsRequest.Node(request), null);
WatcherStatsResponse.Node nodeResponse2 = action.nodeOperation(new WatcherStatsRequest.Node(request), null);
WatcherStatsResponse response = action.newResponse(request, Arrays.asList(nodeResponse1, nodeResponse2), Collections.emptyList());
assertThat(response.getWatchesCount(), is(40L));
try (XContentBuilder builder = jsonBuilder()) {
builder.startObject();
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
ObjectPath objectPath = ObjectPath.createFromXContent(JsonXContent.jsonXContent, BytesReference.bytes(builder));
assertThat(objectPath.evaluate("stats.0.stats.foo.bar.baz"), is(1024));
assertThat(objectPath.evaluate("stats.1.stats.foo.bar.baz"), is(1147));
assertThat(objectPath.evaluate("stats.0.stats.spam.eggs"), is(1));
assertThat(objectPath.evaluate("stats.1.stats.whatever"), is(1));
}
}
}
|
TransportWatcherStatsActionTests
|
java
|
apache__maven
|
impl/maven-cli/src/test/java/org/apache/maven/cling/invoker/mvnup/goals/ModelUpgradeStrategyTest.java
|
{
"start": 2471,
"end": 5115
}
|
class ____ {
@ParameterizedTest
@MethodSource("provideApplicabilityScenarios")
@DisplayName("should determine applicability based on options")
void shouldDetermineApplicabilityBasedOnOptions(
Boolean all, String model, boolean expectedApplicable, String description) {
UpgradeContext context = TestUtils.createMockContext(TestUtils.createOptions(all, null, null, null, model));
boolean isApplicable = strategy.isApplicable(context);
assertEquals(expectedApplicable, isApplicable, description);
}
private static Stream<Arguments> provideApplicabilityScenarios() {
return Stream.of(
Arguments.of(null, "4.1.0", true, "Should be applicable when --model=4.1.0 is specified"),
Arguments.of(true, null, true, "Should be applicable when --all is specified"),
Arguments.of(true, "4.0.0", true, "Should be applicable when --all is specified (overrides model)"),
Arguments.of(null, null, false, "Should not be applicable by default"),
Arguments.of(false, null, false, "Should not be applicable when --all=false"),
Arguments.of(null, "4.0.0", false, "Should not be applicable for same version (4.0.0)"),
Arguments.of(false, "4.1.0", true, "Should be applicable for model upgrade even when --all=false"),
Arguments.of(false, "4.2.0", true, "Should be applicable for model upgrade even when --all=false"),
Arguments.of(null, "4.2.0", true, "Should be applicable when --model=4.2.0 is specified"));
}
@Test
@DisplayName("should handle conflicting option combinations")
void shouldHandleConflictingOptionCombinations() {
// Test case where multiple conflicting options are set
UpgradeContext context = TestUtils.createMockContext(TestUtils.createOptions(
true, // --all
false, // --infer (conflicts with --all)
false, // --fix-model (conflicts with --all)
false, // --plugins (conflicts with --all)
"4.0.0" // --model (conflicts with --all)
));
// --all should take precedence and make strategy applicable
assertTrue(
strategy.isApplicable(context),
"Strategy should be applicable when --all is set, regardless of other options");
}
}
@Nested
@DisplayName("Model Version Upgrades")
|
ApplicabilityTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/querycache/QueryCacheTest.java
|
{
"start": 24114,
"end": 24957
}
|
class ____ extends ConnectionProviderDelegate {
private static final ThreadLocal<SQLException> CONNECTION_RETRIEVAL_EXCEPTION_TO_THROW = new ThreadLocal<>();
public ProxyConnectionProvider() {
setConnectionProvider( SharedDriverManagerConnectionProvider.getInstance() );
}
static void runWithConnectionRetrievalFailure(SQLException exceptionToThrow, Runnable runnable) {
CONNECTION_RETRIEVAL_EXCEPTION_TO_THROW.set( exceptionToThrow );
try {
runnable.run();
}
finally {
CONNECTION_RETRIEVAL_EXCEPTION_TO_THROW.remove();
}
}
@Override
public Connection getConnection() throws SQLException {
SQLException exceptionToSend = CONNECTION_RETRIEVAL_EXCEPTION_TO_THROW.get();
if ( exceptionToSend != null ) {
throw exceptionToSend;
}
return super.getConnection();
}
}
}
|
ProxyConnectionProvider
|
java
|
grpc__grpc-java
|
core/src/test/java/io/grpc/internal/ForwardingNameResolverTest.java
|
{
"start": 1259,
"end": 2631
}
|
class ____ {
private final NameResolver delegate = mock(NameResolver.class);
private final NameResolver forwarder = new ForwardingNameResolver(delegate) {
};
@Test
public void allMethodsForwarded() throws Exception {
ForwardingTestUtil.testMethodsForwarded(
NameResolver.class,
delegate,
forwarder,
Collections.<Method>emptyList());
}
@Test
public void getServiceAuthority() {
String auth = "example.com";
when(delegate.getServiceAuthority()).thenReturn(auth);
assertEquals(auth, forwarder.getServiceAuthority());
}
@Test
@SuppressWarnings("deprecation") // this will be removed in 1.21.0
public void start_listener() {
NameResolver.Listener listener = new NameResolver.Listener() {
@Override
public void onAddresses(List<EquivalentAddressGroup> servers, Attributes attributes) { }
@Override
public void onError(Status error) { }
};
forwarder.start(listener);
verify(delegate).start(listener);
}
@Test
public void start_observer() {
NameResolver.Listener2 listener = new NameResolver.Listener2() {
@Override
public void onResult(ResolutionResult result) {
}
@Override
public void onError(Status error) { }
};
forwarder.start(listener);
verify(delegate).start(listener);
}
}
|
ForwardingNameResolverTest
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/DateComparator.java
|
{
"start": 1083,
"end": 3490
}
|
class ____ extends BasicTypeComparator<Date> {
private static final long serialVersionUID = 1L;
public DateComparator(boolean ascending) {
super(ascending);
}
@Override
public int compareSerialized(DataInputView firstSource, DataInputView secondSource)
throws IOException {
return compareSerializedDate(firstSource, secondSource, ascendingComparison);
}
@Override
public boolean supportsNormalizedKey() {
return true;
}
@Override
public int getNormalizeKeyLen() {
return 8;
}
@Override
public boolean isNormalizedKeyPrefixOnly(int keyBytes) {
return keyBytes < 8;
}
@Override
public void putNormalizedKey(Date record, MemorySegment target, int offset, int numBytes) {
putNormalizedKeyDate(record, target, offset, numBytes);
}
@Override
public DateComparator duplicate() {
return new DateComparator(ascendingComparison);
}
// --------------------------------------------------------------------------------------------
// Static Helpers for Date Comparison
// --------------------------------------------------------------------------------------------
public static int compareSerializedDate(
DataInputView firstSource, DataInputView secondSource, boolean ascendingComparison)
throws IOException {
final long l1 = firstSource.readLong();
final long l2 = secondSource.readLong();
final int comp = (l1 < l2 ? -1 : (l1 == l2 ? 0 : 1));
return ascendingComparison ? comp : -comp;
}
public static void putNormalizedKeyDate(
Date record, MemorySegment target, int offset, int numBytes) {
final long value = record.getTime() - Long.MIN_VALUE;
// see IntValue for an explanation of the logic
if (numBytes == 8) {
// default case, full normalized key
target.putLongBigEndian(offset, value);
} else if (numBytes < 8) {
for (int i = 0; numBytes > 0; numBytes--, i++) {
target.put(offset + i, (byte) (value >>> ((7 - i) << 3)));
}
} else {
target.putLongBigEndian(offset, value);
for (int i = 8; i < numBytes; i++) {
target.put(offset + i, (byte) 0);
}
}
}
}
|
DateComparator
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/ContextConfiguration.java
|
{
"start": 11161,
"end": 11334
}
|
class ____ <em>inherit</em> the application context initializers defined
* by test superclasses and enclosing classes. Specifically, the initializers
* for a given test
|
will
|
java
|
square__retrofit
|
retrofit/kotlin-test/src/test/java/retrofit2/KotlinRequestFactoryTest.java
|
{
"start": 237,
"end": 309
}
|
class ____ {
@Test
public void headUnit() {
|
KotlinRequestFactoryTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/ValuesBytesRefAggregatorFunctionSupplier.java
|
{
"start": 653,
"end": 1641
}
|
class ____ implements AggregatorFunctionSupplier {
public ValuesBytesRefAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return ValuesBytesRefAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return ValuesBytesRefGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public ValuesBytesRefAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return ValuesBytesRefAggregatorFunction.create(driverContext, channels);
}
@Override
public ValuesBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return ValuesBytesRefGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "values of bytes";
}
}
|
ValuesBytesRefAggregatorFunctionSupplier
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/rest/TaskStopping.java
|
{
"start": 1126,
"end": 1714
}
|
class ____ extends TaskState {
/**
* The time on the agent when the task was received.
*/
private final long startedMs;
@JsonCreator
public TaskStopping(@JsonProperty("spec") TaskSpec spec,
@JsonProperty("startedMs") long startedMs,
@JsonProperty("status") JsonNode status) {
super(spec, status);
this.startedMs = startedMs;
}
@JsonProperty
public long startedMs() {
return startedMs;
}
@Override
public TaskStateType stateType() {
return TaskStateType.STOPPING;
}
}
|
TaskStopping
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/CsiAdaptorProtocol.java
|
{
"start": 1637,
"end": 3567
}
|
interface ____ {
/**
* Get plugin info from the CSI driver. The driver usually returns
* the name of the driver and its version.
* @param request get plugin info request.
* @return response that contains driver name and its version.
* @throws YarnException exceptions from yarn servers.
* @throws IOException io error occur.
*/
GetPluginInfoResponse getPluginInfo(GetPluginInfoRequest request)
throws YarnException, IOException;
/**
* Validate if the volume capacity can be satisfied on the underneath
* storage system. This method responses if the capacity can be satisfied
* or not, with a detailed message.
* @param request validate volume capability request.
* @return validation response.
* @throws YarnException exceptions from yarn servers.
* @throws IOException io error occur.
*/
ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
ValidateVolumeCapabilitiesRequest request) throws YarnException,
IOException;
/**
* Publish the volume on a node manager, the volume will be mounted
* to the local file system and become visible for clients.
* @param request publish volume request.
* @return publish volume response.
* @throws YarnException exceptions from yarn servers.
* @throws IOException io error occur.
*/
NodePublishVolumeResponse nodePublishVolume(
NodePublishVolumeRequest request) throws YarnException, IOException;
/**
* This is a reverse operation of
* {@link #nodePublishVolume(NodePublishVolumeRequest)}, it un-mounts the
* volume from given node.
* @param request un-publish volume request.
* @return un-publish volume response.
* @throws YarnException exceptions from yarn servers.
* @throws IOException io error occur.
*/
NodeUnpublishVolumeResponse nodeUnpublishVolume(
NodeUnpublishVolumeRequest request) throws YarnException, IOException;
}
|
CsiAdaptorProtocol
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-16/src/test/java/org/redisson/RedisRunner.java
|
{
"start": 4261,
"end": 4358
}
|
enum ____ {
ALWAYS,
EVERYSEC,
NO
}
public
|
APPEND_FSYNC_MODE_OPTIONS
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/loader/ast/internal/LoaderSelectBuilder.java
|
{
"start": 4036,
"end": 27907
}
|
class ____ {
/**
* Create an SQL AST select-statement for loading by unique key
*
* @param loadable The root Loadable
* @param partsToSelect Parts of the Loadable to select. Null/empty indicates to select the Loadable itself
* @param restrictedPart Part to base the where-clause restriction on
* @param cachedDomainResult DomainResult to be used. Null indicates to generate the DomainResult
* @param loadQueryInfluencers Any influencers (entity graph, fetch profile) to account for
* @param lockOptions Pessimistic lock options to apply
* @param jdbcParameterConsumer Consumer for all JdbcParameter references created
* @param sessionFactory The SessionFactory
*/
public static SelectStatement createSelectByUniqueKey(
Loadable loadable,
List<? extends ModelPart> partsToSelect,
ModelPart restrictedPart,
DomainResult<?> cachedDomainResult,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer,
SessionFactoryImplementor sessionFactory) {
final var process = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
loadable,
partsToSelect,
singletonList( restrictedPart ),
cachedDomainResult,
1,
loadQueryInfluencers,
lockOptions,
determineGraphTraversalState( loadQueryInfluencers, sessionFactory.getJpaMetamodel() ),
true,
jdbcParameterConsumer
);
return process.generateSelect();
}
/**
* Create a select-statement (SQL AST) for loading by multiple keys using a single SQL ARRAY parameter
*/
public static SelectStatement createSelectBySingleArrayParameter(
Loadable loadable,
ValuedModelPart restrictedPart,
LoadQueryInfluencers influencers,
LockOptions lockOptions,
JdbcParameter jdbcArrayParameter,
SessionFactoryImplementor sessionFactory) {
final var builder = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
loadable,
null,
singletonList( restrictedPart ),
null,
-1,
influencers,
lockOptions,
determineGraphTraversalState( influencers, sessionFactory.getJpaMetamodel() ),
true,
null
);
final var rootQuerySpec = new QuerySpec( true );
final var sqlAstCreationState = builder.createSqlAstCreationState( rootQuerySpec );
final var rootNavigablePath = new NavigablePath( loadable.getRootPathName() );
rootQuerySpec.applyRootPathForLocking( rootNavigablePath );
final var rootTableGroup =
builder.buildRootTableGroup( rootNavigablePath, rootQuerySpec, sqlAstCreationState );
final var domainResult = loadable.createDomainResult(
rootNavigablePath,
rootTableGroup,
null,
sqlAstCreationState
);
final List<DomainResult<?>> domainResults = singletonList( domainResult );
applyArrayParamRestriction(
rootQuerySpec,
rootNavigablePath,
rootTableGroup,
restrictedPart,
jdbcArrayParameter,
sqlAstCreationState
);
if ( loadable instanceof PluralAttributeMapping pluralAttributeMapping ) {
builder.applyFiltering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
builder.applyOrdering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
}
else {
builder.applyFiltering( rootQuerySpec, rootTableGroup, (Restrictable) loadable, sqlAstCreationState );
}
return new SelectStatement( rootQuerySpec, domainResults );
}
private static void applyArrayParamRestriction(
QuerySpec rootQuerySpec,
NavigablePath rootNavigablePath,
TableGroup rootTableGroup,
ValuedModelPart restrictedPart,
JdbcParameter jdbcArrayParameter,
LoaderSqlAstCreationState sqlAstCreationState) {
assert restrictedPart.getJdbcTypeCount() == 1;
final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final var restrictedPartMapping = restrictedPart.getSelectable( 0 );
final var restrictionPath =
rootNavigablePath.append( restrictedPart.getNavigableRole().getNavigableName() );
final var tableReference =
rootTableGroup.resolveTableReference( restrictionPath,
restrictedPartMapping.getContainingTableExpression() );
final var columnRef =
(ColumnReference)
sqlExpressionResolver.resolveSqlExpression( tableReference, restrictedPartMapping );
rootQuerySpec.applyPredicate( new InArrayPredicate( columnRef, jdbcArrayParameter ) );
}
/**
* Create an SQL AST select-statement based on matching one-or-more keys
*
* @param loadable The root Loadable
* @param partsToSelect Parts of the Loadable to select. Null/empty indicates to select the Loadable itself
* @param restrictedPart Part to base the where-clause restriction on
* @param cachedDomainResult DomainResult to be used. Null indicates to generate the DomainResult
* @param numberOfKeysToLoad How many keys should be accounted for in the where-clause restriction?
* @param loadQueryInfluencers Any influencers (entity graph, fetch profile) to account for
* @param lockOptions Pessimistic lock options to apply
* @param jdbcParameterConsumer Consumer for all JdbcParameter references created
* @param sessionFactory The SessionFactory
*/
public static SelectStatement createSelect(
Loadable loadable,
List<? extends ModelPart> partsToSelect,
ModelPart restrictedPart,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer,
SessionFactoryImplementor sessionFactory) {
final var process = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
loadable,
partsToSelect,
restrictedPart,
cachedDomainResult,
numberOfKeysToLoad,
loadQueryInfluencers,
lockOptions,
jdbcParameterConsumer
);
return process.generateSelect();
}
public static SelectStatement createSelect(
Loadable loadable,
List<? extends ModelPart> partsToSelect,
List<ModelPart> restrictedParts,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer,
SessionFactoryImplementor sessionFactory) {
final var process = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
loadable,
partsToSelect,
restrictedParts,
cachedDomainResult,
numberOfKeysToLoad,
loadQueryInfluencers,
lockOptions,
jdbcParameterConsumer
);
return process.generateSelect();
}
// TODO: this method is probably unnecessary if we make
// determineWhetherToForceIdSelection() a bit smarter
static SelectStatement createSelect(
Loadable loadable,
List<ModelPart> partsToSelect,
boolean forceIdentifierSelection,
List<ModelPart> restrictedParts,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer,
SessionFactoryImplementor sessionFactory) {
final var process = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
loadable,
partsToSelect,
restrictedParts,
cachedDomainResult,
numberOfKeysToLoad,
loadQueryInfluencers,
lockOptions,
determineGraphTraversalState( loadQueryInfluencers, sessionFactory.getJpaMetamodel() ),
forceIdentifierSelection,
jdbcParameterConsumer
);
return process.generateSelect();
}
/**
* Create an SQL AST select-statement used for subselect-based CollectionLoader
*
* @param attributeMapping The plural-attribute being loaded
* @param subselect The subselect details to apply
* @param cachedDomainResult DomainResult to be used. Null indicates to generate the DomainResult?
* @param loadQueryInfluencers Any influencers (entity graph, fetch profile) to account for
* @param lockOptions Pessimistic lock options to apply
* @param jdbcParameterConsumer Consumer for all JdbcParameter references created
* @param sessionFactory The SessionFactory
*
* @see CollectionLoaderSubSelectFetch
*/
public static SelectStatement createSubSelectFetchSelect(
PluralAttributeMapping attributeMapping,
SubselectFetch subselect,
DomainResult<?> cachedDomainResult,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer,
SessionFactoryImplementor sessionFactory) {
final var process = new LoaderSelectBuilder(
sessionFactory.getSqlTranslationEngine(),
attributeMapping,
null,
attributeMapping.getKeyDescriptor(),
cachedDomainResult,
-1,
loadQueryInfluencers,
lockOptions,
jdbcParameterConsumer
);
return process.generateSelect( subselect );
}
private final SqlAstCreationContext creationContext;
private final Loadable loadable;
private final List<? extends ModelPart> partsToSelect;
private final List<ModelPart> restrictedParts;
private final DomainResult<?> cachedDomainResult;
private final int numberOfKeysToLoad;
private final boolean forceIdentifierSelection;
private final LoadQueryInfluencers loadQueryInfluencers;
private final LockOptions lockOptions;
private final Consumer<JdbcParameter> jdbcParameterConsumer;
private final EntityGraphTraversalState entityGraphTraversalState;
private int fetchDepth;
private RowCardinality rowCardinality = RowCardinality.SINGLE;
private LoaderSelectBuilder(
SqlAstCreationContext creationContext,
Loadable loadable,
List<? extends ModelPart> partsToSelect,
List<ModelPart> restrictedParts,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
EntityGraphTraversalState entityGraphTraversalState,
boolean forceIdentifierSelection,
Consumer<JdbcParameter> jdbcParameterConsumer) {
this.creationContext = creationContext;
this.loadable = loadable;
this.partsToSelect = partsToSelect;
this.restrictedParts = restrictedParts;
this.cachedDomainResult = cachedDomainResult;
this.numberOfKeysToLoad = numberOfKeysToLoad;
this.loadQueryInfluencers = loadQueryInfluencers;
this.lockOptions = lockOptions;
this.entityGraphTraversalState = entityGraphTraversalState;
this.forceIdentifierSelection = forceIdentifierSelection;
this.jdbcParameterConsumer = jdbcParameterConsumer;
if ( loadable instanceof PluralAttributeMapping pluralAttributeMapping ) {
if ( pluralAttributeMapping.getMappedType().getCollectionSemantics()
.getCollectionClassification() == CollectionClassification.BAG ) {
rowCardinality = RowCardinality.BAG;
}
}
}
private LoaderSelectBuilder(
SqlAstCreationContext creationContext,
Loadable loadable,
List<? extends ModelPart> partsToSelect,
List<ModelPart> restrictedParts,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer) {
this(
creationContext,
loadable,
partsToSelect,
restrictedParts,
cachedDomainResult,
numberOfKeysToLoad,
loadQueryInfluencers,
lockOptions != null ? lockOptions : new LockOptions(),
determineGraphTraversalState( loadQueryInfluencers, creationContext.getJpaMetamodel() ),
determineWhetherToForceIdSelection( numberOfKeysToLoad, restrictedParts ),
jdbcParameterConsumer
);
}
private LoaderSelectBuilder(
SqlAstCreationContext creationContext,
Loadable loadable,
List<? extends ModelPart> partsToSelect,
ModelPart restrictedPart,
DomainResult<?> cachedDomainResult,
int numberOfKeysToLoad,
LoadQueryInfluencers loadQueryInfluencers,
LockOptions lockOptions,
Consumer<JdbcParameter> jdbcParameterConsumer) {
this(
creationContext,
loadable,
partsToSelect,
singletonList( restrictedPart ),
cachedDomainResult,
numberOfKeysToLoad,
loadQueryInfluencers,
lockOptions,
jdbcParameterConsumer
);
}
private static boolean determineWhetherToForceIdSelection(int numberOfKeysToLoad, List<ModelPart> restrictedParts) {
if ( numberOfKeysToLoad > 1 ) {
return true;
}
if ( restrictedParts.size() == 1 ) {
final var restrictedPart = restrictedParts.get( 0 );
if ( Objects.equals( restrictedPart.getPartName(), NaturalIdMapping.PART_NAME ) ) {
return true;
}
}
for ( var restrictedPart : restrictedParts ) {
if ( restrictedPart instanceof ForeignKeyDescriptor
|| restrictedPart instanceof NonAggregatedIdentifierMapping ) {
return true;
}
}
return false;
}
private static EntityGraphTraversalState determineGraphTraversalState(
LoadQueryInfluencers loadQueryInfluencers,
JpaMetamodel jpaMetamodel) {
if ( loadQueryInfluencers != null ) {
final var effectiveEntityGraph = loadQueryInfluencers.getEffectiveEntityGraph();
if ( effectiveEntityGraph != null ) {
final var graphSemantic = effectiveEntityGraph.getSemantic();
final var rootGraph = effectiveEntityGraph.getGraph();
if ( graphSemantic != null && rootGraph != null ) {
return new StandardEntityGraphTraversalStateImpl( graphSemantic, rootGraph, jpaMetamodel );
}
}
}
return null;
}
private SelectStatement generateSelect() {
final var rootNavigablePath = new NavigablePath( loadable.getRootPathName() );
final var rootQuerySpec = new QuerySpec( true );
rootQuerySpec.applyRootPathForLocking( rootNavigablePath );
final var sqlAstCreationState = createSqlAstCreationState( rootQuerySpec );
final var rootTableGroup = buildRootTableGroup( rootNavigablePath, rootQuerySpec, sqlAstCreationState );
final List<DomainResult<?>> domainResults;
if ( partsToSelect != null && !partsToSelect.isEmpty() ) {
domainResults = buildRequestedDomainResults( rootNavigablePath, sqlAstCreationState, rootTableGroup );
}
else if ( cachedDomainResult != null ) {
domainResults = singletonList( cachedDomainResult );
}
else {
final var domainResult = loadable.createDomainResult(
rootNavigablePath,
rootTableGroup,
null,
sqlAstCreationState
);
domainResults = singletonList( domainResult );
}
for ( var restrictedPart : restrictedParts ) {
applyRestriction(
rootQuerySpec,
rootNavigablePath,
rootTableGroup,
restrictedPart,
restrictedPart.getJdbcTypeCount(),
jdbcParameterConsumer,
sqlAstCreationState
);
}
if ( loadable instanceof PluralAttributeMapping pluralAttributeMapping ) {
applyFiltering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
applyOrdering( rootQuerySpec, rootTableGroup, pluralAttributeMapping, sqlAstCreationState );
}
else {
applyFiltering( rootQuerySpec, rootTableGroup, (Restrictable) loadable, sqlAstCreationState );
}
return new SelectStatement( rootQuerySpec, domainResults );
}
private List<DomainResult<?>> buildRequestedDomainResults(
NavigablePath rootNavigablePath, LoaderSqlAstCreationState sqlAstCreationState, TableGroup rootTableGroup) {
final List<DomainResult<?>> domainResults;
domainResults = new ArrayList<>( partsToSelect.size() );
for ( var part : partsToSelect ) {
final var navigablePath = rootNavigablePath.append( part.getPartName() );
final TableGroup tableGroup;
if ( part instanceof TableGroupJoinProducer tableGroupJoinProducer ) {
final var tableGroupJoin = tableGroupJoinProducer.createTableGroupJoin(
navigablePath,
rootTableGroup,
null,
null,
SqlAstJoinType.LEFT,
true,
false,
sqlAstCreationState
);
rootTableGroup.addTableGroupJoin( tableGroupJoin );
tableGroup = tableGroupJoin.getJoinedGroup();
sqlAstCreationState.getFromClauseAccess().registerTableGroup( navigablePath, tableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), tableGroup );
}
else {
tableGroup = rootTableGroup;
}
domainResults.add(
part.createDomainResult(
navigablePath,
tableGroup,
null,
sqlAstCreationState
)
);
}
return domainResults;
}
private TableGroup buildRootTableGroup(
NavigablePath rootNavigablePath, QuerySpec rootQuerySpec, LoaderSqlAstCreationState sqlAstCreationState) {
final var rootTableGroup = loadable.createRootTableGroup(
true,
rootNavigablePath,
null,
null,
() -> rootQuerySpec::applyPredicate,
sqlAstCreationState
);
rootQuerySpec.getFromClause().addRoot( rootTableGroup );
sqlAstCreationState.getFromClauseAccess().registerTableGroup( rootNavigablePath, rootTableGroup );
registerPluralTableGroupParts( sqlAstCreationState.getFromClauseAccess(), rootTableGroup );
return rootTableGroup;
}
private LoaderSqlAstCreationState createSqlAstCreationState(QuerySpec rootQuerySpec) {
return new LoaderSqlAstCreationState(
rootQuerySpec,
new SqlAliasBaseManager(),
new SimpleFromClauseAccessImpl(),
lockOptions,
this::visitFetches,
forceIdentifierSelection,
loadQueryInfluencers,
creationContext
);
}
private void applyRestriction(
QuerySpec rootQuerySpec,
NavigablePath rootNavigablePath,
TableGroup rootTableGroup,
ModelPart restrictedPart,
int numberColumns,
Consumer<JdbcParameter> jdbcParameterConsumer,
LoaderSqlAstCreationState sqlAstCreationState) {
final var sqlExpressionResolver = sqlAstCreationState.getSqlExpressionResolver();
final var navigablePath =
rootNavigablePath.append( restrictedPart.getNavigableRole().getNavigableName() );
if ( numberColumns == 1 ) {
restrictedPart.forEachSelectable(
(columnIndex, selection) -> {
final var tableReference =
rootTableGroup.resolveTableReference( navigablePath,
selection.getContainingTableExpression() );
final var columnRef =
(ColumnReference)
sqlExpressionResolver.resolveSqlExpression( tableReference, selection );
if ( numberOfKeysToLoad == 1 ) {
final var jdbcParameter = new SqlTypedMappingJdbcParameter( selection );
jdbcParameterConsumer.accept( jdbcParameter );
rootQuerySpec.applyPredicate(
new ComparisonPredicate( columnRef, ComparisonOperator.EQUAL, jdbcParameter )
);
}
else {
final var predicate = new InListPredicate( columnRef );
for ( int i = 0; i < numberOfKeysToLoad; i++ ) {
final var jdbcParameter = new SqlTypedMappingJdbcParameter( selection );
jdbcParameterConsumer.accept( jdbcParameter );
predicate.addExpression( jdbcParameter );
}
rootQuerySpec.applyPredicate( predicate );
}
}
);
}
else {
final List<ColumnReference> columnReferences = new ArrayList<>( numberColumns );
restrictedPart.forEachSelectable(
(columnIndex, selection) -> {
final var tableReference =
rootTableGroup.resolveTableReference( navigablePath,
selection.getContainingTableExpression() );
columnReferences.add(
(ColumnReference)
sqlExpressionResolver.resolveSqlExpression( tableReference, selection )
);
}
);
final SqlTuple tuple = new SqlTuple( columnReferences, restrictedPart );
final InListPredicate predicate = new InListPredicate( tuple );
for ( int i = 0; i < numberOfKeysToLoad; i++ ) {
final List<JdbcParameter> tupleParams = new ArrayList<>( numberColumns );
restrictedPart.forEachSelectable(
(columnIndex, selection) -> {
final JdbcParameter jdbcParameter = new SqlTypedMappingJdbcParameter( selection );
jdbcParameterConsumer.accept( jdbcParameter );
tupleParams.add( jdbcParameter );
}
);
final SqlTuple paramTuple = new SqlTuple( tupleParams, restrictedPart );
predicate.addExpression( paramTuple );
}
rootQuerySpec.applyPredicate( predicate );
}
}
private void applyFiltering(
QuerySpec querySpec,
TableGroup tableGroup,
PluralAttributeMapping pluralAttributeMapping,
SqlAstCreationState astCreationState) {
// Only apply restrictions for root table groups,
// because for table group joins the restriction is applied
// via PluralAttributeMappingImpl.createTableGroupJoin
assert tableGroup.getNavigablePath().getParent() == null;
pluralAttributeMapping.applyBaseRestrictions(
querySpec::applyPredicate,
tableGroup,
true,
loadQueryInfluencers.getEnabledFilters(),
false,
null,
astCreationState
);
pluralAttributeMapping.applyBaseManyToManyRestrictions(
querySpec::applyPredicate,
tableGroup,
true,
loadQueryInfluencers.getEnabledFilters(),
null,
astCreationState
);
}
private void applyFiltering(
PredicateContainer predicateContainer,
TableGroup tableGroup,
Restrictable restrictable,
SqlAstCreationState astCreationState) {
restrictable.applyBaseRestrictions(
predicateContainer::applyPredicate,
tableGroup,
true,
loadQueryInfluencers.getEnabledFilters(),
true,
null,
astCreationState
);
}
private void applyOrdering(
QuerySpec querySpec,
TableGroup tableGroup,
PluralAttributeMapping pluralAttributeMapping,
SqlAstCreationState astCreationState) {
final var orderByFragment = pluralAttributeMapping.getOrderByFragment();
if ( orderByFragment != null ) {
applyOrdering( querySpec, tableGroup, orderByFragment, astCreationState );
}
final var manyToManyOrderByFragment = pluralAttributeMapping.getManyToManyOrderByFragment();
if ( manyToManyOrderByFragment != null ) {
applyOrdering(
querySpec,
tableGroup,
manyToManyOrderByFragment,
astCreationState
);
}
}
private void applyOrdering(
QuerySpec querySpec,
TableGroup tableGroup,
OrderByFragment orderByFragment,
SqlAstCreationState astCreationState) {
orderByFragment.apply( querySpec, tableGroup, astCreationState );
}
private ImmutableFetchList visitFetches(FetchParent fetchParent, LoaderSqlAstCreationState creationState) {
final var fetches = new ImmutableFetchList.Builder( fetchParent.getReferencedMappingContainer() );
final var processor = createFetchableConsumer( fetchParent, creationState, fetches );
final var referencedMappingContainer = fetchParent.getReferencedMappingContainer();
if ( fetchParent.getNavigablePath().getParent() != null ) {
final int size = referencedMappingContainer.getNumberOfKeyFetchables();
for ( int i = 0; i < size; i++ ) {
processor.accept( referencedMappingContainer.getKeyFetchable( i ), true, false );
}
}
final int size = referencedMappingContainer.getNumberOfFetchables();
List<Fetchable> bagFetchables = null;
for ( int i = 0; i < size; i++ ) {
final Fetchable fetchable = referencedMappingContainer.getFetchable( i );
if ( isBag( fetchable ) ) {
if ( bagFetchables == null ) {
bagFetchables = new ArrayList<>();
}
// Delay processing of bag fetchables at last since they cannot be joined and will create subsequent selects
bagFetchables.add( fetchable );
}
else {
processor.accept( fetchable, false, false );
}
}
if ( bagFetchables != null ) {
for ( Fetchable fetchable : bagFetchables ) {
processor.accept( fetchable, false, true );
}
}
return fetches.build();
}
private boolean isBag(Fetchable fetchable) {
return isPluralAttributeMapping( fetchable )
&& ( (PluralAttributeMapping) fetchable ).getMappedType().getCollectionSemantics()
.getCollectionClassification() == CollectionClassification.BAG;
}
private boolean isPluralAttributeMapping(Fetchable fetchable) {
final var attributeMapping = fetchable.asAttributeMapping();
return attributeMapping != null && attributeMapping.isPluralAttributeMapping();
}
@FunctionalInterface
private
|
LoaderSelectBuilder
|
java
|
apache__kafka
|
group-coordinator/src/test/java/org/apache/kafka/coordinator/group/assignor/SimpleAssignorTest.java
|
{
"start": 2513,
"end": 36283
}
|
class ____ {
private static final Uuid TOPIC_1_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_2_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_3_UUID = Uuid.randomUuid();
private static final Uuid TOPIC_4_UUID = Uuid.randomUuid();
private static final String TOPIC_1_NAME = "topic1";
private static final String TOPIC_2_NAME = "topic2";
private static final String TOPIC_3_NAME = "topic3";
private static final String TOPIC_4_NAME = "topic4";
private static final String MEMBER_A = "A";
private static final String MEMBER_B = "B";
private static final String MEMBER_C = "C";
private final SimpleAssignor assignor = new SimpleAssignor();
@Test
public void testName() {
assertEquals("simple", assignor.name());
}
@Test
public void testAssignWithEmptyMembers() {
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
CoordinatorMetadataImage.EMPTY
);
GroupSpec groupSpec = new GroupSpecImpl(
Map.of(),
HOMOGENEOUS,
Map.of()
);
GroupAssignment groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
groupSpec = new GroupSpecImpl(
Map.of(),
HETEROGENEOUS,
Map.of()
);
groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
}
@Test
public void testAssignWithNoSubscribedTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
GroupAssignment groupAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEquals(Map.of(), groupAssignment.members());
}
@Test
public void testAssignWithSubscribedToNonExistentTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
MEMBER_A,
new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_2_UUID),
Assignment.EMPTY
)
);
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
assertThrows(PartitionAssignorException.class,
() -> assignor.assign(groupSpec, subscribedTopicMetadata));
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneousWithAllowedMap() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 3)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1, 2),
TOPIC_3_UUID, Set.of(0, 1) // but not 2
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithTwoMembersAndTwoTopicsHomogeneousWithNonAssignableTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
topicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
Map.of(),
Optional.of(
Map.of(TOPIC_1_UUID, Set.of(0, 1, 2))
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
assertEveryPartitionGetsAssignment(3, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
// T1: 3 partitions + T2: 3 partitions + T3: 2 partitions = 8 partitions
assertEveryPartitionGetsAssignment(8, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneousWithAllowedMap() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1), // but not 2
TOPIC_2_UUID, Set.of(0, 2), // but not 1
TOPIC_3_UUID, Set.of(1) // but not 0
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
// T1: 2 partitions + T2: 2 partitions + T3: 1 partition = 5 partitions
assertEveryPartitionGetsAssignment(5, computedAssignment);
}
@Test
public void testAssignWithThreeMembersThreeTopicsHeterogeneousWithNonAssignableTopic() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2) // non-assignable
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription = new LinkedHashSet<>();
memberCTopicsSubscription.add(TOPIC_2_UUID);
memberCTopicsSubscription.add(TOPIC_3_UUID);
members.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of(),
Optional.of(
Map.of(
TOPIC_1_UUID, Set.of(0, 1, 2),
TOPIC_2_UUID, Set.of(0, 1, 2)
)
)
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>();
expectedAssignment.put(MEMBER_A, mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 2)
));
expectedAssignment.put(MEMBER_B, Map.of());
expectedAssignment.put(MEMBER_C, mkAssignment(
mkTopicAssignment(TOPIC_2_UUID, 1)
));
// T1: 3 partitions + T2: 3 partitions + T3: 2 partitions(non-assignable) = 6 partitions
assertEveryPartitionGetsAssignment(6, computedAssignment);
assertAssignment(expectedAssignment, computedAssignment);
}
@Test
public void testAssignWithOneMemberNoAssignedTopicHeterogeneous() {
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription = new LinkedHashSet<>();
memberATopicsSubscription.add(TOPIC_1_UUID);
memberATopicsSubscription.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
members.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription,
Assignment.EMPTY
));
members.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(),
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
GroupAssignment computedAssignment = assignor.assign(
groupSpec,
subscribedTopicMetadata
);
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment = new HashMap<>();
expectedAssignment.put(MEMBER_A, mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 1)));
expectedAssignment.put(MEMBER_B, mkAssignment());
// T1: 3 partitions + T2: 2 partitions = 5 partitions
assertEveryPartitionGetsAssignment(5, computedAssignment);
assertAssignment(expectedAssignment, computedAssignment);
}
@Test
public void testIncrementalAssignmentIncreasingMembersHomogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
// Increase the number of members one a time, checking that the partitions are assigned as expected
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
computedAssignment.members().forEach((memberId, partitions) -> members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(partitions.partitions())
)));
}
}
@Test
public void testIncrementalAssignmentDecreasingMembersHomogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
MetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions)
.build();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage)
);
Set<Uuid> topicsSubscription = new LinkedHashSet<>();
topicsSubscription.add(TOPIC_1_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
Assignment.EMPTY
));
}
GroupSpec groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(computedAssignment.members().get(newMemberId).partitions()))
);
}
// Decrease the number of members one a time, checking that the partitions are assigned as expected
for (int member = numMembers - 1; member > 0; member--) {
String newMemberId = "M" + member;
members.remove(newMemberId);
groupSpec = new GroupSpecImpl(
members,
HOMOGENEOUS,
new HashMap<>()
);
computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
computedAssignment.members().forEach((memberId, partitions) -> members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscription,
new Assignment(partitions.partitions())
)));
}
}
@Test
public void testAssignWithCurrentAssignmentHeterogeneous() {
// Current assignment setup - 3 members A - {T1, T2}, B - {T3}, C - {T2, T3}.
MetadataImage metadataImage1 = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.build();
Set<Uuid> memberATopicsSubscription1 = new LinkedHashSet<>();
memberATopicsSubscription1.add(TOPIC_1_UUID);
memberATopicsSubscription1.add(TOPIC_2_UUID);
Map<String, MemberSubscriptionAndAssignmentImpl> members1 = new HashMap<>();
members1.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription1,
Assignment.EMPTY
));
members1.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
Set.of(TOPIC_3_UUID),
Assignment.EMPTY
));
Set<Uuid> memberCTopicsSubscription1 = new LinkedHashSet<>();
memberCTopicsSubscription1.add(TOPIC_2_UUID);
memberCTopicsSubscription1.add(TOPIC_3_UUID);
members1.put(MEMBER_C, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberCTopicsSubscription1,
Assignment.EMPTY
));
GroupSpec groupSpec1 = new GroupSpecImpl(
members1,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata1 = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage1)
);
GroupAssignment computedAssignment1 = assignor.assign(
groupSpec1,
subscribedTopicMetadata1
);
assertEveryPartitionGetsAssignment(8, computedAssignment1);
// New assignment setup - 2 members A - {T1, T2, T3}, B - {T3, T4}.
MetadataImage metadataImage2 = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, 3)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, 2)
.addTopic(TOPIC_4_UUID, TOPIC_4_NAME, 1)
.build();
Map<String, MemberSubscriptionAndAssignmentImpl> members2 = new HashMap<>();
Set<Uuid> memberATopicsSubscription2 = new LinkedHashSet<>();
memberATopicsSubscription2.add(TOPIC_1_UUID);
memberATopicsSubscription2.add(TOPIC_2_UUID);
memberATopicsSubscription2.add(TOPIC_3_UUID);
Set<Uuid> memberBTopicsSubscription2 = new LinkedHashSet<>();
memberBTopicsSubscription2.add(TOPIC_3_UUID);
memberBTopicsSubscription2.add(TOPIC_4_UUID);
members2.put(MEMBER_A, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberATopicsSubscription2,
new Assignment(mkAssignment(
mkTopicAssignment(TOPIC_1_UUID, 0, 1, 2),
mkTopicAssignment(TOPIC_2_UUID, 0, 2)))
));
members2.put(MEMBER_B, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
memberBTopicsSubscription2,
new Assignment(mkAssignment(
mkTopicAssignment(TOPIC_3_UUID, 0, 1)))
));
GroupSpec groupSpec2 = new GroupSpecImpl(
members2,
HETEROGENEOUS,
Map.of()
);
SubscribedTopicDescriberImpl subscribedTopicMetadata2 = new SubscribedTopicDescriberImpl(
new KRaftCoordinatorMetadataImage(metadataImage2)
);
GroupAssignment computedAssignment2 = assignor.assign(
groupSpec2,
subscribedTopicMetadata2
);
assertEveryPartitionGetsAssignment(9, computedAssignment2);
}
@Test
public void testIncrementalAssignmentIncreasingMembersHeterogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions / 2)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, numPartitions / 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, numPartitions / 6)
.buildCoordinatorMetadataImage();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
metadataImage
);
ArrayList<Set<Uuid>> topicsSubscriptions = new ArrayList<>(3);
Set<Uuid> topicsSubscription1 = new LinkedHashSet<>();
topicsSubscription1.add(TOPIC_1_UUID);
topicsSubscription1.add(TOPIC_2_UUID);
topicsSubscription1.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription1);
Set<Uuid> topicsSubscription2 = new LinkedHashSet<>();
topicsSubscription2.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription2);
Set<Uuid> topicsSubscription3 = new LinkedHashSet<>();
topicsSubscription3.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription3);
Set<Uuid> topicsSubscription4 = new LinkedHashSet<>();
topicsSubscription4.add(TOPIC_1_UUID);
topicsSubscription4.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription4);
int numTopicsSubscriptions = 4;
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
// Increase the number of members one a time, checking that the partitions are assigned as expected
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
Assignment.EMPTY
));
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int m = 0; m < member; m++) {
String memberId = "M" + m;
members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(m % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(memberId).partitions())
));
}
}
}
@Test
public void testIncrementalAssignmentDecreasingMembersHeterogeneous() {
final int numPartitions = 24;
final int numMembers = 101;
CoordinatorMetadataImage metadataImage = new MetadataImageBuilder()
.addTopic(TOPIC_1_UUID, TOPIC_1_NAME, numPartitions / 2)
.addTopic(TOPIC_2_UUID, TOPIC_2_NAME, numPartitions / 3)
.addTopic(TOPIC_3_UUID, TOPIC_3_NAME, numPartitions / 6)
.buildCoordinatorMetadataImage();
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
metadataImage
);
ArrayList<Set<Uuid>> topicsSubscriptions = new ArrayList<>(3);
Set<Uuid> topicsSubscription1 = new LinkedHashSet<>();
topicsSubscription1.add(TOPIC_1_UUID);
topicsSubscription1.add(TOPIC_2_UUID);
topicsSubscription1.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription1);
Set<Uuid> topicsSubscription2 = new LinkedHashSet<>();
topicsSubscription2.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription2);
Set<Uuid> topicsSubscription3 = new LinkedHashSet<>();
topicsSubscription3.add(TOPIC_3_UUID);
topicsSubscriptions.add(topicsSubscription3);
Set<Uuid> topicsSubscription4 = new LinkedHashSet<>();
topicsSubscription4.add(TOPIC_1_UUID);
topicsSubscription4.add(TOPIC_2_UUID);
topicsSubscriptions.add(topicsSubscription4);
int numTopicsSubscriptions = 4;
Map<String, MemberSubscriptionAndAssignmentImpl> members = new HashMap<>();
SimpleAssignor assignor = new SimpleAssignor();
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
Assignment.EMPTY
));
}
GroupSpec groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
GroupAssignment computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int member = 0; member < numMembers; member++) {
String newMemberId = "M" + member;
members.put(newMemberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(member % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(newMemberId).partitions()))
);
}
// Decrease the number of members one a time, checking that the partitions are assigned as expected
for (int member = numMembers - 1; member > 0; member--) {
String newMemberId = "M" + member;
members.remove(newMemberId);
groupSpec = new GroupSpecImpl(
members,
HETEROGENEOUS,
new HashMap<>()
);
computedAssignment = assignor.assign(groupSpec, subscribedTopicMetadata);
assertEveryPartitionGetsAssignment(numPartitions, computedAssignment);
for (int m = 0; m < member; m++) {
String memberId = "M" + m;
members.put(memberId, new MemberSubscriptionAndAssignmentImpl(
Optional.empty(),
Optional.empty(),
topicsSubscriptions.get(m % numTopicsSubscriptions),
new Assignment(computedAssignment.members().get(memberId).partitions())
));
}
}
}
private void assertAssignment(
Map<String, Map<Uuid, Set<Integer>>> expectedAssignment,
GroupAssignment computedGroupAssignment
) {
assertEquals(expectedAssignment.size(), computedGroupAssignment.members().size());
for (String memberId : computedGroupAssignment.members().keySet()) {
Map<Uuid, Set<Integer>> computedAssignmentForMember = computedGroupAssignment.members().get(memberId).partitions();
assertEquals(expectedAssignment.get(memberId), computedAssignmentForMember);
}
}
private void assertEveryPartitionGetsAssignment(
int expectedPartitions,
GroupAssignment computedGroupAssignment
) {
Map<String, MemberAssignment> memberAssignments = computedGroupAssignment.members();
Set<TopicIdPartition> topicPartitionAssignments = new HashSet<>();
memberAssignments.values().forEach(memberAssignment -> {
Map<Uuid, Set<Integer>> topicIdPartitions = memberAssignment.partitions();
topicIdPartitions.forEach((topicId, partitions) ->
partitions.forEach(partition -> topicPartitionAssignments.add(new TopicIdPartition(topicId, partition)))
);
});
assertEquals(expectedPartitions, topicPartitionAssignments.size());
}
}
|
SimpleAssignorTest
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/endpoint/annotation/EndpointDiscovererTests.java
|
{
"start": 19885,
"end": 20091
}
|
class ____ extends DiscovererEndpointFilter {
SpecializedEndpointFilter() {
super(SpecializedEndpointDiscoverer.class);
}
}
@SpecializedEndpoint(id = "specialized")
static
|
SpecializedEndpointFilter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/registry/selector/spi/NamedStrategyContributions.java
|
{
"start": 269,
"end": 1088
}
|
interface ____ {
/**
* Registers a named implementor of a particular strategy contract.
*
* @param strategy The strategy contract.
* @param implementation The implementation class.
* @param names The registration names.
*
* @param <T> The strategy type.
*/
<T> void contributeStrategyImplementor(Class<T> strategy, Class<? extends T> implementation, String... names);
/**
* Un-registers a named implementor of a particular strategy contract. Un-registers all named registrations
* for the given strategy contract naming the given class.
*
* @param strategy The strategy contract.
* @param implementation The implementation class.
*
* @param <T> The strategy type.
*/
<T> void removeStrategyImplementor(Class<T> strategy, Class<? extends T> implementation);
}
|
NamedStrategyContributions
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/ParallelSource.java
|
{
"start": 2793,
"end": 10285
}
|
class ____<T> implements InnerConsumer<T> {
final CoreSubscriber<? super T>[] subscribers;
final AtomicLongArray requests;
final long[] emissions;
final int prefetch;
final int limit;
final Supplier<Queue<T>> queueSupplier;
@SuppressWarnings("NotNullFieldNotInitialized") // s initialized in onSubscribe
Subscription s;
@SuppressWarnings("NotNullFieldNotInitialized") // initialized in onSubscribe
Queue<T> queue;
@Nullable Throwable error;
volatile boolean done;
int index;
volatile boolean cancelled;
volatile int wip;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<ParallelSourceMain> WIP =
AtomicIntegerFieldUpdater.newUpdater(ParallelSourceMain.class, "wip");
/**
* Counts how many subscribers were setup to delay triggering the
* drain of upstream until all of them have been setup.
*/
volatile int subscriberCount;
@SuppressWarnings("rawtypes")
static final AtomicIntegerFieldUpdater<ParallelSourceMain> SUBSCRIBER_COUNT =
AtomicIntegerFieldUpdater.newUpdater(ParallelSourceMain.class, "subscriberCount");
int produced;
int sourceMode;
ParallelSourceMain(CoreSubscriber<? super T>[] subscribers, int prefetch,
Supplier<Queue<T>> queueSupplier) {
this.subscribers = subscribers;
this.prefetch = prefetch;
this.queueSupplier = queueSupplier;
this.limit = Operators.unboundedOrLimit(prefetch);
this.requests = new AtomicLongArray(subscribers.length);
this.emissions = new long[subscribers.length];
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.PREFETCH) return prefetch;
if (key == Attr.TERMINATED) return done;
if (key == Attr.CANCELLED) return cancelled;
if (key == Attr.ERROR) return error;
if (key == Attr.BUFFERED) return queue != null ? queue.size() : 0;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return null;
}
@Override
public Stream<? extends Scannable> inners() {
return Stream.of(subscribers).map(Scannable::from);
}
@Override
public Context currentContext() {
return subscribers[0].currentContext();
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = s;
if (s instanceof Fuseable.QueueSubscription) {
@SuppressWarnings("unchecked")
Fuseable.QueueSubscription<T> qs = (Fuseable.QueueSubscription<T>) s;
int m = qs.requestFusion(Fuseable.ANY | Fuseable.THREAD_BARRIER);
if (m == Fuseable.SYNC) {
sourceMode = m;
queue = qs;
done = true;
setupSubscribers();
drain();
return;
} else
if (m == Fuseable.ASYNC) {
sourceMode = m;
queue = qs;
setupSubscribers();
s.request(Operators.unboundedOrPrefetch(prefetch));
return;
}
}
queue = queueSupplier.get();
setupSubscribers();
s.request(Operators.unboundedOrPrefetch(prefetch));
}
}
void setupSubscribers() {
int m = subscribers.length;
for (int i = 0; i < m; i++) {
if (cancelled) {
return;
}
int j = i;
SUBSCRIBER_COUNT.lazySet(this, i + 1);
subscribers[i].onSubscribe(new ParallelSourceInner<>(this, j, m));
}
}
@Override
public void onNext(T t) {
if (done) {
Operators.onNextDropped(t, currentContext());
return;
}
if (sourceMode == Fuseable.NONE) {
if (!queue.offer(t)) {
onError(Operators.onOperatorError(s, Exceptions.failWithOverflow(Exceptions.BACKPRESSURE_ERROR_QUEUE_FULL), t, currentContext()));
return;
}
}
drain();
}
@Override
public void onError(Throwable t) {
if (done) {
Operators.onErrorDropped(t, currentContext());
return;
}
error = t;
done = true;
drain();
}
@Override
public void onComplete() {
if(done){
return;
}
done = true;
drain();
}
void cancel() {
if (!cancelled) {
cancelled = true;
this.s.cancel();
if (WIP.getAndIncrement(this) == 0) {
queue.clear();
}
}
}
void drainAsync() {
int missed = 1;
Queue<T> q = queue;
CoreSubscriber<? super T>[] a = this.subscribers;
AtomicLongArray r = this.requests;
long[] e = this.emissions;
int n = e.length;
int idx = index;
int consumed = produced;
for (;;) {
int notReady = 0;
for (;;) {
if (cancelled) {
q.clear();
return;
}
boolean d = done;
if (d) {
Throwable ex = error;
if (ex != null) {
q.clear();
for (Subscriber<? super T> s : a) {
s.onError(ex);
}
return;
}
}
boolean empty = q.isEmpty();
if (d && empty) {
for (Subscriber<? super T> s : a) {
s.onComplete();
}
return;
}
if (empty) {
break;
}
long ridx = r.get(idx);
long eidx = e[idx];
if (ridx != eidx) {
T v;
try {
v = q.poll();
} catch (Throwable ex) {
ex = Operators.onOperatorError(s, ex, a[idx].currentContext());
for (Subscriber<? super T> s : a) {
s.onError(ex);
}
return;
}
if (v == null) {
break;
}
a[idx].onNext(v);
e[idx] = eidx + 1;
int c = ++consumed;
if (c == limit) {
consumed = 0;
s.request(c);
}
notReady = 0;
} else {
notReady++;
}
idx++;
if (idx == n) {
idx = 0;
}
if (notReady == n) {
break;
}
}
int w = wip;
if (w == missed) {
index = idx;
produced = consumed;
missed = WIP.addAndGet(this, -missed);
if (missed == 0) {
break;
}
} else {
missed = w;
}
}
}
void drainSync() {
int missed = 1;
Queue<T> q = queue;
CoreSubscriber<? super T>[] a = this.subscribers;
AtomicLongArray r = this.requests;
long[] e = this.emissions;
int n = e.length;
int idx = index;
for (;;) {
int notReady = 0;
for (;;) {
if (cancelled) {
q.clear();
return;
}
if (q.isEmpty()) {
for (Subscriber<? super T> s : a) {
s.onComplete();
}
return;
}
long ridx = r.get(idx);
long eidx = e[idx];
if (ridx != eidx) {
T v;
try {
v = q.poll();
} catch (Throwable ex) {
ex = Operators.onOperatorError(s, ex, a[idx].currentContext());
for (Subscriber<? super T> s : a) {
s.onError(ex);
}
return;
}
if (v == null) {
for (Subscriber<? super T> s : a) {
s.onComplete();
}
return;
}
a[idx].onNext(v);
e[idx] = eidx + 1;
notReady = 0;
} else {
notReady++;
}
idx++;
if (idx == n) {
idx = 0;
}
if (notReady == n) {
break;
}
}
int w = wip;
if (w == missed) {
index = idx;
missed = WIP.addAndGet(this, -missed);
if (missed == 0) {
break;
}
} else {
missed = w;
}
}
}
void drain() {
if (WIP.getAndIncrement(this) != 0) {
return;
}
if (sourceMode == Fuseable.SYNC) {
drainSync();
} else {
drainAsync();
}
}
static final
|
ParallelSourceMain
|
java
|
apache__camel
|
components/camel-nats/src/generated/java/org/apache/camel/component/nats/NatsComponentConfigurer.java
|
{
"start": 731,
"end": 3954
}
|
class ____ extends PropertyConfigurerSupport implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
@Override
public boolean configure(CamelContext camelContext, Object obj, String name, Object value, boolean ignoreCase) {
NatsComponent target = (NatsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": target.setAutowiredEnabled(property(camelContext, boolean.class, value)); return true;
case "bridgeerrorhandler":
case "bridgeErrorHandler": target.setBridgeErrorHandler(property(camelContext, boolean.class, value)); return true;
case "headerfilterstrategy":
case "headerFilterStrategy": target.setHeaderFilterStrategy(property(camelContext, org.apache.camel.spi.HeaderFilterStrategy.class, value)); return true;
case "lazystartproducer":
case "lazyStartProducer": target.setLazyStartProducer(property(camelContext, boolean.class, value)); return true;
case "servers": target.setServers(property(camelContext, java.lang.String.class, value)); return true;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": target.setUseGlobalSslContextParameters(property(camelContext, boolean.class, value)); return true;
case "verbose": target.setVerbose(property(camelContext, boolean.class, value)); return true;
default: return false;
}
}
@Override
public Class<?> getOptionType(String name, boolean ignoreCase) {
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return boolean.class;
case "bridgeerrorhandler":
case "bridgeErrorHandler": return boolean.class;
case "headerfilterstrategy":
case "headerFilterStrategy": return org.apache.camel.spi.HeaderFilterStrategy.class;
case "lazystartproducer":
case "lazyStartProducer": return boolean.class;
case "servers": return java.lang.String.class;
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return boolean.class;
case "verbose": return boolean.class;
default: return null;
}
}
@Override
public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
NatsComponent target = (NatsComponent) obj;
switch (ignoreCase ? name.toLowerCase() : name) {
case "autowiredenabled":
case "autowiredEnabled": return target.isAutowiredEnabled();
case "bridgeerrorhandler":
case "bridgeErrorHandler": return target.isBridgeErrorHandler();
case "headerfilterstrategy":
case "headerFilterStrategy": return target.getHeaderFilterStrategy();
case "lazystartproducer":
case "lazyStartProducer": return target.isLazyStartProducer();
case "servers": return target.getServers();
case "useglobalsslcontextparameters":
case "useGlobalSslContextParameters": return target.isUseGlobalSslContextParameters();
case "verbose": return target.isVerbose();
default: return null;
}
}
}
|
NatsComponentConfigurer
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/suite/engine/testsuites/LifecycleMethodsSuites.java
|
{
"start": 5072,
"end": 5215
}
|
class ____ {
@AfterSuite
void nonStaticAfterSuite() {
fail("Should not be called");
}
}
@TestSuite
public static
|
NonStaticAfterSuite
|
java
|
netty__netty
|
codec-http/src/main/java/io/netty/handler/codec/http/multipart/FileUpload.java
|
{
"start": 884,
"end": 2297
}
|
interface ____ extends HttpData {
/**
* Returns the original filename in the client's filesystem,
* as provided by the browser (or other client software).
* @return the original filename
*/
String getFilename();
/**
* Set the original filename
*/
void setFilename(String filename);
/**
* Set the Content Type passed by the browser if defined
* @param contentType Content Type to set - must be not null
*/
void setContentType(String contentType);
/**
* Returns the content type passed by the browser or null if not defined.
* @return the content type passed by the browser or null if not defined.
*/
String getContentType();
/**
* Set the Content-Transfer-Encoding type from String as 7bit, 8bit or binary
*/
void setContentTransferEncoding(String contentTransferEncoding);
/**
* Returns the Content-Transfer-Encoding
* @return the Content-Transfer-Encoding
*/
String getContentTransferEncoding();
@Override
FileUpload copy();
@Override
FileUpload duplicate();
@Override
FileUpload retainedDuplicate();
@Override
FileUpload replace(ByteBuf content);
@Override
FileUpload retain();
@Override
FileUpload retain(int increment);
@Override
FileUpload touch();
@Override
FileUpload touch(Object hint);
}
|
FileUpload
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/seda/SedaInOutMEPTest.java
|
{
"start": 1081,
"end": 2205
}
|
class ____ extends ContextTestSupport {
@Test
public void testInOutMEP() throws Exception {
getMockEndpoint("mock:foo").expectedBodiesReceived("InOut Camel");
getMockEndpoint("mock:result").expectedBodiesReceived("Hello InOut Camel");
// InOut MEP when doing request
Object out = template.requestBody("direct:start", "Camel");
assertEquals("Hello InOut Camel", out);
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
// force MEP back to InOnly as we want the next to define the MEP as InOut
.setExchangePattern(ExchangePattern.InOnly)
.to(ExchangePattern.InOut, "seda:foo")
.setBody(body().prepend("Hello "))
.to("mock:result");
from("seda:foo").setBody(body().prepend("InOut ")).to("mock:foo");
}
};
}
}
|
SedaInOutMEPTest
|
java
|
netty__netty
|
codec-dns/src/main/java/io/netty/handler/codec/dns/TcpDnsResponseDecoder.java
|
{
"start": 858,
"end": 2566
}
|
class ____ extends LengthFieldBasedFrameDecoder {
private final DnsResponseDecoder<SocketAddress> responseDecoder;
/**
* Creates a new decoder with {@linkplain DnsRecordDecoder#DEFAULT the default record decoder}.
*/
public TcpDnsResponseDecoder() {
this(DnsRecordDecoder.DEFAULT, 64 * 1024);
}
/**
* Creates a new decoder with the specified {@code recordDecoder} and {@code maxFrameLength}
*/
public TcpDnsResponseDecoder(DnsRecordDecoder recordDecoder, int maxFrameLength) {
// Length is two octets as defined by RFC-7766
// See https://tools.ietf.org/html/rfc7766#section-8
super(maxFrameLength, 0, 2, 0, 2);
this.responseDecoder = new DnsResponseDecoder<SocketAddress>(recordDecoder) {
@Override
protected DnsResponse newResponse(SocketAddress sender, SocketAddress recipient,
int id, DnsOpCode opCode, DnsResponseCode responseCode) {
return new DefaultDnsResponse(id, opCode, responseCode);
}
};
}
@Override
protected Object decode(ChannelHandlerContext ctx, ByteBuf in) throws Exception {
ByteBuf frame = (ByteBuf) super.decode(ctx, in);
if (frame == null) {
return null;
}
try {
return responseDecoder.decode(ctx.channel().remoteAddress(), ctx.channel().localAddress(), frame.slice());
} finally {
frame.release();
}
}
@Override
protected ByteBuf extractFrame(ChannelHandlerContext ctx, ByteBuf buffer, int index, int length) {
return buffer.copy(index, length);
}
}
|
TcpDnsResponseDecoder
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/Rounding.java
|
{
"start": 32589,
"end": 35995
}
|
class ____ extends TimeUnitPreparedRounding {
@Override
public long round(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime localMidnight = truncateLocalDateTime(localDateTime);
return firstTimeOnDay(localMidnight);
}
@Override
public long nextRoundingValue(long utcMillis) {
LocalDateTime localDateTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(utcMillis), timeZone);
LocalDateTime earlierLocalMidnight = truncateLocalDateTime(localDateTime);
LocalDateTime localMidnight = nextRelevantMidnight(earlierLocalMidnight);
return firstTimeOnDay(localMidnight);
}
@Override
protected Prepared maybeUseArray(long minUtcMillis, long maxUtcMillis, int max) {
// We don't have the right information needed to know if this is safe for this time zone so we always use java rounding
return this;
}
private long firstTimeOnDay(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.of(0, 0, 0)) : "firstTimeOnDay should only be called at midnight";
// Now work out what localMidnight actually means
final List<ZoneOffset> currentOffsets = timeZone.getRules().getValidOffsets(localMidnight);
if (currentOffsets.isEmpty() == false) {
// There is at least one midnight on this day, so choose the first
final ZoneOffset firstOffset = currentOffsets.get(0);
final OffsetDateTime offsetMidnight = localMidnight.atOffset(firstOffset);
return offsetMidnight.toInstant().toEpochMilli();
} else {
// There were no midnights on this day, so we must have entered the day via an offset transition.
// Use the time of the transition as it is the earliest time on the right day.
ZoneOffsetTransition zoneOffsetTransition = timeZone.getRules().getTransition(localMidnight);
return zoneOffsetTransition.getInstant().toEpochMilli();
}
}
private LocalDateTime nextRelevantMidnight(LocalDateTime localMidnight) {
assert localMidnight.toLocalTime().equals(LocalTime.MIDNIGHT) : "nextRelevantMidnight should only be called at midnight";
return switch (unit) {
case DAY_OF_MONTH -> localMidnight.plusDays(multiplier);
case WEEK_OF_WEEKYEAR -> localMidnight.plusDays(7L * multiplier);
case MONTH_OF_YEAR, MONTHS_OF_YEAR -> localMidnight.plusMonths(multiplier);
case QUARTER_OF_YEAR -> localMidnight.plusMonths(3L * multiplier);
case YEAR_OF_CENTURY, YEARS_OF_CENTURY -> localMidnight.plusYears(multiplier);
default -> throw new IllegalArgumentException("Unknown round-to-midnight unit: " + unit);
};
}
@Override
public String toString() {
return TimeUnitRounding.this + "[java.time to midnight]";
}
}
private
|
JavaTimeToMidnightRounding
|
java
|
elastic__elasticsearch
|
x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/recovery/SearchableSnapshotRecoveryState.java
|
{
"start": 585,
"end": 3955
}
|
class ____ extends RecoveryState {
private boolean preWarmComplete;
private boolean remoteTranslogSet;
public SearchableSnapshotRecoveryState(ShardRouting shardRouting, DiscoveryNode targetNode, @Nullable DiscoveryNode sourceNode) {
super(shardRouting, targetNode, sourceNode, new Index());
}
@Override
public synchronized RecoveryState setStage(Stage stage) {
// The transition to the final state was done by #prewarmCompleted, just ignore the transition
if (getStage() == Stage.DONE || stage == Stage.FINALIZE && remoteTranslogSet) {
return this;
}
// Pre-warm is still running, hold the state transition
// until the pre-warm process finishes
if (preWarmComplete == false && stage == Stage.DONE) {
validateCurrentStage(Stage.FINALIZE);
return this;
}
if (stage == Stage.INIT) {
remoteTranslogSet = false;
}
return super.setStage(stage);
}
@Override
public synchronized RecoveryState setRemoteTranslogStage() {
remoteTranslogSet = true;
super.setStage(Stage.TRANSLOG);
return super.setStage(Stage.FINALIZE);
}
@Override
public synchronized RecoveryState reset() {
// In searchable snapshots we won't run into #90441, therefore we don't need to create a new copy
setStage(Stage.INIT);
return this;
}
@Override
public synchronized void validateCurrentStage(Stage expected) {
if (remoteTranslogSet == false) {
super.validateCurrentStage(expected);
} else {
final Stage stage = getStage();
// For small indices it's possible that pre-warming finished shortly
// after transitioning to FINALIZE stage
if (stage != Stage.FINALIZE && stage != Stage.DONE) {
assert false : "expected stage [" + Stage.FINALIZE + " || " + Stage.DONE + "]; but current stage is [" + stage + "]";
throw new IllegalStateException(
"expected stage [" + Stage.FINALIZE + " || " + Stage.DONE + "]; " + "but current stage is [" + stage + "]"
);
}
}
}
// Visible for tests
boolean isRemoteTranslogSet() {
return remoteTranslogSet;
}
public synchronized void setPreWarmComplete() {
// For small shards it's possible that the
// cache is pre-warmed before the stage has transitioned
// to FINALIZE, so the transition to the final state is delayed until
// the recovery process catches up.
if (getStage() == Stage.FINALIZE) {
super.setStage(Stage.DONE);
}
SearchableSnapshotRecoveryState.Index index = (Index) getIndex();
index.stopTimer();
preWarmComplete = true;
}
public synchronized boolean isPreWarmComplete() {
return preWarmComplete;
}
public synchronized void ignoreFile(String name) {
SearchableSnapshotRecoveryState.Index index = (Index) getIndex();
index.addFileToIgnore(name);
}
public synchronized void markIndexFileAsReused(String name) {
SearchableSnapshotRecoveryState.Index index = (Index) getIndex();
index.markFileAsReused(name);
}
private static final
|
SearchableSnapshotRecoveryState
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authentication/ObservationReactiveAuthenticationManager.java
|
{
"start": 1255,
"end": 2963
}
|
class ____ implements ReactiveAuthenticationManager {
private final ObservationRegistry registry;
private final ReactiveAuthenticationManager delegate;
private ObservationConvention<AuthenticationObservationContext> convention = new AuthenticationObservationConvention();
public ObservationReactiveAuthenticationManager(ObservationRegistry registry,
ReactiveAuthenticationManager delegate) {
this.registry = registry;
this.delegate = delegate;
}
@Override
public Mono<Authentication> authenticate(Authentication authentication) throws AuthenticationException {
AuthenticationObservationContext context = new AuthenticationObservationContext();
context.setAuthenticationRequest(authentication);
context.setAuthenticationManagerClass(this.delegate.getClass());
return Mono.deferContextual((contextView) -> {
Observation observation = Observation.createNotStarted(this.convention, () -> context, this.registry)
.parentObservation(contextView.getOrDefault(ObservationThreadLocalAccessor.KEY, null))
.start();
return this.delegate.authenticate(authentication).doOnSuccess((result) -> {
context.setAuthenticationResult(result);
observation.stop();
}).doOnCancel(observation::stop).doOnError((t) -> {
observation.error(t);
observation.stop();
});
});
}
/**
* Use the provided convention for reporting observation data
* @param convention The provided convention
*
* @since 6.1
*/
public void setObservationConvention(ObservationConvention<AuthenticationObservationContext> convention) {
Assert.notNull(convention, "The observation convention cannot be null");
this.convention = convention;
}
}
|
ObservationReactiveAuthenticationManager
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/AuthenticationResult.java
|
{
"start": 1761,
"end": 7200
}
|
enum ____ {
/**
* The authenticator successfully handled the authentication request
*/
SUCCESS,
/**
* The authenticator either did not handle the authentication request for reasons such as
* it cannot find necessary credentials
* Or the authenticator tried to handle the authentication request but it was unsuccessful.
* Subsequent authenticators (if any) still have chance to attempt authentication.
*/
CONTINUE,
/**
* The authenticator fail to authenticate the request and also requires the whole authentication chain to be stopped
*/
TERMINATE,
}
private final Status status;
private final T value;
private final String message;
private final Exception exception;
private final Map<String, Object> metadata;
private AuthenticationResult(
Status status,
@Nullable T value,
@Nullable String message,
@Nullable Exception exception,
@Nullable Map<String, Object> metadata
) {
this.status = status;
this.value = value;
this.message = message;
this.exception = exception;
this.metadata = metadata == null ? Collections.emptyMap() : Collections.unmodifiableMap(metadata);
}
public Status getStatus() {
return status;
}
public T getValue() {
return value;
}
public String getMessage() {
return message;
}
public Exception getException() {
return exception;
}
public Map<String, Object> getMetadata() {
return metadata;
}
/**
* Creates an {@code AuthenticationResult} that indicates that the supplied {@link User}
* has been successfully authenticated.
* <p>
* The {@link #getStatus() status} is set to {@link Status#SUCCESS}.
* </p><p>
* Neither the {@link #getMessage() message} nor {@link #getException() exception} are populated.
* </p>
* @param value The user that was authenticated. Cannot be {@code null}.
*/
public static <T> AuthenticationResult<T> success(T value) {
return success(value, null);
}
/**
* Creates a successful result, with optional metadata
*
* @see #success(Object)
*/
public static <T> AuthenticationResult<T> success(T value, @Nullable Map<String, Object> metadata) {
Objects.requireNonNull(value);
return new AuthenticationResult<>(Status.SUCCESS, value, null, null, metadata);
}
/**
* Creates an {@code AuthenticationResult} that indicates that the realm did not handle the
* authentication request in any way, and has no failure messages.
* <p>
* The {@link #getStatus() status} is set to {@link Status#CONTINUE}.
* </p><p>
* The {@link #getMessage() message}, {@link #getException() exception}, and {@link #getValue() user} are all set to {@code null}.
* </p>
*/
@SuppressWarnings("unchecked")
public static <T> AuthenticationResult<T> notHandled() {
return (AuthenticationResult<T>) NOT_HANDLED;
}
/**
* Creates an {@code AuthenticationResult} that indicates that the realm attempted to handle the authentication request but was
* unsuccessful. The reason for the failure is given in the supplied message and optional exception.
* <p>
* The {@link #getStatus() status} is set to {@link Status#CONTINUE}.
* </p><p>
* The {@link #getValue() value} is not populated.
* </p>
*/
public static <T> AuthenticationResult<T> unsuccessful(String message, @Nullable Exception cause) {
Objects.requireNonNull(message);
return new AuthenticationResult<>(Status.CONTINUE, null, message, cause, null);
}
/**
* Creates an {@code AuthenticationResult} that indicates that the realm attempted to handle the authentication request, was
* unsuccessful and wants to terminate this authentication request.
* The reason for the failure is given in the supplied message and optional exception.
* <p>
* The {@link #getStatus() status} is set to {@link Status#TERMINATE}.
* </p><p>
* The {@link #getValue() value} is not populated.
* </p>
*/
public static <T> AuthenticationResult<T> terminate(String message, @Nullable Exception cause) {
return new AuthenticationResult<>(Status.TERMINATE, null, message, cause, null);
}
/**
* Creates an {@code AuthenticationResult} that indicates that the realm attempted to handle the authentication request, was
* unsuccessful and wants to terminate this authentication request.
* The reason for the failure is given in the supplied message.
* <p>
* The {@link #getStatus() status} is set to {@link Status#TERMINATE}.
* </p><p>
* The {@link #getValue() value} is not populated.
* </p>
*/
public static <T> AuthenticationResult<T> terminate(String message) {
return terminate(message, null);
}
public boolean isAuthenticated() {
return status == Status.SUCCESS;
}
@Override
public String toString() {
return "AuthenticationResult{"
+ "status="
+ status
+ ", value="
+ value
+ ", message="
+ message
+ ", exception="
+ exception
+ '}';
}
}
|
Status
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/operators/hash/NonReusingHashJoinIteratorITCase.java
|
{
"start": 55447,
"end": 56327
}
|
class ____
extends TypePairComparator<IntPair, Tuple2<Integer, String>> {
private int reference;
@Override
public void setReference(IntPair reference) {
this.reference = reference.getKey();
}
@Override
public boolean equalToReference(Tuple2<Integer, String> candidate) {
try {
return candidate.f0 == this.reference;
} catch (NullPointerException npex) {
throw new NullKeyFieldException();
}
}
@Override
public int compareToReference(Tuple2<Integer, String> candidate) {
try {
return candidate.f0 - this.reference;
} catch (NullPointerException npex) {
throw new NullKeyFieldException();
}
}
}
static final
|
IntPairTuplePairComparator
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.