language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/collectionincompatibletype/CompatibleWithMisuseTest.java | {
"start": 1917,
"end": 2841
} | class ____<X> {
static final String CONSTANT = "Y";
// BUG: Diagnostic contains: Valid arguments are: X
void doSomething(@CompatibleWith("Y") Object bad) {}
// BUG: Diagnostic contains: Valid arguments are: X
void doSomethingWithConstant(@CompatibleWith(CONSTANT) Object bad) {}
// BUG: Diagnostic contains: not be empty (valid arguments are X)
void doSomethingEmpty(@CompatibleWith("") Object bad) {}
// BUG: Diagnostic contains: Valid arguments are: Z, X
<Z> void doSomethingElse(@CompatibleWith("Y") Object ok) {}
}
""")
.doTest();
}
@Test
public void overridesAlreadyAnnotated() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.errorprone.annotations.CompatibleWith;
| Test |
java | google__guava | android/guava/src/com/google/common/util/concurrent/AbstractScheduledService.java | {
"start": 18947,
"end": 19087
} | class ____ extends Scheduler {
/** Constructor for use by subclasses. */
public CustomScheduler() {}
/** A callable | CustomScheduler |
java | apache__flink | flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/functions/SqlFunctionUtils.java | {
"start": 2611,
"end": 41350
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(SqlFunctionUtils.class);
private static final ThreadLocalCache<String, Pattern> REGEXP_PATTERN_CACHE =
new ThreadLocalCache<String, Pattern>() {
@Override
public Pattern getNewInstance(String regex) {
return Pattern.compile(regex);
}
};
private static final ThreadLocalCache<String, URL> URL_CACHE =
new ThreadLocalCache<String, URL>() {
public URL getNewInstance(String url) {
try {
return new URL(url);
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
};
private static final Map<String, String> EMPTY_MAP = new HashMap<>(0);
public static double exp(DecimalData d) {
return Math.exp(doubleValue(d));
}
public static double power(double base, DecimalData exponent) {
return Math.pow(base, doubleValue(exponent));
}
public static double power(DecimalData base, DecimalData exponent) {
return Math.pow(doubleValue(base), doubleValue(exponent));
}
public static double power(DecimalData base, double exponent) {
return Math.pow(doubleValue(base), exponent);
}
public static double cosh(DecimalData x) {
return Math.cosh(doubleValue(x));
}
public static double acos(DecimalData a) {
return Math.acos(doubleValue(a));
}
public static double asin(DecimalData a) {
return Math.asin(doubleValue(a));
}
public static double atan(DecimalData a) {
return Math.atan(doubleValue(a));
}
public static double atan2(DecimalData y, DecimalData x) {
return Math.atan2(doubleValue(y), doubleValue(x));
}
public static double sin(DecimalData a) {
return Math.sin(doubleValue(a));
}
public static double sinh(DecimalData a) {
return Math.sinh(doubleValue(a));
}
public static double cos(DecimalData a) {
return Math.cos(doubleValue(a));
}
public static double tan(DecimalData a) {
return Math.tan(doubleValue(a));
}
/** Calculates the hyperbolic tangent of a big decimal number. */
public static double tanh(DecimalData a) {
return Math.tanh(doubleValue(a));
}
/** SQL <code>COT</code> operator applied to double values. */
public static double cot(double b0) {
return 1.0d / Math.tan(b0);
}
public static double cot(DecimalData a) {
return 1.0d / Math.tan(doubleValue(a));
}
public static double degrees(DecimalData angrad) {
return Math.toDegrees(doubleValue(angrad));
}
public static double radians(DecimalData angdeg) {
return Math.toRadians(doubleValue(angdeg));
}
/** SQL <code>ABS</code> operator applied to byte values. */
public static byte abs(byte b0) {
return (byte) Math.abs(b0);
}
/** SQL <code>ABS</code> operator applied to short values. */
public static short abs(short b0) {
return (short) Math.abs(b0);
}
/** SQL <code>ABS</code> operator applied to int values. */
public static int abs(int b0) {
return Math.abs(b0);
}
/** SQL <code>ABS</code> operator applied to long values. */
public static long abs(long b0) {
return Math.abs(b0);
}
/** SQL <code>ABS</code> operator applied to float values. */
public static float abs(float b0) {
return Math.abs(b0);
}
/** SQL <code>ABS</code> operator applied to double values. */
public static double abs(double b0) {
return Math.abs(b0);
}
public static DecimalData abs(DecimalData a) {
return DecimalDataUtils.abs(a);
}
public static double floor(double b0) {
return Math.floor(b0);
}
public static float floor(float b0) {
return (float) Math.floor(b0);
}
/** SQL <code>FLOOR</code> operator applied to int values. */
public static int floor(int b0, int b1) {
int r = b0 % b1;
if (r < 0) {
r += b1;
}
return b0 - r;
}
/** SQL <code>FLOOR</code> operator applied to long values. */
public static long floor(long b0, long b1) {
long r = b0 % b1;
if (r < 0) {
r += b1;
}
return b0 - r;
}
public static DecimalData floor(DecimalData a) {
return DecimalDataUtils.floor(a);
}
public static double ceil(double b0) {
return Math.ceil(b0);
}
public static float ceil(float b0) {
return (float) Math.ceil(b0);
}
/** SQL <code>CEIL</code> operator applied to int values. */
public static int ceil(int b0, int b1) {
int r = b0 % b1;
if (r > 0) {
r -= b1;
}
return b0 - r;
}
/** SQL <code>CEIL</code> operator applied to long values. */
public static long ceil(long b0, long b1) {
return floor(b0 + b1 - 1, b1);
}
public static DecimalData ceil(DecimalData a) {
return DecimalDataUtils.ceil(a);
}
// -------------------------- natural logarithm ------------------------
/** Returns the natural logarithm of "x". */
public static double log(double x) {
return Math.log(x);
}
public static double log(DecimalData x) {
return Math.log(doubleValue(x));
}
/** Returns the logarithm of "x" with base "base". */
public static double log(double base, double x) {
return Math.log(x) / Math.log(base);
}
public static double log(double base, DecimalData x) {
return log(base, doubleValue(x));
}
public static double log(DecimalData base, double x) {
return log(doubleValue(base), x);
}
public static double log(DecimalData base, DecimalData x) {
return log(doubleValue(base), doubleValue(x));
}
/** Returns the logarithm of "a" with base 2. */
public static double log2(double x) {
return Math.log(x) / Math.log(2);
}
public static double log2(DecimalData x) {
return log2(doubleValue(x));
}
public static double log10(double x) {
return Math.log10(x);
}
public static double log10(DecimalData x) {
return log10(doubleValue(x));
}
// -------------------------- string functions ------------------------
/**
* Returns the string str left-padded with the string pad to a length of len characters. If str
* is longer than len, the return value is shortened to len characters.
*/
public static String lpad(String base, int len, String pad) {
if (len < 0 || "".equals(pad)) {
return null;
} else if (len == 0) {
return "";
}
char[] data = new char[len];
char[] baseChars = base.toCharArray();
char[] padChars = pad.toCharArray();
// the length of the padding needed
int pos = Math.max(len - base.length(), 0);
// copy the padding
for (int i = 0; i < pos; i += pad.length()) {
for (int j = 0; j < pad.length() && j < pos - i; j++) {
data[i + j] = padChars[j];
}
}
// copy the base
int i = 0;
while (pos + i < len && i < base.length()) {
data[pos + i] = baseChars[i];
i += 1;
}
return new String(data);
}
/**
* Returns the string str right-padded with the string pad to a length of len characters. If str
* is longer than len, the return value is shortened to len characters.
*/
public static String rpad(String base, int len, String pad) {
if (len < 0 || "".equals(pad)) {
return null;
} else if (len == 0) {
return "";
}
char[] data = new char[len];
char[] baseChars = base.toCharArray();
char[] padChars = pad.toCharArray();
int pos = 0;
// copy the base
while (pos < base.length() && pos < len) {
data[pos] = baseChars[pos];
pos += 1;
}
// copy the padding
while (pos < len) {
int i = 0;
while (i < pad.length() && i < len - pos) {
data[pos + i] = padChars[i];
i += 1;
}
pos += pad.length();
}
return new String(data);
}
/** Returns a string that repeats the base string n times. */
public static String repeat(String str, int repeat) {
return EncodingUtils.repeat(str, repeat);
}
/** Replaces all the old strings with the replacement string. */
public static String replace(String str, String oldStr, String replacement) {
return str.replace(oldStr, replacement);
}
/**
* Split target string with custom separator and pick the index-th(start with 0) result.
*
* @param str target string.
* @param separator custom separator.
* @param index index of the result which you want.
* @return the string at the index of split results.
*/
public static String splitIndex(String str, String separator, int index) {
if (index < 0) {
return null;
}
String[] values = StringUtils.splitByWholeSeparatorPreserveAllTokens(str, separator);
if (index >= values.length) {
return null;
} else {
return values[index];
}
}
/**
* Split target string with custom separator and pick the index-th(start with 0) result.
*
* @param str target string.
* @param character int value of the separator character
* @param index index of the result which you want.
* @return the string at the index of split results.
*/
public static String splitIndex(String str, int character, int index) {
if (character > 255 || character < 1 || index < 0) {
return null;
}
String[] values = StringUtils.splitPreserveAllTokens(str, (char) character);
if (index >= values.length) {
return null;
} else {
return values[index];
}
}
/**
* Returns a string resulting from replacing all substrings that match the regular expression
* with replacement.
*/
public static String regexpReplace(String str, String regex, String replacement) {
if (str == null || regex == null || replacement == null) {
return null;
}
try {
return str.replaceAll(regex, Matcher.quoteReplacement(replacement));
} catch (Exception e) {
LOG.error(
String.format(
"Exception in regexpReplace('%s', '%s', '%s')",
str, regex, replacement),
e);
// return null if exception in regex replace
return null;
}
}
/**
* Returns a string extracted with a specified regular expression and a regex match group index.
*/
public static String regexpExtract(String str, String regex, int extractIndex) {
if (str == null || regex == null) {
return null;
}
try {
Matcher m = Pattern.compile(regex).matcher(str);
if (m.find()) {
MatchResult mr = m.toMatchResult();
return mr.group(extractIndex);
}
} catch (Exception e) {
LOG.error(
String.format(
"Exception in regexpExtract('%s', '%s', '%d')",
str, regex, extractIndex),
e);
}
return null;
}
public static String regexpExtract(String str, String regex, long extractIndex) {
return regexpExtract(str, regex, (int) extractIndex);
}
/** Returns the first string extracted with a specified regular expression. */
public static String regexpExtract(String str, String regex) {
return regexpExtract(str, regex, 0);
}
/**
* Returns a Matcher object that represents the result of matching given StringData against a
* specified regular expression pattern.
*/
public static Matcher getRegexpMatcher(@Nullable StringData str, @Nullable StringData regex) {
if (str == null || regex == null) {
return null;
}
try {
return REGEXP_PATTERN_CACHE.get(regex.toString()).matcher(str.toString());
} catch (PatternSyntaxException e) {
return null;
}
}
/**
* Parse string as key-value string and return the value matches key name. example:
* keyvalue('k1=v1;k2=v2', ';', '=', 'k2') = 'v2' keyvalue('k1:v1,k2:v2', ',', ':', 'k3') = NULL
*
* @param str target string.
* @param pairSeparator separator between key-value tuple.
* @param kvSeparator separator between key and value.
* @param keyName name of the key whose value you want return.
* @return target value.
*/
public static BinaryStringData keyValue(
BinaryStringData str,
BinaryStringData pairSeparator,
BinaryStringData kvSeparator,
BinaryStringData keyName) {
if (str == null || str.getSizeInBytes() == 0) {
return null;
}
if (pairSeparator != null
&& pairSeparator.getSizeInBytes() == 1
&& kvSeparator != null
&& kvSeparator.getSizeInBytes() == 1) {
return BinaryStringDataUtil.keyValue(
str, pairSeparator.byteAt(0), kvSeparator.byteAt(0), keyName);
} else {
return BinaryStringData.fromString(
keyValue(
BinaryStringDataUtil.safeToString(str),
BinaryStringDataUtil.safeToString(pairSeparator),
BinaryStringDataUtil.safeToString(kvSeparator),
BinaryStringDataUtil.safeToString(keyName)));
}
}
private static String keyValue(
String str, String pairSeparator, String kvSeparator, String keyName) {
try {
if (StringUtils.isEmpty(str)) {
return null;
}
String[] values = StringUtils.split(str, pairSeparator);
for (String value : values) {
if (!StringUtils.isEmpty(value)) {
String[] kv = StringUtils.split(kvSeparator);
if (kv != null && kv.length == 2 && kv[0].equals(keyName)) {
return kv[1];
}
}
}
return null;
} catch (Exception e) {
LOG.error("Exception when parse key-value", e);
return null;
}
}
/**
* Calculate the hash value of a given string.
*
* @param algorithm message digest algorithm.
* @param str string to hash.
* @return hash value of string.
*/
public static String hash(String algorithm, String str) {
return hash(algorithm, str, "");
}
/**
* Calculate the hash value of a given string.
*
* @param algorithm message digest algorithm.
* @param str string to hash.
* @param charsetName charset of string.
* @return hash value of string.
*/
public static String hash(String algorithm, String str, String charsetName) {
try {
byte[] digest =
MessageDigest.getInstance(algorithm)
.digest(strToBytesWithCharset(str, charsetName));
return EncodingUtils.hex(digest);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException("Unsupported algorithm: " + algorithm, e);
}
}
private static byte[] strToBytesWithCharset(String str, String charsetName) {
byte[] byteArray = null;
if (!StringUtils.isEmpty(charsetName)) {
try {
byteArray = str.getBytes(charsetName);
} catch (UnsupportedEncodingException e) {
LOG.warn(
"Unsupported encoding: " + charsetName + ", fallback to system charset", e);
byteArray = null;
}
}
if (byteArray == null) {
byteArray = str.getBytes(StandardCharsets.UTF_8);
}
return byteArray;
}
/**
* Parse url and return various components of the URL. If accept any null arguments, return
* null.
*
* @param urlStr URL string.
* @param partToExtract determines which components would return. accept values:
* HOST,PATH,QUERY,REF, PROTOCOL,FILE,AUTHORITY,USERINFO
* @return target value.
*/
public static String parseUrl(String urlStr, String partToExtract) {
URL url;
try {
url = URL_CACHE.get(urlStr);
} catch (Exception e) {
LOG.error("Parse URL error: " + urlStr, e);
return null;
}
if ("HOST".equals(partToExtract)) {
return url.getHost();
}
if ("PATH".equals(partToExtract)) {
return url.getPath();
}
if ("QUERY".equals(partToExtract)) {
return url.getQuery();
}
if ("REF".equals(partToExtract)) {
return url.getRef();
}
if ("PROTOCOL".equals(partToExtract)) {
return url.getProtocol();
}
if ("FILE".equals(partToExtract)) {
return url.getFile();
}
if ("AUTHORITY".equals(partToExtract)) {
return url.getAuthority();
}
if ("USERINFO".equals(partToExtract)) {
return url.getUserInfo();
}
return null;
}
/**
* Parse url and return various parameter of the URL. If accept any null arguments, return null.
*
* @param urlStr URL string.
* @param partToExtract must be QUERY, or return null.
* @param key parameter name.
* @return target value.
*/
public static String parseUrl(String urlStr, String partToExtract, String key) {
if (!"QUERY".equals(partToExtract)) {
return null;
}
String query = parseUrl(urlStr, partToExtract);
if (query == null) {
return null;
}
Pattern p = Pattern.compile("(&|^)" + Pattern.quote(key) + "=([^&]*)");
Matcher m = p.matcher(query);
if (m.find()) {
return m.group(2);
}
return null;
}
public static int divideInt(int a, int b) {
return a / b;
}
public static String subString(String str, long start, long len) {
if (len < 0) {
LOG.error(
"len of 'substring(str, start, len)' must be >= 0 and Int type, but len = {}",
len);
return null;
}
if (len > Integer.MAX_VALUE || start > Integer.MAX_VALUE) {
LOG.error(
"len or start of 'substring(str, start, len)' must be Int type, but len = {}, start = {}",
len,
start);
return null;
}
int length = (int) len;
int pos = (int) start;
if (str.isEmpty()) {
return "";
}
int startPos;
int endPos;
if (pos > 0) {
startPos = pos - 1;
if (startPos >= str.length()) {
return "";
}
} else if (pos < 0) {
startPos = str.length() + pos;
if (startPos < 0) {
return "";
}
} else {
startPos = 0;
}
if ((str.length() - startPos) < length) {
endPos = str.length();
} else {
endPos = startPos + length;
}
return str.substring(startPos, endPos);
}
public static String subString(String str, long start) {
return subString(str, start, Integer.MAX_VALUE);
}
public static String chr(long chr) {
if (chr < 0) {
return "";
} else if ((chr & 0xFF) == 0) {
return String.valueOf(Character.MIN_VALUE);
} else {
return String.valueOf((char) (chr & 0xFF));
}
}
public static String overlay(String s, String r, long start, long length) {
if (start <= 0 || start > s.length()) {
return s;
} else {
StringBuilder sb = new StringBuilder();
int startPos = (int) start;
int len = (int) length;
sb.append(s, 0, startPos - 1);
sb.append(r);
if ((startPos + len) <= s.length() && len > 0) {
sb.append(s.substring(startPos - 1 + len));
}
return sb.toString();
}
}
public static String overlay(String s, String r, long start) {
return overlay(s, r, start, r.length());
}
public static int position(BinaryStringData seek, BinaryStringData s) {
return position(seek, s, 1);
}
public static int position(BinaryStringData seek, BinaryStringData s, int from) {
return s.indexOf(seek, from - 1) + 1;
}
public static int instr(
BinaryStringData str,
BinaryStringData subString,
int startPosition,
int nthAppearance) {
if (nthAppearance <= 0) {
throw new IllegalArgumentException("nthAppearance must be positive!");
}
if (startPosition == 0) {
return 0;
} else if (startPosition > 0) {
int startIndex = startPosition;
int index = 0;
for (int i = 0; i < nthAppearance; i++) {
index = str.indexOf(subString, startIndex - 1) + 1;
if (index == 0) {
return 0;
}
startIndex = index + 1;
}
return index;
} else {
int pos =
instr(
BinaryStringDataUtil.reverse(str),
BinaryStringDataUtil.reverse(subString),
-startPosition,
nthAppearance);
if (pos == 0) {
return 0;
} else {
return str.numChars() + 2 - pos - subString.numChars();
}
}
}
/** Returns the hex string of a long argument. */
public static String hex(long x) {
return Long.toHexString(x).toUpperCase();
}
/** Returns the hex string of a string argument. */
public static String hex(String x) {
return EncodingUtils.hex(x.getBytes(StandardCharsets.UTF_8)).toUpperCase();
}
/**
* Creates a map by parsing text. Split text into key-value pairs using two delimiters. The
* first delimiter separates pairs, and the second delimiter separates key and value. If only
* one parameter is given, default delimiters are used: ',' as delimiter1 and '=' as delimiter2.
* Both delimiters are treated as regular expressions.
*
* @param text the input text
* @return the map
*/
public static Map<String, String> strToMap(String text) {
return strToMap(text, ",", "=");
}
/**
* Creates a map by parsing text. Split text into key-value pairs using two delimiters. The
* first delimiter separates pairs, and the second delimiter separates key and value. Both
* {@code listDelimiter} and {@code keyValueDelimiter} are treated as regular expressions.
*
* @param text the input text
* @param listDelimiter the delimiter to separates pairs
* @param keyValueDelimiter the delimiter to separates key and value
* @return the map
*/
public static Map<String, String> strToMap(
String text, String listDelimiter, String keyValueDelimiter) {
if (StringUtils.isEmpty(text)) {
return EMPTY_MAP;
}
String[] keyValuePairs = text.split(listDelimiter);
Map<String, String> ret = CollectionUtil.newHashMapWithExpectedSize(keyValuePairs.length);
for (String keyValuePair : keyValuePairs) {
String[] keyValue = keyValuePair.split(keyValueDelimiter, 2);
if (keyValue.length < 2) {
ret.put(keyValuePair, null);
} else {
ret.put(keyValue[0], keyValue[1]);
}
}
return ret;
}
// SQL ROUND
/** SQL <code>ROUND</code> operator applied to byte values. */
public static byte sround(byte b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to byte values. */
public static byte sround(byte b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).byteValue();
}
/** SQL <code>ROUND</code> operator applied to short values. */
public static short sround(short b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to short values. */
public static short sround(short b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).shortValue();
}
/** SQL <code>ROUND</code> operator applied to int values. */
public static int sround(int b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to int values. */
public static int sround(int b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).intValue();
}
/** SQL <code>ROUND</code> operator applied to long values. */
public static long sround(long b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to long values. */
public static long sround(long b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).longValue();
}
/** SQL <code>ROUND</code> operator applied to BigDecimal values. */
public static BigDecimal sround(BigDecimal b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to BigDecimal values. */
public static BigDecimal sround(BigDecimal b0, int b1) {
return b0.movePointRight(b1).setScale(0, RoundingMode.HALF_UP).movePointLeft(b1);
}
/** SQL <code>ROUND</code> operator applied to float values. */
public static float sround(float b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to float values. */
public static float sround(float b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).floatValue();
}
/** SQL <code>ROUND</code> operator applied to double values. */
public static double sround(double b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to double values. */
public static double sround(double b0, int b1) {
return sround(BigDecimal.valueOf(b0), b1).doubleValue();
}
/** SQL <code>ROUND</code> operator applied to DecimalData values. */
public static DecimalData sround(DecimalData b0) {
return sround(b0, 0);
}
/** SQL <code>ROUND</code> operator applied to DecimalData values. */
public static DecimalData sround(DecimalData b0, int b1) {
return DecimalDataUtils.sround(b0, b1);
}
public static DecimalData sign(DecimalData b0) {
return DecimalDataUtils.sign(b0);
}
public static boolean isDecimal(Object obj) {
if ((obj instanceof Long)
|| (obj instanceof Integer)
|| (obj instanceof Short)
|| (obj instanceof Byte)
|| (obj instanceof Float)
|| (obj instanceof Double)
|| (obj instanceof BigDecimal)
|| (obj instanceof BigInteger)) {
return true;
}
if (obj instanceof String || obj instanceof Character) {
String s = obj.toString();
if (s.isEmpty()) {
return false;
}
return isInteger(s) || isLong(s) || isDouble(s);
} else {
return false;
}
}
private static boolean isInteger(String s) {
boolean flag = true;
try {
Integer.parseInt(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
private static boolean isLong(String s) {
boolean flag = true;
try {
Long.parseLong(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
private static boolean isDouble(String s) {
boolean flag = true;
try {
Double.parseDouble(s);
} catch (NumberFormatException e) {
flag = false;
}
return flag;
}
public static boolean isDigit(Object obj) {
if ((obj instanceof Long)
|| (obj instanceof Integer)
|| (obj instanceof Short)
|| (obj instanceof Byte)) {
return true;
}
if (obj instanceof String) {
String s = obj.toString();
if (s.isEmpty()) {
return false;
}
return StringUtils.isNumeric(s);
} else {
return false;
}
}
public static boolean isAlpha(Object obj) {
if (obj == null) {
return false;
}
if (!(obj instanceof String)) {
return false;
}
String s = obj.toString();
if ("".equals(s)) {
return false;
}
return StringUtils.isAlpha(s);
}
public static Integer hashCode(String str) {
if (str == null) {
return Integer.MIN_VALUE;
}
return Math.abs(str.hashCode());
}
public static Boolean regExp(String s, String regex) {
if (regex.length() == 0) {
return false;
}
try {
return (REGEXP_PATTERN_CACHE.get(regex)).matcher(s).find(0);
} catch (Exception e) {
LOG.error("Exception when compile and match regex:" + regex + " on: " + s, e);
return false;
}
}
public static Byte bitAnd(Byte a, Byte b) {
if (a == null || b == null) {
return 0;
}
return (byte) (a & b);
}
public static Short bitAnd(Short a, Short b) {
if (a == null || b == null) {
return 0;
}
return (short) (a & b);
}
public static Integer bitAnd(Integer a, Integer b) {
if (a == null || b == null) {
return 0;
}
return a & b;
}
public static Long bitAnd(Long a, Long b) {
if (a == null || b == null) {
return 0L;
}
return a & b;
}
public static Byte bitNot(Byte a) {
if (a == null) {
a = 0;
}
return (byte) (~a);
}
public static Short bitNot(Short a) {
if (a == null) {
a = 0;
}
return (short) (~a);
}
public static Integer bitNot(Integer a) {
if (a == null) {
a = 0;
}
return ~a;
}
public static Long bitNot(Long a) {
if (a == null) {
a = 0L;
}
return ~a;
}
public static Byte bitOr(Byte a, Byte b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (byte) (a | b);
}
public static Short bitOr(Short a, Short b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (short) (a | b);
}
public static Integer bitOr(Integer a, Integer b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return a | b;
}
public static Long bitOr(Long a, Long b) {
if (a == null || b == null) {
if (a == null) {
a = 0L;
}
if (b == null) {
b = 0L;
}
}
return a | b;
}
public static Byte bitXor(Byte a, Byte b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (byte) (a ^ b);
}
public static Short bitXor(Short a, Short b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return (short) (a ^ b);
}
public static Integer bitXor(Integer a, Integer b) {
if (a == null || b == null) {
if (a == null) {
a = 0;
}
if (b == null) {
b = 0;
}
}
return a ^ b;
}
public static Long bitXor(Long a, Long b) {
if (a == null || b == null) {
if (a == null) {
a = 0L;
}
if (b == null) {
b = 0L;
}
}
return a ^ b;
}
public static String toBase64(BinaryStringData bs) {
return toBase64(bs.toBytes());
}
public static String toBase64(byte[] bytes) {
return Base64.getEncoder().encodeToString(bytes);
}
public static BinaryStringData fromBase64(BinaryStringData bs) {
return BinaryStringData.fromBytes(Base64.getDecoder().decode(bs.toBytes()));
}
public static BinaryStringData fromBase64(byte[] bytes) {
return BinaryStringData.fromBytes(Base64.getDecoder().decode(bytes));
}
public static String uuid() {
return UUID.randomUUID().toString();
}
public static String uuid(byte[] b) {
return UUID.nameUUIDFromBytes(b).toString();
}
/** SQL <code>TRUNCATE</code> operator applied to int values. */
public static int struncate(int b0) {
return struncate(b0, 0);
}
public static int struncate(int b0, int b1) {
return (int) struncate((long) b0, b1);
}
/** SQL <code>TRUNCATE</code> operator applied to long values. */
public static long struncate(long b0) {
return struncate(b0, 0);
}
public static long struncate(long b0, int b1) {
return castToIntegral(struncate(castFrom(b0, 38, 18), b1));
}
/** SQL <code>TRUNCATE</code> operator applied to BigDecimal values. */
public static DecimalData struncate(DecimalData b0) {
return struncate(b0, 0);
}
public static DecimalData struncate(DecimalData b0, int b1) {
if (b1 >= b0.scale()) {
return b0;
}
BigDecimal b2 =
b0.toBigDecimal()
.movePointRight(b1)
.setScale(0, RoundingMode.DOWN)
.movePointLeft(b1);
int p = b0.precision();
int s = b0.scale();
if (b1 < 0) {
return DecimalData.fromBigDecimal(b2, Math.min(38, 1 + p - s), 0);
} else {
return DecimalData.fromBigDecimal(b2, 1 + p - s + b1, b1);
}
}
/** SQL <code>TRUNCATE</code> operator applied to double values. */
public static float struncate(float b0) {
return struncate(b0, 0);
}
public static float struncate(float b0, int b1) {
return (float) doubleValue(struncate(castFrom((double) b0, 38, 18), b1));
}
/** SQL <code>TRUNCATE</code> operator applied to double values. */
public static double struncate(double b0) {
return struncate(b0, 0);
}
public static double struncate(double b0, int b1) {
return doubleValue(struncate(castFrom(b0, 38, 18), b1));
}
/**
* Compares two byte arrays in lexicographical order.
*
* <p>The result is positive if {@code array1} is great than {@code array2}, negative if {@code
* array1} is less than {@code array2} and 0 if {@code array1} is equal to {@code array2}.
*
* <p>Note: Currently, this is used in {@code ScalarOperatorGens} for comparing two fields of
* binary or varbinary type.
*
* @param array1 byte array to compare.
* @param array2 byte array to compare.
* @return an Integer indicating which one is bigger
*/
public static int byteArrayCompare(byte[] array1, byte[] array2) {
for (int i = 0, j = 0; i < array1.length && j < array2.length; i++, j++) {
int a = (array1[i] & 0xff);
int b = (array2[j] & 0xff);
if (a != b) {
return a - b;
}
}
return array1.length - array2.length;
}
/** SQL INITCAP(string) function. */
public static String initcap(String s) {
// Assumes Alpha as [A-Za-z0-9]
// white space is treated as everything else.
final int len = s.length();
boolean start = true;
final StringBuilder newS = new StringBuilder();
for (int i = 0; i < len; i++) {
char curCh = s.charAt(i);
final int c = (int) curCh;
if (start) { // curCh is whitespace or first character of word.
if (c > 47 && c < 58) { // 0-9
start = false;
} else if (c > 64 && c < 91) { // A-Z
start = false;
} else if (c > 96 && c < 123) { // a-z
start = false;
curCh = (char) (c - 32); // Uppercase this character
}
// else {} whitespace
} else { // Inside of a word or white space after end of word.
if (c > 47 && c < 58) { // 0-9
// noop
} else if (c > 64 && c < 91) { // A-Z
curCh = (char) (c + 32); // Lowercase this character
} else if (c > 96 && c < 123) { // a-z
// noop
} else { // whitespace
start = true;
}
}
newS.append(curCh);
} // for each character in s
return newS.toString();
}
}
| SqlFunctionUtils |
java | elastic__elasticsearch | x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectRealmTests.java | {
"start": 3265,
"end": 31702
} | class ____ extends OpenIdConnectTestCase {
private Settings globalSettings;
private Environment env;
private ThreadContext threadContext;
@Before
public void setupEnv() {
globalSettings = Settings.builder().put("path.home", createTempDir()).build();
env = TestEnvironment.newEnvironment(globalSettings);
threadContext = new ThreadContext(globalSettings);
}
public void testAuthentication() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final String principal = randomAlphaOfLength(12);
AtomicReference<UserRoleMapper.UserData> userData = new AtomicReference<>();
doAnswer(getAnswer(userData)).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
final boolean notPopulateMetadata = randomBoolean();
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
AuthenticationResult<User> result = authenticateWithOidc(
principal,
roleMapper,
notPopulateMetadata,
false,
authenticatingRealm,
null
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().principal(), equalTo(principal));
assertThat(result.getValue().email(), equalTo("cbarton@shield.gov"));
assertThat(result.getValue().fullName(), equalTo("Clinton Barton"));
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("kibana_user", "role1"));
if (notPopulateMetadata) {
assertThat(result.getValue().metadata(), anEmptyMap());
} else {
assertThat(result.getValue().metadata().get("oidc(iss)"), equalTo("https://op.company.org"));
assertThat(result.getValue().metadata().get("oidc(name)"), equalTo("Clinton Barton"));
final Object groups = result.getValue().metadata().get("oidc(groups)");
assertThat(groups, notNullValue());
assertThat(groups, instanceOf(Collection.class));
assertThat((Collection<?>) groups, contains("group1", "group2", "groups3"));
}
}
public void testClaimPropertyMapping() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final String principal = randomAlphaOfLength(12);
AtomicReference<UserRoleMapper.UserData> userData = new AtomicReference<>();
doAnswer(getAnswer(userData)).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
Map<String, Object> claimsWithObject = Map.of(
"groups",
List.of(Map.of("key1", List.of("value1", "value2")), Map.of("key2", List.of("value1", "value2")))
);
Map<String, Object> claimsWithNumber = Map.of("groups", List.of(2, "value2"));
Exception e = expectThrows(
Exception.class,
() -> authenticateWithOidc(principal, roleMapper, false, false, REALM_NAME, claimsWithObject)
);
Exception e2 = expectThrows(
Exception.class,
() -> authenticateWithOidc(principal, roleMapper, false, false, REALM_NAME, claimsWithNumber)
);
assertThat(e.getCause().getMessage(), containsString("expects claim [groups] with String or a String Array value"));
assertThat(e2.getCause().getMessage(), containsString("expects claim [groups] with String or a String Array value"));
}
public void testClaimMetadataMapping() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final String principal = randomAlphaOfLength(12);
AtomicReference<UserRoleMapper.UserData> userData = new AtomicReference<>();
doAnswer(getAnswer(userData)).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
Map<String, Object> claims = Map.of(
"string",
"String",
"number",
232,
"boolean",
true,
"string_array",
List.of("one", "two", "three"),
"number_array",
List.of(1, 2, 3),
"boolean_array",
List.of(true, false, true),
"object",
Map.of("key", List.of("value1", "value2")),
"object_array",
List.of(Map.of("key1", List.of("value1", "value2")), Map.of("key2", List.of("value1", "value2")))
);
AuthenticationResult<User> result = authenticateWithOidc(principal, roleMapper, false, false, REALM_NAME, claims);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().principal(), equalTo(principal));
assertThat(result.getValue().email(), equalTo("cbarton@shield.gov"));
assertThat(result.getValue().fullName(), equalTo("Clinton Barton"));
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("kibana_user", "role1"));
assertTrue(result.getValue().metadata().containsKey("oidc(string)"));
assertTrue(result.getValue().metadata().containsKey("oidc(number)"));
assertTrue(result.getValue().metadata().containsKey("oidc(boolean)"));
assertTrue(result.getValue().metadata().containsKey("oidc(string_array)"));
assertTrue(result.getValue().metadata().containsKey("oidc(boolean_array)"));
assertTrue(result.getValue().metadata().containsKey("oidc(number_array)"));
assertFalse(result.getValue().metadata().containsKey("oidc(object_array)"));
assertFalse(result.getValue().metadata().containsKey("oidc(object)"));
}
public void testWithAuthorizingRealm() throws Exception {
final UserRoleMapper roleMapper = mock(UserRoleMapper.class);
final String principal = randomAlphaOfLength(12);
doAnswer(invocation -> {
assert invocation.getArguments().length == 2;
@SuppressWarnings("unchecked")
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) invocation.getArguments()[1];
listener.onFailure(new RuntimeException("Role mapping should not be called"));
return null;
}).when(roleMapper).resolveRoles(any(UserRoleMapper.UserData.class), anyActionListener());
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
AuthenticationResult<User> result = authenticateWithOidc(principal, roleMapper, randomBoolean(), true, authenticatingRealm, null);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.SUCCESS));
assertThat(result.getValue().principal(), equalTo(principal));
assertThat(result.getValue().email(), equalTo("cbarton@shield.gov"));
assertThat(result.getValue().fullName(), equalTo("Clinton Barton"));
assertThat(result.getValue().roles(), arrayContainingInAnyOrder("lookup_user_role"));
assertThat(result.getValue().metadata().entrySet(), Matchers.iterableWithSize(1));
assertThat(result.getValue().metadata().get("is_lookup"), Matchers.equalTo(true));
assertNotNull(result.getMetadata().get(CONTEXT_TOKEN_DATA));
assertThat(result.getMetadata().get(CONTEXT_TOKEN_DATA), instanceOf(Map.class));
@SuppressWarnings("unchecked")
Map<String, Object> tokenMetadata = (Map<String, Object>) result.getMetadata().get(CONTEXT_TOKEN_DATA);
assertThat(tokenMetadata.get("id_token_hint"), equalTo("thisis.aserialized.jwt"));
}
public void testAuthenticationWithWrongRealm() throws Exception {
final String principal = randomAlphaOfLength(12);
AuthenticationResult<User> result = authenticateWithOidc(
principal,
mock(UserRoleMapper.class),
randomBoolean(),
true,
REALM_NAME + randomAlphaOfLength(8),
null
);
assertThat(result, notNullValue());
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE));
}
public void testClaimPatternParsing() throws Exception {
final Settings.Builder builder = getBasicRealmSettings();
builder.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getPattern()), "^OIDC-(.+)");
final RealmConfig config = buildConfig(builder.build(), threadContext);
final ClaimSetting principalSetting = new ClaimSetting(OpenIdConnectRealmSettings.TYPE, "principal");
final ClaimParser parser = ClaimParser.forSetting(logger, principalSetting, config, true);
final JWTClaimsSet claims = new JWTClaimsSet.Builder().subject("OIDC-cbarton")
.audience("https://rp.elastic.co/cb")
.expirationTime(Date.from(now().plusSeconds(3600)))
.issueTime(Date.from(now().minusSeconds(5)))
.jwtID(randomAlphaOfLength(8))
.issuer("https://op.company.org")
.build();
assertThat(parser.getClaimValue(claims), equalTo("cbarton"));
}
public void testInvalidPrincipalClaimPatternParsing() {
final OpenIdConnectAuthenticator authenticator = mock(OpenIdConnectAuthenticator.class);
final String authenticatingRealm = randomBoolean() ? REALM_NAME : null;
final OpenIdConnectToken token = new OpenIdConnectToken("", new State(), new Nonce(), authenticatingRealm);
final Settings.Builder builder = getBasicRealmSettings();
builder.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getPattern()), "^OIDC-(.+)");
final RealmConfig config = buildConfig(builder.build(), threadContext);
final OpenIdConnectRealm realm = new OpenIdConnectRealm(config, authenticator, null);
final JWTClaimsSet claims = new JWTClaimsSet.Builder().subject("cbarton@avengers.com")
.audience("https://rp.elastic.co/cb")
.expirationTime(Date.from(now().plusSeconds(3600)))
.issueTime(Date.from(now().minusSeconds(5)))
.jwtID(randomAlphaOfLength(8))
.issuer("https://op.company.org")
.build();
doAnswer((i) -> {
@SuppressWarnings("unchecked")
ActionListener<JWTClaimsSet> listener = (ActionListener<JWTClaimsSet>) i.getArguments()[1];
listener.onResponse(claims);
return null;
}).when(authenticator).authenticate(any(OpenIdConnectToken.class), anyActionListener());
final PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(token, future);
final AuthenticationResult<User> result = future.actionGet();
assertThat(result.getStatus(), equalTo(AuthenticationResult.Status.CONTINUE));
assertThat(result.getMessage(), containsString("claims.principal"));
assertThat(result.getMessage(), containsString("sub"));
assertThat(result.getMessage(), containsString("^OIDC-(.+)"));
}
public void testBuildRelyingPartyConfigWithoutOpenIdScope() {
final Settings.Builder settingsBuilder = Settings.builder()
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code")
.putList(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES), Arrays.asList("scope1", "scope2"))
.setSecureSettings(getSecureSettings());
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build(), threadContext), null, null);
final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null, null);
final String state = response.getState();
final String nonce = response.getNonce();
assertEqualUrlStrings(
response.getAuthenticationRequestUrl(),
"https://op.example.com/login?scope=scope1+scope2+openid&response_type=code"
+ "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb&state="
+ state
+ "&nonce="
+ nonce
+ "&client_id=rp-my"
);
assertThat(response.getRealmName(), equalTo(REALM_NAME));
}
public void testBuildingAuthenticationRequest() {
final Settings.Builder settingsBuilder = Settings.builder()
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code")
.putList(
getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REQUESTED_SCOPES),
Arrays.asList("openid", "scope1", "scope2")
)
.setSecureSettings(getSecureSettings());
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build(), threadContext), null, null);
final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null, null);
final String state = response.getState();
final String nonce = response.getNonce();
assertEqualUrlStrings(
response.getAuthenticationRequestUrl(),
"https://op.example.com/login?scope=openid+scope1+scope2&response_type=code"
+ "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb&state="
+ state
+ "&nonce="
+ nonce
+ "&client_id=rp-my"
);
assertThat(response.getRealmName(), equalTo(REALM_NAME));
}
public void testBuilidingAuthenticationRequestWithDefaultScope() {
final Settings.Builder settingsBuilder = Settings.builder()
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code")
.setSecureSettings(getSecureSettings());
;
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build(), threadContext), null, null);
final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(null, null, null);
final String state = response.getState();
final String nonce = response.getNonce();
assertEqualUrlStrings(
response.getAuthenticationRequestUrl(),
"https://op.example.com/login?scope=openid&response_type=code"
+ "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb&state="
+ state
+ "&nonce="
+ nonce
+ "&client_id=rp-my"
);
assertThat(response.getRealmName(), equalTo(REALM_NAME));
}
public void testBuildLogoutResponse() throws Exception {
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(getBasicRealmSettings().build(), threadContext), null, null);
// Random strings, as we will not validate the token here
final JWT idToken = generateIdToken(randomAlphaOfLength(8), randomAlphaOfLength(8), randomAlphaOfLength(8));
final OpenIdConnectLogoutResponse logoutResponse = realm.buildLogoutResponse(idToken);
final String endSessionUrl = logoutResponse.getEndSessionUrl();
final Map<String, String> parameters = new HashMap<>();
RestUtils.decodeQueryString(endSessionUrl, endSessionUrl.indexOf("?") + 1, parameters);
assertThat(parameters, aMapWithSize(3));
assertThat(parameters, hasKey("id_token_hint"));
assertThat(parameters, hasKey("post_logout_redirect_uri"));
assertThat(parameters, hasKey("state"));
}
public void testBuildLogoutResponseFromEndsessionEndpointWithExistingParameters() throws Exception {
final Settings.Builder realmSettingsWithFunkyEndpoint = getBasicRealmSettings();
realmSettingsWithFunkyEndpoint.put(
getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ENDSESSION_ENDPOINT),
"https://op.example.org/logout?parameter=123"
);
final OpenIdConnectRealm realm = new OpenIdConnectRealm(
buildConfig(realmSettingsWithFunkyEndpoint.build(), threadContext),
null,
null
);
// Random strings, as we will not validate the token here
final JWT idToken = generateIdToken(randomAlphaOfLength(8), randomAlphaOfLength(8), randomAlphaOfLength(8));
final OpenIdConnectLogoutResponse logoutResponse = realm.buildLogoutResponse(idToken);
final String endSessionUrl = logoutResponse.getEndSessionUrl();
final Map<String, String> parameters = new HashMap<>();
RestUtils.decodeQueryString(endSessionUrl, endSessionUrl.indexOf("?") + 1, parameters);
assertThat(parameters, aMapWithSize(4));
assertThat(parameters, hasKey("parameter"));
assertThat(parameters, hasKey("post_logout_redirect_uri"));
assertThat(parameters, hasKey("state"));
assertThat(parameters, hasKey("id_token_hint"));
}
public void testBuildingAuthenticationRequestWithExistingStateAndNonce() {
final Settings.Builder settingsBuilder = Settings.builder()
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code")
.setSecureSettings(getSecureSettings());
;
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build(), threadContext), null, null);
final String state = new State().getValue();
final String nonce = new Nonce().getValue();
final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(state, nonce, null);
assertEqualUrlStrings(
response.getAuthenticationRequestUrl(),
"https://op.example.com/login?scope=openid&response_type=code"
+ "&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb&state="
+ state
+ "&nonce="
+ nonce
+ "&client_id=rp-my"
);
assertThat(response.getRealmName(), equalTo(REALM_NAME));
}
public void testBuildingAuthenticationRequestWithLoginHint() {
final Settings.Builder settingsBuilder = Settings.builder()
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_AUTHORIZATION_ENDPOINT), "https://op.example.com/login")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_TOKEN_ENDPOINT), "https://op.example.com/token")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_ISSUER), "https://op.example.com")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.OP_JWKSET_PATH), "https://op.example.com/jwks.json")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.PRINCIPAL_CLAIM.getClaim()), "sub")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_REDIRECT_URI), "https://rp.my.com/cb")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_CLIENT_ID), "rp-my")
.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.RP_RESPONSE_TYPE), "code")
.setSecureSettings(getSecureSettings());
;
final OpenIdConnectRealm realm = new OpenIdConnectRealm(buildConfig(settingsBuilder.build(), threadContext), null, null);
final String state = new State().getValue();
final String nonce = new Nonce().getValue();
final String thehint = randomAlphaOfLength(8);
final OpenIdConnectPrepareAuthenticationResponse response = realm.buildAuthenticationRequestUri(state, nonce, thehint);
assertEqualUrlStrings(
response.getAuthenticationRequestUrl(),
"https://op.example.com/login?login_hint="
+ thehint
+ "&scope=openid&response_type=code&redirect_uri=https%3A%2F%2Frp.my.com%2Fcb&state="
+ state
+ "&nonce="
+ nonce
+ "&client_id=rp-my"
);
assertThat(response.getRealmName(), equalTo(REALM_NAME));
}
private void assertEqualUrlStrings(String actual, String expected) {
final int endOfPath = actual.indexOf('?');
assertThat(endOfPath, greaterThan(-1));
assertThat(actual.substring(0, endOfPath + 1), equalTo(expected.substring(0, endOfPath + 1)));
final HashMap<String, String> actualParams = new HashMap<>();
RestUtils.decodeQueryString(actual, endOfPath + 1, actualParams);
final HashMap<String, String> expectedParams = new HashMap<>();
RestUtils.decodeQueryString(expected, endOfPath + 1, expectedParams);
assertThat(actualParams, equalTo(expectedParams));
}
private AuthenticationResult<User> authenticateWithOidc(
String principal,
UserRoleMapper roleMapper,
boolean notPopulateMetadata,
boolean useAuthorizingRealm,
String authenticatingRealm,
@Nullable Map<String, Object> additionalClaims
) throws Exception {
RealmConfig.RealmIdentifier realmIdentifier = new RealmConfig.RealmIdentifier("mock", "mock_lookup");
final MockLookupRealm lookupRealm = new MockLookupRealm(
new RealmConfig(
realmIdentifier,
Settings.builder().put(globalSettings).put(getFullSettingKey(realmIdentifier, RealmSettings.ORDER_SETTING), 0).build(),
env,
threadContext
)
);
final OpenIdConnectAuthenticator authenticator = mock(OpenIdConnectAuthenticator.class);
final Settings.Builder builder = getBasicRealmSettings();
if (notPopulateMetadata) {
builder.put(getFullSettingKey(REALM_NAME, OpenIdConnectRealmSettings.POPULATE_USER_METADATA), false);
}
if (useAuthorizingRealm) {
builder.putList(
getFullSettingKey(new RealmConfig.RealmIdentifier("oidc", REALM_NAME), DelegatedAuthorizationSettings.AUTHZ_REALMS),
lookupRealm.name()
);
lookupRealm.registerUser(
new User(
principal,
new String[] { "lookup_user_role" },
"Clinton Barton",
"cbarton@shield.gov",
Collections.singletonMap("is_lookup", true),
true
)
);
}
final RealmConfig config = buildConfig(builder.build(), threadContext);
final OpenIdConnectRealm realm = new OpenIdConnectRealm(config, authenticator, roleMapper);
initializeRealms(realm, lookupRealm);
final OpenIdConnectToken token = new OpenIdConnectToken("", new State(), new Nonce(), authenticatingRealm);
final JWTClaimsSet.Builder claimsBuilder = new JWTClaimsSet.Builder().subject(principal)
.audience("https://rp.elastic.co/cb")
.expirationTime(Date.from(now().plusSeconds(3600)))
.issueTime(Date.from(now().minusSeconds(5)))
.jwtID(randomAlphaOfLength(8))
.issuer("https://op.company.org")
.claim("groups", Arrays.asList("group1", "group2", "groups3"))
.claim("mail", "cbarton@shield.gov")
.claim("name", "Clinton Barton")
.claim("id_token_hint", "thisis.aserialized.jwt");
if (additionalClaims != null) {
for (Map.Entry<String, Object> entry : additionalClaims.entrySet()) {
claimsBuilder.claim(entry.getKey(), entry.getValue());
}
}
final JWTClaimsSet claims = claimsBuilder.build();
doAnswer((i) -> {
@SuppressWarnings("unchecked")
ActionListener<JWTClaimsSet> listener = (ActionListener<JWTClaimsSet>) i.getArguments()[1];
listener.onResponse(claims);
return null;
}).when(authenticator).authenticate(any(OpenIdConnectToken.class), anyActionListener());
final PlainActionFuture<AuthenticationResult<User>> future = new PlainActionFuture<>();
realm.authenticate(token, future);
return future.get();
}
private void initializeRealms(Realm... realms) {
MockLicenseState licenseState = mock(MockLicenseState.class);
when(licenseState.isAllowed(Security.DELEGATED_AUTHORIZATION_FEATURE)).thenReturn(true);
final List<Realm> realmList = Arrays.asList(realms);
for (Realm realm : realms) {
realm.initialize(realmList, licenseState);
}
}
private Answer<Class<Void>> getAnswer(AtomicReference<UserRoleMapper.UserData> userData) {
return invocation -> {
assert invocation.getArguments().length == 2;
userData.set((UserRoleMapper.UserData) invocation.getArguments()[0]);
@SuppressWarnings("unchecked")
ActionListener<Set<String>> listener = (ActionListener<Set<String>>) invocation.getArguments()[1];
listener.onResponse(new HashSet<>(Arrays.asList("kibana_user", "role1")));
return null;
};
}
}
| OpenIdConnectRealmTests |
java | apache__dubbo | dubbo-rpc/dubbo-rpc-triple/src/main/java/org/apache/dubbo/rpc/protocol/tri/Http3Exchanger.java | {
"start": 2675,
"end": 7004
} | class ____ {
private static final FluentLogger LOGGER = FluentLogger.of(Http3Exchanger.class);
private static final boolean HAS_NETTY_HTTP3 = ClassUtils.isPresent("io.netty.handler.codec.http3.Http3");
private static final ConcurrentHashMap<String, RemotingServer> SERVERS = new ConcurrentHashMap<>();
private static final Map<String, AbstractConnectionClient> CLIENTS = new ConcurrentHashMap<>(16);
private static final ChannelHandler HANDLER = new ChannelHandlerAdapter();
private static boolean ENABLED = false;
private static boolean NEGOTIATION_ENABLED = true;
private Http3Exchanger() {}
public static void init(Configuration configuration) {
ENABLED = configuration.getBoolean(Constants.H3_SETTINGS_HTTP3_ENABLED, false);
NEGOTIATION_ENABLED = configuration.getBoolean(Constants.H3_SETTINGS_HTTP3_NEGOTIATION, true);
if (ENABLED && !HAS_NETTY_HTTP3) {
throw new IllegalStateException("Class for netty http3 support not found");
}
}
public static boolean isEnabled(URL url) {
return ENABLED || HAS_NETTY_HTTP3 && url.getParameter(Constants.HTTP3_KEY, false);
}
public static RemotingServer bind(URL url) {
if (isEnabled(url)) {
return ConcurrentHashMapUtils.computeIfAbsent(SERVERS, url.getAddress(), addr -> {
try {
URL serverUrl = url.putAttribute(PIPELINE_CONFIGURATOR_KEY, configServerPipeline(url));
return new NettyHttp3Server(serverUrl, HANDLER);
} catch (RemotingException e) {
throw new RuntimeException(e);
}
});
}
return null;
}
private static Consumer<ChannelPipeline> configServerPipeline(URL url) {
NettyHttp3ProtocolSelectorHandler selectorHandler =
new NettyHttp3ProtocolSelectorHandler(url, ScopeModelUtil.getFrameworkModel(url.getScopeModel()));
return pipeline -> {
pipeline.addLast(new Http3ServerConnectionHandler(new ChannelInitializer<QuicStreamChannel>() {
@Override
protected void initChannel(QuicStreamChannel ch) {
ch.pipeline()
.addLast(new HttpWriteQueueHandler())
.addLast(new FlushConsolidationHandler(64, true))
.addLast(NettyHttp3FrameCodec.INSTANCE)
.addLast(selectorHandler);
}
}));
pipeline.addLast(new Http3TripleServerConnectionHandler());
};
}
public static AbstractConnectionClient connect(URL url) {
return CLIENTS.compute(url.getAddress(), (address, client) -> {
if (client == null) {
URL clientUrl = url.putAttribute(PIPELINE_CONFIGURATOR_KEY, configClientPipeline(url));
AbstractConnectionClient connectionClient = NEGOTIATION_ENABLED
? Helper.createAutoSwitchClient(clientUrl, HANDLER)
: Helper.createHttp3Client(clientUrl, HANDLER);
connectionClient.addCloseListener(() -> CLIENTS.remove(address, connectionClient));
client = connectionClient;
} else {
client.retain();
}
return client;
});
}
private static Consumer<ChannelPipeline> configClientPipeline(URL url) {
int heartbeat = UrlUtils.getHeartbeat(url);
int closeTimeout = UrlUtils.getCloseTimeout(url);
return pipeline -> {
pipeline.addLast(Http3ClientFrameCodec.INSTANCE);
pipeline.addLast(new IdleStateHandler(heartbeat, 0, 0, TimeUnit.MILLISECONDS));
pipeline.addLast(new TriplePingPongHandler(closeTimeout));
};
}
public static void close() {
if (SERVERS.isEmpty()) {
return;
}
ArrayList<RemotingServer> toClose = new ArrayList<>(SERVERS.values());
SERVERS.clear();
for (RemotingServer server : toClose) {
try {
server.close();
} catch (Throwable t) {
LOGGER.error(LoggerCodeConstants.PROTOCOL_ERROR_CLOSE_SERVER, "Close http3 server failed", t);
}
}
}
}
| Http3Exchanger |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/component/proxy/ComponentBasicProxyTest.java | {
"start": 911,
"end": 2414
} | class ____ {
@Test
@JiraKey(value = "HHH-12786")
public void testBasicProxyingWithProtectedMethodCalledInConstructor(SessionFactoryScope scope) {
scope.inTransaction( (entityManager) -> {
Adult adult = new Adult();
adult.setName( "Arjun Kumar" );
entityManager.persist( adult );
} );
scope.inTransaction( (entityManager) -> {
List<Adult> adultsCalledArjun = entityManager
.createQuery( "SELECT a from Adult a WHERE a.name = :name", Adult.class )
.setParameter( "name", "Arjun Kumar" ).getResultList();
Adult adult = adultsCalledArjun.iterator().next();
entityManager.remove( adult );
} );
}
@Test
@JiraKey(value = "HHH-12791")
public void testOnlyOneProxyClassGenerated(DomainModelScope domainModelScope, SessionFactoryScope sfScope) {
sfScope.getSessionFactory();
final PersistentClass personDescriptor = domainModelScope.getDomainModel().getEntityBinding( Person.class.getName() );
final CompositeTypeImplementor componentType = (CompositeTypeImplementor) personDescriptor.getIdentifierMapper().getType();
final EmbeddableValuedModelPart embedded = componentType.getMappingModelPart();
final EmbeddableInstantiator instantiator = embedded.getEmbeddableTypeDescriptor()
.getRepresentationStrategy()
.getInstantiator();
final Object instance1 = instantiator.instantiate( null );
final Object instance2 = instantiator.instantiate( null );
assertThat( instance1.getClass() ).isEqualTo( instance2.getClass() );
}
}
| ComponentBasicProxyTest |
java | reactor__reactor-core | reactor-test/src/main/java/reactor/test/scheduler/VirtualTimeScheduler.java | {
"start": 18298,
"end": 20431
} | class ____ extends AtomicReference<@Nullable Disposable>
implements Runnable, Disposable {
final Runnable decoratedRun;
final long periodInNanoseconds;
long count;
long lastNowNanoseconds;
long startInNanoseconds;
PeriodicTask(long firstStartInNanoseconds,
Runnable decoratedRun,
long firstNowNanoseconds,
long periodInNanoseconds) {
this.decoratedRun = decoratedRun;
this.periodInNanoseconds = periodInNanoseconds;
lastNowNanoseconds = firstNowNanoseconds;
startInNanoseconds = firstStartInNanoseconds;
lazySet(EMPTY);
}
@Override
public void run() {
decoratedRun.run();
if (get() != CANCELLED && !shutdown) {
long nextTick;
long nowNanoseconds = nanoTime;
// If the clock moved in a direction quite a bit, rebase the repetition period
if (nowNanoseconds + CLOCK_DRIFT_TOLERANCE_NANOSECONDS < lastNowNanoseconds || nowNanoseconds >= lastNowNanoseconds + periodInNanoseconds + CLOCK_DRIFT_TOLERANCE_NANOSECONDS) {
nextTick = nowNanoseconds + periodInNanoseconds;
/*
* Shift the start point back by the drift as if the whole thing
* started count periods ago.
*/
startInNanoseconds = nextTick - (periodInNanoseconds * (++count));
}
else {
nextTick = startInNanoseconds + (++count * periodInNanoseconds);
}
lastNowNanoseconds = nowNanoseconds;
long delay = nextTick - nowNanoseconds;
replace(this, schedule(this, delay, TimeUnit.NANOSECONDS));
}
}
@Override
public void dispose() {
getAndSet(CANCELLED).dispose();
}
}
}
static final Disposable CANCELLED = Disposables.disposed();
static final Disposable EMPTY = Disposables.never();
static boolean replace(AtomicReference<@Nullable Disposable> ref, @Nullable Disposable c) {
for (; ; ) {
Disposable current = ref.get();
if (current == CANCELLED) {
if (c != null) {
c.dispose();
}
return false;
}
if (ref.compareAndSet(current, c)) {
return true;
}
}
}
static | PeriodicTask |
java | apache__camel | components/camel-xchange/src/main/java/org/apache/camel/component/xchange/XChangeMetaDataProducer.java | {
"start": 1289,
"end": 2998
} | class ____ extends DefaultProducer {
public XChangeMetaDataProducer(XChangeEndpoint endpoint) {
super(endpoint);
}
@Override
public XChangeEndpoint getEndpoint() {
return (XChangeEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
XChangeEndpoint endpoint = getEndpoint();
XChangeMethod method = endpoint.getConfiguration().getMethod();
if (XChangeMethod.currencies == method) {
Object body = endpoint.getCurrencies();
exchange.getMessage().setBody(body);
} else if (XChangeMethod.currencyPairs == method) {
Object body = endpoint.getCurrencyPairs();
exchange.getMessage().setBody(body);
} else if (XChangeMethod.currencyMetaData == method) {
Currency curr = exchange.getMessage().getHeader(HEADER_CURRENCY, Currency.class);
curr = curr != null ? curr : exchange.getMessage().getBody(Currency.class);
curr = curr != null ? curr : endpoint.getConfiguration().getCurrency();
Object body = endpoint.getCurrencyMetaData(curr);
exchange.getMessage().setBody(body);
} else if (XChangeMethod.currencyPairMetaData == method) {
CurrencyPair pair = exchange.getIn().getHeader(HEADER_CURRENCY_PAIR, CurrencyPair.class);
pair = pair != null ? pair : exchange.getMessage().getBody(CurrencyPair.class);
pair = pair != null ? pair : endpoint.getConfiguration().getAsCurrencyPair();
Object body = endpoint.getCurrencyPairMetaData(pair);
exchange.getMessage().setBody(body);
}
}
}
| XChangeMetaDataProducer |
java | elastic__elasticsearch | x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/tree/NodeSubclassTests.java | {
"start": 28208,
"end": 31320
} | class ____ are not interested
// (and IDE folders like eclipse)
if (className.startsWith("org.elasticsearch.xpack.ql") == false
&& className.startsWith("org.elasticsearch.xpack.sql") == false
&& className.startsWith("org.elasticsearch.xpack.eql") == false) {
return false;
}
return true;
};
protected Predicate<String> pluggableClassNameFilter() {
return CLASSNAME_FILTER;
}
private <T> Set<Class<? extends T>> innerSubclassesOf(Class<T> clazz) throws IOException {
return subclassesOf(clazz, pluggableClassNameFilter());
}
public static <T> Set<Class<? extends T>> subclassesOf(Class<T> clazz) throws IOException {
return subclassesOf(clazz, CLASSNAME_FILTER);
}
/**
* Find all subclasses of a particular class.
*/
public static <T> Set<Class<? extends T>> subclassesOf(Class<T> clazz, Predicate<String> classNameFilter) throws IOException {
@SuppressWarnings("unchecked") // The map is built this way
Set<Class<? extends T>> lookup = (Set<Class<? extends T>>) subclassCache.get(clazz);
if (lookup != null) {
return lookup;
}
Set<Class<? extends T>> results = new LinkedHashSet<>();
String[] paths = System.getProperty("java.class.path").split(System.getProperty("path.separator"));
for (String path : paths) {
Path root = PathUtils.get(path);
int rootLength = root.toString().length() + 1;
// load classes from jar files
// NIO FileSystem API is not used since it trips the SecurityManager
// https://bugs.openjdk.java.net/browse/JDK-8160798
// so iterate the jar "by hand"
if (path.endsWith(".jar") && path.contains("x-pack-ql")) {
try (JarInputStream jar = jarStream(root)) {
JarEntry je = null;
while ((je = jar.getNextJarEntry()) != null) {
String name = je.getName();
if (name.endsWith(".class")) {
String className = name.substring(0, name.length() - ".class".length()).replace("/", ".");
maybeLoadClass(clazz, className, root + "!/" + name, classNameFilter, results);
}
}
}
}
// for folders, just use the FileSystems API
else {
Files.walkFileTree(root, new SimpleFileVisitor<>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (Files.isRegularFile(file) && file.getFileName().toString().endsWith(".class")) {
String fileName = file.toString();
// Chop off the root and file extension
String className = fileName.substring(rootLength, fileName.length() - ".class".length());
// Go from "path" style to | that |
java | apache__camel | components/camel-cxf/camel-cxf-spring-rest/src/test/java/org/apache/camel/component/cxf/jaxrs/CxfRsSpringConsumerTest.java | {
"start": 1841,
"end": 4707
} | class ____ extends CamelSpringTestSupport {
private static int port1 = CXFTestSupport.getPort1();
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
final Processor testProcessor = new Processor() {
public void process(Exchange exchange) throws Exception {
// just throw the CustomException here
throw new CustomException("Here is the exception");
}
};
final Processor responseProcessor = new Processor() {
public void process(Exchange exchange) throws Exception {
// do something else with the request properties as usual
// do something else with the response
exchange.getMessage().getBody(Customer.class).setId(246);
}
};
return new RouteBuilder() {
public void configure() {
errorHandler(new NoErrorHandlerBuilder());
from("cxfrs://bean://rsServer").process(testProcessor);
from("cxfrs://bean://rsServer2").process(testProcessor);
from("cxfrs://bean://rsServerInvoke?performInvocation=true").process(responseProcessor);
}
};
}
@Override
protected AbstractApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext("org/apache/camel/component/cxf/jaxrs/CxfRsSpringConsumer.xml");
}
@Test
public void testMappingException() throws Exception {
String address = "http://localhost:" + port1 + "/CxfRsSpringConsumerTest/customerservice/customers/126";
doTestMappingException(address);
}
@Test
public void testMappingException2() throws Exception {
String address = "http://localhost:" + port1 + "/CxfRsSpringConsumerTest2/customerservice/customers/126";
doTestMappingException(address);
}
@Test
public void testInvokeCxfRsConsumer() throws Exception {
String address = "http://localhost:" + port1 + "/CxfRsSpringConsumerInvokeService/customerservice/customers/123";
WebClient wc = WebClient.create(address);
Customer c = wc.accept("application/json").get(Customer.class);
assertEquals(246L, c.getId());
}
private void doTestMappingException(String address) throws Exception {
HttpGet get = new HttpGet(address);
get.addHeader("Accept", "application/json");
try (CloseableHttpClient httpclient = HttpClientBuilder.create().build();
CloseableHttpResponse response = httpclient.execute(get)) {
assertEquals(500, response.getCode(), "Get a wrong status code");
assertEquals("exception: Here is the exception", response.getHeaders("exception")[0].toString(),
"Get a wrong message header");
}
}
}
| CxfRsSpringConsumerTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/mapping/Selectable.java | {
"start": 493,
"end": 2568
} | interface ____ permits Column, Formula {
/**
* The selectable's "canonical" text representation
*/
String getText();
/**
* The selectable's text representation accounting for the Dialect's
* quoting, if quoted
*/
String getText(Dialect dialect);
/**
* Does this selectable represent a formula? {@code true} indicates
* it is a formula; {@code false} indicates it is a physical column
*/
boolean isFormula();
/**
* Any custom read expression for this selectable. Only pertinent
* for physical columns (not formulas)
*
* @see org.hibernate.annotations.ColumnTransformer
*/
String getCustomReadExpression();
/**
* Any custom write expression for this selectable. Only pertinent
* for physical columns (not formulas)
*
* @see org.hibernate.annotations.ColumnTransformer
*/
String getCustomWriteExpression();
/**
* @deprecated new read-by-position paradigm means that these generated
* aliases are no longer needed
*/
@Deprecated(since = "6.0")
String getAlias(Dialect dialect);
/**
* @deprecated new read-by-position paradigm means that these generated
* aliases are no longer needed
*/
@Deprecated(since = "6.0")
String getAlias(Dialect dialect, Table table);
String getTemplate(Dialect dialect, TypeConfiguration typeConfiguration);
@Incubating
default String getWriteExpr() {
final String customWriteExpression = getCustomWriteExpression();
return customWriteExpression == null || customWriteExpression.isEmpty()
? "?"
: customWriteExpression;
}
@Deprecated(forRemoval = true, since = "7.2")
default String getWriteExpr(JdbcMapping jdbcMapping, Dialect dialect) {
return jdbcMapping.getJdbcType().wrapWriteExpression( getWriteExpr(), null, dialect );
}
@Incubating
default String getWriteExpr(JdbcMapping jdbcMapping, Dialect dialect, MappingContext mappingContext) {
final Size size = this instanceof Column column ? column.getColumnSize( dialect, mappingContext ) : null;
return jdbcMapping.getJdbcType().wrapWriteExpression( getWriteExpr(), size, dialect );
}
}
| Selectable |
java | junit-team__junit5 | platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/LocalMavenRepo.java | {
"start": 854,
"end": 918
} | class ____ implements AutoCloseable {
public static | LocalMavenRepo |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/channel/NettyThreadFactory.java | {
"start": 5814,
"end": 6065
} | class ____ extends FastThreadLocalThread implements NonBlocking {
public NonBlockingFastThreadLocalThread(ThreadGroup group, Runnable target, String name) {
super(group, target, name);
}
}
}
| NonBlockingFastThreadLocalThread |
java | spring-projects__spring-framework | spring-jms/src/main/java/org/springframework/jms/listener/endpoint/DefaultJmsActivationSpecFactory.java | {
"start": 2967,
"end": 3697
} | class ____
* "ActivationSpecImpl" in the same package as the ResourceAdapter class.
*/
@Override
protected Class<?> determineActivationSpecClass(ResourceAdapter adapter) {
String adapterClassName = adapter.getClass().getName();
if (adapterClassName.endsWith(RESOURCE_ADAPTER_SUFFIX)) {
// for example, ActiveMQ
String providerName =
adapterClassName.substring(0, adapterClassName.length() - RESOURCE_ADAPTER_SUFFIX.length());
String specClassName = providerName + ACTIVATION_SPEC_SUFFIX;
try {
return adapter.getClass().getClassLoader().loadClass(specClassName);
}
catch (ClassNotFoundException ex) {
if (logger.isDebugEnabled()) {
logger.debug("No default <Provider>ActivationSpec | named |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java | {
"start": 2072,
"end": 8972
} | class ____ implements ResolvedExpression {
private final boolean isTemporary;
private final @Nullable FunctionIdentifier functionIdentifier;
private final FunctionDefinition functionDefinition;
private final List<ResolvedExpression> args;
private final DataType dataType;
@Internal
public CallExpression(
boolean isTemporary,
@Nullable FunctionIdentifier functionIdentifier,
FunctionDefinition functionDefinition,
List<ResolvedExpression> args,
DataType dataType) {
this.isTemporary = isTemporary;
this.functionIdentifier = functionIdentifier;
this.functionDefinition =
Preconditions.checkNotNull(
functionDefinition, "Function definition must not be null.");
this.args =
new ArrayList<>(Preconditions.checkNotNull(args, "Arguments must not be null."));
this.dataType = Preconditions.checkNotNull(dataType, "Data type must not be null.");
}
/**
* Creates a {@link CallExpression} to a permanent function (persisted in a {@link Catalog} or
* provided by a {@link Module}).
*/
public static CallExpression permanent(
FunctionIdentifier functionIdentifier,
FunctionDefinition functionDefinition,
List<ResolvedExpression> args,
DataType dataType) {
return new CallExpression(
false,
Preconditions.checkNotNull(
functionIdentifier,
"Function identifier must not be null for permanent functions."),
functionDefinition,
args,
dataType);
}
/**
* Creates a {@link CallExpression} to a resolved built-in function. It assumes that the {@link
* BuiltInFunctionDefinition} instance is provided by the framework (usually the core module).
*/
@Internal
public static CallExpression permanent(
BuiltInFunctionDefinition builtInFunctionDefinition,
List<ResolvedExpression> args,
DataType dataType) {
return new CallExpression(
false,
FunctionIdentifier.of(builtInFunctionDefinition.getName()),
builtInFunctionDefinition,
args,
dataType);
}
/**
* Creates a {@link CallExpression} to a temporary function (potentially shadowing a {@link
* Catalog} function or providing a system function).
*/
public static CallExpression temporary(
FunctionIdentifier functionIdentifier,
FunctionDefinition functionDefinition,
List<ResolvedExpression> args,
DataType dataType) {
return new CallExpression(
true,
Preconditions.checkNotNull(
functionIdentifier,
"Function identifier must not be null for temporary functions."),
functionDefinition,
args,
dataType);
}
/**
* Creates a {@link CallExpression} to an anonymous function that has been declared inline
* without a {@link FunctionIdentifier}.
*/
public static CallExpression anonymous(
FunctionDefinition functionDefinition,
List<ResolvedExpression> args,
DataType dataType) {
return new CallExpression(true, null, functionDefinition, args, dataType);
}
public boolean isTemporary() {
return isTemporary;
}
public Optional<FunctionIdentifier> getFunctionIdentifier() {
return Optional.ofNullable(functionIdentifier);
}
public FunctionDefinition getFunctionDefinition() {
return functionDefinition;
}
/**
* Returns a string representation of the call's function for logging or printing to a console.
*/
public String getFunctionName() {
if (functionIdentifier == null) {
return functionDefinition.toString();
} else {
return functionIdentifier.asSummaryString();
}
}
public CallExpression replaceArgs(List<ResolvedExpression> args, DataType dataType) {
return new CallExpression(
this.isTemporary, this.functionIdentifier, this.functionDefinition, args, dataType);
}
@Override
public DataType getOutputDataType() {
return dataType;
}
@Override
public List<ResolvedExpression> getResolvedChildren() {
return args;
}
@Override
public String asSummaryString() {
final String argList =
args.stream()
.map(Expression::asSummaryString)
.collect(Collectors.joining(", ", "(", ")"));
return getFunctionName() + argList;
}
@Override
public String asSerializableString(SqlFactory sqlFactory) {
if (functionDefinition instanceof BuiltInFunctionDefinition) {
final BuiltInFunctionDefinition definition =
(BuiltInFunctionDefinition) functionDefinition;
return definition.getCallSyntax().unparse(definition.getSqlName(), args, sqlFactory);
} else {
return SqlCallSyntax.FUNCTION.unparse(
getSerializableFunctionName(sqlFactory), args, sqlFactory);
}
}
private String getSerializableFunctionName(SqlFactory sqlFactory) {
if (functionIdentifier == null) {
return sqlFactory.serializeInlineFunction(functionDefinition);
}
return functionIdentifier
.getIdentifier()
.map(ObjectIdentifier::asSerializableString)
.orElseGet(
() -> EncodingUtils.escapeIdentifier(functionIdentifier.getFunctionName()));
}
@Override
public List<Expression> getChildren() {
return Collections.unmodifiableList(this.args);
}
@Override
public <R> R accept(ExpressionVisitor<R> visitor) {
return visitor.visit(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CallExpression that = (CallExpression) o;
return isTemporary == that.isTemporary
&& Objects.equals(functionIdentifier, that.functionIdentifier)
&& functionDefinition.equals(that.functionDefinition)
&& args.equals(that.args)
&& dataType.equals(that.dataType);
}
@Override
public int hashCode() {
return Objects.hash(isTemporary, functionIdentifier, functionDefinition, args, dataType);
}
@Override
public String toString() {
return asSummaryString();
}
}
| CallExpression |
java | grpc__grpc-java | auth/src/main/java/io/grpc/auth/GoogleAuthLibraryCallCredentials.java | {
"start": 3268,
"end": 9832
} | class ____
if (APP_ENGINE_CREDENTIALS_CLASS == null) {
requiresSpecificExecutor = false;
} else {
requiresSpecificExecutor = APP_ENGINE_CREDENTIALS_CLASS.isInstance(creds);
}
}
@Override
public void applyRequestMetadata(
RequestInfo info, Executor appExecutor, final MetadataApplier applier) {
SecurityLevel security = info.getSecurityLevel();
if (requirePrivacy && security != SecurityLevel.PRIVACY_AND_INTEGRITY) {
applier.fail(Status.UNAUTHENTICATED
.withDescription("Credentials require channel with PRIVACY_AND_INTEGRITY security level. "
+ "Observed security level: " + security));
return;
}
String authority = checkNotNull(info.getAuthority(), "authority");
final URI uri;
try {
uri = serviceUri(authority, info.getMethodDescriptor());
} catch (StatusException e) {
applier.fail(e.getStatus());
return;
}
// Credentials is expected to manage caching internally if the metadata is fetched over
// the network.
creds.getRequestMetadata(uri, appExecutor, new RequestMetadataCallback() {
@Override
public void onSuccess(Map<String, List<String>> metadata) {
// Some implementations may pass null metadata.
// Re-use the headers if getRequestMetadata() returns the same map. It may return a
// different map based on the provided URI, i.e., for JWT. However, today it does not
// cache JWT and so we won't bother tring to save its return value based on the URI.
Metadata headers;
try {
synchronized (GoogleAuthLibraryCallCredentials.this) {
if (lastMetadata == null || lastMetadata != metadata) {
lastHeaders = toHeaders(metadata);
lastMetadata = metadata;
}
headers = lastHeaders;
}
} catch (Throwable t) {
applier.fail(Status.UNAUTHENTICATED
.withDescription("Failed to convert credential metadata")
.withCause(t));
return;
}
applier.apply(headers);
}
@Override
public void onFailure(Throwable e) {
if (e instanceof Retryable && ((Retryable) e).isRetryable()) {
// Let the call be retried with UNAVAILABLE.
applier.fail(Status.UNAVAILABLE
.withDescription("Credentials failed to obtain metadata")
.withCause(e));
} else {
applier.fail(Status.UNAUTHENTICATED
.withDescription("Failed computing credential metadata")
.withCause(e));
}
}
});
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough information
* for a service to know that the JWT was intended for it. The URI will commonly be verified with
* a simple string equality check.
*/
private static URI serviceUri(String authority, MethodDescriptor<?, ?> method)
throws StatusException {
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + method.getServiceName();
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private static URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED.withDescription(
"Unable to construct service URI after removing port").withCause(e).asException();
}
}
private static Metadata toHeaders(@Nullable Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
if (key.endsWith("-bin")) {
Metadata.Key<byte[]> headerKey = Metadata.Key.of(key, Metadata.BINARY_BYTE_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, BaseEncoding.base64().decode(value));
}
} else {
Metadata.Key<String> headerKey = Metadata.Key.of(key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
}
return headers;
}
@VisibleForTesting
@Nullable
static JwtHelper createJwtHelperOrNull(ClassLoader loader) {
Class<?> rawServiceAccountClass;
try {
// Specify loader so it can be overridden in tests
rawServiceAccountClass
= Class.forName("com.google.auth.oauth2.ServiceAccountCredentials", false, loader);
} catch (ClassNotFoundException ex) {
return null;
}
Exception caughtException;
try {
return new JwtHelper(rawServiceAccountClass, loader);
} catch (ClassNotFoundException ex) {
caughtException = ex;
} catch (NoSuchMethodException ex) {
caughtException = ex;
}
if (caughtException != null) {
// Failure is a bug in this class, but we still choose to gracefully recover
log.log(Level.WARNING, "Failed to create JWT helper. This is unexpected", caughtException);
}
return null;
}
@Nullable
private static Class<? extends Credentials> loadGoogleCredentialsClass() {
Class<?> rawGoogleCredentialsClass;
try {
// Can't use a loader as it disables ProGuard's reference detection and would fail to rename
// this reference. Unfortunately this will initialize the class.
rawGoogleCredentialsClass = Class.forName("com.google.auth.oauth2.GoogleCredentials");
} catch (ClassNotFoundException ex) {
log.log(Level.FINE, "Failed to load GoogleCredentials", ex);
return null;
}
return rawGoogleCredentialsClass.asSubclass(Credentials.class);
}
@Nullable
private static Class<?> loadAppEngineCredentials() {
try {
return Class.forName("com.google.auth.appengine.AppEngineCredentials");
} catch (ClassNotFoundException ex) {
log.log(Level.FINE, "AppEngineCredentials not available in classloader", ex);
return null;
}
}
private static | once |
java | apache__dubbo | dubbo-cluster/src/test/java/org/apache/dubbo/rpc/cluster/support/DemoServiceB.java | {
"start": 856,
"end": 905
} | interface ____ {
String methodB();
}
| DemoServiceB |
java | apache__camel | components/camel-crypto-pgp/src/test/java/org/apache/camel/converter/crypto/SpringPGPDataFormatTest.java | {
"start": 1122,
"end": 1582
} | class ____ extends AbstractPGPDataFormatTest {
@Override
protected CamelContext createCamelContext() throws Exception {
return SpringCamelContext.springCamelContext(
new ClassPathXmlApplicationContext("/org/apache/camel/component/crypto/SpringPGPDataFormatTest.xml"), true);
}
@Test
void testEncryption() {
assertDoesNotThrow(() -> doRoundTripEncryptionTests("direct:inline"));
}
}
| SpringPGPDataFormatTest |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/destination/DestinationClassNameMapperDecorator.java | {
"start": 263,
"end": 651
} | class ____ implements DestinationClassNameMapperDecorated {
final DestinationClassNameMapperDecorated delegate;
protected DestinationClassNameMapperDecorator(DestinationClassNameMapperDecorated delegate) {
this.delegate = delegate;
}
@Override
public Target map(Integer source) {
return delegate.map( source );
}
}
| DestinationClassNameMapperDecorator |
java | apache__hadoop | hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java | {
"start": 1607,
"end": 4655
} | class ____<K, V> extends TextOutputFormat<K, V> {
/**
* Setter for the working directory for DistCp (where files will be copied
* before they are moved to the final commit-directory.)
* @param job The Job on whose configuration the working-directory is to be set.
* @param workingDirectory The path to use as the working directory.
*/
public static void setWorkingDirectory(Job job, Path workingDirectory) {
job.getConfiguration().set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH,
workingDirectory.toString());
}
/**
* Setter for the final directory for DistCp (where files copied will be
* moved, atomically.)
* @param job The Job on whose configuration the working-directory is to be set.
* @param commitDirectory The path to use for final commit.
*/
public static void setCommitDirectory(Job job, Path commitDirectory) {
job.getConfiguration().set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH,
commitDirectory.toString());
}
/**
* Getter for the working directory.
* @param job The Job from whose configuration the working-directory is to
* be retrieved.
* @return The working-directory Path.
*/
public static Path getWorkingDirectory(Job job) {
return getWorkingDirectory(job.getConfiguration());
}
private static Path getWorkingDirectory(Configuration conf) {
String workingDirectory = conf.get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH);
if (workingDirectory == null || workingDirectory.isEmpty()) {
return null;
} else {
return new Path(workingDirectory);
}
}
/**
* Getter for the final commit-directory.
* @param job The Job from whose configuration the commit-directory is to be
* retrieved.
* @return The commit-directory Path.
*/
public static Path getCommitDirectory(Job job) {
return getCommitDirectory(job.getConfiguration());
}
private static Path getCommitDirectory(Configuration conf) {
String commitDirectory = conf.get(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH);
if (commitDirectory == null || commitDirectory.isEmpty()) {
return null;
} else {
return new Path(commitDirectory);
}
}
/** {@inheritDoc} */
@Override
public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws IOException {
return new CopyCommitter(getOutputPath(context), context);
}
/** {@inheritDoc} */
@Override
public void checkOutputSpecs(JobContext context) throws IOException {
Configuration conf = context.getConfiguration();
if (getCommitDirectory(conf) == null) {
throw new IllegalStateException("Commit directory not configured");
}
Path workingPath = getWorkingDirectory(conf);
if (workingPath == null) {
throw new IllegalStateException("Working directory not configured");
}
// get delegation token for outDir's file system
TokenCache.obtainTokensForNamenodes(context.getCredentials(),
new Path[] {workingPath}, conf);
}
}
| CopyOutputFormat |
java | quarkusio__quarkus | independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/project/CodestartResourceLoadersBuilder.java | {
"start": 1060,
"end": 7254
} | class ____ {
private static final String BASE_CODESTARTS_ARTIFACT_PROPERTY = "quarkus-base-codestart-artifact";
private static final String BASE_CODESTARTS_ARTIFACT_PROPERTIES_NAME = "/quarkus-devtools-base-codestarts.properties";
private static final String BASE_CODESTARTS_ARTIFACT_COORDS = retrieveBaseCodestartsArtifactCoords();
private ExtensionCatalog catalog = null;
private MavenArtifactResolver artifactResolver;
private String baseCodestartsArtifactCoords = BASE_CODESTARTS_ARTIFACT_COORDS;
private Collection<String> extraCodestartsArtifactCoords = new ArrayList<>();
private MessageWriter log;
private static String retrieveBaseCodestartsArtifactCoords() {
final String artifact = PropertiesUtil.getProperty(BASE_CODESTARTS_ARTIFACT_PROPERTY);
if (artifact != null) {
return artifact;
}
try (final InputStream resource = QuarkusProjectHelper.class
.getResourceAsStream(BASE_CODESTARTS_ARTIFACT_PROPERTIES_NAME)) {
final Properties properties = new Properties();
requireNonNull(resource,
BASE_CODESTARTS_ARTIFACT_PROPERTIES_NAME + " resource not found.");
properties.load(resource);
return requireNonNull(properties.getProperty("artifact"),
"base codestarts 'artifact' property not found");
} catch (IOException e) {
throw new IllegalStateException("Couldn't load the base codestarts artifact properties", e);
}
}
private CodestartResourceLoadersBuilder(MessageWriter log) {
this.log = log;
}
public static CodestartResourceLoadersBuilder codestartLoadersBuilder(MessageWriter log) {
return new CodestartResourceLoadersBuilder(log);
}
public static List<ResourceLoader> getCodestartResourceLoaders(MessageWriter log) {
return codestartLoadersBuilder(log).build();
}
public static List<ResourceLoader> getCodestartResourceLoaders(MessageWriter log, ExtensionCatalog catalog) {
return codestartLoadersBuilder(log).catalog(catalog).build();
}
public CodestartResourceLoadersBuilder catalog(ExtensionCatalog catalog) {
this.catalog = catalog;
return this;
}
public CodestartResourceLoadersBuilder artifactResolver(MavenArtifactResolver artifactResolver) {
this.artifactResolver = artifactResolver;
return this;
}
public CodestartResourceLoadersBuilder baseCodestartsArtifactCoords(String baseCodestartsArtifactCoords) {
this.baseCodestartsArtifactCoords = baseCodestartsArtifactCoords;
return this;
}
public CodestartResourceLoadersBuilder noBaseCodestarts() {
this.baseCodestartsArtifactCoords = null;
return this;
}
public CodestartResourceLoadersBuilder addExtraCodestartsArtifactCoords(Collection<String> extraCodestartsArtifactCoords) {
this.extraCodestartsArtifactCoords.addAll(extraCodestartsArtifactCoords);
return this;
}
public List<ResourceLoader> build() {
return getCodestartResourceLoaders(baseCodestartsArtifactCoords, extraCodestartsArtifactCoords, catalog,
artifactResolver == null ? QuarkusProjectHelper.artifactResolver() : artifactResolver, log);
}
private static List<ResourceLoader> getCodestartResourceLoaders(String baseCodestartsArtifactCoords,
Collection<String> extraCodestartsArtifactCoords,
ExtensionCatalog catalog,
MavenArtifactResolver mavenArtifactResolver,
MessageWriter log) {
final Map<String, Artifact> codestartsArtifacts = new LinkedHashMap<>();
// The latest inserted in the Map will have priority over the previous (in case of codestarts name conflicts)
// We have to remove keys to override because 'put' keeps the order in a LinkedHashMap
if (catalog != null) {
// Load codestarts from each extensions codestart artifacts
for (Extension e : catalog.getExtensions()) {
final String coords = getCodestartArtifact(e);
if (coords == null || codestartsArtifacts.containsKey(coords)) {
continue;
}
codestartsArtifacts.put(coords, DependencyUtils.toArtifact(coords));
}
}
// Load base codestart artifacts
if (baseCodestartsArtifactCoords != null) {
codestartsArtifacts.put(baseCodestartsArtifactCoords, DependencyUtils.toArtifact(baseCodestartsArtifactCoords));
}
if (catalog != null) {
// Load codestarts from catalog codestart artifacts
final List<String> catalogCodestartArtifacts = getCodestartArtifacts(catalog);
for (String coords : catalogCodestartArtifacts) {
if (codestartsArtifacts.containsKey(coords)) {
// Make sure it overrides the previous codestarts
codestartsArtifacts.remove(coords);
}
codestartsArtifacts.put(coords, DependencyUtils.toArtifact(coords));
}
}
// Load codestarts from the given artifacts
for (String coords : extraCodestartsArtifactCoords) {
if (codestartsArtifacts.containsKey(coords)) {
// Make sure it overrides the previous codestarts
codestartsArtifacts.remove(coords);
}
codestartsArtifacts.put(coords, DependencyUtils.toArtifact(coords));
}
final List<ResourceLoader> codestartResourceLoaders = new ArrayList<>(codestartsArtifacts.size());
for (Artifact a : codestartsArtifacts.values()) {
try {
final File artifactFile = mavenArtifactResolver.resolve(a).getArtifact().getFile();
codestartResourceLoaders.add(resolveFileResourceLoader(artifactFile));
} catch (Exception e) {
log.warn("Unable to resolve codestart artifact for %s: %s", a, e.getMessage());
continue;
}
}
return codestartResourceLoaders;
}
}
| CodestartResourceLoadersBuilder |
java | apache__flink | flink-docs/src/test/java/org/apache/flink/docs/rest/data/clash/inner/TestNameClashingMessageHeaders1.java | {
"start": 1559,
"end": 1613
} | class ____ implements RequestBody {}
}
| ClashingRequestBody |
java | hibernate__hibernate-orm | hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/GaussDBStructuredJdbcType.java | {
"start": 1059,
"end": 3275
} | class ____ extends AbstractPostgreSQLStructJdbcType {
public static final GaussDBStructuredJdbcType INSTANCE = new GaussDBStructuredJdbcType();
public GaussDBStructuredJdbcType() {
this( null, null, null );
}
private GaussDBStructuredJdbcType(
EmbeddableMappingType embeddableMappingType,
String typeName,
int[] orderMapping) {
super( embeddableMappingType, typeName, orderMapping );
}
@Override
public AggregateJdbcType resolveAggregateJdbcType(
EmbeddableMappingType mappingType,
String sqlType,
RuntimeModelCreationContext creationContext) {
return new GaussDBStructuredJdbcType(
mappingType,
sqlType,
creationContext.getBootModel()
.getDatabase()
.getDefaultNamespace()
.locateUserDefinedType( Identifier.toIdentifier( sqlType ) )
.getOrderMapping()
);
}
@Override
public void appendWriteExpression(
String writeExpression,
@Nullable Size size,
SqlAppender appender,
Dialect dialect) {
appender.append( "cast(" );
appender.append( writeExpression );
appender.append( " as " );
appender.append( getStructTypeName() );
appender.append( ')' );
}
@Override
public boolean isWriteExpressionTyped(Dialect dialect) {
return true;
}
@Override
public <X> ValueBinder<X> getBinder(JavaType<X> javaType) {
return new BasicBinder<>( javaType, this ) {
@Override
protected void doBind(PreparedStatement st, X value, int index, WrapperOptions options)
throws SQLException {
final String stringValue = ( (GaussDBStructuredJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( index, stringValue );
}
@Override
protected void doBind(CallableStatement st, X value, String name, WrapperOptions options)
throws SQLException {
final String stringValue = ( (GaussDBStructuredJdbcType) getJdbcType() ).toString(
value,
getJavaType(),
options
);
st.setString( name, stringValue );
}
@Override
public Object getBindValue(X value, WrapperOptions options) throws SQLException {
return ( (GaussDBStructuredJdbcType) getJdbcType() ).getBindValue( value, options );
}
};
}
}
| GaussDBStructuredJdbcType |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/search/action/RestPutSearchApplicationActionTests.java | {
"start": 870,
"end": 2047
} | class ____ extends AbstractRestEnterpriseSearchActionTests {
public void testWithNonCompliantLicense() throws Exception {
checkLicenseForRequest(
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
.withParams(Map.of("name", "my-app"))
.withContent(new BytesArray("{\"indices\": [\"my-index\"]}"), XContentType.JSON)
.build(),
LicenseUtils.Product.SEARCH_APPLICATION
);
}
public void testInvalidRequestWithNonCompliantLicense() throws Exception {
checkLicenseForRequest(
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withMethod(RestRequest.Method.PUT)
.withParams(Map.of("invalid_param_name", "invalid_value"))
.withContent(new BytesArray("{}"), XContentType.JSON)
.build(),
LicenseUtils.Product.SEARCH_APPLICATION
);
}
@Override
protected EnterpriseSearchBaseRestHandler getRestAction(XPackLicenseState licenseState) {
return new RestPutSearchApplicationAction(licenseState);
}
}
| RestPutSearchApplicationActionTests |
java | apache__avro | lang/java/mapred/src/test/java/org/apache/avro/mapred/TestAvroTextSort.java | {
"start": 1197,
"end": 2118
} | class ____ {
@TempDir
public File INPUT_DIR;
@TempDir
public File OUTPUT_DIR;
/**
* Run the identity job on a "bytes" Avro file using AvroAsTextInputFormat and
* AvroTextOutputFormat to produce a sorted "bytes" Avro file.
*/
@Test
void sort() throws Exception {
JobConf job = new JobConf();
String inputPath = INPUT_DIR.getPath();
Path outputPath = new Path(OUTPUT_DIR.getPath());
outputPath.getFileSystem(job).delete(outputPath, true);
WordCountUtil.writeLinesBytesFile(inputPath);
job.setInputFormat(AvroAsTextInputFormat.class);
job.setOutputFormat(AvroTextOutputFormat.class);
job.setOutputKeyClass(Text.class);
FileInputFormat.setInputPaths(job, new Path(inputPath));
FileOutputFormat.setOutputPath(job, outputPath);
JobClient.runJob(job);
WordCountUtil.validateSortedFile(outputPath.toString() + "/part-00000.avro");
}
}
| TestAvroTextSort |
java | micronaut-projects__micronaut-core | core/src/main/java/io/micronaut/core/naming/NameUtils.java | {
"start": 5192,
"end": 5256
} | class ____ as string.
*
* @param className The | represented |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/framework/JdkDynamicProxyTests.java | {
"start": 5008,
"end": 5079
} | interface ____ {
Bar getBarThis();
Foo getFooThis();
}
public | Foo |
java | spring-projects__spring-framework | spring-test/src/main/java/org/springframework/test/context/ContextHierarchy.java | {
"start": 3541,
"end": 3664
} | class ____ {}
*
* @ContextHierarchy(@ContextConfiguration("/spring/soap-ws-config.xml"))
* public | AbstractWebTests |
java | alibaba__druid | druid-admin/src/main/java/com/alibaba/druid/admin/model/dto/SqlDetailResult.java | {
"start": 467,
"end": 4326
} | class ____ {
@JSONField(name = "ExecuteAndResultSetHoldTime")
private int ExecuteAndResultSetHoldTime;
@JSONField(name = "LastErrorMessage")
private Object LastErrorMessage;
@JSONField(name = "InputStreamOpenCount")
private int InputStreamOpenCount;
@JSONField(name = "BatchSizeTotal")
private int BatchSizeTotal;
@JSONField(name = "FetchRowCountMax")
private int FetchRowCountMax;
@JSONField(name = "ErrorCount")
private int ErrorCount;
@JSONField(name = "BatchSizeMax")
private int BatchSizeMax;
@JSONField(name = "URL")
private Object URL;
@JSONField(name = "Name")
private Object Name;
@JSONField(name = "LastErrorTime")
private Object LastErrorTime;
@JSONField(name = "ReaderOpenCount")
private int ReaderOpenCount;
@JSONField(name = "parsedRelationships")
private String parsedRelationships;
@JSONField(name = "EffectedRowCountMax")
private int EffectedRowCountMax;
@JSONField(name = "LastErrorClass")
private Object LastErrorClass;
@JSONField(name = "InTransactionCount")
private int InTransactionCount;
@JSONField(name = "LastErrorStackTrace")
private Object LastErrorStackTrace;
@JSONField(name = "ResultSetHoldTime")
private int ResultSetHoldTime;
@JSONField(name = "TotalTime")
private int TotalTime;
@JSONField(name = "ID")
private int ID;
@JSONField(name = "ConcurrentMax")
private int ConcurrentMax;
@JSONField(name = "RunningCount")
private int RunningCount;
@JSONField(name = "FetchRowCount")
private int FetchRowCount;
@JSONField(name = "parsedFields")
private String parsedFields;
@JSONField(name = "MaxTimespanOccurTime")
private String MaxTimespanOccurTime;
@JSONField(name = "LastSlowParameters")
private Object LastSlowParameters;
@JSONField(name = "ReadBytesLength")
private int ReadBytesLength;
@JSONField(name = "formattedSql")
private String formattedSql;
@JSONField(name = "DbType")
private String DbType;
@JSONField(name = "DataSource")
private Object DataSource;
@JSONField(name = "SQL")
private String SQL;
@JSONField(name = "HASH")
private long HASH;
@JSONField(name = "LastError")
private Object LastError;
@JSONField(name = "MaxTimespan")
private int MaxTimespan;
@JSONField(name = "parsedTable")
private String parsedTable;
@JSONField(name = "parsedOrderbycolumns")
private String parsedOrderbycolumns;
@JSONField(name = "BlobOpenCount")
private int BlobOpenCount;
@JSONField(name = "ExecuteCount")
private int ExecuteCount;
@JSONField(name = "EffectedRowCount")
private int EffectedRowCount;
@JSONField(name = "ReadStringLength")
private int ReadStringLength;
@JSONField(name = "File")
private Object File;
@JSONField(name = "ClobOpenCount")
private int ClobOpenCount;
@JSONField(name = "LastTime")
private String LastTime;
@JSONField(name = "parsedConditions")
private String parsedConditions;
@JSONField(name = "EffectedRowCountHistogram")
private List<Integer> EffectedRowCountHistogram;
@JSONField(name = "Histogram")
private List<Integer> Histogram;
@JSONField(name = "ExecuteAndResultHoldTimeHistogram")
private List<Integer> ExecuteAndResultHoldTimeHistogram;
@JSONField(name = "FetchRowCountHistogram")
private List<Integer> FetchRowCountHistogram;
}
}
| ContentBean |
java | quarkusio__quarkus | extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/Echo.java | {
"start": 323,
"end": 554
} | class ____ {
@Inject
EchoService echoService;
@OnTextMessage
Uni<String> echo(String msg) {
assertTrue(Context.isOnEventLoopThread());
return Uni.createFrom().item(echoService.echo(msg));
}
}
| Echo |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/engine/spi/ExceptionConverter.java | {
"start": 313,
"end": 1583
} | interface ____ {
/**
* Converts the exception thrown during the transaction commit phase
*
* @param e The exception being handled
*
* @return The converted exception
*/
RuntimeException convertCommitException(RuntimeException e);
/**
* Converts a Hibernate-specific exception into a JPA-specified exception;
* note that the JPA specification makes use of exceptions outside its
* exception hierarchy, though they are all runtime exceptions.
*
* @param e The Hibernate exception.
* @param lockOptions The lock options in effect at the time of exception (can be null)
*
* @return The JPA-specified exception
*/
RuntimeException convert(HibernateException e, LockOptions lockOptions);
/**
* Converts a Hibernate-specific exception into a JPA-specified exception;
* note that the JPA specification makes use of exceptions outside its
* exception hierarchy, though they are all runtime exceptions.
*
* @param e The Hibernate exception.
*
* @return The JPA-specified exception
*/
RuntimeException convert(HibernateException e);
RuntimeException convert(RuntimeException e);
RuntimeException convert(RuntimeException e, LockOptions lockOptions);
JDBCException convert(SQLException e, String message);
}
| ExceptionConverter |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/util/ByteArrayManager.java | {
"start": 7793,
"end": 9542
} | class ____ {
/**
* The count threshold for each array length so that a manager is created
* only after the allocation count exceeds the threshold.
*/
private final int countThreshold;
/**
* The maximum number of arrays allowed for each array length.
*/
private final int countLimit;
/**
* The time period in milliseconds that the allocation count for each array
* length is reset to zero if there is no increment.
*/
private final long countResetTimePeriodMs;
public Conf(int countThreshold, int countLimit, long
countResetTimePeriodMs) {
this.countThreshold = countThreshold;
this.countLimit = countLimit;
this.countResetTimePeriodMs = countResetTimePeriodMs;
}
}
/**
* Create a byte array for the given length, where the length of
* the returned array is larger than or equal to the given length.
*
* The current thread may be blocked if some resource is unavailable.
*
* The byte array created by this method must be released
* via the {@link ByteArrayManager#release(byte[])} method.
*
* @return a byte array with length larger than or equal to the given length.
*/
public abstract byte[] newByteArray(int size) throws InterruptedException;
/**
* Release the given byte array.
*
* The byte array may or may not be created
* by the {@link ByteArrayManager#newByteArray(int)} method.
*
* @return the number of free array.
*/
public abstract int release(byte[] array);
public static ByteArrayManager newInstance(Conf conf) {
return conf == null? new NewByteArrayWithoutLimit(): new Impl(conf);
}
/**
* A dummy implementation which simply calls new byte[].
*/
static | Conf |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/idclassgeneratedvalue/Simple2.java | {
"start": 598,
"end": 1140
} | class ____ implements Serializable {
@Id
@GenericGenerator(name = "increment", strategy = "increment")
@GeneratedValue(generator = "increment")
private Long id1;
@Id
private Long id2;
private int quantity;
public Simple2() {
}
public Simple2(Long id, int quantity) {
this.id2 = id;
this.quantity = quantity;
}
public Long getId1() {
return id1;
}
public Long getId2() {
return id2;
}
public int getQuantity() {
return quantity;
}
public void setQuantity(int quantity) {
this.quantity = quantity;
}
}
| Simple2 |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/logging/logback/ColorConverterTests.java | {
"start": 1089,
"end": 5880
} | class ____ {
private final ColorConverter converter = new ColorConverter();
private final LoggingEvent event = new LoggingEvent();
private final String in = "in";
@BeforeAll
static void setupAnsi() {
AnsiOutput.setEnabled(AnsiOutput.Enabled.ALWAYS);
}
@AfterAll
static void resetAnsi() {
AnsiOutput.setEnabled(AnsiOutput.Enabled.DETECT);
}
@Test
void black() {
this.converter.setOptionList(Collections.singletonList("black"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[30min\033[0;39m");
}
@Test
void white() {
this.converter.setOptionList(Collections.singletonList("white"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[37min\033[0;39m");
}
@Test
void faint() {
this.converter.setOptionList(Collections.singletonList("faint"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[2min\033[0;39m");
}
@Test
void red() {
this.converter.setOptionList(Collections.singletonList("red"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[31min\033[0;39m");
}
@Test
void green() {
this.converter.setOptionList(Collections.singletonList("green"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[32min\033[0;39m");
}
@Test
void yellow() {
this.converter.setOptionList(Collections.singletonList("yellow"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[33min\033[0;39m");
}
@Test
void blue() {
this.converter.setOptionList(Collections.singletonList("blue"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[34min\033[0;39m");
}
@Test
void magenta() {
this.converter.setOptionList(Collections.singletonList("magenta"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[35min\033[0;39m");
}
@Test
void cyan() {
this.converter.setOptionList(Collections.singletonList("cyan"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[36min\033[0;39m");
}
@Test
void brightBlack() {
this.converter.setOptionList(Collections.singletonList("bright_black"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[90min\033[0;39m");
}
@Test
void brightWhite() {
this.converter.setOptionList(Collections.singletonList("bright_white"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[97min\033[0;39m");
}
@Test
void brightRed() {
this.converter.setOptionList(Collections.singletonList("bright_red"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[91min\033[0;39m");
}
@Test
void brightGreen() {
this.converter.setOptionList(Collections.singletonList("bright_green"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[92min\033[0;39m");
}
@Test
void brightYellow() {
this.converter.setOptionList(Collections.singletonList("bright_yellow"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[93min\033[0;39m");
}
@Test
void brightBlue() {
this.converter.setOptionList(Collections.singletonList("bright_blue"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[94min\033[0;39m");
}
@Test
void brightMagenta() {
this.converter.setOptionList(Collections.singletonList("bright_magenta"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[95min\033[0;39m");
}
@Test
void brightCyan() {
this.converter.setOptionList(Collections.singletonList("bright_cyan"));
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[96min\033[0;39m");
}
@Test
void highlightError() {
this.event.setLevel(Level.ERROR);
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[31min\033[0;39m");
}
@Test
void highlightWarn() {
this.event.setLevel(Level.WARN);
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[33min\033[0;39m");
}
@Test
void highlightDebug() {
this.event.setLevel(Level.DEBUG);
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[32min\033[0;39m");
}
@Test
void highlightTrace() {
this.event.setLevel(Level.TRACE);
String out = this.converter.transform(this.event, this.in);
assertThat(out).isEqualTo("\033[32min\033[0;39m");
}
}
| ColorConverterTests |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java | {
"start": 4466,
"end": 6712
} | class ____ extends TransportMasterNodeAction<Request, PersistentTaskResponse> {
private final PersistentTasksClusterService persistentTasksClusterService;
private final ProjectResolver projectResolver;
@Inject
public TransportAction(
TransportService transportService,
ClusterService clusterService,
ThreadPool threadPool,
ActionFilters actionFilters,
PersistentTasksClusterService persistentTasksClusterService,
ProjectResolver projectResolver
) {
super(
INSTANCE.name(),
transportService,
clusterService,
threadPool,
actionFilters,
Request::new,
PersistentTaskResponse::new,
threadPool.executor(ThreadPool.Names.MANAGEMENT)
);
this.persistentTasksClusterService = persistentTasksClusterService;
this.projectResolver = projectResolver;
}
@Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) {
// Cluster is not affected but we look up repositories in metadata
return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE);
}
@Override
protected final void masterOperation(
Task ignoredTask,
final Request request,
final ClusterState state,
final ActionListener<PersistentTaskResponse> listener
) {
// Try resolve the project-id which may be null if the request is for a cluster-scope task.
// A non-null project-id does not guarantee the task is project-scope. This will be determined
// later by checking the taskName associated with the task-id.
final ProjectId projectIdHint = resolveProjectIdHint(projectResolver);
persistentTasksClusterService.updatePersistentTaskState(
projectIdHint,
request.taskId,
request.allocationId,
request.state,
listener.map(PersistentTaskResponse::new)
);
}
}
}
| TransportAction |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/identifier/composite/EventId.java | {
"start": 380,
"end": 1346
} | class ____ implements Serializable {
private Integer category;
private Timestamp createdOn;
//Getters and setters are omitted for brevity
//end::identifiers-composite-generated-mapping-example[]
public Integer getCategory() {
return category;
}
public void setCategory(Integer category) {
this.category = category;
}
public Timestamp getCreatedOn() {
return createdOn;
}
public void setCreatedOn(Timestamp createdOn) {
this.createdOn = createdOn;
}
//tag::identifiers-composite-generated-mapping-example[]
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
EventId that = (EventId) o;
return Objects.equals(category, that.category) &&
Objects.equals(createdOn, that.createdOn);
}
@Override
public int hashCode() {
return Objects.hash(category, createdOn);
}
}
//end::identifiers-composite-generated-mapping-example[]
| EventId |
java | elastic__elasticsearch | test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/BlacklistedPathPatternMatcher.java | {
"start": 1195,
"end": 2711
} | class ____ {
private final Pattern pattern;
/**
* Constructs a new <code>BlacklistedPathPatternMatcher</code> instance from the provided suffix pattern.
*
* @param p The suffix pattern. Must be a non-empty string.
*/
BlacklistedPathPatternMatcher(String p) {
// guard against accidentally matching everything as an empty string lead to the pattern ".*" which matches everything
if (p == null || p.trim().isEmpty()) {
throw new IllegalArgumentException("Empty blacklist patterns are not supported");
}
// we don't support [], {}, (), ? and . in their special meaning within a path regex, so escape them here
String sanitizedPattern = p.replaceAll("([\\[\\]\\{\\}\\(\\)\\?\\.])", "\\\\$1");
// very simple transformation from wildcard to a proper regex
String finalPattern = sanitizedPattern.replace("*", "[^/]*") // support wildcard matches (within a single path segment)
.replace("\\,", ","); // restore previously escaped ',' in paths.
// suffix match
pattern = Pattern.compile(".*" + finalPattern);
}
/**
* Checks whether the provided path matches the suffix pattern, i.e. "/foo/bar" will match the pattern "bar".
*
* @param path The path to match. Must not be null.
* @return true iff this path is a suffix match.
*/
public boolean isSuffixMatch(String path) {
return pattern.matcher(path).matches();
}
}
| BlacklistedPathPatternMatcher |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/MetadataInfo.java | {
"start": 3336,
"end": 9659
} | interface ____}:{version}'
private transient Map<String, Set<ServiceInfo>> subscribedServices;
private final transient Map<String, String> extendParams;
private final transient Map<String, String> instanceParams;
protected transient volatile boolean updated = false;
private transient ConcurrentNavigableMap<String, SortedSet<URL>> subscribedServiceURLs;
private transient ConcurrentNavigableMap<String, SortedSet<URL>> exportedServiceURLs;
private transient ExtensionLoader<MetadataParamsFilter> loader;
public MetadataInfo() {
this(null);
}
public MetadataInfo(String app) {
this(app, null, null);
}
public MetadataInfo(String app, String revision, Map<String, ServiceInfo> services) {
this.app = app;
this.revision = revision;
this.services = services == null ? new ConcurrentHashMap<>() : services;
this.extendParams = new ConcurrentHashMap<>();
this.instanceParams = new ConcurrentHashMap<>();
}
private MetadataInfo(
String app,
String revision,
Map<String, ServiceInfo> services,
AtomicBoolean initiated,
Map<String, String> extendParams,
Map<String, String> instanceParams,
boolean updated,
ConcurrentNavigableMap<String, SortedSet<URL>> subscribedServiceURLs,
ConcurrentNavigableMap<String, SortedSet<URL>> exportedServiceURLs,
ExtensionLoader<MetadataParamsFilter> loader) {
this.app = app;
this.revision = revision;
this.services = new ConcurrentHashMap<>(services);
this.initiated = new AtomicBoolean(initiated.get());
this.extendParams = new ConcurrentHashMap<>(extendParams);
this.instanceParams = new ConcurrentHashMap<>(instanceParams);
this.updated = updated;
this.subscribedServiceURLs =
subscribedServiceURLs == null ? null : new ConcurrentSkipListMap<>(subscribedServiceURLs);
this.exportedServiceURLs =
exportedServiceURLs == null ? null : new ConcurrentSkipListMap<>(exportedServiceURLs);
this.loader = loader;
}
/**
* Initialize is needed when MetadataInfo is created from deserialization on the consumer side before being used for RPC call.
*/
public void init() {
if (!initiated.compareAndSet(false, true)) {
return;
}
if (CollectionUtils.isNotEmptyMap(services)) {
services.forEach((_k, serviceInfo) -> {
serviceInfo.init();
// create duplicate serviceKey(without protocol)->serviceInfo mapping to support metadata search when
// protocol is not specified on consumer side.
if (subscribedServices == null) {
subscribedServices = new HashMap<>();
}
Set<ServiceInfo> serviceInfos =
subscribedServices.computeIfAbsent(serviceInfo.getServiceKey(), _key -> new HashSet<>());
serviceInfos.add(serviceInfo);
});
}
}
public synchronized void addService(URL url) {
// fixme, pass in application mode context during initialization of MetadataInfo.
if (this.loader == null) {
this.loader = url.getOrDefaultApplicationModel().getExtensionLoader(MetadataParamsFilter.class);
}
List<MetadataParamsFilter> filters = loader.getActivateExtension(url, "params-filter");
// generate service level metadata
ServiceInfo serviceInfo = new ServiceInfo(url, filters);
this.services.put(serviceInfo.getMatchKey(), serviceInfo);
// extract common instance level params
extractInstanceParams(url, filters);
if (exportedServiceURLs == null) {
exportedServiceURLs = new ConcurrentSkipListMap<>();
}
addURL(exportedServiceURLs, url);
updated = true;
}
public synchronized void removeService(URL url) {
if (url == null) {
return;
}
this.services.remove(url.getProtocolServiceKey());
if (exportedServiceURLs != null) {
removeURL(exportedServiceURLs, url);
}
updated = true;
}
public String getRevision() {
return revision;
}
/**
* Calculation of this instance's status like revision and modification of the same instance must be synchronized among different threads.
* <p>
* Usage of this method is strictly restricted to certain points such as when during registration. Always try to use {@link this#getRevision()} instead.
*/
public synchronized String calAndGetRevision() {
if (revision != null && !updated) {
return revision;
}
updated = false;
if (CollectionUtils.isEmptyMap(services)) {
this.revision = EMPTY_REVISION;
} else {
String tempRevision = calRevision();
if (!StringUtils.isEquals(this.revision, tempRevision)) {
if (logger.isInfoEnabled()) {
logger.info(String.format(
"[METADATA_REGISTER] metadata revision changed: %s -> %s, app: %s, services: %d",
this.revision, tempRevision, this.app, this.services.size()));
}
this.revision = tempRevision;
this.rawMetadataInfo = JsonUtils.toJson(this);
}
}
return revision;
}
public synchronized String calRevision() {
StringBuilder sb = new StringBuilder();
sb.append(app);
for (Map.Entry<String, ServiceInfo> entry : new TreeMap<>(services).entrySet()) {
sb.append(entry.getValue().toDescString());
}
return RevisionResolver.calRevision(sb.toString());
}
public void setRevision(String revision) {
this.revision = revision;
}
@Transient
public String getContent() {
return this.rawMetadataInfo;
}
public String getApp() {
return app;
}
public void setApp(String app) {
this.app = app;
}
public Map<String, ServiceInfo> getServices() {
return services;
}
/**
* Get service info of an | name |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/vectors/RescoreKnnVectorQuery.java | {
"start": 7622,
"end": 9326
} | class ____ extends RescoreKnnVectorQuery {
final int rescoreK;
private LateRescoreQuery(
String fieldName,
float[] floatTarget,
VectorSimilarityFunction vectorSimilarityFunction,
int k,
int rescoreK,
Query innerQuery
) {
super(fieldName, floatTarget, vectorSimilarityFunction, k, innerQuery);
this.rescoreK = rescoreK;
}
@Override
public Query rewrite(IndexSearcher searcher) throws IOException {
final TopDocs topDocs;
// Retrieve top `rescoreK` documents from the inner query
topDocs = searcher.search(innerQuery, rescoreK);
vectorOperations = topDocs.totalHits.value();
// Retrieve top `k` documents from the top `rescoreK` query
var topDocsQuery = new KnnScoreDocQuery(topDocs.scoreDocs, searcher.getIndexReader());
var rescoreQuery = new DirectRescoreKnnVectorQuery(fieldName, floatTarget, topDocsQuery);
var rescoreTopDocs = searcher.search(rescoreQuery.rewrite(searcher), k);
return new KnnScoreDocQuery(rescoreTopDocs.scoreDocs, searcher.getIndexReader());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
var that = (RescoreKnnVectorQuery.LateRescoreQuery) o;
return super.equals(o) && that.rescoreK == rescoreK;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), rescoreK);
}
}
private static | LateRescoreQuery |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxPublish.java | {
"start": 4509,
"end": 20684
} | class ____<T>
implements InnerConsumer<T>, Disposable {
final int prefetch;
final FluxPublish<T> parent;
@SuppressWarnings("NotNullFieldNotInitialized") // s initialized in onSubscribe
Subscription s;
@SuppressWarnings("NotNullFieldNotInitialized") // initialized in constructor
volatile PubSubInner<T>[] subscribers;
@SuppressWarnings("rawtypes")
static final AtomicReferenceFieldUpdater<PublishSubscriber, PubSubInner[]> SUBSCRIBERS =
AtomicReferenceFieldUpdater.newUpdater(PublishSubscriber.class,
PubSubInner[].class,
"subscribers");
volatile long state;
@SuppressWarnings("rawtypes")
static final AtomicLongFieldUpdater<PublishSubscriber> STATE =
AtomicLongFieldUpdater.newUpdater(PublishSubscriber.class, "state");
//notes: FluxPublish needs to distinguish INIT from CANCELLED in order to correctly
//drop values in case of an early connect() without any subscribers.
@SuppressWarnings("rawtypes")
static final PubSubInner[] INIT = new PublishInner[0];
@SuppressWarnings("rawtypes")
static final PubSubInner[] CANCELLED = new PublishInner[0];
@SuppressWarnings("rawtypes")
static final PubSubInner[] TERMINATED = new PublishInner[0];
@SuppressWarnings("NotNullFieldNotInitialized") // initialized in onSubscribe
Queue<T> queue;
int sourceMode;
boolean done;
volatile @Nullable Throwable error;
// https://github.com/uber/NullAway/issues/1157
@SuppressWarnings({"rawtypes", "DataFlowIssue"})
static final AtomicReferenceFieldUpdater<PublishSubscriber, @Nullable Throwable> ERROR =
AtomicReferenceFieldUpdater.newUpdater(PublishSubscriber.class,
Throwable.class,
"error");
PublishSubscriber(int prefetch, FluxPublish<T> parent) {
this.prefetch = prefetch;
this.parent = parent;
SUBSCRIBERS.lazySet(this, INIT);
}
boolean isTerminated(){
return subscribers == TERMINATED;
}
@Override
public void onSubscribe(Subscription s) {
if (Operators.validate(this.s, s)) {
this.s = s;
if (s instanceof Fuseable.QueueSubscription) {
@SuppressWarnings("unchecked") Fuseable.QueueSubscription<T> f =
(Fuseable.QueueSubscription<T>) s;
int m = f.requestFusion(Fuseable.ANY | Fuseable.THREAD_BARRIER);
if (m == Fuseable.SYNC) {
sourceMode = m;
queue = f;
long previousState = markSubscriptionSetAndAddWork(this);
if (isCancelled(previousState)) {
s.cancel();
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
drain(previousState | SUBSCRIPTION_SET_FLAG | 1);
return;
}
if (m == Fuseable.ASYNC) {
sourceMode = m;
queue = f;
long previousState = markSubscriptionSet(this);
if (isCancelled(previousState)) {
s.cancel();
}
else {
s.request(Operators.unboundedOrPrefetch(prefetch));
}
return;
}
}
queue = parent.queueSupplier.get();
long previousState = markSubscriptionSet(this);
if (isCancelled(previousState)) {
s.cancel();
}
else {
s.request(Operators.unboundedOrPrefetch(prefetch));
}
}
}
@Override
public void onNext(@Nullable T t) {
if (done) {
if (t != null) {
Operators.onNextDropped(t, currentContext());
}
return;
}
boolean isAsyncMode = sourceMode == Fuseable.ASYNC;
if (!isAsyncMode) {
assert t != null : "onNext called with null in non-async fusion mode";
if (!queue.offer(t)) {
Throwable ex = Operators.onOperatorError(s,
Exceptions.failWithOverflow(Exceptions.BACKPRESSURE_ERROR_QUEUE_FULL),
t,
currentContext());
if (!Exceptions.addThrowable(ERROR, this, ex)) {
Operators.onErrorDroppedMulticast(ex, subscribers);
return;
}
done = true;
}
}
long previousState = addWork(this);
if (isFinalized(previousState)) {
clear();
return;
}
if (isTerminated(previousState) || isCancelled(previousState)) {
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
drain(previousState + 1);
}
@Override
public void onError(Throwable t) {
if (done) {
Operators.onErrorDroppedMulticast(t, subscribers);
return;
}
if (!Exceptions.addThrowable(ERROR, this, t)) {
Operators.onErrorDroppedMulticast(t, subscribers);
return;
}
done = true;
long previousState = markTerminated(this);
if (isTerminated(previousState) || isCancelled(previousState)) {
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
drain((previousState | TERMINATED_FLAG) + 1);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
long previousState = markTerminated(this);
if (isTerminated(previousState) || isCancelled(previousState)) {
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
drain((previousState | TERMINATED_FLAG) + 1);
}
@Override
public void dispose() {
if (SUBSCRIBERS.get(this) == TERMINATED) {
return;
}
if (CONNECTION.compareAndSet(parent, this, null)) {
long previousState = markCancelled(this);
if (isTerminated(previousState) || isCancelled(previousState)) {
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
disconnectAction(previousState);
}
}
void clear() {
if (sourceMode == Fuseable.NONE) {
T t;
while ((t = queue.poll()) != null) {
Operators.onDiscard(t, currentContext());
}
}
else {
queue.clear();
}
}
void disconnectAction(long previousState) {
if (isSubscriptionSet(previousState)) {
this.s.cancel();
clear();
}
@SuppressWarnings("unchecked")
PubSubInner<T>[] inners = SUBSCRIBERS.getAndSet(this, CANCELLED);
if (inners.length > 0) {
CancellationException ex = new CancellationException("Disconnected");
for (PubSubInner<T> inner : inners) {
inner.actual.onError(ex);
}
}
}
boolean add(PublishInner<T> inner) {
for (; ; ) {
FluxPublish.PubSubInner<T>[] a = subscribers;
if (a == TERMINATED) {
return false;
}
int n = a.length;
PubSubInner<?>[] b = new PubSubInner[n + 1];
System.arraycopy(a, 0, b, 0, n);
b[n] = inner;
if (SUBSCRIBERS.compareAndSet(this, a, b)) {
return true;
}
}
}
public void remove(PubSubInner<T> inner) {
for (; ; ) {
PubSubInner<T>[] a = subscribers;
if (a == TERMINATED || a == CANCELLED) {
return;
}
int n = a.length;
int j = -1;
for (int i = 0; i < n; i++) {
if (a[i] == inner) {
j = i;
break;
}
}
if (j < 0) {
//inner was not found
return;
}
PubSubInner<?>[] b;
if (n == 1) {
b = CANCELLED;
}
else {
b = new PubSubInner<?>[n - 1];
System.arraycopy(a, 0, b, 0, j);
System.arraycopy(a, j + 1, b, j, n - j - 1);
}
if (SUBSCRIBERS.compareAndSet(this, a, b)) {
//we don't assume autoCancel semantics, which will rather depend from
//downstream operators like autoConnect vs refCount, so we don't disconnect here
return;
}
}
}
@SuppressWarnings("unchecked")
PubSubInner<T>[] terminate() {
return SUBSCRIBERS.getAndSet(this, TERMINATED);
}
boolean tryConnect() {
long previousState = markConnected(this);
return !isConnected(previousState);
}
void drainFromInner() {
long previousState = addWorkIfSubscribed(this);
if (!isSubscriptionSet(previousState)) {
return;
}
if (hasWorkInProgress(previousState)) {
return;
}
drain(previousState + 1);
}
void drain(long expectedState) {
for (; ; ) {
boolean d = done;
Queue<T> q = queue;
int mode = sourceMode;
boolean empty = q == null || q.isEmpty();
if (checkTerminated(d, empty, null)) {
return;
}
PubSubInner<T>[] a = subscribers;
if (a != CANCELLED && !empty) {
long maxRequested = Long.MAX_VALUE;
int len = a.length;
int cancel = 0;
for (PubSubInner<T> inner : a) {
long r = inner.requested;
if (r >= 0L) {
maxRequested = Math.min(maxRequested, r);
}
else { //Long.MIN
cancel++;
}
}
if (len == cancel) {
T v;
try {
v = q.poll();
}
catch (Throwable ex) {
Exceptions.addThrowable(ERROR,
this,
Operators.onOperatorError(s, ex, currentContext()));
d = true;
v = null;
}
if (checkTerminated(d, v == null, v)) {
return;
}
if (mode != Fuseable.SYNC) {
s.request(1);
}
continue;
}
int e = 0;
while (e < maxRequested && cancel != Integer.MIN_VALUE) {
d = done;
T v;
try {
v = q.poll();
}
catch (Throwable ex) {
Exceptions.addThrowable(ERROR,
this,
Operators.onOperatorError(s, ex, currentContext()));
d = true;
v = null;
}
empty = v == null;
if (checkTerminated(d, empty, v)) {
return;
}
if (empty) {
//async mode only needs to break but SYNC mode needs to perform terminal cleanup here...
if (mode == Fuseable.SYNC) {
done = true;
checkTerminated(true, true, null);
}
break;
}
for (PubSubInner<T> inner : a) {
inner.actual.onNext(v);
if(Operators.producedCancellable(PubSubInner.REQUESTED,
inner,1) ==
Long.MIN_VALUE){
cancel = Integer.MIN_VALUE;
}
}
e++;
}
if (e != 0 && mode != Fuseable.SYNC) {
s.request(e);
}
if (maxRequested != 0L && !empty) {
continue;
}
}
else if (q != null && mode == Fuseable.SYNC) {
done = true;
if (checkTerminated(true, empty, null)) {
break;
}
}
expectedState = markWorkDone(this, expectedState);
if (isCancelled(expectedState)) {
clearAndFinalize(this);
return;
}
if (!hasWorkInProgress(expectedState)) {
return;
}
}
}
boolean checkTerminated(boolean d, boolean empty, @Nullable T t) {
long state = this.state;
if (isCancelled(state)) {
Operators.onDiscard(t, currentContext());
disconnectAction(state);
return true;
}
if (d) {
Throwable e = error;
if (e != null && e != Exceptions.TERMINATED) {
if (parent.resetUponSourceTermination) {
CONNECTION.compareAndSet(parent, this, null);
e = Exceptions.terminate(ERROR, this);
}
queue.clear();
for (PubSubInner<T> inner : terminate()) {
inner.actual.onError(e);
}
return true;
}
else if (empty) {
if (parent.resetUponSourceTermination) {
CONNECTION.compareAndSet(parent, this, null);
}
for (PubSubInner<T> inner : terminate()) {
inner.actual.onComplete();
}
return true;
}
}
return false;
}
@Override
public Stream<? extends Scannable> inners() {
return Stream.of(subscribers);
}
@Override
public Context currentContext() {
return Operators.multiSubscribersContext(subscribers);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.PREFETCH) return prefetch;
if (key == Attr.ERROR) return error;
if (key == Attr.BUFFERED) return queue != null ? queue.size() : 0;
if (key == Attr.TERMINATED) return isTerminated();
if (key == Attr.CANCELLED) return s == Operators.cancelledSubscription();
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return null;
}
@Override
public boolean isDisposed() {
long state = this.state;
return isTerminated(state) || isCancelled(state);
}
static void clearAndFinalize(PublishSubscriber<?> instance) {
for (; ; ) {
final long state = instance.state;
if (isFinalized(state)) {
instance.clear();
return;
}
if (isSubscriptionSet(state)) {
instance.clear();
}
if (STATE.compareAndSet(
instance, state,
(state & ~WORK_IN_PROGRESS_MASK) | FINALIZED_FLAG)) {
break;
}
}
}
static long addWork(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (STATE.compareAndSet(instance, state, addWork(state))) {
return state;
}
}
}
static long addWorkIfSubscribed(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (!isSubscriptionSet(state)) {
return state;
}
if (STATE.compareAndSet(instance, state, addWork(state))) {
return state;
}
}
}
static long addWork(long state) {
if ((state & WORK_IN_PROGRESS_MASK) == WORK_IN_PROGRESS_MASK) {
return (state &~ WORK_IN_PROGRESS_MASK) | 1;
}
else {
return state + 1;
}
}
static long markTerminated(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (isCancelled(state) || isTerminated(state)) {
return state;
}
long nextState = addWork(state);
if (STATE.compareAndSet(instance, state, nextState | TERMINATED_FLAG)) {
return state;
}
}
}
static long markConnected(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (isConnected(state)) {
return state;
}
if (STATE.compareAndSet(instance, state, state | CONNECTED_FLAG)) {
return state;
}
}
}
static long markSubscriptionSet(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (isCancelled(state)) {
return state;
}
if (STATE.compareAndSet(instance, state, state | SUBSCRIPTION_SET_FLAG)) {
return state;
}
}
}
static long markSubscriptionSetAndAddWork(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (isCancelled(state)) {
return state;
}
long nextState = addWork(state);
if (STATE.compareAndSet(instance, state, nextState | SUBSCRIPTION_SET_FLAG)) {
return state;
}
}
}
static long markCancelled(PublishSubscriber<?> instance) {
for (;;) {
long state = instance.state;
if (isCancelled(state)) {
return state;
}
long nextState = addWork(state);
if (STATE.compareAndSet(instance, state, nextState | CANCELLED_FLAG)) {
return state;
}
}
}
static long markWorkDone(PublishSubscriber<?> instance, long expectedState) {
for (;;) {
long state = instance.state;
if (expectedState != state) {
return state;
}
long nextState = state & ~WORK_IN_PROGRESS_MASK;
if (STATE.compareAndSet(instance, state, nextState)) {
return nextState;
}
}
}
static boolean isConnected(long state) {
return (state & CONNECTED_FLAG) == CONNECTED_FLAG;
}
static boolean isFinalized(long state) {
return (state & FINALIZED_FLAG) == FINALIZED_FLAG;
}
static boolean isCancelled(long state) {
return (state & CANCELLED_FLAG) == CANCELLED_FLAG;
}
static boolean isTerminated(long state) {
return (state & TERMINATED_FLAG) == TERMINATED_FLAG;
}
static boolean isSubscriptionSet(long state) {
return (state & SUBSCRIPTION_SET_FLAG) == SUBSCRIPTION_SET_FLAG;
}
static boolean hasWorkInProgress(long state) {
return (state & WORK_IN_PROGRESS_MASK) > 0;
}
static final long FINALIZED_FLAG =
0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000L;
static final long CANCELLED_FLAG =
0b0010_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000L;
static final long TERMINATED_FLAG =
0b0100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000L;
static final long SUBSCRIPTION_SET_FLAG =
0b0000_1000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000L;
static final long CONNECTED_FLAG =
0b0000_0100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000L;
static final long WORK_IN_PROGRESS_MASK =
0b0000_0000_0000_0000_0000_0000_0000_0000_1111_1111_1111_1111_1111_1111_1111_1111L;
}
static abstract | PublishSubscriber |
java | apache__flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/batch/TableSourceScanBatchRestoreTest.java | {
"start": 1223,
"end": 1932
} | class ____ extends BatchRestoreTestBase {
public TableSourceScanBatchRestoreTest() {
super(BatchExecTableSourceScan.class);
}
@Override
public List<TableTestProgram> programs() {
return Arrays.asList(
TableSourceScanTestPrograms.PROJECT_PUSHDOWN,
TableSourceScanTestPrograms.PROJECT_PUSHDOWN_DISABLED,
TableSourceScanTestPrograms.FILTER_PUSHDOWN,
TableSourceScanTestPrograms.LIMIT_PUSHDOWN,
TableSourceScanTestPrograms.PARTITION_PUSHDOWN,
TableSourceScanTestPrograms.READING_METADATA,
TableSourceScanTestPrograms.REUSE_SOURCE);
}
}
| TableSourceScanBatchRestoreTest |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/lock/LockInterceptorDeadlockTest.java | {
"start": 428,
"end": 969
} | class ____ {
@RegisterExtension
public ArcTestContainer container = new ArcTestContainer(SimpleApplicationScopedBean.class, Lock.class,
LockInterceptor.class);
@Test
public void testApplicationScopedBean() throws Exception {
SimpleApplicationScopedBean bean = Arc.container().instance(SimpleApplicationScopedBean.class).get();
assertTrue(bean.read());
assertTrue(bean.nestedRead());
assertTrue(bean.nestedWrite());
}
@ApplicationScoped
static | LockInterceptorDeadlockTest |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/SourceOperatorStreamTaskTest.java | {
"start": 23651,
"end": 24249
} | class ____ extends MockSource {
private static final long serialVersionUID = 3078454109555893721L;
private final TestingExternallyInducedSourceReader reader;
private TestingExternallyInducedSource(TestingExternallyInducedSourceReader reader) {
super(Boundedness.CONTINUOUS_UNBOUNDED, 1);
this.reader = reader;
}
@Override
public SourceReader<Integer, MockSourceSplit> createReader(
SourceReaderContext readerContext) {
return reader;
}
}
private static | TestingExternallyInducedSource |
java | apache__maven | compat/maven-embedder/src/test/java/org/apache/maven/cli/transfer/SimplexTransferListenerTest.java | {
"start": 1336,
"end": 5073
} | class ____ {
@Test
void cancellation() throws InterruptedException {
TransferListener delegate = new TransferListener() {
@Override
public void transferInitiated(TransferEvent event) throws TransferCancelledException {
throw new TransferCancelledException();
}
@Override
public void transferStarted(TransferEvent event) throws TransferCancelledException {
throw new TransferCancelledException();
}
@Override
public void transferProgressed(TransferEvent event) throws TransferCancelledException {
throw new TransferCancelledException();
}
@Override
public void transferCorrupted(TransferEvent event) throws TransferCancelledException {
throw new TransferCancelledException();
}
@Override
public void transferSucceeded(TransferEvent event) {}
@Override
public void transferFailed(TransferEvent event) {}
};
SimplexTransferListener listener = new SimplexTransferListener(delegate);
TransferResource resource =
new TransferResource(null, null, "http://maven.org/test/test-resource", new File("file"), null);
DefaultRepositorySystemSession session = new DefaultRepositorySystemSession(h -> false); // no close handle
// for technical reasons we cannot throw here, even if delegate does cancel transfer
listener.transferInitiated(event(session, resource, TransferEvent.EventType.INITIATED));
Thread.sleep(500); // to make sure queue is processed, cancellation applied
// subsequent call will cancel
assertThrows(
TransferCancelledException.class,
() -> listener.transferStarted(event(session, resource, TransferEvent.EventType.STARTED)));
}
@Test
void handlesAbsentTransferSource() throws InterruptedException, TransferCancelledException {
TransferResource resource = new TransferResource(null, null, "http://maven.org/test/test-resource", null, null);
RepositorySystemSession session = Mockito.mock(RepositorySystemSession.class);
TransferListener delegate = Mockito.mock(TransferListener.class);
SimplexTransferListener listener = new SimplexTransferListener(delegate);
TransferEvent transferInitiatedEvent = event(session, resource, TransferEvent.EventType.INITIATED);
TransferEvent transferStartedEvent = event(session, resource, TransferEvent.EventType.STARTED);
TransferEvent transferProgressedEvent = event(session, resource, TransferEvent.EventType.PROGRESSED);
TransferEvent transferSucceededEvent = event(session, resource, TransferEvent.EventType.SUCCEEDED);
listener.transferInitiated(transferInitiatedEvent);
listener.transferStarted(transferStartedEvent);
listener.transferProgressed(transferProgressedEvent);
listener.transferSucceeded(transferSucceededEvent);
Thread.sleep(500); // to make sure queue is processed, cancellation applied
Mockito.verify(delegate).transferInitiated(transferInitiatedEvent);
Mockito.verify(delegate).transferStarted(transferStartedEvent);
Mockito.verify(delegate).transferProgressed(transferProgressedEvent);
Mockito.verify(delegate).transferSucceeded(transferSucceededEvent);
}
private static TransferEvent event(
RepositorySystemSession session, TransferResource resource, TransferEvent.EventType type) {
return new TransferEvent.Builder(session, resource).setType(type).build();
}
}
| SimplexTransferListenerTest |
java | apache__kafka | clients/src/test/java/org/apache/kafka/clients/ClusterConnectionStatesTest.java | {
"start": 1731,
"end": 23278
} | class ____ {
private static ArrayList<InetAddress> initialAddresses;
private static ArrayList<InetAddress> newAddresses;
static {
try {
initialAddresses = new ArrayList<>(Arrays.asList(
InetAddress.getByName("10.200.20.100"),
InetAddress.getByName("10.200.20.101"),
InetAddress.getByName("10.200.20.102")
));
newAddresses = new ArrayList<>(Arrays.asList(
InetAddress.getByName("10.200.20.103"),
InetAddress.getByName("10.200.20.104"),
InetAddress.getByName("10.200.20.105")
));
} catch (UnknownHostException e) {
fail("Attempted to create an invalid InetAddress, this should not happen");
}
}
private final MockTime time = new MockTime();
private final long reconnectBackoffMs = 10 * 1000;
private final long reconnectBackoffMax = 60 * 1000;
private final long connectionSetupTimeoutMs = 10 * 1000;
private final long connectionSetupTimeoutMaxMs = 127 * 1000;
private final int reconnectBackoffExpBase = ClusterConnectionStates.RECONNECT_BACKOFF_EXP_BASE;
private final double reconnectBackoffJitter = ClusterConnectionStates.RECONNECT_BACKOFF_JITTER;
private final int connectionSetupTimeoutExpBase = ClusterConnectionStates.CONNECTION_SETUP_TIMEOUT_EXP_BASE;
private final double connectionSetupTimeoutJitter = ClusterConnectionStates.CONNECTION_SETUP_TIMEOUT_JITTER;
private final String nodeId1 = "1001";
private final String nodeId2 = "2002";
private final String nodeId3 = "3003";
private final String hostTwoIps = "multiple.ip.address";
// For testing nodes with a single IP address, use localhost and default DNS resolution
private final DefaultHostResolver singleIPHostResolver = new DefaultHostResolver();
// For testing nodes with multiple IP addresses, mock DNS resolution to get consistent results
private final AddressChangeHostResolver multipleIPHostResolver = new AddressChangeHostResolver(
initialAddresses.toArray(new InetAddress[0]), newAddresses.toArray(new InetAddress[0]));
private ClusterConnectionStates connectionStates;
@BeforeEach
public void setup() {
this.connectionStates = new ClusterConnectionStates(reconnectBackoffMs, reconnectBackoffMax,
connectionSetupTimeoutMs, connectionSetupTimeoutMaxMs, new LogContext(), this.singleIPHostResolver);
}
@Test
public void testClusterConnectionStateChanges() {
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
assertEquals(0, connectionStates.connectionDelay(nodeId1, time.milliseconds()));
// Start connecting to Node and check state
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertEquals(ConnectionState.CONNECTING, connectionStates.connectionState(nodeId1));
assertTrue(connectionStates.isConnecting(nodeId1));
assertFalse(connectionStates.isReady(nodeId1, time.milliseconds()));
assertFalse(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
long connectionDelay = connectionStates.connectionDelay(nodeId1, time.milliseconds());
double connectionDelayDelta = connectionSetupTimeoutMs * connectionSetupTimeoutJitter;
assertEquals(connectionSetupTimeoutMs, connectionDelay, connectionDelayDelta);
time.sleep(100);
// Successful connection
connectionStates.ready(nodeId1);
assertEquals(ConnectionState.READY, connectionStates.connectionState(nodeId1));
assertTrue(connectionStates.isReady(nodeId1, time.milliseconds()));
assertTrue(connectionStates.hasReadyNodes(time.milliseconds()));
assertFalse(connectionStates.isConnecting(nodeId1));
assertFalse(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
assertEquals(Long.MAX_VALUE, connectionStates.connectionDelay(nodeId1, time.milliseconds()));
time.sleep(15000);
// Disconnected from broker
connectionStates.disconnected(nodeId1, time.milliseconds());
assertEquals(ConnectionState.DISCONNECTED, connectionStates.connectionState(nodeId1));
assertTrue(connectionStates.isDisconnected(nodeId1));
assertTrue(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
assertFalse(connectionStates.isConnecting(nodeId1));
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
assertFalse(connectionStates.canConnect(nodeId1, time.milliseconds()));
// After disconnecting we expect a backoff value equal to the reconnect.backoff.ms setting (plus minus 20% jitter)
double backoffTolerance = reconnectBackoffMs * reconnectBackoffJitter;
long currentBackoff = connectionStates.connectionDelay(nodeId1, time.milliseconds());
assertEquals(reconnectBackoffMs, currentBackoff, backoffTolerance);
time.sleep(currentBackoff + 1);
// after waiting for the current backoff value we should be allowed to connect again
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
}
@Test
public void testMultipleNodeConnectionStates() {
// Check initial state, allowed to connect to all nodes, but no nodes shown as ready
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
assertTrue(connectionStates.canConnect(nodeId2, time.milliseconds()));
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
// Start connecting one node and check that the pool only shows ready nodes after
// successful connect
connectionStates.connecting(nodeId2, time.milliseconds(), "localhost");
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
time.sleep(1000);
connectionStates.ready(nodeId2);
assertTrue(connectionStates.hasReadyNodes(time.milliseconds()));
// Connect second node and check that both are shown as ready, pool should immediately
// show ready nodes, since node2 is already connected
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertTrue(connectionStates.hasReadyNodes(time.milliseconds()));
time.sleep(1000);
connectionStates.ready(nodeId1);
assertTrue(connectionStates.hasReadyNodes(time.milliseconds()));
time.sleep(12000);
// disconnect nodes and check proper state of pool throughout
connectionStates.disconnected(nodeId2, time.milliseconds());
assertTrue(connectionStates.hasReadyNodes(time.milliseconds()));
assertTrue(connectionStates.isBlackedOut(nodeId2, time.milliseconds()));
assertFalse(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
time.sleep(connectionStates.connectionDelay(nodeId2, time.milliseconds()));
// by the time node1 disconnects node2 should have been unblocked again
connectionStates.disconnected(nodeId1, time.milliseconds() + 1);
assertTrue(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
assertFalse(connectionStates.isBlackedOut(nodeId2, time.milliseconds()));
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
}
@Test
public void testAuthorizationFailed() {
// Try connecting
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
time.sleep(100);
connectionStates.authenticationFailed(nodeId1, time.milliseconds(), new AuthenticationException("No path to CA for certificate!"));
time.sleep(1000);
assertEquals(ConnectionState.AUTHENTICATION_FAILED, connectionStates.connectionState(nodeId1));
assertNotNull(connectionStates.authenticationException(nodeId1));
assertFalse(connectionStates.hasReadyNodes(time.milliseconds()));
assertFalse(connectionStates.canConnect(nodeId1, time.milliseconds()));
time.sleep(connectionStates.connectionDelay(nodeId1, time.milliseconds()) + 1);
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
connectionStates.ready(nodeId1);
assertNull(connectionStates.authenticationException(nodeId1));
}
@Test
public void testRemoveNode() {
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
time.sleep(1000);
connectionStates.ready(nodeId1);
time.sleep(10000);
connectionStates.disconnected(nodeId1, time.milliseconds());
// Node is disconnected and blocked, removing it from the list should reset all blocks
connectionStates.remove(nodeId1);
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
assertFalse(connectionStates.isBlackedOut(nodeId1, time.milliseconds()));
assertEquals(0L, connectionStates.connectionDelay(nodeId1, time.milliseconds()));
}
@Test
public void testMaxReconnectBackoff() {
long effectiveMaxReconnectBackoff = Math.round(reconnectBackoffMax * (1 + reconnectBackoffJitter));
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
time.sleep(1000);
connectionStates.disconnected(nodeId1, time.milliseconds());
// Do 100 reconnect attempts and check that MaxReconnectBackoff (plus jitter) is not exceeded
for (int i = 0; i < 100; i++) {
long reconnectBackoff = connectionStates.connectionDelay(nodeId1, time.milliseconds());
assertTrue(reconnectBackoff <= effectiveMaxReconnectBackoff);
assertFalse(connectionStates.canConnect(nodeId1, time.milliseconds()));
time.sleep(reconnectBackoff + 1);
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
time.sleep(10);
connectionStates.disconnected(nodeId1, time.milliseconds());
}
}
@Test
public void testExponentialReconnectBackoff() {
verifyReconnectExponentialBackoff(false);
verifyReconnectExponentialBackoff(true);
}
@Test
public void testThrottled() {
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
time.sleep(1000);
connectionStates.ready(nodeId1);
time.sleep(10000);
// Initially not throttled.
assertEquals(0, connectionStates.throttleDelayMs(nodeId1, time.milliseconds()));
// Throttle for 100ms from now.
connectionStates.throttle(nodeId1, time.milliseconds() + 100);
assertEquals(100, connectionStates.throttleDelayMs(nodeId1, time.milliseconds()));
// Still throttled after 50ms. The remaining delay is 50ms. The poll delay should be same as throttling delay.
time.sleep(50);
assertEquals(50, connectionStates.throttleDelayMs(nodeId1, time.milliseconds()));
assertEquals(50, connectionStates.pollDelayMs(nodeId1, time.milliseconds()));
// Not throttled anymore when the deadline is reached. The poll delay should be same as connection delay.
time.sleep(50);
assertEquals(0, connectionStates.throttleDelayMs(nodeId1, time.milliseconds()));
assertEquals(connectionStates.connectionDelay(nodeId1, time.milliseconds()),
connectionStates.pollDelayMs(nodeId1, time.milliseconds()));
}
@Test
public void testSingleIP() throws UnknownHostException {
InetAddress[] localhostIps = Stream.of(InetAddress.getByName("127.0.0.1")).toArray(InetAddress[]::new);
HostResolver hostResolver = host -> {
assertEquals("localhost", host);
return localhostIps;
};
connectionStates = new ClusterConnectionStates(reconnectBackoffMs, reconnectBackoffMax,
connectionSetupTimeoutMs, connectionSetupTimeoutMaxMs, new LogContext(), hostResolver);
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
InetAddress currAddress = connectionStates.currentAddress(nodeId1);
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertSame(currAddress, connectionStates.currentAddress(nodeId1));
}
@Test
public void testMultipleIPs() throws UnknownHostException {
setupMultipleIPs();
assertTrue(ClientUtils.resolve(hostTwoIps, multipleIPHostResolver).size() > 1);
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr1 = connectionStates.currentAddress(nodeId1);
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr2 = connectionStates.currentAddress(nodeId1);
assertNotSame(addr1, addr2);
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr3 = connectionStates.currentAddress(nodeId1);
assertNotSame(addr1, addr3);
}
@Test
public void testHostResolveChange() throws UnknownHostException {
setupMultipleIPs();
assertTrue(ClientUtils.resolve(hostTwoIps, multipleIPHostResolver).size() > 1);
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr1 = connectionStates.currentAddress(nodeId1);
multipleIPHostResolver.changeAddresses();
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
InetAddress addr2 = connectionStates.currentAddress(nodeId1);
assertNotSame(addr1, addr2);
}
@Test
public void testNodeWithNewHostname() throws UnknownHostException {
setupMultipleIPs();
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
InetAddress addr1 = connectionStates.currentAddress(nodeId1);
this.multipleIPHostResolver.changeAddresses();
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr2 = connectionStates.currentAddress(nodeId1);
assertNotSame(addr1, addr2);
}
@Test
public void testIsPreparingConnection() {
assertFalse(connectionStates.isPreparingConnection(nodeId1));
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertTrue(connectionStates.isPreparingConnection(nodeId1));
connectionStates.checkingApiVersions(nodeId1);
assertTrue(connectionStates.isPreparingConnection(nodeId1));
connectionStates.disconnected(nodeId1, time.milliseconds());
assertFalse(connectionStates.isPreparingConnection(nodeId1));
}
@Test
public void testExponentialConnectionSetupTimeout() {
assertTrue(connectionStates.canConnect(nodeId1, time.milliseconds()));
// Check the exponential timeout growth
for (int n = 0; n <= Math.log((double) connectionSetupTimeoutMaxMs / connectionSetupTimeoutMs) / Math.log(connectionSetupTimeoutExpBase); n++) {
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertTrue(connectionStates.connectingNodes().contains(nodeId1));
assertEquals(connectionSetupTimeoutMs * Math.pow(connectionSetupTimeoutExpBase, n),
connectionStates.connectionSetupTimeoutMs(nodeId1),
connectionSetupTimeoutMs * Math.pow(connectionSetupTimeoutExpBase, n) * connectionSetupTimeoutJitter);
connectionStates.disconnected(nodeId1, time.milliseconds());
assertFalse(connectionStates.connectingNodes().contains(nodeId1));
}
// Check the timeout value upper bound
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertEquals(connectionSetupTimeoutMaxMs,
connectionStates.connectionSetupTimeoutMs(nodeId1),
connectionSetupTimeoutMaxMs * connectionSetupTimeoutJitter);
assertTrue(connectionStates.connectingNodes().contains(nodeId1));
// Should reset the timeout value to the init value
connectionStates.ready(nodeId1);
assertEquals(connectionSetupTimeoutMs,
connectionStates.connectionSetupTimeoutMs(nodeId1),
connectionSetupTimeoutMs * connectionSetupTimeoutJitter);
assertFalse(connectionStates.connectingNodes().contains(nodeId1));
connectionStates.disconnected(nodeId1, time.milliseconds());
// Check if the connection state transition from ready to disconnected
// won't increase the timeout value
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
assertEquals(connectionSetupTimeoutMs,
connectionStates.connectionSetupTimeoutMs(nodeId1),
connectionSetupTimeoutMs * connectionSetupTimeoutJitter);
assertTrue(connectionStates.connectingNodes().contains(nodeId1));
}
@Test
public void testTimedOutConnections() {
// Initiate two connections
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
connectionStates.connecting(nodeId2, time.milliseconds(), "localhost");
// Expect no timed out connections
assertEquals(0, connectionStates.nodesWithConnectionSetupTimeout(time.milliseconds()).size());
// Advance time by half of the connection setup timeout
time.sleep(connectionSetupTimeoutMs / 2);
// Initiate a third connection
connectionStates.connecting(nodeId3, time.milliseconds(), "localhost");
// Advance time beyond the connection setup timeout (+ max jitter) for the first two connections
time.sleep((long) (connectionSetupTimeoutMs / 2 + connectionSetupTimeoutMs * connectionSetupTimeoutJitter));
// Expect two timed out connections.
List<String> timedOutConnections = connectionStates.nodesWithConnectionSetupTimeout(time.milliseconds());
assertEquals(2, timedOutConnections.size());
assertTrue(timedOutConnections.contains(nodeId1));
assertTrue(timedOutConnections.contains(nodeId2));
// Disconnect the first two connections
connectionStates.disconnected(nodeId1, time.milliseconds());
connectionStates.disconnected(nodeId2, time.milliseconds());
// Advance time beyond the connection setup timeout (+ max jitter) for the third connections
time.sleep((long) (connectionSetupTimeoutMs / 2 + connectionSetupTimeoutMs * connectionSetupTimeoutJitter));
// Expect one timed out connection
timedOutConnections = connectionStates.nodesWithConnectionSetupTimeout(time.milliseconds());
assertEquals(1, timedOutConnections.size());
assertTrue(timedOutConnections.contains(nodeId3));
// Disconnect the third connection
connectionStates.disconnected(nodeId3, time.milliseconds());
// Expect no timed out connections
assertEquals(0, connectionStates.nodesWithConnectionSetupTimeout(time.milliseconds()).size());
}
@Test
public void testSkipLastAttemptedIp() throws UnknownHostException {
setupMultipleIPs();
assertTrue(ClientUtils.resolve(hostTwoIps, multipleIPHostResolver).size() > 1);
// Connect to the first IP
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr1 = connectionStates.currentAddress(nodeId1);
// Disconnect, which will trigger re-resolution with the first IP still first
connectionStates.disconnected(nodeId1, time.milliseconds());
// Connect again, the first IP should get skipped
connectionStates.connecting(nodeId1, time.milliseconds(), hostTwoIps);
InetAddress addr2 = connectionStates.currentAddress(nodeId1);
assertNotSame(addr1, addr2);
}
private void setupMultipleIPs() {
this.connectionStates = new ClusterConnectionStates(reconnectBackoffMs, reconnectBackoffMax,
connectionSetupTimeoutMs, connectionSetupTimeoutMaxMs, new LogContext(), this.multipleIPHostResolver);
}
private void verifyReconnectExponentialBackoff(boolean enterCheckingApiVersionState) {
double reconnectBackoffMaxExp = Math.log(reconnectBackoffMax / (double) Math.max(reconnectBackoffMs, 1))
/ Math.log(reconnectBackoffExpBase);
connectionStates.remove(nodeId1);
// Run through 10 disconnects and check that reconnect backoff value is within expected range for every attempt
for (int i = 0; i < 10; i++) {
connectionStates.connecting(nodeId1, time.milliseconds(), "localhost");
if (enterCheckingApiVersionState) {
connectionStates.checkingApiVersions(nodeId1);
}
connectionStates.disconnected(nodeId1, time.milliseconds());
// Calculate expected backoff value without jitter
long expectedBackoff = Math.round(Math.pow(reconnectBackoffExpBase, Math.min(i, reconnectBackoffMaxExp))
* reconnectBackoffMs);
long currentBackoff = connectionStates.connectionDelay(nodeId1, time.milliseconds());
assertEquals(expectedBackoff, currentBackoff, reconnectBackoffJitter * expectedBackoff);
time.sleep(connectionStates.connectionDelay(nodeId1, time.milliseconds()) + 1);
}
}
}
| ClusterConnectionStatesTest |
java | google__guice | extensions/assistedinject/src/com/google/inject/assistedinject/FactoryProvider.java | {
"start": 9203,
"end": 14582
} | class ____ throw exceptions
// to add errors, which isn't really the best way in Guice
throw newConfigurationException(
"Parameter of type '%s' is not injectable or annotated "
+ "with @Assisted for Constructor '%s'",
p, c);
}
}
}
}
private void checkDeclaredExceptionsMatch() {
for (Map.Entry<Method, AssistedConstructor<?>> entry : factoryMethodToConstructor.entrySet()) {
for (Class<?> constructorException : entry.getValue().getDeclaredExceptions()) {
if (!isConstructorExceptionCompatibleWithFactoryExeception(
constructorException, entry.getKey().getExceptionTypes())) {
throw newConfigurationException(
"Constructor %s declares an exception, but no compatible "
+ "exception is thrown by the factory method %s",
entry.getValue(), entry.getKey());
}
}
}
}
private boolean isConstructorExceptionCompatibleWithFactoryExeception(
Class<?> constructorException, Class<?>[] factoryExceptions) {
for (Class<?> factoryException : factoryExceptions) {
if (factoryException.isAssignableFrom(constructorException)) {
return true;
}
}
return false;
}
private boolean paramCanBeInjected(Parameter parameter, Injector injector) {
return parameter.isBound(injector);
}
private static Map<Method, AssistedConstructor<?>> createMethodMapping(
TypeLiteral<?> factoryType, TypeLiteral<?> implementationType) {
List<AssistedConstructor<?>> constructors = Lists.newArrayList();
for (Constructor<?> constructor : implementationType.getRawType().getDeclaredConstructors()) {
if (constructor.isAnnotationPresent(AssistedInject.class)) {
AssistedConstructor<?> assistedConstructor =
AssistedConstructor.create(
constructor, implementationType.getParameterTypes(constructor));
constructors.add(assistedConstructor);
}
}
if (constructors.isEmpty()) {
return ImmutableMap.of();
}
Method[] factoryMethods = factoryType.getRawType().getMethods();
if (constructors.size() != factoryMethods.length) {
throw newConfigurationException(
"Constructor mismatch: %s has %s @AssistedInject "
+ "constructors, factory %s has %s creation methods",
implementationType, constructors.size(), factoryType, factoryMethods.length);
}
Map<ParameterListKey, AssistedConstructor<?>> paramsToConstructor = Maps.newHashMap();
for (AssistedConstructor<?> c : constructors) {
if (paramsToConstructor.containsKey(c.getAssistedParameters())) {
throw new RuntimeException("Duplicate constructor, " + c);
}
paramsToConstructor.put(c.getAssistedParameters(), c);
}
Map<Method, AssistedConstructor<?>> result = Maps.newHashMap();
for (Method method : factoryMethods) {
if (!method.getReturnType().isAssignableFrom(implementationType.getRawType())) {
throw newConfigurationException(
"Return type of method %s is not assignable from %s", method, implementationType);
}
List<Type> parameterTypes = Lists.newArrayList();
for (TypeLiteral<?> parameterType : factoryType.getParameterTypes(method)) {
parameterTypes.add(parameterType.getType());
}
ParameterListKey methodParams = new ParameterListKey(parameterTypes);
if (!paramsToConstructor.containsKey(methodParams)) {
throw newConfigurationException(
"%s has no @AssistInject constructor that takes the "
+ "@Assisted parameters %s in that order. @AssistInject constructors are %s",
implementationType, methodParams, paramsToConstructor.values());
}
method.getParameterAnnotations();
for (Annotation[] parameterAnnotations : method.getParameterAnnotations()) {
for (Annotation parameterAnnotation : parameterAnnotations) {
if (parameterAnnotation.annotationType() == Assisted.class) {
throw newConfigurationException(
"Factory method %s has an @Assisted parameter, which is incompatible with the"
+ " deprecated @AssistedInject annotation. Please replace @AssistedInject with"
+ " @Inject on the %s constructor.",
method, implementationType);
}
}
}
AssistedConstructor<?> matchingConstructor = paramsToConstructor.remove(methodParams);
result.put(method, matchingConstructor);
}
return result;
}
@Override
public Set<Dependency<?>> getDependencies() {
List<Dependency<?>> dependencies = Lists.newArrayList();
for (AssistedConstructor<?> constructor : factoryMethodToConstructor.values()) {
for (Parameter parameter : constructor.getAllParameters()) {
if (!parameter.isProvidedByFactory()) {
dependencies.add(Dependency.get(parameter.getPrimaryBindingKey()));
}
}
}
return ImmutableSet.copyOf(dependencies);
}
@Override
public F get() {
InvocationHandler invocationHandler =
new InvocationHandler() {
@Override
public Object invoke(Object proxy, Method method, Object[] creationArgs)
throws Throwable {
// pass methods from Object. | we |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/util/AnnotationUtilsTests.java | {
"start": 23593,
"end": 23744
} | interface ____ {
}
@Target({ ElementType.TYPE, ElementType.METHOD })
@Retention(RetentionPolicy.RUNTIME)
@Annotation1
@Inherited
@ | ComposedAnnotation |
java | google__guava | android/guava/src/com/google/common/math/BigDecimalMath.java | {
"start": 2496,
"end": 3120
} | class ____ extends ToDoubleRounder<BigDecimal> {
static final BigDecimalToDoubleRounder INSTANCE = new BigDecimalToDoubleRounder();
private BigDecimalToDoubleRounder() {}
@Override
double roundToDoubleArbitrarily(BigDecimal bigDecimal) {
return bigDecimal.doubleValue();
}
@Override
int sign(BigDecimal bigDecimal) {
return bigDecimal.signum();
}
@Override
BigDecimal toX(double d, RoundingMode mode) {
return new BigDecimal(d);
}
@Override
BigDecimal minus(BigDecimal a, BigDecimal b) {
return a.subtract(b);
}
}
}
| BigDecimalToDoubleRounder |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/transaction/interceptor/TransactionInterceptor.java | {
"start": 1546,
"end": 2086
} | class ____
* contains the integration with Spring's underlying transaction API.
* TransactionInterceptor simply calls the relevant superclass methods
* such as {@link #invokeWithinTransaction} in the correct order.
*
* <p>TransactionInterceptors are thread-safe.
*
* @author Rod Johnson
* @author Juergen Hoeller
* @author Sebastien Deleuze
* @see TransactionProxyFactoryBean
* @see org.springframework.aop.framework.ProxyFactoryBean
* @see org.springframework.aop.framework.ProxyFactory
*/
@SuppressWarnings("serial")
public | which |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/OptionalFactories.java | {
"start": 3616,
"end": 8132
} | class ____ {
/**
* The factory classes that implement {@code Provider<Optional<T>>} or {@code
* Producer<Optional<T>>} for present optional bindings for a given kind of dependency request
* within the component.
*
* <p>The key is the {@code Provider<Optional<T>>} type.
*/
private final Map<PresentFactorySpec, XTypeSpec> presentFactoryClasses =
new TreeMap<>(
Comparator.comparing(PresentFactorySpec::valueKind)
.thenComparing(PresentFactorySpec::frameworkType)
.thenComparing(PresentFactorySpec::optionalKind));
/**
* The static methods that return a {@code Provider<Optional<T>>} that always returns an absent
* value.
*/
private final Map<OptionalKind, XFunSpec> absentOptionalProviderMethods = new TreeMap<>();
/**
* The static fields for {@code Provider<Optional<T>>} objects that always return an absent
* value.
*/
private final Map<OptionalKind, XPropertySpec> absentOptionalProviderFields = new TreeMap<>();
@Inject
PerGeneratedFileCache() {}
}
private final PerGeneratedFileCache perGeneratedFileCache;
private final GeneratedImplementation topLevelImplementation;
@Inject
OptionalFactories(
PerGeneratedFileCache perGeneratedFileCache,
@TopLevel GeneratedImplementation topLevelImplementation) {
this.perGeneratedFileCache = perGeneratedFileCache;
this.topLevelImplementation = topLevelImplementation;
}
/**
* Returns an expression that calls a static method that returns a {@code Provider<Optional<T>>}
* for absent optional bindings.
*/
XCodeBlock absentOptionalProvider(OptionalBinding binding) {
verify(
binding.bindingType().equals(BindingType.PROVISION),
"Absent optional bindings should be provisions: %s",
binding);
OptionalKind optionalKind = OptionalType.from(binding.key()).kind();
return XCodeBlock.of(
"%N()",
perGeneratedFileCache.absentOptionalProviderMethods.computeIfAbsent(
optionalKind,
kind -> {
XFunSpec method = absentOptionalProviderMethod(kind);
topLevelImplementation.addMethod(ABSENT_OPTIONAL_METHOD, method);
return method;
}));
}
/**
* Creates a method specification for a {@code Provider<Optional<T>>} that always returns an
* absent value.
*/
private XFunSpec absentOptionalProviderMethod(OptionalKind optionalKind) {
XTypeName typeVariable = XTypeNames.getTypeVariableName("T");
return methodBuilder(
String.format(
"absent%sProvider", UPPER_UNDERSCORE.to(UPPER_CAMEL, optionalKind.name())))
.addModifiers(PRIVATE, STATIC)
.addTypeVariable(typeVariable)
.returns(daggerProviderOf(optionalKind.of(typeVariable)))
.addJavadoc(
"Returns a {@link %T} that returns {@code %L}.",
XTypeNames.DAGGER_PROVIDER, optionalKind.absentValueExpression())
.addCode("%L // safe covariant cast\n", XAnnotationSpecs.suppressWarnings(UNCHECKED))
.addStatement(
"%1T provider = (%1T) %2N",
daggerProviderOf(optionalKind.of(typeVariable)),
perGeneratedFileCache.absentOptionalProviderFields.computeIfAbsent(
optionalKind,
kind -> {
XPropertySpec field = absentOptionalProviderField(kind);
topLevelImplementation.addField(ABSENT_OPTIONAL_FIELD, field);
return field;
}))
.addStatement("return provider")
.build();
}
/**
* Creates a field specification for a {@code Provider<Optional<T>>} that always returns an absent
* value.
*/
private XPropertySpec absentOptionalProviderField(OptionalKind optionalKind) {
return XPropertySpecs.builder(
String.format("ABSENT_%s_PROVIDER", optionalKind.name()),
XTypeNames.DAGGER_PROVIDER,
PRIVATE,
STATIC,
FINAL)
.addAnnotation(suppressWarnings(RAWTYPES))
.initializer(
"%T.create(%L)", XTypeNames.INSTANCE_FACTORY, optionalKind.absentValueExpression())
.addJavadoc(
"A {@link %T} that returns {@code %L}.",
XTypeNames.DAGGER_PROVIDER, optionalKind.absentValueExpression())
.build();
}
/** Information about the type of a factory for present bindings. */
@AutoValue
abstract static | PerGeneratedFileCache |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/typed/FooImpl.java | {
"start": 160,
"end": 200
} | class ____ implements Foo1, Foo2 {
}
| FooImpl |
java | apache__camel | components/camel-ai/camel-neo4j/src/main/java/org/apache/camel/component/neo4j/Neo4Operation.java | {
"start": 853,
"end": 1117
} | enum ____ {
CREATE_NODE,
DELETE_NODE,
RETRIEVE_NODES,
RETRIEVE_NODES_AND_UPDATE_WITH_CYPHER_QUERY,
ADD_OR_DELETE_NODE_WITH_CYPHER_QUERY,
CREATE_VECTOR_INDEX,
DROP_VECTOR_INDEX,
CREATE_VECTOR,
VECTOR_SIMILARITY_SEARCH
}
| Neo4Operation |
java | quarkusio__quarkus | extensions/arc/deployment/src/main/java/io/quarkus/arc/deployment/SyntheticBeanBuildItem.java | {
"start": 3894,
"end": 11526
} | class ____ extends BeanConfiguratorBase<ExtendedBeanConfigurator, Object> {
private Object runtimeProxy;
private Supplier<?> supplier;
private RuntimeValue<?> runtimeValue;
private Function<SyntheticCreationalContext<?>, ?> fun;
private boolean staticInit;
private Supplier<ActiveResult> checkActive;
ExtendedBeanConfigurator(DotName implClazz) {
super(implClazz);
this.staticInit = true;
}
/**
* Finish the configurator.
*
* @return a new build item
*/
public SyntheticBeanBuildItem done() {
if (supplier == null && runtimeValue == null && fun == null && runtimeProxy == null && creatorConsumer == null) {
throw new IllegalStateException(
"Synthetic bean does not provide a creation method, use ExtendedBeanConfigurator#creator(), ExtendedBeanConfigurator#supplier(), ExtendedBeanConfigurator#createWith() or ExtendedBeanConfigurator#runtimeValue()");
}
if (checkActive != null && supplier == null && runtimeValue == null && fun == null && runtimeProxy == null) {
// "check active" procedure is set via recorder proxy,
// creation function must also be set via recorder proxy
throw new IllegalStateException(
"Synthetic bean has ExtendedBeanConfigurator#checkActive(), but does not have ExtendedBeanConfigurator#supplier() / createWith() / runtimeValue() / runtimeProxy()");
}
return new SyntheticBeanBuildItem(this);
}
/**
* The contextual bean instance is supplied by a proxy returned from a recorder method.
* <p>
* Use {@link #createWith(Function)} if you want to leverage build-time parameters or synthetic injection points.
*
* @param supplier A supplier returned from a recorder method
* @return self
* @throws IllegalArgumentException If the supplier argument is not a proxy returned from a recorder method
*/
public ExtendedBeanConfigurator supplier(Supplier<?> supplier) {
checkReturnedProxy(supplier);
checkMultipleCreationMethods();
this.supplier = Objects.requireNonNull(supplier);
return this;
}
/**
* The contextual bean instance is a proxy returned from a recorder method.
* <p>
* Use {@link #createWith(Function)} if you want to leverage build-time parameters or synthetic injection points.
*
* @param runtimeValue A runtime value returned from a recorder method
* @return self
* @throws IllegalArgumentException If the runtimeValue argument is not a proxy returned from a recorder method
*/
public ExtendedBeanConfigurator runtimeValue(RuntimeValue<?> runtimeValue) {
checkReturnedProxy(runtimeValue);
checkMultipleCreationMethods();
this.runtimeValue = Objects.requireNonNull(runtimeValue);
return this;
}
/**
* The contextual bean instance is created by a proxy returned from a recorder method.
* <p>
* This method is useful if you need to use build-time parameters or synthetic injection points during creation of a
* bean instance.
*
* @param function A function returned from a recorder method
* @return self
* @throws IllegalArgumentException If the function argument is not a proxy returned from a recorder method
*/
public <B> ExtendedBeanConfigurator createWith(Function<SyntheticCreationalContext<B>, B> function) {
checkReturnedProxy(function);
checkMultipleCreationMethods();
this.fun = cast(Objects.requireNonNull(function));
return this;
}
/**
* The contextual bean instance is a proxy returned from a recorder method.
* <p>
* Use {@link #createWith(Function)} if you want to leverage build-time parameters or synthetic injection points.
*
* @param proxy A proxy returned from a recorder method
* @return self
* @throws IllegalArgumentException If the proxy argument is not a proxy returned from a recorder method
*/
public ExtendedBeanConfigurator runtimeProxy(Object proxy) {
checkReturnedProxy(proxy);
checkMultipleCreationMethods();
this.runtimeProxy = Objects.requireNonNull(proxy);
return this;
}
/**
* A synthetic bean whose instance is produced through a recorder is initialized during
* {@link ExecutionTime#STATIC_INIT} by default.
* <p>
* It is possible to change this behavior and initialize the bean during the {@link ExecutionTime#RUNTIME_INIT}.
* However, in such case a client that attempts to obtain such bean during {@link ExecutionTime#STATIC_INIT} or before
* runtime-init synthetic beans are initialized will receive an exception.
* <p>
* {@link ExecutionTime#RUNTIME_INIT} build steps that access a runtime-init synthetic bean should consume the
* {@link SyntheticBeansRuntimeInitBuildItem}.
*
* @return self
* @see SyntheticBeansRuntimeInitBuildItem
*/
public ExtendedBeanConfigurator setRuntimeInit() {
this.staticInit = false;
return this;
}
/**
* The {@link #checkActive(Consumer)} procedure is a {@code Supplier<ActiveResult>} proxy
* returned from a recorder method.
*
* @param checkActive a {@code Supplier<ActiveResult>} returned from a recorder method
* @return self
* @throws IllegalArgumentException if the {@code checkActive} argument is not a proxy returned from a recorder method
*/
public ExtendedBeanConfigurator checkActive(Supplier<ActiveResult> checkActive) {
checkReturnedProxy(checkActive);
this.checkActive = Objects.requireNonNull(checkActive);
return this;
}
DotName getImplClazz() {
return implClazz;
}
Set<Type> getTypes() {
return types;
}
Set<AnnotationInstance> getQualifiers() {
return qualifiers;
}
String getIdentifier() {
return identifier;
}
Supplier<?> getSupplier() {
return supplier;
}
RuntimeValue<?> getRuntimeValue() {
return runtimeValue;
}
Function<SyntheticCreationalContext<?>, ?> getFunction() {
return fun;
}
Object getRuntimeProxy() {
return runtimeProxy;
}
Supplier<ActiveResult> getCheckActive() {
return checkActive;
}
private void checkMultipleCreationMethods() {
if (runtimeProxy == null && runtimeValue == null && supplier == null && fun == null) {
return;
}
throw new IllegalStateException("It is not possible to specify multiple creation methods");
}
private void checkReturnedProxy(Object object) {
if (object instanceof ReturnedProxy) {
return;
}
throw new IllegalArgumentException(
"The object is not a proxy returned from a recorder method: " + object.toString());
}
}
}
| ExtendedBeanConfigurator |
java | grpc__grpc-java | api/src/main/java/io/grpc/ClientCall.java | {
"start": 10144,
"end": 13266
} | class ____ be called after this method has been called.
*
* <p>It is recommended that at least one of the arguments to be non-{@code null}, to provide
* useful debug information. Both argument being null may log warnings and result in suboptimal
* performance. Also note that the provided information will not be sent to the server.
*
* @param message if not {@code null}, will appear as the description of the CANCELLED status
* @param cause if not {@code null}, will appear as the cause of the CANCELLED status
*/
public abstract void cancel(@Nullable String message, @Nullable Throwable cause);
/**
* Close the call for request message sending. Incoming response messages are unaffected. This
* should be called when no more messages will be sent from the client.
*
* @throws IllegalStateException if call is already {@code halfClose()}d or {@link #cancel}ed
*/
public abstract void halfClose();
/**
* Send a request message to the server. May be called zero or more times depending on how many
* messages the server is willing to accept for the operation.
*
* @param message message to be sent to the server.
* @throws IllegalStateException if call is {@link #halfClose}d or explicitly {@link #cancel}ed
*/
public abstract void sendMessage(ReqT message);
/**
* If {@code true}, indicates that the call is capable of sending additional messages
* without requiring excessive buffering internally. This event is
* just a suggestion and the application is free to ignore it, however doing so may
* result in excessive buffering within the call.
*
* <p>If {@code false}, {@link Listener#onReady()} will be called after {@code isReady()}
* transitions to {@code true}.
*
* <p>If the type of the call is either {@link MethodDescriptor.MethodType#UNARY} or
* {@link MethodDescriptor.MethodType#SERVER_STREAMING}, this method may persistently return
* false. Calls that send exactly one message should not check this method.
*
* <p>This abstract class's implementation always returns {@code true}. Implementations generally
* override the method.
*/
public boolean isReady() {
return true;
}
/**
* Enables per-message compression, if an encoding type has been negotiated. If no message
* encoding has been negotiated, this is a no-op. By default per-message compression is enabled,
* but may not have any effect if compression is not enabled on the call.
*/
public void setMessageCompression(boolean enabled) {
// noop
}
/**
* Returns additional properties of the call. May only be called after {@link Listener#onHeaders}
* or {@link Listener#onClose}. If called prematurely, the implementation may throw {@code
* IllegalStateException} or return arbitrary {@code Attributes}.
*
* @return non-{@code null} attributes
* @throws IllegalStateException (optional) if called before permitted
*/
@ExperimentalApi("https://github.com/grpc/grpc-java/issues/2607")
@Grpc.TransportAttr
public Attributes getAttributes() {
return Attributes.EMPTY;
}
}
| can |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java | {
"start": 25552,
"end": 27658
} | class ____ implements StubbableTransport.RequestHandlingBehavior<TransportRequest> {
private final String actionName;
private final AtomicBoolean recoveryStarted;
private final Runnable connectionBreaker;
private final AtomicInteger blocksRemaining;
private TransientReceiveRejected(String actionName, AtomicBoolean recoveryStarted, Runnable connectionBreaker) {
this.actionName = actionName;
this.recoveryStarted = recoveryStarted;
this.connectionBreaker = connectionBreaker;
this.blocksRemaining = new AtomicInteger(randomIntBetween(1, 3));
}
@Override
public void messageReceived(
TransportRequestHandler<TransportRequest> handler,
TransportRequest request,
TransportChannel channel,
Task task
) throws Exception {
recoveryStarted.set(true);
if (blocksRemaining.getAndUpdate(i -> i == 0 ? 0 : i - 1) != 0) {
String rejected = "rejected";
String circuit = "circuit";
String network = "network";
String reason = randomFrom(rejected, circuit, network);
if (reason.equals(rejected)) {
logger.info("--> preventing {} response by throwing exception", actionName);
throw new EsRejectedExecutionException();
} else if (reason.equals(circuit)) {
logger.info("--> preventing {} response by throwing exception", actionName);
throw new CircuitBreakingException("Broken", CircuitBreaker.Durability.PERMANENT);
} else if (reason.equals(network)) {
logger.info("--> preventing {} response by breaking connection", actionName);
connectionBreaker.run();
} else {
throw new AssertionError("Unknown failure reason: " + reason);
}
}
handler.messageReceived(request, channel, task);
}
}
}
| TransientReceiveRejected |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/requests/DescribeAclsResponse.java | {
"start": 1936,
"end": 6708
} | class ____ extends AbstractResponse {
private final DescribeAclsResponseData data;
public DescribeAclsResponse(DescribeAclsResponseData data, short version) {
super(ApiKeys.DESCRIBE_ACLS);
this.data = data;
validate(Optional.of(version));
}
// Skips version validation, visible for testing
DescribeAclsResponse(DescribeAclsResponseData data) {
super(ApiKeys.DESCRIBE_ACLS);
this.data = data;
validate(Optional.empty());
}
@Override
public DescribeAclsResponseData data() {
return data;
}
@Override
public int throttleTimeMs() {
return data.throttleTimeMs();
}
@Override
public void maybeSetThrottleTimeMs(int throttleTimeMs) {
data.setThrottleTimeMs(throttleTimeMs);
}
public ApiError error() {
return new ApiError(Errors.forCode(data.errorCode()), data.errorMessage());
}
@Override
public Map<Errors, Integer> errorCounts() {
return errorCounts(Errors.forCode(data.errorCode()));
}
public final List<DescribeAclsResource> acls() {
return data.resources();
}
public static DescribeAclsResponse parse(Readable readable, short version) {
return new DescribeAclsResponse(new DescribeAclsResponseData(readable, version), version);
}
@Override
public boolean shouldClientThrottle(short version) {
return version >= 1;
}
private void validate(Optional<Short> version) {
if (version.isPresent() && version.get() == 0) {
final boolean unsupported = acls().stream()
.anyMatch(acl -> acl.patternType() != PatternType.LITERAL.code());
if (unsupported) {
throw new UnsupportedVersionException("Version 0 only supports literal resource pattern types");
}
}
for (DescribeAclsResource resource : acls()) {
if (resource.patternType() == PatternType.UNKNOWN.code() || resource.resourceType() == ResourceType.UNKNOWN.code())
throw new IllegalArgumentException("Contain UNKNOWN elements");
for (AclDescription acl : resource.acls()) {
if (acl.operation() == AclOperation.UNKNOWN.code() || acl.permissionType() == AclPermissionType.UNKNOWN.code()) {
throw new IllegalArgumentException("Contain UNKNOWN elements");
}
}
}
}
private static Stream<AclBinding> aclBindings(DescribeAclsResource resource) {
return resource.acls().stream().map(acl -> {
ResourcePattern pattern = new ResourcePattern(
ResourceType.fromCode(resource.resourceType()),
resource.resourceName(),
PatternType.fromCode(resource.patternType()));
AccessControlEntry entry = new AccessControlEntry(
acl.principal(),
acl.host(),
AclOperation.fromCode(acl.operation()),
AclPermissionType.fromCode(acl.permissionType()));
return new AclBinding(pattern, entry);
});
}
public static List<AclBinding> aclBindings(List<DescribeAclsResource> resources) {
return resources.stream().flatMap(DescribeAclsResponse::aclBindings).collect(Collectors.toList());
}
public static List<DescribeAclsResource> aclsResources(Iterable<AclBinding> acls) {
Map<ResourcePattern, Set<AccessControlEntry>> patternToEntries = new HashMap<>();
for (AclBinding acl : acls) {
patternToEntries.computeIfAbsent(acl.pattern(), v -> new HashSet<>()).add(acl.entry());
}
List<DescribeAclsResource> resources = new ArrayList<>(patternToEntries.size());
for (Entry<ResourcePattern, Set<AccessControlEntry>> entry : patternToEntries.entrySet()) {
ResourcePattern key = entry.getKey();
List<AclDescription> aclDescriptions = new ArrayList<>(entry.getValue().size());
for (AccessControlEntry ace : entry.getValue()) {
AclDescription ad = new AclDescription()
.setHost(ace.host())
.setOperation(ace.operation().code())
.setPermissionType(ace.permissionType().code())
.setPrincipal(ace.principal());
aclDescriptions.add(ad);
}
DescribeAclsResource dar = new DescribeAclsResource()
.setResourceName(key.name())
.setPatternType(key.patternType().code())
.setResourceType(key.resourceType().code())
.setAcls(aclDescriptions);
resources.add(dar);
}
return resources;
}
}
| DescribeAclsResponse |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/core/publisher/FluxSample.java | {
"start": 1681,
"end": 2486
} | class ____<T, U> extends InternalFluxOperator<T, T> {
final Publisher<U> other;
FluxSample(Flux<? extends T> source, Publisher<U> other) {
super(source);
this.other = Operators.toFluxOrMono(Objects.requireNonNull(other, "other"));
}
@Override
public int getPrefetch() {
return Integer.MAX_VALUE;
}
@Override
public CoreSubscriber<? super T> subscribeOrReturn(CoreSubscriber<? super T> actual) {
CoreSubscriber<T> serial = Operators.serialize(actual);
SampleMainSubscriber<T> main = new SampleMainSubscriber<>(serial);
actual.onSubscribe(main);
other.subscribe(new SampleOther<>(main));
return main;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return super.scanUnsafe(key);
}
static final | FluxSample |
java | redisson__redisson | redisson-micronaut/redisson-micronaut-20/src/main/java/org/redisson/micronaut/session/RedissonHttpSessionConfiguration.java | {
"start": 970,
"end": 1078
} | class ____ extends HttpSessionConfiguration implements Toggleable {
public | RedissonHttpSessionConfiguration |
java | quarkusio__quarkus | extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/IngressConfig.java | {
"start": 1628,
"end": 2689
} | interface ____ {
/**
* The host under which the rule is going to be used.
*/
String host();
/**
* The path under which the rule is going to be used. Default is "/".
*/
@WithDefault("/")
String path();
/**
* The path type strategy to use by the Ingress rule. Default is "Prefix".
*/
@WithDefault("Prefix")
String pathType();
/**
* The service name to be used by this Ingress rule. Default is the generated service name of the application.
*/
Optional<String> serviceName();
/**
* The service port name to be used by this Ingress rule. Default is the port name of the generated service of
* the application.
*/
Optional<String> servicePortName();
/**
* The service port number to be used by this Ingress rule. This is only used when the servicePortName is not set.
*/
Optional<Integer> servicePortNumber();
}
}
| IngressRuleConfig |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java | {
"start": 3933,
"end": 4532
} | class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory dbl;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory dbl) {
this.source = source;
this.dbl = dbl;
}
@Override
public ToStringFromDoubleEvaluator get(DriverContext context) {
return new ToStringFromDoubleEvaluator(source, dbl.get(context), context);
}
@Override
public String toString() {
return "ToStringFromDoubleEvaluator[" + "dbl=" + dbl + "]";
}
}
}
| Factory |
java | quarkusio__quarkus | extensions/funqy/funqy-knative-events/deployment/src/test/java/io/quarkus/funqy/test/WithConflictingAttributeFilterTest.java | {
"start": 269,
"end": 1360
} | class ____ {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest().assertException(t -> {
Throwable i = t;
boolean found = false;
while (i != null) {
if (i instanceof IllegalStateException) {
found = true;
break;
}
i = i.getCause();
}
assertTrue(found, "Build failed with wrong exception, expected IllegalStateException but got " + t);
})
.withApplicationRoot((jar) -> jar
.addClasses(WithConflictingAttributeFilter.class, Identity.class));
@Test
public void testAttributeFilterMatch() {
RestAssured.given().contentType("application/json")
.body("[{\"name\": \"Bill\"}, {\"name\": \"Matej\"}]")
.header("ce-id", "42")
.header("ce-type", "listOfStrings")
.header("ce-source", "test")
.header("ce-specversion", "1.0")
.post("/")
.then().statusCode(404);
}
}
| WithConflictingAttributeFilterTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/converter/Person.java | {
"start": 344,
"end": 802
} | class ____ {
@Id
private Integer id;
@Basic
private String name;
@Embedded
@Convert(
attributeName = "city",
converter = ToDataBaseColumnThrowRuntimeExceptionConverter.class)
private Address address;
Person() {
}
public Person(Integer id, String name, Address address) {
this.id = id;
this.name = name;
this.address = address;
}
public Integer getId() {
return id;
}
public Address getAddress() {
return address;
}
}
| Person |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/LeafQueue.java | {
"start": 1132,
"end": 1745
} | class ____ extends AbstractLeafQueue {
private static final Logger LOG =
LoggerFactory.getLogger(LeafQueue.class);
public LeafQueue(CapacitySchedulerQueueContext queueContext,
String queueName, CSQueue parent, CSQueue old) throws IOException {
this(queueContext, queueName, parent, old, false);
}
public LeafQueue(CapacitySchedulerQueueContext queueContext,
String queueName, CSQueue parent, CSQueue old, boolean isDynamic) throws
IOException {
super(queueContext, queueName, parent, old, isDynamic);
setupQueueConfigs(queueContext.getClusterResource());
}
}
| LeafQueue |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/util/ContextDataProviderTest.java | {
"start": 1614,
"end": 2653
} | class ____ {
private static Logger logger;
private static ListAppender appender;
@BeforeAll
static void beforeClass() {
ThreadContextDataInjector.contextDataProviders.add(new TestContextDataProvider());
System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, "log4j-contextData.xml");
final LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
logger = loggerContext.getLogger(ContextDataProviderTest.class.getName());
appender = loggerContext.getConfiguration().getAppender("List");
assertNotNull(appender, "No List appender");
}
@Test
void testContextProvider() {
ThreadContext.put("loginId", "jdoe");
logger.debug("This is a test");
final List<String> messages = appender.getMessages();
assertEquals(1, messages.size(), "Incorrect number of messages");
assertTrue(messages.get(0).contains("testKey=testValue"), "Context data missing");
}
private static | ContextDataProviderTest |
java | spring-projects__spring-boot | module/spring-boot-webflux/src/test/java/org/springframework/boot/webflux/autoconfigure/WebFluxAutoConfigurationTests.java | {
"start": 50525,
"end": 50691
} | class ____ implements Printer<Example> {
@Override
public String print(Example example, Locale locale) {
return example.getName();
}
}
static | ExamplePrinter |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/tool/schema/internal/StandardTableCleaner.java | {
"start": 818,
"end": 2659
} | class ____ implements Cleaner {
protected final Dialect dialect;
public StandardTableCleaner(Dialect dialect) {
this.dialect = dialect;
}
@Override
public String getSqlBeforeString() {
return dialect.getDisableConstraintsStatement();
}
@Override
public String getSqlAfterString() {
return dialect.getEnableConstraintsStatement();
}
@Override
public String[] getSqlTruncateStrings(Collection<Table> tables, Metadata metadata, SqlStringGenerationContext context) {
final String[] tableNames =
tables.stream()
.map( table -> context.format( getTableName( table ) ) )
.toArray( String[]::new );
final String[] truncateTableStatements = dialect.getTruncateTableStatements( tableNames );
final String[] initStatements =
tables.stream()
.flatMap( table -> table.getInitCommands( context ).stream() )
.flatMap( command -> stream( command.initCommands() ) )
.toArray( String[]::new );
return join( truncateTableStatements, initStatements );
}
@Override
public String getSqlDisableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context) {
return dialect.getDisableConstraintStatement( context.format( getTableName( foreignKey.getTable() ) ), foreignKey.getName() );
}
@Override
public String getSqlEnableConstraintString(ForeignKey foreignKey, Metadata metadata, SqlStringGenerationContext context) {
return dialect.getEnableConstraintStatement( context.format( getTableName( foreignKey.getTable() ) ), foreignKey.getName() );
}
private static QualifiedNameParser.NameParts getTableName(Table table) {
return new QualifiedNameParser.NameParts(
toIdentifier( table.getCatalog(), table.isCatalogQuoted() ),
toIdentifier( table.getSchema(), table.isSchemaQuoted() ),
table.getNameIdentifier()
);
}
}
| StandardTableCleaner |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/serialization/TypedValueSerializationTest.java | {
"start": 570,
"end": 1459
} | class ____ {
@Test
@JiraKey(value = "HHH-9024")
public void testTypedValueSerialization() throws Exception {
final Type mockType = mock( Type.class );
final String value = "foo";
final TypedValue typedValue = new TypedValue( mockType, value );
try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
try (final ObjectOutputStream oos = new ObjectOutputStream( baos )) {
oos.writeObject( typedValue );
try (final ObjectInputStream ois = new ObjectInputStream(
new ByteArrayInputStream( baos.toByteArray() ) )) {
final TypedValue typedValueClone = (TypedValue) ois.readObject();
assertEquals( typedValue.hashCode(), typedValueClone.hashCode() );
assertEquals( typedValue.toString(), typedValueClone.toString() );
assertEquals( typedValue.getValue(), typedValueClone.getValue() );
}
}
}
}
}
| TypedValueSerializationTest |
java | lettuce-io__lettuce-core | src/main/java/io/lettuce/core/dynamic/support/ReflectionUtils.java | {
"start": 7444,
"end": 7802
} | class ____ introspect
* @param mc the callback to invoke for each method
* @see #doWithMethods(Class, MethodCallback, MethodFilter)
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc) {
doWithMethods(clazz, mc, null);
}
/**
* Perform the given callback operation on all matching methods of the given | to |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/web/client/HttpStatusCodeExceptionTests.java | {
"start": 1102,
"end": 2160
} | class ____ {
/**
* Corners bug SPR-9273, which reported the fact that following the changes made in
* SPR-7591, {@link HttpStatusCodeException} and subtypes became no longer
* serializable due to the addition of a non-serializable {@code Charset} field.
*/
@Test
void testSerializability() throws IOException, ClassNotFoundException {
HttpStatusCodeException ex1 = new HttpClientErrorException(
HttpStatus.BAD_REQUEST, null, null, StandardCharsets.US_ASCII);
ByteArrayOutputStream out = new ByteArrayOutputStream();
new ObjectOutputStream(out).writeObject(ex1);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
HttpStatusCodeException ex2 =
(HttpStatusCodeException) new ObjectInputStream(in).readObject();
assertThat(ex2.getResponseBodyAsString()).isEqualTo(ex1.getResponseBodyAsString());
}
@Test
void emptyStatusText() {
HttpStatusCodeException ex = new HttpClientErrorException(HttpStatus.NOT_FOUND, "");
assertThat(ex.getMessage()).isEqualTo("404 Not Found");
}
}
| HttpStatusCodeExceptionTests |
java | mockito__mockito | mockito-extensions/mockito-junit-jupiter/src/test/java/org/mockitousage/GenericTypeMockTest.java | {
"start": 13037,
"end": 13160
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
}
public | Two |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/ParameterResolverTest.java | {
"start": 4058,
"end": 4739
} | class ____ implements ParameterResolver {
@Override
public boolean supportsParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return List.class.isAssignableFrom(parameterContext.getParameter().getType());
}
@Override
public Object resolveParameter(ParameterContext parameterContext, ExtensionContext extensionContext)
throws ParameterResolutionException {
return new ArrayList<>(List.of(new NonSerializable("foo"), new NonSerializable("bar")));
}
}
public static | ListWithNonSerializableParameterResolver |
java | elastic__elasticsearch | x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/session/IndexResolverFieldNamesTests.java | {
"start": 472,
"end": 16296
} | class ____ extends ESTestCase {
private static final EqlParser parser = new EqlParser();
public void testSimpleQueryEqual() {
assertFieldNames(
"""
process where serial_event_id == 1""",
Set.of("serial_event_id.*", "serial_event_id", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testSimpleQueryHeadSix() {
assertFieldNames("""
process where true | head 6""", Set.of("event.category.*", "event.category", "@timestamp.*", "@timestamp"));
}
public void testProcessWhereFalse() {
assertFieldNames("""
process where false""", Set.of("event.category.*", "event.category", "@timestamp.*", "@timestamp"));
}
public void testProcessNameInexistent() {
assertFieldNames(
"""
process where process_name : "impossible name" or (serial_event_id < 4.5 and serial_event_id >= 3.1)""",
Set.of(
"process_name.*",
"process_name",
"serial_event_id.*",
"serial_event_id",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testSerialEventIdLteAndGt() {
assertFieldNames(
"""
process where serial_event_id<=8 and serial_event_id > 7""",
Set.of("serial_event_id.*", "serial_event_id", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testMinusOneLtExitCode() {
assertFieldNames(
"""
process where -1 < exit_code""",
Set.of("exit_code.*", "exit_code", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testNotExitCodeGtWithHead1() {
assertFieldNames(
"""
process where not (exit_code > -1)
and serial_event_id in (58, 64, 69, 74, 80, 85, 90, 93, 94)
| head 10""",
Set.of(
"exit_code.*",
"exit_code",
"serial_event_id.*",
"serial_event_id",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testProcessWithMultipleConditions1() {
assertFieldNames(
"""
process where (serial_event_id<=8 and serial_event_id > 7) and (opcode==3 and opcode>2)""",
Set.of(
"opcode.*",
"opcode",
"serial_event_id.*",
"serial_event_id",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testWildcardAndMultipleConditions1() {
assertFieldNames(
"""
file where file_path:"x"
and opcode in (0,1,2) and user_name:\"vagrant\"""",
Set.of(
"user_name.*",
"user_name",
"opcode.*",
"opcode",
"file_path.*",
"file_path",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testSequenceOneOneMatch() {
assertFieldNames(
"""
sequence
[process where serial_event_id == 1]
[process where serial_event_id == 2]""",
Set.of("serial_event_id.*", "serial_event_id", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testSequenceOneManyMany_Runs() {
assertFieldNames(
"""
sequence
[process where serial_event_id == 1]
[process where true] with runs=2""",
Set.of("serial_event_id.*", "serial_event_id", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testTwoSequencesWithKeys() {
assertFieldNames(
"""
sequence
[process where true] by unique_pid
[process where opcode == 1] by unique_ppid""",
Set.of(
"opcode.*",
"opcode",
"unique_ppid.*",
"unique_ppid",
"unique_pid.*",
"unique_pid",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testTwoSequencesWithTwoKeys() {
assertFieldNames(
"""
sequence
[process where true] by unique_pid, process_path
[process where opcode == 1] by unique_ppid, parent_process_path""",
Set.of(
"opcode.*",
"opcode",
"unique_ppid.*",
"unique_ppid",
"unique_pid.*",
"unique_pid",
"process_path.*",
"process_path",
"parent_process_path.*",
"parent_process_path",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testFourSequencesByPidWithUntil1() {
assertFieldNames(
"""
sequence
[process where opcode == 1] by unique_pid
[file where opcode == 0] by unique_pid
[file where opcode == 0] by unique_pid
[file where opcode == 0] by unique_pid
until
[file where opcode == 2] by unique_pid""",
Set.of("opcode.*", "opcode", "unique_pid.*", "unique_pid", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testSequencesOnDifferentEventTypesWithBy() {
assertFieldNames(
"""
sequence
[file where opcode==0 and file_name:"svchost.exe"] by unique_pid
[process where opcode == 1] by unique_ppid""",
Set.of(
"opcode.*",
"opcode",
"unique_ppid.*",
"unique_ppid",
"unique_pid.*",
"unique_pid",
"file_name.*",
"file_name",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testMultipleConditions2() {
assertFieldNames(
"""
process where opcode == 1
and process_name in ("net.exe", "net1.exe")
and not (parent_process_name : "net.exe"
and process_name : "net1.exe")
and command_line : "*group *admin*" and command_line != \"*x*\"""",
Set.of(
"opcode.*",
"opcode",
"process_name.*",
"process_name",
"parent_process_name.*",
"parent_process_name",
"command_line.*",
"command_line",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testTwoSequencesWithKeys2() {
assertFieldNames(
"""
sequence
[file where file_name:"lsass.exe"] by file_path,process_path
[process where true] by process_path,parent_process_path""",
Set.of(
"file_name.*",
"file_name",
"file_path.*",
"file_path",
"process_path.*",
"process_path",
"parent_process_path.*",
"parent_process_path",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testEndsWithAndCondition() {
assertFieldNames(
"""
file where opcode==0 and serial_event_id == 88 and startsWith~("explorer.exeaAAAA", "EXPLORER.exe")""",
Set.of(
"opcode.*",
"opcode",
"serial_event_id.*",
"serial_event_id",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testStringContains2() {
assertFieldNames(
"""
file where opcode==0 and stringContains("ABCDEFGHIexplorer.exeJKLMNOP", file_name)""",
Set.of("opcode.*", "opcode", "file_name.*", "file_name", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testConcatCaseInsensitive() {
assertFieldNames(
"process where concat(serial_event_id, \":\", process_name, opcode) : \"x\"",
Set.of(
"opcode.*",
"opcode",
"process_name.*",
"process_name",
"serial_event_id.*",
"serial_event_id",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testCidrMatch4() {
assertFieldNames(
"""
network where cidrMatch(source_address, "0.0.0.0/0")""",
Set.of("source_address.*", "source_address", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testNumberStringConversion5() {
assertFieldNames(
"""
any where number(string(serial_event_id), 16) == 17""",
Set.of("serial_event_id.*", "serial_event_id", "@timestamp.*", "@timestamp")
);
}
public void testSimpleRegex() {
assertFieldNames(
"process where command_line regex \".*\"",
Set.of("command_line.*", "command_line", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testSequenceWithOptionalUserDomain() {
assertFieldNames(
"""
sequence by ?user_domain [process where true] [registry where true]""",
Set.of("user_domain.*", "user_domain", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testTwoSequencesWithTwoKeys_AndOptionals() {
assertFieldNames(
"""
sequence by ?x
[process where true] by unique_pid, process_path, ?z
[process where opcode == 1] by unique_ppid, parent_process_path, ?w""",
Set.of(
"opcode.*",
"opcode",
"x.*",
"x",
"parent_process_path.*",
"parent_process_path",
"process_path.*",
"process_path",
"unique_pid.*",
"unique_pid",
"unique_ppid.*",
"unique_ppid",
"z.*",
"z",
"w.*",
"w",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testOptionalDefaultNullValueFieldEqualNull() {
assertFieldNames(
"""
OPTIONAL where ?optional_field_default_null == null""",
Set.of(
"optional_field_default_null.*",
"optional_field_default_null",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testSequenceOptionalFieldAsQueryKeys() {
assertFieldNames(
"""
sequence by ?x, transID
[ERROR where true] by ?x
[OPTIONAL where true] by ?y""",
Set.of("x.*", "x", "y.*", "y", "transID.*", "transID", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testSequenceAllKeysOptional() {
assertFieldNames(
"""
sequence by ?process.entity_id, ?process.pid
[process where transID == 2]
[file where transID == 0] with runs=2""",
Set.of(
"process.entity_id.*",
"process.entity_id",
"process.pid.*",
"process.pid",
"transID.*",
"transID",
"event.category.*",
"event.category",
"@timestamp.*",
"@timestamp"
)
);
}
public void testMultipleMissing1() {
assertFieldNames("""
sequence with maxspan=1s
[ test4 where tag == "A" ]
[ test4 where tag == "B" ]
![ test4 where tag == "M1"]
[ test4 where tag == "C" ]
![ test4 where tag == "M2"]
[ test4 where tag == "D" ]""", Set.of("tag.*", "tag", "event.category.*", "event.category", "@timestamp.*", "@timestamp"));
}
public void testWithByKey_runs() {
assertFieldNames(
"""
sequence by k1 with maxspan=1s
[ test5 where tag == "normal" ] by k2 with runs=2
![ test5 where tag == "missing" ] by k2
[ test5 where tag == "normal" ] by k2""",
Set.of("tag.*", "tag", "k1.*", "k1", "k2.*", "k2", "event.category.*", "event.category", "@timestamp.*", "@timestamp")
);
}
public void testComplexFiltersWithSample() {
assertFieldNames(
"""
sample by host
[any where uptime > 0 and host == "doom" and (uptime > 15 or bool == true)] by os
[any where port > 100 and ip == "10.0.0.5" or op_sys : "REDHAT"] by op_sys
[any where bool == true] by os""",
Set.of(
"host.*",
"host",
"uptime.*",
"uptime",
"bool.*",
"bool",
"os.*",
"os",
"port.*",
"port",
"ip.*",
"ip",
"op_sys.*",
"op_sys"
)
);
}
public void testOptionalFieldAsKeyAndMultipleConditions() {
assertFieldNames(
"""
sample by ?x, ?y
[failure where (?x == null or ?y == null) and id == 17]
[success where (?y == null and ?x == null) and id == 18]""",
Set.of("x.*", "x", "y.*", "y", "id.*", "id", "event.category.*", "event.category")
);
}
private void assertFieldNames(String query, Set<String> expected) {
Set<String> fieldNames = EqlSession.fieldNames(parser.createStatement(query));
assertThat(fieldNames, equalTo(expected));
}
}
| IndexResolverFieldNamesTests |
java | netty__netty | codec-base/src/main/java/io/netty/handler/codec/bytes/ByteArrayEncoder.java | {
"start": 1841,
"end": 2144
} | class ____ extends MessageToMessageEncoder<byte[]> {
public ByteArrayEncoder() {
super(byte[].class);
}
@Override
protected void encode(ChannelHandlerContext ctx, byte[] msg, List<Object> out) throws Exception {
out.add(Unpooled.wrappedBuffer(msg));
}
}
| ByteArrayEncoder |
java | elastic__elasticsearch | x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskTests.java | {
"start": 589,
"end": 3048
} | class ____ extends ESTestCase {
private static final String TRANSFORM_ID = "transform-id";
private static final TimeValue FREQUENCY = TimeValue.timeValueSeconds(10);
private static final TimeValue DEFAULT_FREQUENCY = TimeValue.timeValueSeconds(60);
private static final long LAST_TRIGGERED_TIME_MILLIS = 100000L;
private static final Listener LISTENER = event -> {};
public void testBasics() {
TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 0, 123, LISTENER);
assertThat(task.getTransformId(), is(equalTo(TRANSFORM_ID)));
assertThat(task.getFrequency(), is(equalTo(FREQUENCY)));
assertThat(task.getLastTriggeredTimeMillis(), is(equalTo(LAST_TRIGGERED_TIME_MILLIS)));
assertThat(task.getFailureCount(), is(equalTo(0)));
assertThat(task.getNextScheduledTimeMillis(), is(equalTo(123L)));
assertThat(task.getListener(), is(equalTo(LISTENER)));
}
public void testNextScheduledTimeMillis() {
{
TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 0, 123, LISTENER);
// Verify that the explicitly-provided next scheduled time is returned when failure count is 0
assertThat(task.getNextScheduledTimeMillis(), is(equalTo(123L)));
}
{
TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 1, 123, LISTENER);
// Verify that the explicitly-provided next scheduled time is returned when failure count is greater than 0
assertThat(task.getNextScheduledTimeMillis(), is(equalTo(123L)));
}
{
TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 0, LISTENER);
// Verify that the next scheduled time is calculated properly when failure count is 0
assertThat(task.getNextScheduledTimeMillis(), is(equalTo(110000L)));
}
{
TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 1, LISTENER);
// Verify that the next scheduled time is calculated properly when failure count is greater than 0
assertThat(task.getNextScheduledTimeMillis(), is(equalTo(105000L)));
}
}
}
| TransformScheduledTaskTests |
java | spring-projects__spring-framework | spring-tx/src/main/java/org/springframework/dao/support/PersistenceExceptionTranslationInterceptor.java | {
"start": 1828,
"end": 4105
} | class ____
implements MethodInterceptor, BeanFactoryAware, InitializingBean {
private volatile @Nullable PersistenceExceptionTranslator persistenceExceptionTranslator;
private boolean alwaysTranslate = false;
private @Nullable ListableBeanFactory beanFactory;
/**
* Create a new PersistenceExceptionTranslationInterceptor.
* Needs to be configured with a PersistenceExceptionTranslator afterwards.
* @see #setPersistenceExceptionTranslator
*/
public PersistenceExceptionTranslationInterceptor() {
}
/**
* Create a new PersistenceExceptionTranslationInterceptor
* for the given PersistenceExceptionTranslator.
* @param pet the PersistenceExceptionTranslator to use
*/
public PersistenceExceptionTranslationInterceptor(PersistenceExceptionTranslator pet) {
Assert.notNull(pet, "PersistenceExceptionTranslator must not be null");
this.persistenceExceptionTranslator = pet;
}
/**
* Create a new PersistenceExceptionTranslationInterceptor, autodetecting
* PersistenceExceptionTranslators in the given BeanFactory.
* @param beanFactory the ListableBeanFactory to obtaining all
* PersistenceExceptionTranslators from
*/
public PersistenceExceptionTranslationInterceptor(ListableBeanFactory beanFactory) {
Assert.notNull(beanFactory, "ListableBeanFactory must not be null");
this.beanFactory = beanFactory;
}
/**
* Specify the PersistenceExceptionTranslator to use.
* <p>Default is to autodetect all PersistenceExceptionTranslators
* in the containing BeanFactory, using them in a chain.
* @see #detectPersistenceExceptionTranslators
*/
public void setPersistenceExceptionTranslator(PersistenceExceptionTranslator pet) {
this.persistenceExceptionTranslator = pet;
}
/**
* Specify whether to always translate the exception ("true"), or whether throw the
* raw exception when declared, i.e. when the originating method signature's exception
* declarations allow for the raw exception to be thrown ("false").
* <p>Default is "false". Switch this flag to "true" in order to always translate
* applicable exceptions, independent of the originating method signature.
* <p>Note that the originating method does not have to declare the specific exception.
* Any base | PersistenceExceptionTranslationInterceptor |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/IterableMapping.java | {
"start": 901,
"end": 1133
} | interface ____ {
* @IterableMapping( numberFormat = "##.00" )
* List<String> sourceToTarget(List<Float> source);
* }
* </code></pre>
* <pre><code class='java'>
* // generates
* public | FloatToStringMapper |
java | ReactiveX__RxJava | src/test/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableConcatMapTest.java | {
"start": 1282,
"end": 10762
} | class ____ extends RxJavaTest {
@Test
public void simpleSubscriptionRequest() {
TestSubscriber<Integer> ts = new TestSubscriber<>(0);
SimpleScalarSubscription<Integer> ws = new SimpleScalarSubscription<>(1, ts);
ts.onSubscribe(ws);
ws.request(0);
ts.assertEmpty();
ws.request(1);
ts.assertResult(1);
ws.request(1);
ts.assertResult(1);
}
@Test
public void boundaryFusion() {
Flowable.range(1, 10000)
.observeOn(Schedulers.single())
.map(new Function<Integer, String>() {
@Override
public String apply(Integer t) throws Exception {
String name = Thread.currentThread().getName();
if (name.contains("RxSingleScheduler")) {
return "RxSingleScheduler";
}
return name;
}
})
.concatMap(new Function<String, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(String v)
throws Exception {
return Flowable.just(v);
}
})
.observeOn(Schedulers.computation())
.distinct()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult("RxSingleScheduler");
}
@Test
public void innerScalarRequestRace() {
Flowable<Integer> just = Flowable.just(1);
int n = 1000;
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
PublishProcessor<Flowable<Integer>> source = PublishProcessor.create();
TestSubscriber<Integer> ts = source
.concatMap(v -> v, n + 1)
.test(1L);
TestHelper.race(() -> {
for (int j = 0; j < n; j++) {
source.onNext(just);
}
}, () -> {
for (int j = 0; j < n; j++) {
ts.request(1);
}
});
ts.assertValueCount(n);
}
}
@Test
public void innerScalarRequestRaceDelayError() {
Flowable<Integer> just = Flowable.just(1);
int n = 1000;
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
PublishProcessor<Flowable<Integer>> source = PublishProcessor.create();
TestSubscriber<Integer> ts = source
.concatMapDelayError(v -> v, true, n + 1)
.test(1L);
TestHelper.race(() -> {
for (int j = 0; j < n; j++) {
source.onNext(just);
}
}, () -> {
for (int j = 0; j < n; j++) {
ts.request(1);
}
});
ts.assertValueCount(n);
}
}
@Test
public void boundaryFusionDelayError() {
Flowable.range(1, 10000)
.observeOn(Schedulers.single())
.map(new Function<Integer, String>() {
@Override
public String apply(Integer t) throws Exception {
String name = Thread.currentThread().getName();
if (name.contains("RxSingleScheduler")) {
return "RxSingleScheduler";
}
return name;
}
})
.concatMapDelayError(new Function<String, Publisher<? extends Object>>() {
@Override
public Publisher<? extends Object> apply(String v)
throws Exception {
return Flowable.just(v);
}
})
.observeOn(Schedulers.computation())
.distinct()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult("RxSingleScheduler");
}
@Test
public void pollThrows() {
Flowable.just(1)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.<Integer>flowableStripBoundary())
.concatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v)
throws Exception {
return Flowable.just(v);
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void pollThrowsDelayError() {
Flowable.just(1)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.<Integer>flowableStripBoundary())
.concatMapDelayError(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v)
throws Exception {
return Flowable.just(v);
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void noCancelPrevious() {
final AtomicInteger counter = new AtomicInteger();
Flowable.range(1, 5)
.concatMap(new Function<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Integer v) throws Exception {
return Flowable.just(v).doOnCancel(new Action() {
@Override
public void run() throws Exception {
counter.getAndIncrement();
}
});
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
assertEquals(0, counter.get());
}
@Test
public void delayErrorCallableTillTheEnd() {
Flowable.just(1, 2, 3, 101, 102, 23, 890, 120, 32)
.concatMapDelayError(new Function<Integer, Flowable<Integer>>() {
@Override public Flowable<Integer> apply(final Integer integer) throws Exception {
return Flowable.fromCallable(new Callable<Integer>() {
@Override public Integer call() throws Exception {
if (integer >= 100) {
throw new NullPointerException("test null exp");
}
return integer;
}
});
}
})
.test()
.assertFailure(CompositeException.class, 1, 2, 3, 23, 32);
}
@Test
public void delayErrorCallableEager() {
Flowable.just(1, 2, 3, 101, 102, 23, 890, 120, 32)
.concatMapDelayError(new Function<Integer, Flowable<Integer>>() {
@Override public Flowable<Integer> apply(final Integer integer) throws Exception {
return Flowable.fromCallable(new Callable<Integer>() {
@Override public Integer call() throws Exception {
if (integer >= 100) {
throw new NullPointerException("test null exp");
}
return integer;
}
});
}
}, false, 2)
.test()
.assertFailure(NullPointerException.class, 1, 2, 3);
}
@Test
public void undeliverableUponCancel() {
TestHelper.checkUndeliverableUponCancel(new FlowableConverter<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> upstream) {
return upstream.concatMap(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Throwable {
return Flowable.just(v).hide();
}
});
}
});
}
@Test
public void undeliverableUponCancelDelayError() {
TestHelper.checkUndeliverableUponCancel(new FlowableConverter<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> upstream) {
return upstream.concatMapDelayError(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Throwable {
return Flowable.just(v).hide();
}
}, false, 2);
}
});
}
@Test
public void undeliverableUponCancelDelayErrorTillEnd() {
TestHelper.checkUndeliverableUponCancel(new FlowableConverter<Integer, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> upstream) {
return upstream.concatMapDelayError(new Function<Integer, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Integer v) throws Throwable {
return Flowable.just(v).hide();
}
}, true, 2);
}
});
}
@Test
public void asyncFusedSource() {
UnicastProcessor<Integer> up = UnicastProcessor.create();
up.onNext(1);
up.onComplete();
up.concatMap(v -> Flowable.just(1).hide())
.test()
.assertResult(1);
}
@Test
public void scalarCallableSource() {
Flowable.fromCallable(() -> 1)
.concatMap(v -> Flowable.just(1))
.test()
.assertResult(1);
}
}
| FlowableConcatMapTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/functions/GroupReduceFunction.java | {
"start": 1849,
"end": 2135
} | interface ____ enable partial computations, i.e., a combiner for this {@link
* GroupReduceFunction}.
*
* @param <T> Type of the elements that this function processes.
* @param <O> The type of the elements returned by the user-defined function.
*/
@Public
@FunctionalInterface
public | to |
java | spring-projects__spring-framework | spring-websocket/src/test/java/org/springframework/web/socket/config/annotation/WebSocketMessageBrokerConfigurationSupportTests.java | {
"start": 16410,
"end": 16960
} | class ____ extends DelegatingWebSocketMessageBrokerConfiguration {
@Bean
public WebSocketMessageBrokerConfigurer getConfigurer() {
return new WebSocketMessageBrokerConfigurer() {
@Override
public void registerStompEndpoints(StompEndpointRegistry registry) {
registry.addEndpoint("/broker");
}
@Override
public void configureMessageBroker(MessageBrokerRegistry registry) {
registry.enableSimpleBroker();
}
@Override
public Integer getPhase() {
return 99;
}
};
}
}
}
| LifecyclePhaseConfig |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformDestIndexSettings.java | {
"start": 1150,
"end": 5444
} | class ____ implements SimpleDiffable<TransformDestIndexSettings>, Writeable, ToXContentObject {
public static final ParseField MAPPINGS = new ParseField("mappings");
public static final ParseField SETTINGS = new ParseField("settings");
public static final ParseField ALIASES = new ParseField("aliases");
private static final ConstructingObjectParser<TransformDestIndexSettings, Void> STRICT_PARSER = createParser(false);
private final Map<String, Object> mappings;
private final Settings settings;
private final Set<Alias> aliases;
private static ConstructingObjectParser<TransformDestIndexSettings, Void> createParser(boolean lenient) {
ConstructingObjectParser<TransformDestIndexSettings, Void> PARSER = new ConstructingObjectParser<>(
"transform_preview_generated_dest_index",
lenient,
args -> {
@SuppressWarnings("unchecked")
Map<String, Object> mappings = (Map<String, Object>) args[0];
Settings settings = (Settings) args[1];
@SuppressWarnings("unchecked")
Set<Alias> aliases = (Set<Alias>) args[2];
return new TransformDestIndexSettings(mappings, settings, aliases);
}
);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), MAPPINGS);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> Settings.fromXContent(p), SETTINGS);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> {
Set<Alias> aliases = new HashSet<>();
while ((p.nextToken()) != XContentParser.Token.END_OBJECT) {
aliases.add(Alias.fromXContent(p));
}
return aliases;
}, ALIASES);
return PARSER;
}
public TransformDestIndexSettings(Map<String, Object> mappings, Settings settings, Set<Alias> aliases) {
this.mappings = mappings == null ? Collections.emptyMap() : Collections.unmodifiableMap(mappings);
this.settings = settings == null ? Settings.EMPTY : settings;
this.aliases = aliases == null ? Collections.emptySet() : Collections.unmodifiableSet(aliases);
}
public TransformDestIndexSettings(StreamInput in) throws IOException {
mappings = in.readGenericMap();
settings = Settings.readSettingsFromStream(in);
aliases = new HashSet<>(in.readCollectionAsList(Alias::new));
}
public Map<String, Object> getMappings() {
return mappings;
}
public Settings getSettings() {
return settings;
}
public Set<Alias> getAliases() {
return aliases;
}
@Override
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
// note: we write out the full object, even if parts are empty to gain visibility of options
builder.startObject();
builder.field(MAPPINGS.getPreferredName(), mappings);
builder.startObject(SETTINGS.getPreferredName());
settings.toXContent(builder, params);
builder.endObject();
builder.startObject(ALIASES.getPreferredName());
for (Alias alias : aliases) {
alias.toXContent(builder, params);
}
builder.endObject();
builder.endObject();
return builder;
}
public static TransformDestIndexSettings fromXContent(final XContentParser parser) {
return STRICT_PARSER.apply(parser, null);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeGenericMap(mappings);
settings.writeTo(out);
out.writeCollection(aliases);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
TransformDestIndexSettings other = (TransformDestIndexSettings) obj;
return Objects.equals(other.mappings, mappings)
&& Objects.equals(other.settings, settings)
&& Objects.equals(other.aliases, aliases);
}
@Override
public int hashCode() {
return Objects.hash(mappings, settings, aliases);
}
}
| TransformDestIndexSettings |
java | spring-projects__spring-framework | spring-websocket/src/main/java/org/springframework/web/socket/adapter/standard/ConvertingEncoderDecoderSupport.java | {
"start": 3596,
"end": 8412
} | class ____<T, M> {
private static final String CONVERSION_SERVICE_BEAN_NAME = "webSocketConversionService";
/**
* Called to initialize the encoder/decoder.
* @see jakarta.websocket.Encoder#init(EndpointConfig)
* @see jakarta.websocket.Decoder#init(EndpointConfig)
*/
public void init(EndpointConfig config) {
if (getApplicationContext() instanceof ConfigurableApplicationContext cac) {
ConfigurableListableBeanFactory beanFactory = cac.getBeanFactory();
beanFactory.autowireBean(this);
}
}
/**
* Called to destroy the encoder/decoder.
* @see jakarta.websocket.Encoder#destroy()
* @see jakarta.websocket.Decoder#destroy()
*/
public void destroy() {
}
/**
* Strategy method used to obtain the {@link ConversionService}. By default, this
* method expects a bean named {@code 'webSocketConversionService'} in the
* {@link #getApplicationContext() active ApplicationContext}.
* @return the {@link ConversionService} (never null)
*/
protected ConversionService getConversionService() {
ApplicationContext applicationContext = getApplicationContext();
Assert.state(applicationContext != null, "Unable to locate the Spring ApplicationContext");
try {
return applicationContext.getBean(CONVERSION_SERVICE_BEAN_NAME, ConversionService.class);
}
catch (BeansException ex) {
throw new IllegalStateException("Unable to find ConversionService: please configure a '" +
CONVERSION_SERVICE_BEAN_NAME + "' or override the getConversionService() method", ex);
}
}
/**
* Returns the active {@link ApplicationContext}. By default, this method obtains
* the context via {@link ContextLoader#getCurrentWebApplicationContext()}, which
* finds the ApplicationContext loaded via {@link ContextLoader} typically in a
* Servlet container environment. When not running in a Servlet container and
* not using {@link ContextLoader}, this method should be overridden.
* @return the {@link ApplicationContext} or {@code null}
*/
protected @Nullable ApplicationContext getApplicationContext() {
return ContextLoader.getCurrentWebApplicationContext();
}
/**
* Returns the type being converted. By default, the type is resolved using
* the generic arguments of the class.
*/
protected TypeDescriptor getType() {
return TypeDescriptor.valueOf(resolveTypeArguments()[0]);
}
/**
* Returns the websocket message type. By default, the type is resolved using
* the generic arguments of the class.
*/
protected TypeDescriptor getMessageType() {
return TypeDescriptor.valueOf(resolveTypeArguments()[1]);
}
private Class<?>[] resolveTypeArguments() {
Class<?>[] resolved = GenericTypeResolver.resolveTypeArguments(getClass(), ConvertingEncoderDecoderSupport.class);
if (resolved == null) {
throw new IllegalStateException("ConvertingEncoderDecoderSupport's generic types T and M " +
"need to be substituted in subclass: " + getClass());
}
return resolved;
}
/**
* Encode an object to a message.
* @see jakarta.websocket.Encoder.Text#encode(Object)
* @see jakarta.websocket.Encoder.Binary#encode(Object)
*/
@SuppressWarnings("unchecked")
public @Nullable M encode(T object) throws EncodeException {
try {
return (M) getConversionService().convert(object, getType(), getMessageType());
}
catch (ConversionException ex) {
throw new EncodeException(object, "Unable to encode websocket message using ConversionService", ex);
}
}
/**
* Determine if a given message can be decoded.
* @see #decode(Object)
* @see jakarta.websocket.Decoder.Text#willDecode(String)
* @see jakarta.websocket.Decoder.Binary#willDecode(ByteBuffer)
*/
public boolean willDecode(M bytes) {
return getConversionService().canConvert(getType(), getMessageType());
}
/**
* Decode the message into an object.
* @see jakarta.websocket.Decoder.Text#decode(String)
* @see jakarta.websocket.Decoder.Binary#decode(ByteBuffer)
*/
@SuppressWarnings("unchecked")
public @Nullable T decode(M message) throws DecodeException {
try {
return (T) getConversionService().convert(message, getMessageType(), getType());
}
catch (ConversionException ex) {
if (message instanceof String string) {
throw new DecodeException(string,
"Unable to decode websocket message using ConversionService", ex);
}
if (message instanceof ByteBuffer byteBuffer) {
throw new DecodeException(byteBuffer,
"Unable to decode websocket message using ConversionService", ex);
}
throw ex;
}
}
/**
* A binary {@link jakarta.websocket.Encoder.Binary jakarta.websocket.Encoder} that delegates
* to Spring's conversion service. See {@link ConvertingEncoderDecoderSupport} for details.
* @param <T> the type that this Encoder can convert to
*/
public abstract static | ConvertingEncoderDecoderSupport |
java | google__guava | android/guava/src/com/google/common/math/IntMath.java | {
"start": 2190,
"end": 26719
} | class ____ {
@VisibleForTesting static final int MAX_SIGNED_POWER_OF_TWO = 1 << (Integer.SIZE - 2);
/**
* Returns the smallest power of two greater than or equal to {@code x}. This is equivalent to
* {@code checkedPow(2, log2(x, CEILING))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException of the next-higher power of two is not representable as an {@code
* int}, i.e. when {@code x > 2^30}
* @since 20.0
*/
public static int ceilingPowerOfTwo(int x) {
checkPositive("x", x);
if (x > MAX_SIGNED_POWER_OF_TWO) {
throw new ArithmeticException("ceilingPowerOfTwo(" + x + ") not representable as an int");
}
return 1 << -Integer.numberOfLeadingZeros(x - 1);
}
/**
* Returns the largest power of two less than or equal to {@code x}. This is equivalent to {@code
* checkedPow(2, log2(x, FLOOR))}.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @since 20.0
*/
public static int floorPowerOfTwo(int x) {
checkPositive("x", x);
return Integer.highestOneBit(x);
}
/**
* Returns {@code true} if {@code x} represents a power of two.
*
* <p>This differs from {@code Integer.bitCount(x) == 1}, because {@code
* Integer.bitCount(Integer.MIN_VALUE) == 1}, but {@link Integer#MIN_VALUE} is not a power of two.
*/
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static boolean isPowerOfTwo(int x) {
return x > 0 & (x & (x - 1)) == 0;
}
/**
* Returns 1 if {@code x < y} as unsigned integers, and 0 otherwise. Assumes that x - y fits into
* a signed int. The implementation is branch-free, and benchmarks suggest it is measurably (if
* narrowly) faster than the straightforward ternary expression.
*/
@VisibleForTesting
static int lessThanBranchFree(int x, int y) {
// The double negation is optimized away by normal Java, but is necessary for GWT
// to make sure bit twiddling works as expected.
return ~~(x - y) >>> (Integer.SIZE - 1);
}
/**
* Returns the base-2 logarithm of {@code x}, rounded according to the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not a power of two
*/
@SuppressWarnings("fallthrough")
// TODO(kevinb): remove after this warning is disabled globally
public static int log2(int x, RoundingMode mode) {
checkPositive("x", x);
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(isPowerOfTwo(x));
// fall through
case DOWN:
case FLOOR:
return (Integer.SIZE - 1) - Integer.numberOfLeadingZeros(x);
case UP:
case CEILING:
return Integer.SIZE - Integer.numberOfLeadingZeros(x - 1);
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
// Since sqrt(2) is irrational, log2(x) - logFloor cannot be exactly 0.5
int leadingZeros = Integer.numberOfLeadingZeros(x);
int cmp = MAX_POWER_OF_SQRT2_UNSIGNED >>> leadingZeros;
// floor(2^(logFloor + 0.5))
int logFloor = (Integer.SIZE - 1) - leadingZeros;
return logFloor + lessThanBranchFree(cmp, x);
}
throw new AssertionError();
}
/** The biggest half power of two that can fit in an unsigned int. */
@VisibleForTesting static final int MAX_POWER_OF_SQRT2_UNSIGNED = 0xB504F333;
/**
* Returns the base-10 logarithm of {@code x}, rounded according to the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x <= 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code x}
* is not a power of ten
*/
@GwtIncompatible // need BigIntegerMath to adequately test
@SuppressWarnings("fallthrough")
public static int log10(int x, RoundingMode mode) {
checkPositive("x", x);
int logFloor = log10Floor(x);
int floorPow = powersOf10[logFloor];
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(x == floorPow);
// fall through
case FLOOR:
case DOWN:
return logFloor;
case CEILING:
case UP:
return logFloor + lessThanBranchFree(floorPow, x);
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
// sqrt(10) is irrational, so log10(x) - logFloor is never exactly 0.5
return logFloor + lessThanBranchFree(halfPowersOf10[logFloor], x);
}
throw new AssertionError();
}
private static int log10Floor(int x) {
/*
* Based on Hacker's Delight Fig. 11-5, the two-table-lookup, branch-free implementation.
*
* The key idea is that based on the number of leading zeros (equivalently, floor(log2(x))), we
* can narrow the possible floor(log10(x)) values to two. For example, if floor(log2(x)) is 6,
* then 64 <= x < 128, so floor(log10(x)) is either 1 or 2.
*/
int y = maxLog10ForLeadingZeros[Integer.numberOfLeadingZeros(x)];
/*
* y is the higher of the two possible values of floor(log10(x)). If x < 10^y, then we want the
* lower of the two possible values, or y - 1, otherwise, we want y.
*/
return y - lessThanBranchFree(x, powersOf10[y]);
}
// maxLog10ForLeadingZeros[i] == floor(log10(2^(Long.SIZE - i)))
@VisibleForTesting
static final byte[] maxLog10ForLeadingZeros = {
9, 9, 9, 8, 8, 8, 7, 7, 7, 6, 6, 6, 6, 5, 5, 5, 4, 4, 4, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1, 0, 0, 0,
0
};
@VisibleForTesting
static final int[] powersOf10 = {
1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000, 1000000000
};
// halfPowersOf10[i] = largest int less than 10^(i + 0.5)
@VisibleForTesting
static final int[] halfPowersOf10 = {
3, 31, 316, 3162, 31622, 316227, 3162277, 31622776, 316227766, Integer.MAX_VALUE
};
/**
* Returns {@code b} to the {@code k}th power. Even if the result overflows, it will be equal to
* {@code BigInteger.valueOf(b).pow(k).intValue()}. This implementation runs in {@code O(log k)}
* time.
*
* <p>Compare {@link #checkedPow}, which throws an {@link ArithmeticException} upon overflow.
*
* @throws IllegalArgumentException if {@code k < 0}
*/
@GwtIncompatible // failing tests
public static int pow(int b, int k) {
checkNonNegative("exponent", k);
switch (b) {
case 0:
return (k == 0) ? 1 : 0;
case 1:
return 1;
case -1:
return ((k & 1) == 0) ? 1 : -1;
case 2:
return (k < Integer.SIZE) ? (1 << k) : 0;
case -2:
if (k < Integer.SIZE) {
return ((k & 1) == 0) ? (1 << k) : -(1 << k);
} else {
return 0;
}
default:
// continue below to handle the general case
}
for (int accum = 1; ; k >>= 1) {
switch (k) {
case 0:
return accum;
case 1:
return b * accum;
default:
accum *= ((k & 1) == 0) ? 1 : b;
b *= b;
}
}
}
/**
* Returns the square root of {@code x}, rounded with the specified rounding mode.
*
* @throws IllegalArgumentException if {@code x < 0}
* @throws ArithmeticException if {@code mode} is {@link RoundingMode#UNNECESSARY} and {@code
* sqrt(x)} is not an integer
*/
@GwtIncompatible // need BigIntegerMath to adequately test
@SuppressWarnings("fallthrough")
public static int sqrt(int x, RoundingMode mode) {
checkNonNegative("x", x);
int sqrtFloor = sqrtFloor(x);
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(sqrtFloor * sqrtFloor == x); // fall through
case FLOOR:
case DOWN:
return sqrtFloor;
case CEILING:
case UP:
return sqrtFloor + lessThanBranchFree(sqrtFloor * sqrtFloor, x);
case HALF_DOWN:
case HALF_UP:
case HALF_EVEN:
int halfSquare = sqrtFloor * sqrtFloor + sqrtFloor;
/*
* We wish to test whether or not x <= (sqrtFloor + 0.5)^2 = halfSquare + 0.25. Since both x
* and halfSquare are integers, this is equivalent to testing whether or not x <=
* halfSquare. (We have to deal with overflow, though.)
*
* If we treat halfSquare as an unsigned int, we know that
* sqrtFloor^2 <= x < (sqrtFloor + 1)^2
* halfSquare - sqrtFloor <= x < halfSquare + sqrtFloor + 1
* so |x - halfSquare| <= sqrtFloor. Therefore, it's safe to treat x - halfSquare as a
* signed int, so lessThanBranchFree is safe for use.
*/
return sqrtFloor + lessThanBranchFree(halfSquare, x);
}
throw new AssertionError();
}
private static int sqrtFloor(int x) {
// There is no loss of precision in converting an int to a double, according to
// http://java.sun.com/docs/books/jls/third_edition/html/conversions.html#5.1.2
return (int) Math.sqrt(x);
}
/**
* Returns the result of dividing {@code p} by {@code q}, rounding using the specified {@code
* RoundingMode}. If the {@code RoundingMode} is {@link RoundingMode#DOWN}, then this method is
* equivalent to regular Java division, {@code p / q}; and if it is {@link RoundingMode#FLOOR},
* then this method is equivalent to {@link Math#floorDiv(int,int) Math.floorDiv}{@code (p, q)}.
*
* @throws ArithmeticException if {@code q == 0}, or if {@code mode == UNNECESSARY} and {@code a}
* is not an integer multiple of {@code b}
*/
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings({"fallthrough", "ShortCircuitBoolean"})
public static int divide(int p, int q, RoundingMode mode) {
checkNotNull(mode);
if (q == 0) {
throw new ArithmeticException("/ by zero"); // for GWT
}
int div = p / q;
int rem = p - q * div; // equal to p % q
if (rem == 0) {
return div;
}
/*
* Normal Java division rounds towards 0, consistently with RoundingMode.DOWN. We just have to
* deal with the cases where rounding towards 0 is wrong, which typically depends on the sign of
* p / q.
*
* signum is 1 if p and q are both nonnegative or both negative, and -1 otherwise.
*/
int signum = 1 | ((p ^ q) >> (Integer.SIZE - 1));
boolean increment;
switch (mode) {
case UNNECESSARY:
checkRoundingUnnecessary(rem == 0);
// fall through
case DOWN:
increment = false;
break;
case UP:
increment = true;
break;
case CEILING:
increment = signum > 0;
break;
case FLOOR:
increment = signum < 0;
break;
case HALF_EVEN:
case HALF_DOWN:
case HALF_UP:
int absRem = abs(rem);
int cmpRemToHalfDivisor = absRem - (abs(q) - absRem);
// subtracting two nonnegative ints can't overflow
// cmpRemToHalfDivisor has the same sign as compare(abs(rem), abs(q) / 2).
if (cmpRemToHalfDivisor == 0) { // exactly on the half mark
increment = (mode == HALF_UP || (mode == HALF_EVEN & (div & 1) != 0));
} else {
increment = cmpRemToHalfDivisor > 0; // closer to the UP value
}
break;
default:
throw new AssertionError();
}
return increment ? div + signum : div;
}
/**
* Returns {@code x mod m}, a non-negative value less than {@code m}. This differs from {@code x %
* m}, which might be negative. This method is equivalent to {@code Math.floorMod(x, m)} except
* that that method also allows negative {@code m}. {@code Math.floorMod} should be preferred when
* {@code m} is known to be positive.
*
* <p>For example:
*
* {@snippet :
* mod(7, 4) == 3
* mod(-7, 4) == 1
* mod(-1, 4) == 3
* mod(-8, 4) == 0
* mod(8, 4) == 0
* }
*
* @throws ArithmeticException if {@code m <= 0}
* @see <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-15.html#jls-15.17.3">
* Remainder Operator</a>
*/
public static int mod(int x, int m) {
if (m <= 0) {
throw new ArithmeticException("Modulus " + m + " must be > 0");
}
return Math.floorMod(x, m);
}
/**
* Returns the greatest common divisor of {@code a, b}. Returns {@code 0} if {@code a == 0 && b ==
* 0}.
*
* @throws IllegalArgumentException if {@code a < 0} or {@code b < 0}
*/
public static int gcd(int a, int b) {
/*
* The reason we require both arguments to be >= 0 is because otherwise, what do you return on
* gcd(0, Integer.MIN_VALUE)? BigInteger.gcd would return positive 2^31, but positive 2^31 isn't
* an int.
*/
checkNonNegative("a", a);
checkNonNegative("b", b);
if (a == 0) {
// 0 % b == 0, so b divides a, but the converse doesn't hold.
// BigInteger.gcd is consistent with this decision.
return b;
} else if (b == 0) {
return a; // similar logic
}
/*
* Uses the binary GCD algorithm; see http://en.wikipedia.org/wiki/Binary_GCD_algorithm. This is
* >40% faster than the Euclidean algorithm in benchmarks.
*/
int aTwos = Integer.numberOfTrailingZeros(a);
a >>= aTwos; // divide out all 2s
int bTwos = Integer.numberOfTrailingZeros(b);
b >>= bTwos; // divide out all 2s
while (a != b) { // both a, b are odd
// The key to the binary GCD algorithm is as follows:
// Both a and b are odd. Assume a > b; then gcd(a - b, b) = gcd(a, b).
// But in gcd(a - b, b), a - b is even and b is odd, so we can divide out powers of two.
// We bend over backwards to avoid branching, adapting a technique from
// http://graphics.stanford.edu/~seander/bithacks.html#IntegerMinOrMax
int delta = a - b; // can't overflow, since a and b are nonnegative
int minDeltaOrZero = delta & (delta >> (Integer.SIZE - 1));
// equivalent to Math.min(delta, 0)
a = delta - minDeltaOrZero - minDeltaOrZero; // sets a to Math.abs(a - b)
// a is now nonnegative and even
b += minDeltaOrZero; // sets b to min(old a, b)
a >>= Integer.numberOfTrailingZeros(a); // divide out all 2s, since 2 doesn't divide b
}
return a << min(aTwos, bTwos);
}
/**
* Returns the sum of {@code a} and {@code b}, provided it does not overflow.
*
* <p><b>Note:</b> this method is now unnecessary and should be treated as deprecated; use {@link
* Math#addExact(int, int)} instead.
*
* @throws ArithmeticException if {@code a + b} overflows in signed {@code int} arithmetic
*/
@InlineMe(replacement = "Math.addExact(a, b)")
public static int checkedAdd(int a, int b) {
return Math.addExact(a, b);
}
/**
* Returns the difference of {@code a} and {@code b}, provided it does not overflow.
*
* <p><b>Note:</b> this method is now unnecessary and should be treated as deprecated; use {@link
* Math#subtractExact(int, int)} instead.
*
* @throws ArithmeticException if {@code a - b} overflows in signed {@code int} arithmetic
*/
@InlineMe(replacement = "Math.subtractExact(a, b)")
public static int checkedSubtract(int a, int b) {
return Math.subtractExact(a, b);
}
/**
* Returns the product of {@code a} and {@code b}, provided it does not overflow.
*
* <p><b>Note:</b> this method is now unnecessary and should be treated as deprecated; use {@link
* Math#multiplyExact(int, int)} instead.
*
* @throws ArithmeticException if {@code a * b} overflows in signed {@code int} arithmetic
*/
@InlineMe(replacement = "Math.multiplyExact(a, b)")
public static int checkedMultiply(int a, int b) {
return Math.multiplyExact(a, b);
}
/**
* Returns the {@code b} to the {@code k}th power, provided it does not overflow.
*
* <p>{@link #pow} may be faster, but does not check for overflow.
*
* @throws ArithmeticException if {@code b} to the {@code k}th power overflows in signed {@code
* int} arithmetic
*/
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static int checkedPow(int b, int k) {
checkNonNegative("exponent", k);
switch (b) {
case 0:
return (k == 0) ? 1 : 0;
case 1:
return 1;
case -1:
return ((k & 1) == 0) ? 1 : -1;
case 2:
checkNoOverflow(k < Integer.SIZE - 1, "checkedPow", b, k);
return 1 << k;
case -2:
checkNoOverflow(k < Integer.SIZE, "checkedPow", b, k);
return ((k & 1) == 0) ? 1 << k : -1 << k;
default:
// continue below to handle the general case
}
int accum = 1;
while (true) {
switch (k) {
case 0:
return accum;
case 1:
return Math.multiplyExact(accum, b);
default:
if ((k & 1) != 0) {
accum = Math.multiplyExact(accum, b);
}
k >>= 1;
if (k > 0) {
checkNoOverflow(-FLOOR_SQRT_MAX_INT <= b & b <= FLOOR_SQRT_MAX_INT, "checkedPow", b, k);
b *= b;
}
}
}
}
/**
* Returns the sum of {@code a} and {@code b} unless it would overflow or underflow in which case
* {@code Integer.MAX_VALUE} or {@code Integer.MIN_VALUE} is returned, respectively.
*
* @since 20.0
*/
public static int saturatedAdd(int a, int b) {
return Ints.saturatedCast((long) a + b);
}
/**
* Returns the difference of {@code a} and {@code b} unless it would overflow or underflow in
* which case {@code Integer.MAX_VALUE} or {@code Integer.MIN_VALUE} is returned, respectively.
*
* @since 20.0
*/
public static int saturatedSubtract(int a, int b) {
return Ints.saturatedCast((long) a - b);
}
/**
* Returns the product of {@code a} and {@code b} unless it would overflow or underflow in which
* case {@code Integer.MAX_VALUE} or {@code Integer.MIN_VALUE} is returned, respectively.
*
* @since 20.0
*/
public static int saturatedMultiply(int a, int b) {
return Ints.saturatedCast((long) a * b);
}
/**
* Returns the {@code b} to the {@code k}th power, unless it would overflow or underflow in which
* case {@code Integer.MAX_VALUE} or {@code Integer.MIN_VALUE} is returned, respectively.
*
* @since 20.0
*/
// Whenever both tests are cheap and functional, it's faster to use &, | instead of &&, ||
@SuppressWarnings("ShortCircuitBoolean")
public static int saturatedPow(int b, int k) {
checkNonNegative("exponent", k);
switch (b) {
case 0:
return (k == 0) ? 1 : 0;
case 1:
return 1;
case -1:
return ((k & 1) == 0) ? 1 : -1;
case 2:
if (k >= Integer.SIZE - 1) {
return Integer.MAX_VALUE;
}
return 1 << k;
case -2:
if (k >= Integer.SIZE) {
return Integer.MAX_VALUE + (k & 1);
}
return ((k & 1) == 0) ? 1 << k : -1 << k;
default:
// continue below to handle the general case
}
int accum = 1;
// if b is negative and k is odd then the limit is MIN otherwise the limit is MAX
int limit = Integer.MAX_VALUE + ((b >>> (Integer.SIZE - 1)) & (k & 1));
while (true) {
switch (k) {
case 0:
return accum;
case 1:
return saturatedMultiply(accum, b);
default:
if ((k & 1) != 0) {
accum = saturatedMultiply(accum, b);
}
k >>= 1;
if (k > 0) {
if (-FLOOR_SQRT_MAX_INT > b | b > FLOOR_SQRT_MAX_INT) {
return limit;
}
b *= b;
}
}
}
}
@VisibleForTesting static final int FLOOR_SQRT_MAX_INT = 46340;
/**
* Returns {@code n!}, that is, the product of the first {@code n} positive integers, {@code 1} if
* {@code n == 0}, or {@link Integer#MAX_VALUE} if the result does not fit in a {@code int}.
*
* @throws IllegalArgumentException if {@code n < 0}
*/
public static int factorial(int n) {
checkNonNegative("n", n);
return (n < factorials.length) ? factorials[n] : Integer.MAX_VALUE;
}
private static final int[] factorials = {
1,
1,
1 * 2,
1 * 2 * 3,
1 * 2 * 3 * 4,
1 * 2 * 3 * 4 * 5,
1 * 2 * 3 * 4 * 5 * 6,
1 * 2 * 3 * 4 * 5 * 6 * 7,
1 * 2 * 3 * 4 * 5 * 6 * 7 * 8,
1 * 2 * 3 * 4 * 5 * 6 * 7 * 8 * 9,
1 * 2 * 3 * 4 * 5 * 6 * 7 * 8 * 9 * 10,
1 * 2 * 3 * 4 * 5 * 6 * 7 * 8 * 9 * 10 * 11,
1 * 2 * 3 * 4 * 5 * 6 * 7 * 8 * 9 * 10 * 11 * 12
};
/**
* Returns {@code n} choose {@code k}, also known as the binomial coefficient of {@code n} and
* {@code k}, or {@link Integer#MAX_VALUE} if the result does not fit in an {@code int}.
*
* @throws IllegalArgumentException if {@code n < 0}, {@code k < 0} or {@code k > n}
*/
public static int binomial(int n, int k) {
checkNonNegative("n", n);
checkNonNegative("k", k);
checkArgument(k <= n, "k (%s) > n (%s)", k, n);
if (k > (n >> 1)) {
k = n - k;
}
if (k >= biggestBinomials.length || n > biggestBinomials[k]) {
return Integer.MAX_VALUE;
}
switch (k) {
case 0:
return 1;
case 1:
return n;
default:
long result = 1;
for (int i = 0; i < k; i++) {
result *= n - i;
result /= i + 1;
}
return (int) result;
}
}
// binomial(biggestBinomials[k], k) fits in an int, but not binomial(biggestBinomials[k]+1,k).
@VisibleForTesting
static final int[] biggestBinomials = {
Integer.MAX_VALUE,
Integer.MAX_VALUE,
65536,
2345,
477,
193,
110,
75,
58,
49,
43,
39,
37,
35,
34,
34,
33
};
/**
* Returns the arithmetic mean of {@code x} and {@code y}, rounded towards negative infinity. This
* method is overflow resilient.
*
* @since 14.0
*/
public static int mean(int x, int y) {
// Efficient method for computing the arithmetic mean.
// The alternative (x + y) / 2 fails for large values.
// The alternative (x + y) >>> 1 fails for negative values.
return (x & y) + ((x ^ y) >> 1);
}
/**
* Returns {@code true} if {@code n} is a <a
* href="http://mathworld.wolfram.com/PrimeNumber.html">prime number</a>: an integer <i>greater
* than one</i> that cannot be factored into a product of <i>smaller</i> positive integers.
* Returns {@code false} if {@code n} is zero, one, or a composite number (one which <i>can</i> be
* factored into smaller positive integers).
*
* <p>To test larger numbers, use {@link LongMath#isPrime} or {@link BigInteger#isProbablePrime}.
*
* @throws IllegalArgumentException if {@code n} is negative
* @since 20.0
*/
@GwtIncompatible // TODO
public static boolean isPrime(int n) {
return LongMath.isPrime(n);
}
/**
* Returns the closest representable {@code int} to the absolute value of {@code x}.
*
* <p>This is the same thing as the true absolute value of {@code x} except in the case when
* {@code x} is {@link Integer#MIN_VALUE}, in which case this returns {@link Integer#MAX_VALUE}.
* (Note that {@code Integer.MAX_VALUE} is mathematically equal to {@code -Integer.MIN_VALUE -
* 1}.)
*
* <p>There are three common APIs for determining the absolute value of an integer, all of which
* behave identically except when passed {@code Integer.MIN_VALUE}. Those methods are:
*
* <ul>
* <li>{@link Math#abs(int)}, which returns {@code Integer.MIN_VALUE} when passed {@code
* Integer.MIN_VALUE}
* <li>{@link Math#absExact(int)}, which throws {@link ArithmeticException} when passed {@code
* Integer.MIN_VALUE}
* <li>this method, {@code IntMath.saturatedAbs(int)}, which returns {@code Integer.MAX_VALUE}
* when passed {@code Integer.MIN_VALUE}
* </ul>
*
* <p>Note that if your only goal is to turn a well-distributed {@code int} (such as a random
* number or hash code) into a well-distributed nonnegative number, the most even distribution is
* achieved not by this method or other absolute value methods, but by {@code x &
* Integer.MAX_VALUE}.
*
* @since 33.5.0
*/
public static int saturatedAbs(int x) {
return (x == Integer.MIN_VALUE) ? Integer.MAX_VALUE : Math.abs(x);
}
private IntMath() {}
}
| IntMath |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/header/GetAllSubscriptionGroupResponseHeader.java | {
"start": 1397,
"end": 1798
} | class ____ implements CommandCustomHeader {
@Override
public void checkFields() throws RemotingCommandException {
}
@CFNotNull
private Integer totalGroupNum;
public Integer getTotalGroupNum() {
return totalGroupNum;
}
public void setTotalGroupNum(Integer totalGroupNum) {
this.totalGroupNum = totalGroupNum;
}
}
| GetAllSubscriptionGroupResponseHeader |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/DataType.java | {
"start": 3834,
"end": 4814
} | class ____ representing values. If no conversion class
* was defined manually, the default conversion defined by the logical type is used.
*
* @see LogicalType#getDefaultConversion()
* @return the expected conversion class
*/
public Class<?> getConversionClass() {
return conversionClass;
}
/**
* Returns the children of this data type, if any. Returns an empty list if this data type is
* atomic.
*
* @return the children data types
*/
public abstract List<DataType> getChildren();
public abstract <R> R accept(DataTypeVisitor<R> visitor);
/**
* Creates a copy of this {@link DataType} instance with the internal data type conversion
* classes. This method performs the transformation deeply through its children. For example,
* for a {@link DataType} instance representing a row type with a timestamp field, this method
* returns a new {@link DataType}, with the conversion | for |
java | spring-projects__spring-framework | spring-core/src/test/java/org/springframework/core/io/buffer/DefaultDataBufferTests.java | {
"start": 997,
"end": 1837
} | class ____ {
private final DefaultDataBufferFactory bufferFactory = new DefaultDataBufferFactory();
@Test // gh-30967
void getNativeBuffer() {
DefaultDataBuffer dataBuffer = this.bufferFactory.allocateBuffer(256);
dataBuffer.write("0123456789", StandardCharsets.UTF_8);
byte[] result = new byte[7];
dataBuffer.read(result);
assertThat(result).isEqualTo("0123456".getBytes(StandardCharsets.UTF_8));
ByteBuffer nativeBuffer = dataBuffer.getNativeBuffer();
assertThat(nativeBuffer.position()).isEqualTo(7);
assertThat(dataBuffer.readPosition()).isEqualTo(7);
assertThat(nativeBuffer.limit()).isEqualTo(10);
assertThat(dataBuffer.writePosition()).isEqualTo(10);
assertThat(nativeBuffer.capacity()).isEqualTo(256);
assertThat(dataBuffer.capacity()).isEqualTo(256);
release(dataBuffer);
}
}
| DefaultDataBufferTests |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/scheduler/adaptive/WaitingForResourcesTest.java | {
"start": 11518,
"end": 15218
} | class ____ extends MockStateWithoutExecutionGraphContext
implements WaitingForResources.Context {
private static final Logger LOG = LoggerFactory.getLogger(MockContext.class);
private final StateValidator<Void> creatingExecutionGraphStateValidator =
new StateValidator<>("executing");
private Supplier<Boolean> hasDesiredResourcesSupplier = () -> false;
private Supplier<Boolean> hasSufficientResourcesSupplier = () -> false;
private final Queue<ScheduledTask<Void>> scheduledTasks =
new PriorityQueue<>(
Comparator.comparingLong(o -> o.getDelay(TimeUnit.MILLISECONDS)));
private final ManualClock testingClock = new ManualClock();
public void setHasDesiredResources(Supplier<Boolean> sup) {
hasDesiredResourcesSupplier = sup;
}
public void setHasSufficientResources(Supplier<Boolean> sup) {
hasSufficientResourcesSupplier = sup;
}
void setExpectCreatingExecutionGraph() {
creatingExecutionGraphStateValidator.expectInput(none -> {});
}
void runScheduledTasks(long untilDelay) {
LOG.info("Running scheduled tasks with a delay between 0 and {}ms:", untilDelay);
while (scheduledTasks.peek() != null
&& scheduledTasks.peek().getDelay(TimeUnit.MILLISECONDS) <= untilDelay) {
ScheduledTask<Void> scheduledTask = scheduledTasks.poll();
LOG.info(
"Running task with delay {}",
scheduledTask.getDelay(TimeUnit.MILLISECONDS));
scheduledTask.execute();
if (scheduledTask.isPeriodic()) {
// remove non-periodic tasks
scheduledTasks.add(scheduledTask);
}
}
}
void runScheduledTasks() {
runScheduledTasks(Long.MAX_VALUE);
}
@Override
public void afterEach(ExtensionContext extensionContext) throws Exception {
super.afterEach(extensionContext);
creatingExecutionGraphStateValidator.close();
}
@Override
public boolean hasDesiredResources() {
return hasDesiredResourcesSupplier.get();
}
@Override
public boolean hasSufficientResources() {
return hasSufficientResourcesSupplier.get();
}
@Override
public ScheduledFuture<?> runIfState(State expectedState, Runnable action, Duration delay) {
LOG.info(
"Scheduling work with delay {} for earliest execution at {}",
delay.toMillis(),
testingClock.absoluteTimeMillis() + delay.toMillis());
final ScheduledTask<Void> scheduledTask =
new ScheduledTask<>(
() -> {
if (!hasStateTransition()) {
action.run();
}
return null;
},
testingClock.absoluteTimeMillis() + delay.toMillis());
scheduledTasks.add(scheduledTask);
return scheduledTask;
}
@Override
public void goToCreatingExecutionGraph(@Nullable ExecutionGraph previousExecutionGraph) {
creatingExecutionGraphStateValidator.validateInput(null);
registerStateTransition();
}
}
static <T> Consumer<T> assertNonNull() {
return (item) -> assertThat(item).isNotNull();
}
}
| MockContext |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/DialectChecks.java | {
"start": 9945,
"end": 10171
} | class ____ implements DialectCheck {
@Override
public boolean isMatch(Dialect dialect) {
return dialect.getDmlTargetColumnQualifierSupport() != DmlTargetColumnQualifierSupport.NONE;
}
}
}
| SupportsDmlTargetColumnQualifier |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/base/GroupReduceOperatorBase.java | {
"start": 2562,
"end": 10994
} | class ____<IN, OUT, FT extends GroupReduceFunction<IN, OUT>>
extends SingleInputOperator<IN, OUT, FT> {
/** The ordering for the order inside a reduce group. */
private Ordering groupOrder;
private boolean combinable;
private Partitioner<?> customPartitioner;
public GroupReduceOperatorBase(
UserCodeWrapper<FT> udf,
UnaryOperatorInformation<IN, OUT> operatorInfo,
int[] keyPositions,
String name) {
super(udf, operatorInfo, keyPositions, name);
}
public GroupReduceOperatorBase(
FT udf,
UnaryOperatorInformation<IN, OUT> operatorInfo,
int[] keyPositions,
String name) {
super(new UserCodeObjectWrapper<FT>(udf), operatorInfo, keyPositions, name);
}
public GroupReduceOperatorBase(
Class<? extends FT> udf,
UnaryOperatorInformation<IN, OUT> operatorInfo,
int[] keyPositions,
String name) {
super(new UserCodeClassWrapper<FT>(udf), operatorInfo, keyPositions, name);
}
public GroupReduceOperatorBase(
UserCodeWrapper<FT> udf, UnaryOperatorInformation<IN, OUT> operatorInfo, String name) {
super(udf, operatorInfo, name);
}
public GroupReduceOperatorBase(
FT udf, UnaryOperatorInformation<IN, OUT> operatorInfo, String name) {
super(new UserCodeObjectWrapper<FT>(udf), operatorInfo, name);
}
public GroupReduceOperatorBase(
Class<? extends FT> udf, UnaryOperatorInformation<IN, OUT> operatorInfo, String name) {
super(new UserCodeClassWrapper<FT>(udf), operatorInfo, name);
}
// --------------------------------------------------------------------------------------------
/**
* Sets the order of the elements within a reduce group.
*
* @param order The order for the elements in a reduce group.
*/
public void setGroupOrder(Ordering order) {
this.groupOrder = order;
}
/**
* Gets the order of elements within a reduce group. If no such order has been set, this method
* returns null.
*
* @return The secondary order.
*/
public Ordering getGroupOrder() {
return this.groupOrder;
}
/**
* Marks the group reduce operation as combinable. Combinable operations may pre-reduce the data
* before the actual group reduce operations. Combinable user-defined functions must implement
* the interface {@link GroupCombineFunction}.
*
* @param combinable Flag to mark the group reduce operation as combinable.
*/
public void setCombinable(boolean combinable) {
// sanity check
if (combinable
&& !GroupCombineFunction.class.isAssignableFrom(
this.userFunction.getUserCodeClass())) {
throw new IllegalArgumentException(
"Cannot set a UDF as combinable if it does not implement the interface "
+ GroupCombineFunction.class.getName());
} else {
this.combinable = combinable;
}
}
/**
* Checks whether the operation is combinable.
*
* @return True, if the UDF is combinable, false if not.
* @see #setCombinable(boolean)
*/
public boolean isCombinable() {
return this.combinable;
}
public void setCustomPartitioner(Partitioner<?> customPartitioner) {
if (customPartitioner != null) {
int[] keys = getKeyColumns(0);
if (keys == null || keys.length == 0) {
throw new IllegalArgumentException(
"Cannot use custom partitioner for a non-grouped GroupReduce (AllGroupReduce)");
}
if (keys.length > 1) {
throw new IllegalArgumentException(
"Cannot use the key partitioner for composite keys (more than one key field)");
}
}
this.customPartitioner = customPartitioner;
}
public Partitioner<?> getCustomPartitioner() {
return customPartitioner;
}
private TypeComparator<IN> getTypeComparator(
TypeInformation<IN> typeInfo,
int[] sortColumns,
boolean[] sortOrderings,
ExecutionConfig executionConfig) {
if (typeInfo instanceof CompositeType) {
return ((CompositeType<IN>) typeInfo)
.createComparator(sortColumns, sortOrderings, 0, executionConfig);
} else if (typeInfo instanceof AtomicType) {
return ((AtomicType<IN>) typeInfo).createComparator(sortOrderings[0], executionConfig);
}
throw new InvalidProgramException(
"Input type of GroupReduce must be one of composite types or atomic types.");
}
// --------------------------------------------------------------------------------------------
@Override
protected List<OUT> executeOnCollections(
List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig)
throws Exception {
GroupReduceFunction<IN, OUT> function = this.userFunction.getUserCodeObject();
UnaryOperatorInformation<IN, OUT> operatorInfo = getOperatorInfo();
TypeInformation<IN> inputType = operatorInfo.getInputType();
int[] keyColumns = getKeyColumns(0);
int[] sortColumns = keyColumns;
boolean[] sortOrderings = new boolean[sortColumns.length];
if (groupOrder != null) {
sortColumns = ArrayUtils.addAll(sortColumns, groupOrder.getFieldPositions());
sortOrderings = ArrayUtils.addAll(sortOrderings, groupOrder.getFieldSortDirections());
}
if (sortColumns.length == 0) { // => all reduce. No comparator
checkArgument(sortOrderings.length == 0);
} else {
final TypeComparator<IN> sortComparator =
getTypeComparator(inputType, sortColumns, sortOrderings, executionConfig);
Collections.sort(
inputData,
new Comparator<IN>() {
@Override
public int compare(IN o1, IN o2) {
return sortComparator.compare(o1, o2);
}
});
}
FunctionUtils.setFunctionRuntimeContext(function, ctx);
FunctionUtils.openFunction(function, DefaultOpenContext.INSTANCE);
ArrayList<OUT> result = new ArrayList<OUT>();
if (inputData.size() > 0) {
final TypeSerializer<IN> inputSerializer =
inputType.createSerializer(executionConfig.getSerializerConfig());
if (keyColumns.length == 0) {
TypeSerializer<OUT> outSerializer =
getOperatorInfo()
.getOutputType()
.createSerializer(executionConfig.getSerializerConfig());
List<IN> inputDataCopy = new ArrayList<IN>(inputData.size());
for (IN in : inputData) {
inputDataCopy.add(inputSerializer.copy(in));
}
CopyingListCollector<OUT> collector =
new CopyingListCollector<OUT>(result, outSerializer);
function.reduce(inputDataCopy, collector);
} else {
boolean[] keyOrderings = new boolean[keyColumns.length];
final TypeComparator<IN> comparator =
getTypeComparator(inputType, keyColumns, keyOrderings, executionConfig);
ListKeyGroupedIterator<IN> keyedIterator =
new ListKeyGroupedIterator<IN>(inputData, inputSerializer, comparator);
TypeSerializer<OUT> outSerializer =
getOperatorInfo()
.getOutputType()
.createSerializer(executionConfig.getSerializerConfig());
CopyingListCollector<OUT> collector =
new CopyingListCollector<OUT>(result, outSerializer);
while (keyedIterator.nextKey()) {
function.reduce(keyedIterator.getValues(), collector);
}
}
}
FunctionUtils.closeFunction(function);
return result;
}
}
| GroupReduceOperatorBase |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/SupportsReportStatistics.java | {
"start": 970,
"end": 1339
} | interface ____
* report statistics to Spark.
* <p>
* As of Spark 3.0, statistics are reported to the optimizer after operators are pushed to the
* data source. Implementations may return more accurate statistics based on pushed operators
* which may improve query performance by providing better information to the optimizer.
*
* @since 3.0.0
*/
@Evolving
public | to |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/leaderelection/LeaderElection.java | {
"start": 1057,
"end": 2733
} | interface ____ extends AutoCloseable {
/** Registers the passed {@link LeaderContender} with the leader election process. */
void startLeaderElection(LeaderContender contender) throws Exception;
/**
* Confirms that the {@link LeaderContender} has accepted the leadership identified by the given
* leader session id. It also publishes the leader address under which the leader is reachable.
*
* <p>The data is only confirmed if the leadership is still acquired.
*
* <p>The intention of this method is to establish an order between setting the new leader
* session ID in the {@link LeaderContender} and publishing the new leader session ID and the
* related leader address to the leader retrieval services.
*
* @param leaderSessionID The new leader session ID
* @param leaderAddress The address of the new leader
*/
CompletableFuture<Void> confirmLeadershipAsync(UUID leaderSessionID, String leaderAddress);
/**
* Returns {@code true} if the service's {@link LeaderContender} has the leadership under the
* given leader session ID acquired.
*
* @param leaderSessionId identifying the current leader
* @return true if the associated {@link LeaderContender} is the leader, otherwise false
*/
CompletableFuture<Boolean> hasLeadershipAsync(UUID leaderSessionId);
/**
* Closes the {@code LeaderElection} by deregistering the {@link LeaderContender} from the
* underlying leader election. {@link LeaderContender#revokeLeadership()} will be called if the
* service still holds the leadership.
*/
void close() throws Exception;
}
| LeaderElection |
java | elastic__elasticsearch | x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Completers.java | {
"start": 505,
"end": 815
} | class ____ {
// TODO: need tree structure
static final Completer INSTANCE = new AggregateCompleter(
new ArgumentCompleter(new StringsCompleter("", "EXPLAIN", "SHOW", "SELECT", "SET")),
new ArgumentCompleter(new StringsCompleter("SHOW", "TABLE", "COLUMNS", "FUNCTIONS"))
);
}
| Completers |
java | apache__flink | flink-table/flink-sql-client/src/main/java/org/apache/flink/table/client/util/ClientWrapperClassLoader.java | {
"start": 2462,
"end": 4296
} | class ____ extends SafetyNetWrapperClassLoader {
private static final Logger LOG = LoggerFactory.getLogger(ClientWrapperClassLoader.class);
static {
ClassLoader.registerAsParallelCapable();
}
private final Configuration configuration;
private final List<FlinkUserCodeClassLoader> originClassLoaders;
public ClientWrapperClassLoader(FlinkUserCodeClassLoader inner, Configuration configuration) {
super(inner, inner.getParent());
this.configuration = new Configuration(configuration);
this.originClassLoaders = new ArrayList<>();
}
public void removeURL(URL url) {
Set<URL> registeredUrls = Stream.of(inner.getURLs()).collect(Collectors.toSet());
if (!registeredUrls.contains(url)) {
LOG.warn(
String.format(
"Could not remove the specified jar because the jar path [%s] is not found in classloader.",
url));
return;
}
originClassLoaders.add(inner);
// build a new classloader without removed jars
registeredUrls.remove(url);
inner =
ClientClassloaderUtil.buildUserClassLoader(
new ArrayList<>(registeredUrls),
ClientWrapperClassLoader.class.getClassLoader(),
configuration);
}
@Override
public void close() {
super.close();
// close other classloader in the list
for (FlinkUserCodeClassLoader classLoader : originClassLoaders) {
try {
classLoader.close();
} catch (IOException e) {
LOG.error("Failed to close the origin classloader.", e);
}
}
originClassLoaders.clear();
}
}
| ClientWrapperClassLoader |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FinalizeTest.java | {
"start": 825,
"end": 1110
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(Finalize.class, getClass());
@Test
public void positive() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| FinalizeTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/association/OneToManyLazyAndEagerTest.java | {
"start": 2376,
"end": 3138
} | class ____ {
@Id
private String id;
@OneToMany(targetEntity = OrderItem.class, mappedBy = "order", fetch = FetchType.EAGER)
private final Set<OrderItem> orderItems = new HashSet<>();
@ManyToOne(fetch = FetchType.EAGER)
private User user;
@ManyToOne(fetch = FetchType.EAGER)
private User targetUser;
public Order() {
}
public Order(String id, User user, User targetUser) {
this.id = id;
this.user = user;
this.targetUser = targetUser;
}
public String getId() {
return id;
}
public Set<OrderItem> getOrderItems() {
return orderItems;
}
public User getUser() {
return user;
}
public User getTargetUser() {
return targetUser;
}
}
@Entity(name = "User")
@Table(name = "Users")
public static | Order |
java | elastic__elasticsearch | x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityWithMultipleRemotesRestIT.java | {
"start": 1285,
"end": 10852
} | class ____ extends AbstractRemoteClusterSecurityTestCase {
protected static ElasticsearchCluster otherFulfillingCluster;
protected static RestClient otherFulfillingClusterClient;
@BeforeClass
public static void initOtherFulfillingClusterClient() {
if (otherFulfillingClusterClient != null) {
return;
}
otherFulfillingClusterClient = buildRestClient(otherFulfillingCluster);
}
@AfterClass
public static void closeOtherFulfillingClusterClient() throws IOException {
try {
IOUtils.close(otherFulfillingClusterClient);
} finally {
otherFulfillingClusterClient = null;
}
}
public void testCrossClusterSearch() throws Exception {
configureRemoteCluster();
configureRolesOnClusters();
// Fulfilling cluster
{
// Index some documents, so we can attempt to search them from the querying cluster
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "cluster1_index1" } }
{ "name": "doc1" }
{ "index": { "_index": "cluster1_index2" } }
{ "name": "doc2" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
}
// Other fulfilling cluster
{
// Index some documents, so we can attempt to search them from the querying cluster
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "cluster2_index1" } }
{ "name": "doc1" }
{ "index": { "_index": "cluster2_index2" } }
{ "name": "doc2" }
"""));
assertOK(performRequestAgainstOtherFulfillingCluster(bulkRequest));
}
// Query cluster
{
// Index some documents, to use them in a multi-cluster search
final var indexDocRequest = new Request("POST", "/local_index/_doc?refresh=true");
indexDocRequest.setJsonEntity("{\"name\": \"doc1\"}");
assertOK(client().performRequest(indexDocRequest));
// Search across local cluster and both remotes
searchAndAssertIndicesFound(
String.format(
Locale.ROOT,
"/local_index,%s:%s/_search?ccs_minimize_roundtrips=%s",
randomFrom("my_remote_*", "*"),
randomFrom("*_index1", "*"),
randomBoolean()
),
"cluster1_index1",
"cluster2_index1",
"local_index"
);
// Search across both remotes using cluster alias wildcard
searchAndAssertIndicesFound(
String.format(
Locale.ROOT,
"/%s:%s/_search?ccs_minimize_roundtrips=%s",
randomFrom("my_remote_*", "*"),
randomFrom("*_index1", "*"),
randomBoolean()
),
"cluster1_index1",
"cluster2_index1"
);
// Search across both remotes using explicit cluster aliases
searchAndAssertIndicesFound(
String.format(
Locale.ROOT,
"/my_remote_cluster:%s,my_remote_cluster_2:%s/_search?ccs_minimize_roundtrips=%s",
randomFrom("cluster1_index1", "*_index1", "*"),
randomFrom("cluster2_index1", "*_index1", "*"),
randomBoolean()
),
"cluster1_index1",
"cluster2_index1"
);
// Search single remote
final boolean searchFirstCluster = randomBoolean();
final String index1 = searchFirstCluster ? "cluster1_index1" : "cluster2_index1";
searchAndAssertIndicesFound(
String.format(
Locale.ROOT,
"/%s:%s/_search?ccs_minimize_roundtrips=%s",
searchFirstCluster ? "my_remote_cluster" : "my_remote_cluster_2",
randomFrom(index1, "*_index1", "*"),
randomBoolean()
),
index1
);
// To simplify the test setup, we only ever (randomly) set skip_unavailable on the other remote, not on both,
// i.e. the first remote cluster always has skip_unavailable = false.
// This impacts below failure scenarios; in some cases, skipping the other remote results in overall request success
final boolean skipUnavailableOnOtherCluster = isSkipUnavailable("my_remote_cluster_2");
// Search when one cluster throws 403
// No permissions for this index name, so searching for it on either remote will result in 403
final String missingIndex = "missingIndex";
final boolean missingIndexOnFirstCluster = randomBoolean();
// Make sure we search for missing index on at least one remote, possibly both
final boolean missingIndexOnSecondCluster = false == missingIndexOnFirstCluster || randomBoolean();
final String searchPath1 = String.format(
Locale.ROOT,
"/my_remote_cluster:%s,my_remote_cluster_2:%s/_search?ccs_minimize_roundtrips=%s",
missingIndexOnFirstCluster ? missingIndex : randomFrom("cluster1_index1", "*_index1", "*"),
missingIndexOnSecondCluster ? missingIndex : randomFrom("cluster2_index1", "*_index1", "*"),
randomBoolean()
);
if (skipUnavailableOnOtherCluster && false == missingIndexOnFirstCluster) {
// 403 from other cluster is skipped, so we get a result
searchAndAssertIndicesFound(searchPath1, "cluster1_index1");
} else {
searchAndExpect403(searchPath1);
}
// Search with cluster alias wildcard matching both remotes, where index is authorized on one but not the other
final String index2 = randomFrom("cluster1_index1", "cluster2_index1");
final String searchPath2 = String.format(
Locale.ROOT,
"/my_remote_cluster*:%s/_search?ccs_minimize_roundtrips=%s",
index2,
randomBoolean()
);
if (skipUnavailableOnOtherCluster && index2.equals("cluster1_index1")) {
// 403 from other cluster is skipped, so we get a result
searchAndAssertIndicesFound(searchPath2, index2);
} else {
searchAndExpect403(searchPath2);
}
// Search when both clusters throw 403; in this case we always fail because first cluster is not skipped
searchAndExpect403(String.format(Locale.ROOT, "/*:%s/_search?ccs_minimize_roundtrips=%s", "missingIndex", randomBoolean()));
}
}
private static boolean isSkipUnavailable(String clusterAlias) throws IOException {
final Request remoteInfoRequest = new Request("GET", "/_remote/info");
final Response remoteInfoResponse = adminClient().performRequest(remoteInfoRequest);
assertOK(remoteInfoResponse);
final Map<String, Object> remoteInfoMap = responseAsMap(remoteInfoResponse);
assertThat(remoteInfoMap, hasKey(clusterAlias));
assertThat(ObjectPath.eval(clusterAlias + ".connected", remoteInfoMap), is(true));
return ObjectPath.eval(clusterAlias + ".skip_unavailable", remoteInfoMap);
}
private static void searchAndExpect403(String searchPath) {
final ResponseException exception = expectThrows(
ResponseException.class,
() -> performRequestWithRemoteSearchUser(new Request("GET", searchPath))
);
assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(403));
}
protected abstract void configureRolesOnClusters() throws IOException;
static void searchAndAssertIndicesFound(String searchPath, String... expectedIndices) throws IOException {
final Response response = performRequestWithRemoteSearchUser(new Request("GET", searchPath));
assertOK(response);
final SearchResponse searchResponse;
try (var parser = responseAsParser(response)) {
searchResponse = SearchResponseUtils.parseSearchResponse(parser);
}
try {
final List<String> actualIndices = Arrays.stream(searchResponse.getHits().getHits())
.map(SearchHit::getIndex)
.collect(Collectors.toList());
assertThat(actualIndices, containsInAnyOrder(expectedIndices));
} finally {
searchResponse.decRef();
}
}
static Response performRequestWithRemoteSearchUser(final Request request) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue(REMOTE_SEARCH_USER, PASS)));
return client().performRequest(request);
}
static Response performRequestAgainstOtherFulfillingCluster(Request putRoleRequest) throws IOException {
return performRequestWithAdminUser(otherFulfillingClusterClient, putRoleRequest);
}
}
| AbstractRemoteClusterSecurityWithMultipleRemotesRestIT |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/deser/DefaultObjectDeserializerTest2.java | {
"start": 1357,
"end": 1407
} | class ____<K, V> extends HashMap<K, V> {
}
}
| Map2 |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/action/PostConnectorSyncJobActionTests.java | {
"start": 854,
"end": 1839
} | class ____ extends ESTestCase {
public void testValidate_WhenConnectorIdIsPresent_ExpectNoValidationError() {
PostConnectorSyncJobAction.Request request = ConnectorSyncJobTestUtils.getRandomPostConnectorSyncJobActionRequest();
ActionRequestValidationException exception = request.validate();
assertThat(exception, nullValue());
}
public void testValidate_WhenConnectorIdIsNull_ExpectValidationError() {
PostConnectorSyncJobAction.Request requestWithMissingConnectorId = new PostConnectorSyncJobAction.Request(
null,
ConnectorSyncJobType.FULL,
ConnectorSyncJobTriggerMethod.ON_DEMAND
);
ActionRequestValidationException exception = requestWithMissingConnectorId.validate();
assertThat(exception, notNullValue());
assertThat(exception.getMessage(), containsString(PostConnectorSyncJobAction.Request.EMPTY_CONNECTOR_ID_ERROR_MESSAGE));
}
}
| PostConnectorSyncJobActionTests |
java | apache__kafka | connect/runtime/src/test/java/org/apache/kafka/connect/runtime/isolation/PluginsTest.java | {
"start": 31881,
"end": 32087
} | class ____ extends WorkerConfig {
public TestableWorkerConfig(Map<String, String> props) {
super(WorkerConfig.baseConfigDef(), props);
}
}
public static | TestableWorkerConfig |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jdbc/internal/BatchingTest.java | {
"start": 1259,
"end": 11423
} | class ____ implements BatchKey {
private final String SANDBOX_TBL = "SANDBOX_JDBC_TST";
@Test
public void testBatchingUsage(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
final var jdbcCoordinator = session.getJdbcCoordinator();
final var insertSql = "insert into SANDBOX_JDBC_TST( ID, NAME ) values ( ?, ? )";
final var batchBuilder = new BatchBuilderImpl( 2 );
final var batchKey = new BasicBatchKey( "this" );
final var insertBatch = batchBuilder.buildBatch( batchKey, null, SANDBOX_TBL, session, insertSql );
assertThat( insertBatch ).isNotNull();
final var batchObserver = new JournalingBatchObserver();
insertBatch.addObserver( batchObserver );
final var jdbcValueBindings = sandboxInsertValueBindings( session );
// bind values for #1 - should do nothing at the JDBC level
jdbcValueBindings.bindValue( 1, SANDBOX_TBL, "ID", ParameterUsage.SET );
jdbcValueBindings.bindValue( "name", SANDBOX_TBL, "NAME", ParameterUsage.SET );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isFalse();
// add #1 to the batch - will acquire prepared statement to bind values
insertBatch.addToBatch( jdbcValueBindings, null );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
// bind values for #2 - again, nothing at JDBC level (we have statement from earlier)
jdbcValueBindings.bindValue( 2, SANDBOX_TBL, "ID", ParameterUsage.SET );
jdbcValueBindings.bindValue( "another name", SANDBOX_TBL, "NAME", ParameterUsage.SET );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
// add #2 to the batch -
// - uses the previous prepared statement to bind values
// - batch size has been exceeded, trigger an implicit execution
insertBatch.addToBatch( jdbcValueBindings, null );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 1 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
// execute the batch - effectively only increments the explicit-execution counter
insertBatch.execute();
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 1 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 1 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isFalse();
insertBatch.release();
} );
}
private JdbcValueBindingsImpl sandboxInsertValueBindings(SessionImplementor session) {
return new JdbcValueBindingsImpl(
MutationType.INSERT,
null,
new JdbcValueBindingsImpl.JdbcValueDescriptorAccess() {
@Override
public JdbcValueDescriptor resolveValueDescriptor(
String tableName,
String columnName,
ParameterUsage usage) {
assert tableName.equals( SANDBOX_TBL );
if ( columnName.equals( "ID" ) ) {
return new JdbcValueDescriptor() {
@Override
public String getColumnName() {
return "ID";
}
@Override
public ParameterUsage getUsage() {
return ParameterUsage.SET;
}
@Override
public int getJdbcPosition() {
return 1;
}
@Override
public JdbcMapping getJdbcMapping() {
return session.getTypeConfiguration()
.getBasicTypeRegistry()
.resolve( StandardBasicTypes.INTEGER );
}
};
}
if ( columnName.equals( "NAME" ) ) {
return new JdbcValueDescriptor() {
@Override
public String getColumnName() {
return "NAME";
}
@Override
public ParameterUsage getUsage() {
return ParameterUsage.SET;
}
@Override
public int getJdbcPosition() {
return 2;
}
@Override
public JdbcMapping getJdbcMapping() {
return session.getTypeConfiguration()
.getBasicTypeRegistry()
.resolve( StandardBasicTypes.STRING );
}
};
}
throw new IllegalArgumentException( "Unknown column : " + columnName );
}
},
session
);
}
@Test
public void testSessionBatchingUsage(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
var jdbcCoordinator = session.getJdbcCoordinator();
session.setJdbcBatchSize( 3 );
final String insertSql = "insert into SANDBOX_JDBC_TST( ID, NAME ) values ( ?, ? )";
final BatchBuilderImpl batchBuilder = new BatchBuilderImpl( 2 );
final BatchKey batchKey = new BasicBatchKey( "this" );
final Batch insertBatch = batchBuilder.buildBatch( batchKey, 3, SANDBOX_TBL, session, insertSql );
assertThat( insertBatch ).isNotNull();
final JournalingBatchObserver batchObserver = new JournalingBatchObserver();
insertBatch.addObserver( batchObserver );
final JdbcValueBindingsImpl jdbcValueBindings = sandboxInsertValueBindings( session );
// bind values for #1 - this does nothing at the JDBC level
jdbcValueBindings.bindValue( 1, SANDBOX_TBL, "ID", ParameterUsage.SET );
jdbcValueBindings.bindValue( "name", SANDBOX_TBL, "NAME", ParameterUsage.SET );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isFalse();
// add the values to the batch - this creates the prepared statement and binds the values
insertBatch.addToBatch( jdbcValueBindings, null );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
// bind values for #2 - this does nothing at the JDBC level : we do still have the statement defining the batch
jdbcValueBindings.bindValue( 2, SANDBOX_TBL, "ID", ParameterUsage.SET );
jdbcValueBindings.bindValue( "another name", SANDBOX_TBL, "NAME", ParameterUsage.SET );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
// add #2 to batch - we have not exceeded batch size, so we should not get an implicit execution
insertBatch.addToBatch( jdbcValueBindings, null );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
// bind values for #3 - this does nothing at the JDBC level : we do still have the statement defining the batch
jdbcValueBindings.bindValue( 3, SANDBOX_TBL, "ID", ParameterUsage.SET );
jdbcValueBindings.bindValue( "yet another name", SANDBOX_TBL, "NAME", ParameterUsage.SET );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 0 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
insertBatch.addToBatch( jdbcValueBindings, null );
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 0 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 1 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isTrue();
insertBatch.execute();
assertThat( batchObserver.getExplicitExecutionCount() ).isEqualTo( 1 );
assertThat( batchObserver.getImplicitExecutionCount() ).isEqualTo( 1 );
assertThat( jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() ).isFalse();
insertBatch.release();
} );
}
@BeforeEach
void exportSandboxSchema(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
var jdbcCoordinator = session.getJdbcCoordinator();
var logicalConnection = jdbcCoordinator.getLogicalConnection();
var dialect = session.getDialect();
// set up some tables to use
Statement statement = jdbcCoordinator.getStatementPreparer().createStatement();
String dropSql = dialect.getDropTableString( "SANDBOX_JDBC_TST" );
try {
jdbcCoordinator.getResultSetReturn().execute( statement, dropSql );
}
catch ( Exception e ) {
// ignore if the DB doesn't support "if exists" and the table doesn't exist
}
jdbcCoordinator.getResultSetReturn().execute( statement, "create table SANDBOX_JDBC_TST ( ID integer, NAME varchar(100) )" );
Assertions.assertTrue(
jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() );
Assertions.assertTrue( logicalConnection.isPhysicallyConnected() );
jdbcCoordinator.getLogicalConnection().getResourceRegistry().release( statement );
Assertions.assertFalse(
jdbcCoordinator.getLogicalConnection().getResourceRegistry().hasRegisteredResources() );
Assertions.assertTrue( logicalConnection.isPhysicallyConnected() ); // after_transaction specified
} );
}
@AfterEach
void cleanupTest(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> session.doWork( connection -> {
try ( Statement stmnt = connection.createStatement() ) {
stmnt.execute( session.getDialect().getDropTableString( "SANDBOX_JDBC_TST" ) );
}
} ) );
}
}
| BatchingTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/preconditions/DatePreconditionTests.java | {
"start": 1193,
"end": 1734
} | class ____ {
}
@Test
public void test() {
get("/preconditions")
.then()
.statusCode(200)
.header("Last-Modified", "Mon, 03 Dec 2007 10:15:30 GMT")
.body(Matchers.equalTo("foo"));
RestAssured
.with()
.header("If-Modified-Since", "Mon, 03 Dec 2007 10:15:30 GMT")
.get("/preconditions")
.then()
.statusCode(304);
}
@Path("/preconditions")
public static | Something |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.