language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/jdbc/JdbcProperties.java | {
"start": 467,
"end": 1738
} | class ____ {
private static final Logger log = Logger.getLogger( JdbcProperties.class );
public static final JdbcProperties INSTANCE = new JdbcProperties();
private final String url;
private final String user;
private final String password;
public JdbcProperties() {
Properties connectionProperties = new Properties();
InputStream inputStream = null;
try {
inputStream = Thread.currentThread()
.getContextClassLoader()
.getResourceAsStream( "hibernate.properties" );
try {
connectionProperties.load( inputStream );
url = resolveUrl( connectionProperties.getProperty( "hibernate.connection.url" ) );
resolveFromSettings(connectionProperties);
user = connectionProperties.getProperty( "hibernate.connection.username" );
password = connectionProperties.getProperty( "hibernate.connection.password" );
}
catch ( IOException e ) {
throw new IllegalArgumentException( e );
}
}
finally {
try {
if ( inputStream != null ) {
inputStream.close();
}
}
catch ( IOException ignore ) {
log.error( ignore.getMessage() );
}
}
}
public String getUrl() {
return url;
}
public String getUser() {
return user;
}
public String getPassword() {
return password;
}
}
| JdbcProperties |
java | hibernate__hibernate-orm | hibernate-graalvm/src/test/java/org/hibernate/graalvm/internal/StaticClassListsTest.java | {
"start": 7553,
"end": 7981
} | class ____ {
@Test
void checkNonDefaultConstructorsCanBeLoaded() {
Class[] classes = StaticClassLists.typesNeedingAllConstructorsAccessible();
for ( Class c : classes ) {
Constructor[] declaredConstructors = c.getDeclaredConstructors();
Assert.assertTrue( declaredConstructors.length > 0 );
if ( declaredConstructors.length == 1 ) {
//If there's only one, let's check that this | BasicConstructorsAvailable |
java | spring-projects__spring-framework | spring-core/src/main/java/org/springframework/core/env/CompositePropertySource.java | {
"start": 1848,
"end": 4414
} | class ____ extends EnumerablePropertySource<Object> {
private final Set<PropertySource<?>> propertySources = new LinkedHashSet<>();
/**
* Create a new {@code CompositePropertySource}.
* @param name the name of the property source
*/
public CompositePropertySource(String name) {
super(name);
}
@Override
public @Nullable Object getProperty(String name) {
for (PropertySource<?> propertySource : this.propertySources) {
Object candidate = propertySource.getProperty(name);
if (candidate != null) {
return candidate;
}
}
return null;
}
@Override
public boolean containsProperty(String name) {
for (PropertySource<?> propertySource : this.propertySources) {
if (propertySource.containsProperty(name)) {
return true;
}
}
return false;
}
@Override
public String[] getPropertyNames() {
List<String[]> namesList = new ArrayList<>(this.propertySources.size());
int total = 0;
for (PropertySource<?> propertySource : this.propertySources) {
if (!(propertySource instanceof EnumerablePropertySource<?> enumerablePropertySource)) {
throw new IllegalStateException(
"Failed to enumerate property names due to non-enumerable property source: " + propertySource);
}
String[] names = enumerablePropertySource.getPropertyNames();
namesList.add(names);
total += names.length;
}
Set<String> allNames = CollectionUtils.newLinkedHashSet(total);
namesList.forEach(names -> Collections.addAll(allNames, names));
return StringUtils.toStringArray(allNames);
}
/**
* Add the given {@link PropertySource} to the end of the chain.
* @param propertySource the PropertySource to add
*/
public void addPropertySource(PropertySource<?> propertySource) {
this.propertySources.add(propertySource);
}
/**
* Add the given {@link PropertySource} to the start of the chain.
* @param propertySource the PropertySource to add
* @since 4.1
*/
public void addFirstPropertySource(PropertySource<?> propertySource) {
List<PropertySource<?>> existing = new ArrayList<>(this.propertySources);
this.propertySources.clear();
this.propertySources.add(propertySource);
this.propertySources.addAll(existing);
}
/**
* Return all property sources that this composite source holds.
* @since 4.1.1
*/
public Collection<PropertySource<?>> getPropertySources() {
return this.propertySources;
}
@Override
public String toString() {
return getClass().getSimpleName() + " {name='" + this.name + "', propertySources=" + this.propertySources + "}";
}
}
| CompositePropertySource |
java | apache__camel | components/camel-bindy/src/main/java/org/apache/camel/dataformat/bindy/BindyAbstractFactory.java | {
"start": 5732,
"end": 9149
} | class ____ the Map Model
mapModel.put(obj.getClass().getName(), obj);
}
return mapModel;
}
/**
* Indicates whether this factory can support a row comprised of the identified classes
*
* @param classes the names of the classes in the row
* @return true if the model supports the identified classes
*/
public boolean supportsModel(Set<String> classes) {
return modelClassNames.containsAll(classes);
}
/**
* Generate a unique key
*
* @param key1 The key of the section number
* @param key2 The key of the position of the field
* @return the key generated
*/
protected static Integer generateKey(Integer key1, Integer key2) {
String key2Formatted;
String keyGenerated;
// BigIntegerFormatFactory added for ticket - camel-2773
if (key1 != null && key2 != null) {
key2Formatted = getNumberFormat().format((long) key2);
keyGenerated = String.valueOf(key1) + key2Formatted;
} else {
throw new IllegalArgumentException("@Section and/or @KeyValuePairDataField have not been defined");
}
return Integer.valueOf(keyGenerated);
}
private static NumberFormat getNumberFormat() {
// Get instance of NumberFormat
NumberFormat nf = NumberFormat.getInstance();
// set max number of digits to 3 (thousands)
nf.setMaximumIntegerDigits(3);
nf.setMinimumIntegerDigits(3);
return nf;
}
public static Object getDefaultValueForPrimitive(Class<?> clazz) {
if (clazz == byte.class) {
return Byte.MIN_VALUE;
} else if (clazz == short.class) {
return Short.MIN_VALUE;
} else if (clazz == int.class) {
return Integer.MIN_VALUE;
} else if (clazz == long.class) {
return Long.MIN_VALUE;
} else if (clazz == float.class) {
return Float.MIN_VALUE;
} else if (clazz == double.class) {
return Double.MIN_VALUE;
} else if (clazz == char.class) {
return Character.MIN_VALUE;
} else if (clazz == boolean.class) {
return false;
} else if (clazz == String.class) {
return "";
} else {
return null;
}
}
/**
* Find the carriage return set
*/
public String getCarriageReturn() {
return crlf;
}
/**
* Find the carriage return set
*/
public String getEndOfLine() {
return eol;
}
/**
* Format the object into a string according to the format rule defined
*/
@SuppressWarnings("unchecked")
public String formatString(Format<?> format, Object value) throws Exception {
String strValue = "";
if (value != null) {
try {
strValue = ((Format<Object>) format).format(value);
} catch (Exception e) {
throw new IllegalArgumentException("Formatting error detected for the value: " + value, e);
}
}
return strValue;
}
public String getLocale() {
return locale;
}
public void setLocale(String locale) {
this.locale = locale;
}
public void setFormatFactory(FormatFactory formatFactory) {
this.formatFactory = formatFactory;
}
}
| to |
java | reactor__reactor-core | reactor-core/src/main/java11/reactor/core/publisher/CallSiteSupplierFactory.java | {
"start": 1228,
"end": 3024
} | class ____.
StackWalker.getInstance();
}
@Override
public StackWalker.StackFrame[] apply(Stream<StackWalker.StackFrame> s) {
StackWalker.StackFrame[] result =
new StackWalker.StackFrame[10];
Iterator<StackWalker.StackFrame> iterator = s.iterator();
iterator.next(); // .get
int i = 0;
while (iterator.hasNext()) {
StackWalker.StackFrame frame = iterator.next();
if (i >= result.length) {
return new StackWalker.StackFrame[0];
}
result[i++] = frame;
if (isUserCode(frame.getClassName())) {
break;
}
}
StackWalker.StackFrame[] copy =
new StackWalker.StackFrame[i];
System.arraycopy(result, 0, copy, 0, i);
return copy;
}
/**
* Transform the current stack trace into a {@link String} representation,
* each element being prepended with a tabulation and appended with a
* newline.
*
* @return the string version of the stacktrace.
*/
@Override
public Supplier<String> get() {
StackWalker.StackFrame[] stack =
StackWalker.getInstance()
.walk(this);
if (stack.length == 0) {
return () -> "";
}
if (stack.length == 1) {
return () -> "\t" + stack[0].toString() + "\n";
}
return () -> {
StringBuilder sb = new StringBuilder();
for (int j = stack.length - 2; j > 0; j--) {
StackWalker.StackFrame previous = stack[j];
if (!full) {
if (previous.isNativeMethod()) {
continue;
}
String previousRow =
previous.getClassName() + "." + previous.getMethodName();
if (shouldSanitize(previousRow)) {
continue;
}
}
sb.append("\t")
.append(previous.toString())
.append("\n");
break;
}
sb.append("\t")
.append(stack[stack.length - 1].toString())
.append("\n");
return sb.toString();
};
}
} | loading |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/InconsistentCapitalizationTest.java | {
"start": 10044,
"end": 10482
} | class ____ {
Object aa;
Function<Object, Object> f =
new Function() {
public Object apply(Object aA) {
aa = aA;
return aA;
}
};
}
""")
.addOutputLines(
"out/Test.java",
"""
import java.util.function.Function;
| Test |
java | dropwizard__dropwizard | dropwizard-db/src/main/java/io/dropwizard/db/PooledDataSourceFactory.java | {
"start": 307,
"end": 1154
} | interface ____ {
/**
* Whether ORM tools allowed to add comments to SQL queries.
*
* @return {@code true}, if allowed
*/
boolean isAutoCommentsEnabled();
/**
* Returns the configuration properties for ORM tools.
*
* @return configuration properties as a map
*/
Map<String, String> getProperties();
/**
* Returns the timeout for awaiting a response from the database
* during connection health checks.
*
* @return the timeout as {@code Duration}
*/
Optional<Duration> getValidationQueryTimeout();
/**
* Returns the SQL query, which is being used for the database
* connection health check.
*
* @return the SQL query as a string
*/
Optional<String> getValidationQuery();
/**
* Returns the Java | PooledDataSourceFactory |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/IndexPattern.java | {
"start": 531,
"end": 1405
} | class ____ {
private final Source source;
private final String indexPattern;
public IndexPattern(Source source, String indexPattern) {
this.source = source;
this.indexPattern = indexPattern;
}
public String indexPattern() {
return indexPattern;
}
@Override
public int hashCode() {
return Objects.hash(indexPattern);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
IndexPattern other = (IndexPattern) obj;
return Objects.equals(indexPattern, other.indexPattern);
}
public Source source() {
return source;
}
@Override
public String toString() {
return indexPattern;
}
}
| IndexPattern |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/functions/aggregate/hyperloglog/HyperLogLogPlusPlusTest.java | {
"start": 1582,
"end": 6382
} | class ____ {
@Test
void testInvalidRelativeSD() {
assertThatThrownBy(() -> new HyperLogLogPlusPlus(0.4))
.satisfies(
anyCauseMatches(
IllegalArgumentException.class,
"HLL++ requires at least 4 bits for addressing. Use a lower error, at most 39%."));
}
@Test
void testInputAllNulls() {
HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(0.01);
HllBuffer buffer = createHllBuffer(hll);
long estimate = hll.query(buffer);
assertThat(estimate).isEqualTo(0);
}
@Test
void testDeterministicCardinalityEstimation() {
int repeats = 10;
testCardinalityEstimates(
new double[] {0.1, 0.05, 0.025, 0.01, 0.001},
new int[] {100, 500, 1000, 5000, 10000, 50000, 100000, 500000, 1000000},
i -> i / repeats,
i -> i / repeats);
}
@Test
void testMerge() {
HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(0.05);
HllBuffer buffer1a = createHllBuffer(hll);
HllBuffer buffer1b = createHllBuffer(hll);
HllBuffer buffer2 = createHllBuffer(hll);
// Create the
// Add the lower half
int i = 0;
while (i < 500000) {
hll.updateByHashcode(buffer1a, hashInt(i, DEFAULT_SEED));
i += 1;
}
// Add the upper half
i = 500000;
while (i < 1000000) {
hll.updateByHashcode(buffer1b, hashInt(i, DEFAULT_SEED));
i += 1;
}
// Merge the lower and upper halves.
hll.merge(buffer1a, buffer1b);
// Create the other buffer in reverse
i = 999999;
while (i >= 0) {
hll.updateByHashcode(buffer2, hashInt(i, DEFAULT_SEED));
i -= 1;
}
assertThat(buffer2.array).isEqualTo(buffer1a.array);
}
@Test
void testRandomCardinalityEstimation() {
Random srng = new Random(323981238L);
Set<Integer> seen = new HashSet<>();
Function<Integer, Integer> update =
i -> {
int value = srng.nextInt();
seen.add(value);
return value;
};
Function<Integer, Integer> eval =
n -> {
int cardinality = seen.size();
seen.clear();
return cardinality;
};
testCardinalityEstimates(
new double[] {0.05, 0.01}, new int[] {100, 10000, 500000}, update, eval);
}
@Test
void testPositiveAndNegativeZero() {
HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(0.05);
HllBuffer buffer = createHllBuffer(hll);
hll.updateByHashcode(buffer, hashLong(Double.doubleToLongBits(0.0d), DEFAULT_SEED));
hll.updateByHashcode(buffer, hashLong(Double.doubleToLongBits(-0.0d), DEFAULT_SEED));
long estimate = hll.query(buffer);
double error = Math.abs(estimate - 1.0d);
// not handle in HyperLogLogPlusPlus but in ApproximateCountDistinct
assertThat(error < hll.trueRsd() * 3.0d).isFalse();
}
@Test
void testNaN() {
HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(0.05);
HllBuffer buffer = createHllBuffer(hll);
hll.updateByHashcode(buffer, hashLong(Double.doubleToLongBits(Double.NaN), DEFAULT_SEED));
long estimate = hll.query(buffer);
double error = Math.abs(estimate - 1.0d);
assertThat(error < hll.trueRsd() * 3.0d).isTrue();
}
private void testCardinalityEstimates(
double[] rsds,
int[] ns,
Function<Integer, Integer> updateFun,
Function<Integer, Integer> evalFun) {
for (double rsd : rsds) {
for (int n : ns) {
HyperLogLogPlusPlus hll = new HyperLogLogPlusPlus(rsd);
HllBuffer buffer = createHllBuffer(hll);
for (int i = 0; i < n; ++i) {
hll.updateByHashcode(buffer, hashInt(updateFun.apply(i), DEFAULT_SEED));
}
long estimate = hll.query(buffer);
int cardinality = evalFun.apply(n);
double error = Math.abs((estimate * 1.0 / cardinality) - 1.0d);
assertThat(error < hll.trueRsd() * 3.0d).isTrue();
}
}
}
public HllBuffer createHllBuffer(HyperLogLogPlusPlus hll) {
HllBuffer buffer = new HllBuffer();
buffer.array = new long[hll.getNumWords()];
int word = 0;
while (word < hll.getNumWords()) {
buffer.array[word] = 0;
word++;
}
return buffer;
}
}
| HyperLogLogPlusPlusTest |
java | netty__netty | buffer/src/test/java/io/netty/buffer/AbstractPooledByteBufTest.java | {
"start": 1068,
"end": 5184
} | class ____ extends AbstractByteBufTest {
protected abstract ByteBuf alloc(int length, int maxCapacity);
@Override
protected ByteBuf newBuffer(int length, int maxCapacity) {
ByteBuf buffer = alloc(length, maxCapacity);
// Testing if the writerIndex and readerIndex are correct when allocate and also after we reset the mark.
assertEquals(0, buffer.writerIndex());
assertEquals(0, buffer.readerIndex());
buffer.resetReaderIndex();
buffer.resetWriterIndex();
assertEquals(0, buffer.writerIndex());
assertEquals(0, buffer.readerIndex());
return buffer;
}
@Test
public void ensureWritableWithEnoughSpaceShouldNotThrow() {
ByteBuf buf = newBuffer(1, 10);
buf.ensureWritable(3);
assertThat(buf.writableBytes()).isGreaterThanOrEqualTo(3);
buf.release();
}
@Test
public void ensureWritableWithNotEnoughSpaceShouldThrow() {
final ByteBuf buf = newBuffer(1, 10);
try {
assertThrows(IndexOutOfBoundsException.class, new Executable() {
@Override
public void execute() {
buf.ensureWritable(11);
}
});
} finally {
buf.release();
}
}
@Override
@Test
public void testMaxFastWritableBytes() {
ByteBuf buffer = newBuffer(150, 500).writerIndex(100);
assertEquals(50, buffer.writableBytes());
assertEquals(150, buffer.capacity());
assertEquals(500, buffer.maxCapacity());
assertEquals(400, buffer.maxWritableBytes());
int chunkSize = pooledByteBuf(buffer).maxLength;
assertTrue(chunkSize >= 150);
int remainingInAlloc = Math.min(chunkSize - 100, 400);
assertEquals(remainingInAlloc, buffer.maxFastWritableBytes());
// write up to max, chunk alloc should not change (same handle)
long handleBefore = pooledByteBuf(buffer).handle;
buffer.writeBytes(new byte[remainingInAlloc]);
assertEquals(handleBefore, pooledByteBuf(buffer).handle);
assertEquals(0, buffer.maxFastWritableBytes());
// writing one more should trigger a reallocation (new handle)
buffer.writeByte(7);
assertNotEquals(handleBefore, pooledByteBuf(buffer).handle);
// should not exceed maxCapacity even if chunk alloc does
buffer.capacity(500);
assertEquals(500 - buffer.writerIndex(), buffer.maxFastWritableBytes());
buffer.release();
}
private static PooledByteBuf<?> pooledByteBuf(ByteBuf buffer) {
// might need to unwrap if swapped (LE) and/or leak-aware-wrapped
while (!(buffer instanceof PooledByteBuf)) {
buffer = buffer.unwrap();
}
return (PooledByteBuf<?>) buffer;
}
@Test
public void testEnsureWritableDoesntGrowTooMuch() {
ByteBuf buffer = newBuffer(150, 500).writerIndex(100);
assertEquals(50, buffer.writableBytes());
int fastWritable = buffer.maxFastWritableBytes();
assertTrue(fastWritable > 50);
long handleBefore = pooledByteBuf(buffer).handle;
// capacity expansion should not cause reallocation
// (should grow precisely the specified amount)
buffer.ensureWritable(fastWritable);
assertEquals(handleBefore, pooledByteBuf(buffer).handle);
assertEquals(100 + fastWritable, buffer.capacity());
assertEquals(buffer.writableBytes(), buffer.maxFastWritableBytes());
buffer.release();
}
@Test
public void testIsContiguous() {
ByteBuf buf = newBuffer(4);
assertTrue(buf.isContiguous());
buf.release();
}
@Test
public void distinctBuffersMustNotOverlap() {
ByteBuf a = newBuffer(16384);
ByteBuf b = newBuffer(65536);
a.setByte(a.capacity() - 1, 1);
b.setByte(0, 2);
try {
assertEquals(1, a.getByte(a.capacity() - 1));
} finally {
a.release();
b.release();
}
}
}
| AbstractPooledByteBufTest |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/BeanWrapperTests.java | {
"start": 13150,
"end": 13456
} | class ____ {
public void setName(String name) {
}
public void setMyString(String string) {
}
public void setMyStrings(String string) {
}
public void setMyStriNg(String string) {
}
public void setMyStringss(String string) {
}
}
@SuppressWarnings("serial")
public static | IntelliBean |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/SslHandler.java | {
"start": 7232,
"end": 8851
} | class ____ extends ByteToMessageDecoder implements ChannelOutboundHandler {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(SslHandler.class);
private static final Pattern IGNORABLE_ERROR_MESSAGE = Pattern.compile(
"^.*(?:connection.*(?:reset|closed|abort|broken)|broken.*pipe).*$", Pattern.CASE_INSENSITIVE);
private static final int STATE_SENT_FIRST_MESSAGE = 1;
private static final int STATE_FLUSHED_BEFORE_HANDSHAKE = 1 << 1;
private static final int STATE_READ_DURING_HANDSHAKE = 1 << 2;
private static final int STATE_HANDSHAKE_STARTED = 1 << 3;
/**
* Set by wrap*() methods when something is produced.
* {@link #channelReadComplete(ChannelHandlerContext)} will check this flag, clear it, and call ctx.flush().
*/
private static final int STATE_NEEDS_FLUSH = 1 << 4;
private static final int STATE_OUTBOUND_CLOSED = 1 << 5;
private static final int STATE_CLOSE_NOTIFY = 1 << 6;
private static final int STATE_PROCESS_TASK = 1 << 7;
/**
* This flag is used to determine if we need to call {@link ChannelHandlerContext#read()} to consume more data
* when {@link ChannelConfig#isAutoRead()} is {@code false}.
*/
private static final int STATE_FIRE_CHANNEL_READ = 1 << 8;
private static final int STATE_UNWRAP_REENTRY = 1 << 9;
/**
* <a href="https://tools.ietf.org/html/rfc5246#section-6.2">2^14</a> which is the maximum sized plaintext chunk
* allowed by the TLS RFC.
*/
private static final int MAX_PLAINTEXT_LENGTH = 16 * 1024;
private | SslHandler |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/bug/Bug_for_issue_415.java | {
"start": 1494,
"end": 1767
} | class ____ {
private String addrDetail;
public String getAddrDetail() {
return addrDetail;
}
public void setAddrDetail(String addressDetail) {
this.addrDetail = addressDetail;
}
}
public static | Address |
java | spring-projects__spring-framework | spring-webflux/src/test/java/org/springframework/web/reactive/function/server/ToStringVisitorTests.java | {
"start": 1651,
"end": 3589
} | class ____ {
@Test
void nested() {
HandlerFunction<ServerResponse> handler = new SimpleHandlerFunction();
RouterFunction<ServerResponse> routerFunction = route()
.path("/foo", builder ->
builder.path("/bar", () -> route()
.GET("/baz", handler)
.build())
)
.build();
ToStringVisitor visitor = new ToStringVisitor();
routerFunction.accept(visitor);
String result = visitor.toString();
String expected = """
/foo => {
/bar => {
(GET && /baz) ->\s
}
}""".replace('\t', ' ');
assertThat(result).isEqualTo(expected);
}
@Test
void predicates() {
testPredicate(methods(HttpMethod.GET), "GET");
testPredicate(methods(HttpMethod.GET, HttpMethod.POST), "[GET, POST]");
testPredicate(path("/foo"), "/foo");
testPredicate(contentType(MediaType.APPLICATION_JSON), "Content-Type: application/json");
ToStringVisitor visitor = new ToStringVisitor();
contentType(MediaType.APPLICATION_JSON, MediaType.TEXT_PLAIN).accept(visitor);
assertThat(visitor.toString()).matches("Content-Type: \\[.+, .+\\]").contains("application/json", "text/plain");
testPredicate(accept(MediaType.APPLICATION_JSON), "Accept: application/json");
testPredicate(queryParam("foo", "bar"), "?foo == bar");
testPredicate(method(HttpMethod.GET).and(path("/foo")), "(GET && /foo)");
testPredicate(method(HttpMethod.GET).or(path("/foo")), "(GET || /foo)");
testPredicate(method(HttpMethod.GET).negate(), "!(GET)");
testPredicate(GET("/foo")
.or(contentType(MediaType.TEXT_PLAIN))
.and(accept(MediaType.APPLICATION_JSON).negate()),
"(((GET && /foo) || Content-Type: text/plain) && !(Accept: application/json))");
}
private void testPredicate(RequestPredicate predicate, String expected) {
ToStringVisitor visitor = new ToStringVisitor();
predicate.accept(visitor);
assertThat(visitor).asString().isEqualTo(expected);
}
private static | ToStringVisitorTests |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/spring/SpringBeanPostProcessorEnabledTest.java | {
"start": 1728,
"end": 1924
} | class ____ {
@Produce("mock:result")
private ProducerTemplate bar;
public void somewhere(String input) {
bar.sendBody("Hello " + input);
}
}
}
| MyFoo |
java | apache__logging-log4j2 | log4j-taglib/src/main/java/org/apache/logging/log4j/taglib/TraceTag.java | {
"start": 897,
"end": 971
} | class ____ the {@code <log:trace>} tag.
*
* @since 2.0
*/
public | implements |
java | apache__camel | components/camel-stax/src/test/java/org/apache/camel/language/xtokenizer/XMLTokenExpressionIteratorTest.java | {
"start": 1264,
"end": 27870
} | class ____ {
private static final byte[] TEST_BODY = ("<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='1' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='a' anotherAttr='a'></c:child>"
+ "<c:child some_attr='b' anotherAttr='b'/>"
+ "</c:parent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='c' anotherAttr='c'></c:child>"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent>"
+ "</grandparent>"
+ "<grandparent><uncle>ben</uncle><aunt/>"
+ "<c:parent some_attr='3' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='e' anotherAttr='e'></c:child>"
+ "<c:child some_attr='f' anotherAttr='f'/>"
+ "</c:parent>"
+ "</grandparent>"
+ "</g:greatgrandparent>")
.getBytes();
// mixing a default namespace with an explicit namespace for child
private static final byte[] TEST_BODY_NS_MIXED = ("<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<child some_attr='a' anotherAttr='a'></child>"
+ "<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b'/>"
+ "</parent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<child some_attr='c' anotherAttr='c' xmlns='urn:c'></child>"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent>"
+ "</grandparent>"
+ "</g:greatgrandparent>")
.getBytes();
// mixing a no namespace with an explicit namespace for child
private static final byte[] TEST_BODY_NO_NS_MIXED = ("<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<child some_attr='a' anotherAttr='a' xmlns=''></child>"
+ "<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b'/>"
+ "</parent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<child some_attr='c' anotherAttr='c'></child>"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent>"
+ "</grandparent>"
+ "</g:greatgrandparent>")
.getBytes();
// mixing different namespaces within a tag
private static final byte[] TEST_BODY_MIXED_CHILDREN = ("<?xml version='1.0' encoding='UTF-8'?>"
+ "<greatgrandparent xmlns='urn:g' xmlns:c='urn:c' xmlns:x='urn:x'>"
+ "<grandparent>"
+ "<x:uncle>bob</x:uncle>"
+ "<x:aunt>emma</x:aunt>"
+ "</grandparent>"
+ "<grandparent>"
+ "<c:parent some_attr='1'>"
+ "<c:child some_attr='a' anotherAttr='a'></c:child>"
+ "<c:child some_attr='b' anotherAttr='b' />"
+ "</c:parent>"
+ "<c:parent some_attr='2'>"
+ "<c:child some_attr='c' anotherAttr='c'></c:child>"
+ "<c:child some_attr='d' anotherAttr='d' />"
+ "</c:parent>"
+ "</grandparent>"
+ "<grandparent>"
+ "<x:uncle>ben</x:uncle>"
+ "<x:aunt>jenna</x:aunt>"
+ "<c:parent some_attr='3'>"
+ "<c:child some_attr='e' anotherAttr='e'></c:child>"
+ "<c:child some_attr='f' anotherAttr='f' />"
+ "</c:parent>"
+ "</grandparent>"
+ "</greatgrandparent>")
.getBytes();
private static final String RESULTS_CW1 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='1' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='a' anotherAttr='a'></c:child>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CW2 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='1' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='b' anotherAttr='b'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CW3 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='2' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='c' anotherAttr='c'></c:child>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CW4 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='2' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CW5 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle>ben</uncle><aunt/>"
+ "<c:parent some_attr='3' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='e' anotherAttr='e'></c:child>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CW6 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle>ben</uncle><aunt/>"
+ "<c:parent some_attr='3' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='f' anotherAttr='f'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String[] RESULTS_CHILD_WRAPPED = {
RESULTS_CW1,
RESULTS_CW2,
RESULTS_CW3,
RESULTS_CW4,
RESULTS_CW5,
RESULTS_CW6
};
private static final String[] RESULTS_CHILD_MIXED = {
"<child some_attr='a' anotherAttr='a' xmlns=\"urn:c\" xmlns:c=\"urn:c\" xmlns:g=\"urn:g\"></child>",
"<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b' xmlns='urn:c' xmlns:c='urn:c' xmlns:g='urn:g'/>",
"<child some_attr='c' anotherAttr='c' xmlns='urn:c' xmlns:g='urn:g' xmlns:c='urn:c'></child>",
"<c:child some_attr='d' anotherAttr='d' xmlns:g=\"urn:g\" xmlns:c=\"urn:c\"/>"
};
private static final String RESULTS_CMW1
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<child some_attr='a' anotherAttr='a'></child></parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CMW2
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b'/></parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CMW3
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<child some_attr='c' anotherAttr='c' xmlns='urn:c'></child></c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CMW4
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<c:child some_attr='d' anotherAttr='d'/></c:parent></grandparent></g:greatgrandparent>";
private static final String[] RESULTS_CHILD_MIXED_WRAPPED = {
RESULTS_CMW1,
RESULTS_CMW2,
RESULTS_CMW3,
RESULTS_CMW4
};
private static final String[] RESULTS_CHILD = {
"<c:child some_attr='a' anotherAttr='a' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"></c:child>",
"<c:child some_attr='b' anotherAttr='b' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"/>",
"<c:child some_attr='c' anotherAttr='c' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"></c:child>",
"<c:child some_attr='d' anotherAttr='d' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"/>",
"<c:child some_attr='e' anotherAttr='e' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"></c:child>",
"<c:child some_attr='f' anotherAttr='f' xmlns:c=\"urn:c\" xmlns:d=\"urn:d\" xmlns:g=\"urn:g\"/>"
};
private static final String[] RESULTS_CHILD_NO_NS_MIXED = {
"<child some_attr='a' anotherAttr='a' xmlns='' xmlns:c='urn:c' xmlns:g='urn:g'></child>",
"<child some_attr='c' anotherAttr='c' xmlns:g=\"urn:g\" xmlns:c=\"urn:c\"></child>",
};
private static final String RESULT_CNNMW1
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<child some_attr='a' anotherAttr='a' xmlns=''></child></parent></grandparent></g:greatgrandparent>";
private static final String RESULT_CNNMW2
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<child some_attr='c' anotherAttr='c'></child></c:parent></grandparent></g:greatgrandparent>";
// note that there is no preceding sibling to the extracted child
private static final String[] RESULTS_CHILD_NO_NS_MIXED_WRAPPED = {
RESULT_CNNMW1,
RESULT_CNNMW2,
};
private static final String[] RESULTS_CHILD_NS_MIXED = {
"<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b' xmlns='urn:c' xmlns:c='urn:c' xmlns:g='urn:g'/>",
"<c:child some_attr='d' anotherAttr='d' xmlns:g=\"urn:g\" xmlns:c=\"urn:c\"/>"
};
private static final String RESULTS_CNMW1
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<parent some_attr='1' xmlns:c='urn:c' xmlns=\"urn:c\">"
+ "<child some_attr='a' anotherAttr='a' xmlns=''></child>"
+ "<x:child xmlns:x='urn:c' some_attr='b' anotherAttr='b'/></parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_CNMW2
= "<?xml version='1.0' encoding='UTF-8'?><g:greatgrandparent xmlns:g='urn:g'><grandparent>"
+ "<c:parent some_attr='2' xmlns:c='urn:c'>"
+ "<child some_attr='c' anotherAttr='c'></child>"
+ "<c:child some_attr='d' anotherAttr='d'/></c:parent></grandparent></g:greatgrandparent>";
// note that there is a preceding sibling to the extracted child
private static final String[] RESULTS_CHILD_NS_MIXED_WRAPPED = {
RESULTS_CNMW1,
RESULTS_CNMW2
};
private static final String RESULTS_PW1 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='1' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='a' anotherAttr='a'></c:child>"
+ "<c:child some_attr='b' anotherAttr='b'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_PW2 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "<c:parent some_attr='2' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='c' anotherAttr='c'></c:child>"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String RESULTS_PW3 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle>ben</uncle><aunt/>"
+ "<c:parent some_attr='3' xmlns:c='urn:c' xmlns:d=\"urn:d\">"
+ "<c:child some_attr='e' anotherAttr='e'></c:child>"
+ "<c:child some_attr='f' anotherAttr='f'/>"
+ "</c:parent></grandparent></g:greatgrandparent>";
private static final String[] RESULTS_PARENT_WRAPPED = {
RESULTS_PW1,
RESULTS_PW2,
RESULTS_PW3,
};
private static final String RESULTS_P1 = "<c:parent some_attr='1' xmlns:c='urn:c' xmlns:d=\"urn:d\" xmlns:g='urn:g'>"
+ "<c:child some_attr='a' anotherAttr='a'></c:child>"
+ "<c:child some_attr='b' anotherAttr='b'/>"
+ "</c:parent>";
private static final String RESULTS_P2 = "<c:parent some_attr='2' xmlns:c='urn:c' xmlns:d=\"urn:d\" xmlns:g='urn:g'>"
+ "<c:child some_attr='c' anotherAttr='c'></c:child>"
+ "<c:child some_attr='d' anotherAttr='d'/>"
+ "</c:parent>";
private static final String RESULTS_P3 = "<c:parent some_attr='3' xmlns:c='urn:c' xmlns:d=\"urn:d\" xmlns:g='urn:g'>"
+ "<c:child some_attr='e' anotherAttr='e'></c:child>"
+ "<c:child some_attr='f' anotherAttr='f'/>"
+ "</c:parent>";
private static final String[] RESULTS_PARENT = {
RESULTS_P1,
RESULTS_P2,
RESULTS_P3,
};
private static final String RESULTS_AW1 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle/><aunt>emma</aunt>"
+ "</grandparent></g:greatgrandparent>";
private static final String RESULTS_AW2 = "<?xml version='1.0' encoding='UTF-8'?>"
+ "<g:greatgrandparent xmlns:g='urn:g'><grandparent><uncle>ben</uncle><aunt/>"
+ "</grandparent></g:greatgrandparent>";
private static final String[] RESULTS_AUNT_WRAPPED = {
RESULTS_AW1,
RESULTS_AW2
};
private static final String[] RESULTS_AUNT = {
"<aunt xmlns:g=\"urn:g\">emma</aunt>",
"<aunt xmlns:g=\"urn:g\"/>"
};
private static final String[] RESULTS_AUNT_UNWRAPPED = {
"emma",
""
};
private static final String[] RESULTS_GRANDPARENT_TEXT = {
"emma",
"ben"
};
private static final String[] RESULTS_AUNT_AND_UNCLE = {
"<x:uncle xmlns=\"urn:g\" xmlns:x=\"urn:x\" xmlns:c=\"urn:c\">bob</x:uncle>",
"<x:aunt xmlns=\"urn:g\" xmlns:x=\"urn:x\" xmlns:c=\"urn:c\">emma</x:aunt>",
"<x:uncle xmlns=\"urn:g\" xmlns:x=\"urn:x\" xmlns:c=\"urn:c\">ben</x:uncle>",
"<x:aunt xmlns=\"urn:g\" xmlns:x=\"urn:x\" xmlns:c=\"urn:c\">jenna</x:aunt>"
};
private static final String[] RESULTS_NULL = {
};
private Map<String, String> nsmap;
@BeforeEach
public void setUp() {
nsmap = new HashMap<>();
nsmap.put("G", "urn:g");
nsmap.put("C", "urn:c");
}
@Test
public void testExtractChild() throws Exception {
invokeAndVerify("//C:child", 'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildInjected() throws Exception {
String[] result = RESULTS_CHILD;
invokeAndVerify("//C:child", 'i', new ByteArrayInputStream(TEST_BODY), result);
}
@Test
public void testExtractChildNSMixed() throws Exception {
invokeAndVerify("//*:child", 'w', new ByteArrayInputStream(TEST_BODY_NS_MIXED), RESULTS_CHILD_MIXED_WRAPPED);
}
@Test
public void testExtractChildNSMixedInjected() throws Exception {
String[] result = RESULTS_CHILD_MIXED;
invokeAndVerify("//*:child", 'i', new ByteArrayInputStream(TEST_BODY_NS_MIXED), result);
}
@Test
public void testExtractAnyChild() throws Exception {
invokeAndVerify("//*:child", 'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractCxxxd() throws Exception {
String[] result = RESULTS_CHILD;
invokeAndVerify("//C:c*d", 'i', new ByteArrayInputStream(TEST_BODY), result);
}
@Test
public void testExtractUnqualifiedChild() throws Exception {
invokeAndVerify("//child", 'w', new ByteArrayInputStream(TEST_BODY), RESULTS_NULL);
}
@Test
public void testExtractSomeUnqualifiedChild() throws Exception {
invokeAndVerify("//child", 'w', new ByteArrayInputStream(TEST_BODY_NO_NS_MIXED), RESULTS_CHILD_NO_NS_MIXED_WRAPPED);
}
@Test
public void testExtractSomeUnqualifiedChildInjected() throws Exception {
String[] result = RESULTS_CHILD_NO_NS_MIXED;
invokeAndVerify("//child", 'i', new ByteArrayInputStream(TEST_BODY_NO_NS_MIXED), result);
}
@Test
public void testExtractSomeQualifiedChild() throws Exception {
nsmap.put("", "urn:c");
invokeAndVerify("//child", 'w', new ByteArrayInputStream(TEST_BODY_NO_NS_MIXED), RESULTS_CHILD_NS_MIXED_WRAPPED);
}
@Test
public void testExtractSomeQualifiedChildInjected() throws Exception {
nsmap.put("", "urn:c");
String[] result = RESULTS_CHILD_NS_MIXED;
invokeAndVerify("//child", 'i', new ByteArrayInputStream(TEST_BODY_NO_NS_MIXED), result);
}
@Test
public void testExtractWithNullNamespaceMap() throws Exception {
nsmap = null;
String[] result = RESULTS_CHILD_NO_NS_MIXED;
invokeAndVerify("//child", 'i', new ByteArrayInputStream(TEST_BODY_NO_NS_MIXED), result);
}
@Test
public void testExtractChildWithAncestorGGPdGP() throws Exception {
invokeAndVerify("/G:greatgrandparent/grandparent//C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildWithAncestorGGPdP() throws Exception {
invokeAndVerify("/G:greatgrandparent//C:parent/C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildWithAncestorGPddP() throws Exception {
invokeAndVerify("//grandparent//C:parent/C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildWithAncestorGPdP() throws Exception {
invokeAndVerify("//grandparent/C:parent/C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildWithAncestorP() throws Exception {
invokeAndVerify("//C:parent/C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractChildWithAncestorGGPdGPdP() throws Exception {
invokeAndVerify("/G:greatgrandparent/grandparent/C:parent/C:child",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_CHILD_WRAPPED);
}
@Test
public void testExtractParent() throws Exception {
invokeAndVerify("//C:parent",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_PARENT_WRAPPED);
}
@Test
public void testExtractParentInjected() throws Exception {
invokeAndVerify("//C:parent",
'i', new ByteArrayInputStream(TEST_BODY), RESULTS_PARENT);
}
@Test
public void testExtractAuntWC1() throws Exception {
invokeAndVerify("//a*t",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_AUNT_WRAPPED);
}
@Test
public void testExtractAuntWC2() throws Exception {
invokeAndVerify("//au?t",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_AUNT_WRAPPED);
}
@Test
public void testExtractAunt() throws Exception {
invokeAndVerify("//aunt",
'w', new ByteArrayInputStream(TEST_BODY), RESULTS_AUNT_WRAPPED);
}
@Test
public void testExtractAuntInjected() throws Exception {
invokeAndVerify("//aunt",
'i', new ByteArrayInputStream(TEST_BODY), RESULTS_AUNT);
}
@Test
public void testExtractAuntUnwrapped() throws Exception {
invokeAndVerify("//aunt",
'u', new ByteArrayInputStream(TEST_BODY), RESULTS_AUNT_UNWRAPPED);
}
@Test
public void testExtractGrandParentText() throws Exception {
invokeAndVerify("//grandparent",
't', new ByteArrayInputStream(TEST_BODY), RESULTS_GRANDPARENT_TEXT);
}
@Test
public void testExtractAuntAndUncleByNamespace() throws Exception {
nsmap.put("X", "urn:x");
invokeAndVerify("//G:grandparent/X:*",
'i', new ByteArrayInputStream(TEST_BODY_MIXED_CHILDREN), RESULTS_AUNT_AND_UNCLE);
}
private void invokeAndVerify(String path, char mode, InputStream in, String[] expected) throws Exception {
XMLTokenExpressionIterator xtei = new XMLTokenExpressionIterator(path, mode);
xtei.setNamespaces(nsmap);
Iterator<?> it = xtei.createIterator(in);
List<String> results = new ArrayList<>();
while (it.hasNext()) {
results.add((String) it.next());
}
((Closeable) it).close();
assertEquals(expected.length, results.size(), "token count");
for (int i = 0; i < expected.length; i++) {
String expectedToken = expected[i];
if (expectedToken.startsWith("<")) {
XmlAssert.assertThat(results.get(i)).and(expectedToken).areIdentical();
} else {
assertEquals(expectedToken, results.get(i), "mismatch [" + i + "]");
}
}
}
}
| XMLTokenExpressionIteratorTest |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/cluster/routing/allocation/ElectReplicaAsPrimaryDuringRelocationTests.java | {
"start": 1389,
"end": 5170
} | class ____ extends ESAllocationTestCase {
public void testElectReplicaAsPrimaryDuringRelocation() {
AllocationService strategy = createAllocationService(
Settings.builder().put("cluster.routing.allocation.node_concurrent_recoveries", 10).build()
);
logger.info("Building initial routing table");
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("test").settings(settings(IndexVersion.current())).numberOfShards(2).numberOfReplicas(1))
.build();
RoutingTable initialRoutingTable = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY)
.addAsNew(metadata.getProject().index("test"))
.build();
ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).routingTable(initialRoutingTable).build();
logger.info("Adding two nodes and performing rerouting");
clusterState = ClusterState.builder(clusterState)
.nodes(DiscoveryNodes.builder().add(newNode("node1")).add(newNode("node2")))
.build();
clusterState = strategy.reroute(clusterState, "reroute", ActionListener.noop());
logger.info("Start the primary shards");
clusterState = startInitializingShardsAndReroute(strategy, clusterState);
logger.info("Start the replica shards");
ClusterState resultingState = startInitializingShardsAndReroute(strategy, clusterState);
assertThat(resultingState, not(equalTo(clusterState)));
clusterState = resultingState;
RoutingNodes routingNodes = clusterState.getRoutingNodes();
assertThat(clusterState.routingTable().index("test").size(), equalTo(2));
assertThat(routingNodes.node("node1").numberOfShardsWithState(STARTED), equalTo(2));
assertThat(routingNodes.node("node2").numberOfShardsWithState(STARTED), equalTo(2));
logger.info("Start another node and perform rerouting");
clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).add(newNode("node3"))).build();
clusterState = strategy.reroute(clusterState, "reroute", ActionListener.noop());
logger.info("find the replica shard that gets relocated");
IndexShardRoutingTable indexShardRoutingTable = null;
if (clusterState.routingTable().index("test").shard(0).replicaShards().get(0).relocating()) {
indexShardRoutingTable = clusterState.routingTable().index("test").shard(0);
} else if (clusterState.routingTable().index("test").shard(1).replicaShards().get(0).relocating()) {
indexShardRoutingTable = clusterState.routingTable().index("test").shard(1);
}
// we might have primary relocating, and the test is only for replicas, so only test in the case of replica allocation
if (indexShardRoutingTable != null) {
logger.info(
"kill the node [{}] of the primary shard for the relocating replica",
indexShardRoutingTable.primaryShard().currentNodeId()
);
clusterState = ClusterState.builder(clusterState)
.nodes(DiscoveryNodes.builder(clusterState.nodes()).remove(indexShardRoutingTable.primaryShard().currentNodeId()))
.build();
clusterState = strategy.disassociateDeadNodes(clusterState, true, "reroute");
logger.info("make sure all the primary shards are active");
assertThat(clusterState.routingTable().index("test").shard(0).primaryShard().active(), equalTo(true));
assertThat(clusterState.routingTable().index("test").shard(1).primaryShard().active(), equalTo(true));
}
}
}
| ElectReplicaAsPrimaryDuringRelocationTests |
java | netty__netty | codec-native-quic/src/test/java/io/netty/handler/codec/quic/QuicStreamIdGeneratorTest.java | {
"start": 779,
"end": 1659
} | class ____ extends AbstractQuicTest {
@Test
public void testServerStreamIds() {
QuicStreamIdGenerator generator = new QuicStreamIdGenerator(true);
assertEquals(1, generator.nextStreamId(true));
assertEquals(5, generator.nextStreamId(true));
assertEquals(3, generator.nextStreamId(false));
assertEquals(9, generator.nextStreamId(true));
assertEquals(7, generator.nextStreamId(false));
}
@Test
public void testClientStreamIds() {
QuicStreamIdGenerator generator = new QuicStreamIdGenerator(false);
assertEquals(0, generator.nextStreamId(true));
assertEquals(4, generator.nextStreamId(true));
assertEquals(2, generator.nextStreamId(false));
assertEquals(8, generator.nextStreamId(true));
assertEquals(6, generator.nextStreamId(false));
}
}
| QuicStreamIdGeneratorTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/UnboundConfigurationPropertyFailureAnalyzerTests.java | {
"start": 1641,
"end": 3863
} | class ____ {
@BeforeEach
void setup() {
LocaleContextHolder.setLocale(Locale.US);
}
@AfterEach
void cleanup() {
LocaleContextHolder.resetLocaleContext();
}
@Test
void bindExceptionDueToUnboundElements() {
FailureAnalysis analysis = performAnalysis(UnboundElementsFailureConfiguration.class,
"test.foo.listValue[0]=hello", "test.foo.listValue[2]=world");
assertThat(analysis.getDescription()).contains(
failure("test.foo.listvalue[2]", "world", "\"test.foo.listValue[2]\" from property source \"test\"",
"The elements [test.foo.listvalue[2]] were left unbound."));
}
private static String failure(String property, String value, String origin, String reason) {
return String.format("Property: %s%n Value: \"%s\"%n Origin: %s%n Reason: %s", property, value, origin,
reason);
}
private FailureAnalysis performAnalysis(Class<?> configuration, String... environment) {
BeanCreationException failure = createFailure(configuration, environment);
FailureAnalysis analyze = new UnboundConfigurationPropertyFailureAnalyzer().analyze(failure);
assertThat(analyze).isNotNull();
return analyze;
}
private BeanCreationException createFailure(Class<?> configuration, String... environment) {
try {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
addEnvironment(context, environment);
context.register(configuration);
context.refresh();
context.close();
throw new AssertionError("Should not be reached");
}
catch (BeanCreationException ex) {
return ex;
}
}
private void addEnvironment(AnnotationConfigApplicationContext context, String[] environment) {
MutablePropertySources sources = context.getEnvironment().getPropertySources();
Map<String, Object> map = new HashMap<>();
for (String pair : environment) {
int index = pair.indexOf('=');
String key = (index > 0) ? pair.substring(0, index) : pair;
String value = (index > 0) ? pair.substring(index + 1) : "";
map.put(key.trim(), value.trim());
}
sources.addFirst(new MapPropertySource("test", map));
}
@EnableConfigurationProperties(UnboundElementsFailureProperties.class)
static | UnboundConfigurationPropertyFailureAnalyzerTests |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/SingleLetterDefaultPackageTest.java | {
"start": 286,
"end": 851
} | class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(B.class, C.class, L.class)
.addAsResource(new StringAsset("simpleBean.baz=1"), "application.properties"));
@Inject
B b;
@Inject
C c;
@Inject
L l;
@Test
public void testB() {
assertEquals("1", b.ping());
assertEquals(c.ping(), b.ping());
assertEquals(l.ping(), b.ping());
}
}
| SingleLetterDefaultPackageTest |
java | junit-team__junit5 | documentation/src/test/java/example/ParameterizedClassDemo.java | {
"start": 2533,
"end": 2651
} | class ____ {
@Parameter
String fruit;
@Nested
@ParameterizedClass
@ValueSource(ints = { 23, 42 })
| FruitTests |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/CapacityHeadroomProvider.java | {
"start": 1198,
"end": 2896
} | class ____ {
UsersManager.User user;
AbstractLeafQueue queue;
FiCaSchedulerApp application;
AbstractLeafQueue.QueueResourceLimitsInfo queueResourceLimitsInfo;
public CapacityHeadroomProvider(UsersManager.User user, AbstractLeafQueue queue,
FiCaSchedulerApp application,
AbstractLeafQueue.QueueResourceLimitsInfo queueResourceLimitsInfo) {
this.user = user;
this.queue = queue;
this.application = application;
this.queueResourceLimitsInfo = queueResourceLimitsInfo;
}
public Resource getHeadroom() {
Resource queueCurrentLimit;
Resource clusterResource;
synchronized (queueResourceLimitsInfo) {
queueCurrentLimit = queueResourceLimitsInfo.getQueueCurrentLimit();
clusterResource = queueResourceLimitsInfo.getClusterResource();
}
Set<String> requestedPartitions =
application.getAppSchedulingInfo().getRequestedPartitions();
Resource headroom;
if (requestedPartitions.isEmpty() || (requestedPartitions.size() == 1
&& requestedPartitions.contains(RMNodeLabelsManager.NO_LABEL))) {
headroom = queue.getHeadroom(user, queueCurrentLimit, clusterResource,
application);
} else {
headroom = Resource.newInstance(0, 0);
for (String partition : requestedPartitions) {
Resource partitionHeadRoom = queue.getHeadroom(user, queueCurrentLimit,
clusterResource, application, partition);
Resources.addTo(headroom, partitionHeadRoom);
}
}
// Corner case to deal with applications being slightly over-limit
if (headroom.getMemorySize() < 0) {
headroom.setMemorySize(0);
}
return headroom;
}
}
| CapacityHeadroomProvider |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleDelayWithObservable.java | {
"start": 1004,
"end": 1468
} | class ____<T, U> extends Single<T> {
final SingleSource<T> source;
final ObservableSource<U> other;
public SingleDelayWithObservable(SingleSource<T> source, ObservableSource<U> other) {
this.source = source;
this.other = other;
}
@Override
protected void subscribeActual(SingleObserver<? super T> observer) {
other.subscribe(new OtherSubscriber<>(observer, source));
}
static final | SingleDelayWithObservable |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2ConnectionHandler.java | {
"start": 41811,
"end": 43510
} | class ____ implements ChannelFutureListener {
private final ChannelHandlerContext ctx;
private final ChannelPromise promise;
private final Future<?> timeoutTask;
private boolean closed;
ClosingChannelFutureListener(ChannelHandlerContext ctx, ChannelPromise promise) {
this.ctx = ctx;
this.promise = promise;
timeoutTask = null;
}
ClosingChannelFutureListener(final ChannelHandlerContext ctx, final ChannelPromise promise,
long timeout, TimeUnit unit) {
this.ctx = ctx;
this.promise = promise;
timeoutTask = ctx.executor().schedule(new Runnable() {
@Override
public void run() {
doClose();
}
}, timeout, unit);
}
@Override
public void operationComplete(ChannelFuture sentGoAwayFuture) {
if (timeoutTask != null) {
timeoutTask.cancel(false);
}
doClose();
}
private void doClose() {
// We need to guard against multiple calls as the timeout may trigger close() first and then it will be
// triggered again because of operationComplete(...) is called.
if (closed) {
// This only happens if we also scheduled a timeout task.
assert timeoutTask != null;
return;
}
closed = true;
if (promise == null) {
ctx.close();
} else {
ctx.close(promise);
}
}
}
}
| ClosingChannelFutureListener |
java | hibernate__hibernate-orm | hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/DialectFeatureChecks.java | {
"start": 6532,
"end": 6836
} | class ____ implements DialectFeatureCheck {
public boolean apply(Dialect dialect) {
return dialect.getNationalizationSupport() != NationalizationSupport.UNSUPPORTED;
}
}
/**
* Does the database specifically support the explicit nationalized data types
*/
public static | SupportsNationalizedData |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/pc/DynamicUpdateTest.java | {
"start": 1224,
"end": 2324
} | class ____ {
@Id
private Long id;
@Column
private String name;
@Column
private String description;
@Column(name = "price_cents")
private Integer priceCents;
@Column
private Integer quantity;
//Getters and setters are omitted for brevity
//end::pc-managed-state-dynamic-update-mapping-example[]
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getPriceCents() {
return priceCents;
}
public void setPriceCents(Integer priceCents) {
this.priceCents = priceCents;
}
public Integer getQuantity() {
return quantity;
}
public void setQuantity(Integer quantity) {
this.quantity = quantity;
}
//tag::pc-managed-state-dynamic-update-mapping-example[]
}
//end::pc-managed-state-dynamic-update-mapping-example[]
}
| Product |
java | redisson__redisson | redisson/src/main/java/org/redisson/client/protocol/decoder/GeoPositionDecoder.java | {
"start": 941,
"end": 1484
} | class ____ implements MultiDecoder<GeoPosition> {
@Override
public Decoder<Object> getDecoder(Codec codec, int paramNum, State state, long size) {
return DoubleCodec.INSTANCE.getValueDecoder();
}
@Override
public GeoPosition decode(List<Object> parts, State state) {
if (parts.isEmpty()) {
return null;
}
Double longitude = (Double) parts.get(0);
Double latitude = (Double) parts.get(1);
return new GeoPosition(longitude, latitude);
}
}
| GeoPositionDecoder |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/sqm/internal/SqmUtil.java | {
"start": 41661,
"end": 43470
} | class ____ implements SqmStatement.ParameterResolutions {
private final Set<SqmParameter<?>> sqmParameters;
private final Map<JpaCriteriaParameter<?>, SqmJpaCriteriaParameterWrapper<?>> jpaCriteriaParamResolutions;
public ParameterResolutionsImpl(
Set<SqmParameter<?>> sqmParameters,
Map<JpaCriteriaParameter<?>, List<SqmJpaCriteriaParameterWrapper<?>>> jpaCriteriaParamResolutions) {
this.sqmParameters = sqmParameters;
if ( jpaCriteriaParamResolutions == null || jpaCriteriaParamResolutions.isEmpty() ) {
this.jpaCriteriaParamResolutions = emptyMap();
}
else {
this.jpaCriteriaParamResolutions =
new IdentityHashMap<>( determineProperSizing( jpaCriteriaParamResolutions ) );
for ( var entry : jpaCriteriaParamResolutions.entrySet() ) {
final var iterator = entry.getValue().iterator();
if ( !iterator.hasNext() ) {
throw new IllegalStateException(
"SqmJpaCriteriaParameterWrapper references for JpaCriteriaParameter ["
+ entry.getKey() + "] already exhausted" );
}
this.jpaCriteriaParamResolutions.put( entry.getKey(), iterator.next() );
}
}
}
@Override
public Set<SqmParameter<?>> getSqmParameters() {
return sqmParameters;
}
@Override
public Map<JpaCriteriaParameter<?>, SqmJpaCriteriaParameterWrapper<?>> getJpaCriteriaParamResolutions() {
return jpaCriteriaParamResolutions;
}
}
/**
* Used to validate that the specified query return type is valid (i.e. the user
* did not pass {@code Integer.class} when the selection is an entity)
*/
public static void validateQueryReturnType(SqmQueryPart<?> queryPart, @Nullable Class<?> expectedResultType) {
if ( expectedResultType != null && !isResultTypeAlwaysAllowed( expectedResultType ) ) {
// the result- | ParameterResolutionsImpl |
java | apache__camel | core/camel-core-reifier/src/main/java/org/apache/camel/reifier/dataformat/FhirDataFormatReifier.java | {
"start": 1133,
"end": 3497
} | class ____<T extends FhirDataformat> extends DataFormatReifier<T> {
public FhirDataFormatReifier(CamelContext camelContext, DataFormatDefinition definition) {
super(camelContext, (T) definition);
}
@Override
protected void prepareDataFormatConfig(Map<String, Object> properties) {
properties.put("fhirVersion", definition.getFhirVersion());
properties.put("fhirContext", asRef(definition.getFhirContext()));
properties.put("serverBaseUrl", definition.getServerBaseUrl());
properties.put("forceResourceId", asRef(definition.getForceResourceId()));
properties.put("preferTypesNames", definition.getPreferTypes());
properties.put("parserOptions", asRef(definition.getParserOptions()));
properties.put("parserErrorHandler", asRef(definition.getParserErrorHandler()));
properties.put("encodeElementsAppliesToChildResourcesOnly", definition.getEncodeElementsAppliesToChildResourcesOnly());
properties.put("omitResourceId", definition.getOmitResourceId());
properties.put("prettyPrint", definition.getPrettyPrint());
properties.put("suppressNarratives", definition.getSuppressNarratives());
properties.put("summaryMode", definition.getSummaryMode());
properties.put("overrideResourceIdWithBundleEntryFullUrl", definition.getOverrideResourceIdWithBundleEntryFullUrl());
properties.put("stripVersionsFromReferences", definition.getStripVersionsFromReferences());
// convert string to list/set for the following options
if (definition.getDontStripVersionsFromReferencesAtPaths() != null) {
List<String> list = Arrays.stream(definition.getDontStripVersionsFromReferencesAtPaths().split(","))
.toList();
properties.put("dontStripVersionsFromReferencesAtPaths", list);
}
if (definition.getDontEncodeElements() != null) {
Set<String> set = Arrays.stream(definition.getDontEncodeElements().split(",")).collect(Collectors.toSet());
properties.put("dontEncodeElements", set);
}
if (definition.getEncodeElements() != null) {
Set<String> set = Arrays.stream(definition.getEncodeElements().split(",")).collect(Collectors.toSet());
properties.put("encodeElements", set);
}
}
}
| FhirDataFormatReifier |
java | google__dagger | javatests/dagger/internal/codegen/InjectConstructorFactoryGeneratorTest.java | {
"start": 46836,
"end": 47370
} | class ____ {",
" @Inject void inject(Producer<String> str) {}",
"}");
daggerCompiler(file)
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining("Producer may only be injected in @Produces methods");
});
}
@Test public void injectConstructor() {
Source file =
CompilerTests.javaSource("test.InjectConstructor",
"package test;",
"",
"import javax.inject.Inject;",
"",
" | A |
java | apache__hadoop | hadoop-cloud-storage-project/hadoop-tos/src/main/java/org/apache/hadoop/fs/tosfs/commit/ops/RawPendingOps.java | {
"start": 1151,
"end": 2130
} | class ____ implements PendingOps {
private static final Logger LOG = LoggerFactory.getLogger(RawPendingOps.class);
private final ObjectStorage storage;
/**
* Constructor for {@link PendingOpsFactory} to reflect a new instance.
*
* @param fs the file system.
* @param storage the object storage.
*/
public RawPendingOps(FileSystem fs, ObjectStorage storage) {
this.storage = storage;
}
public void revert(Pending commit) {
LOG.info("Revert the commit by deleting the object key - {}", commit);
storage.delete(commit.destKey());
}
public void abort(Pending commit) {
LOG.info("Abort the commit by aborting multipart upload - {}", commit);
storage.abortMultipartUpload(commit.destKey(), commit.uploadId());
}
public void commit(Pending commit) {
LOG.info("Commit by completing the multipart uploads - {}", commit);
storage.completeUpload(commit.destKey(), commit.uploadId(), commit.parts());
}
}
| RawPendingOps |
java | spring-projects__spring-boot | module/spring-boot-jdbc/src/main/java/org/springframework/boot/jdbc/metadata/HikariDataSourcePoolMetadata.java | {
"start": 1016,
"end": 2220
} | class ____ extends AbstractDataSourcePoolMetadata<HikariDataSource> {
public HikariDataSourcePoolMetadata(HikariDataSource dataSource) {
super(dataSource);
}
@Override
public @Nullable Integer getActive() {
try {
HikariPool hikariPool = getHikariPool();
return (hikariPool != null) ? hikariPool.getActiveConnections() : null;
}
catch (Exception ex) {
return null;
}
}
@Override
public @Nullable Integer getIdle() {
try {
HikariPool hikariPool = getHikariPool();
return (hikariPool != null) ? hikariPool.getIdleConnections() : null;
}
catch (Exception ex) {
return null;
}
}
private @Nullable HikariPool getHikariPool() {
return (HikariPool) new DirectFieldAccessor(getDataSource()).getPropertyValue("pool");
}
@Override
public @Nullable Integer getMax() {
return getDataSource().getMaximumPoolSize();
}
@Override
public @Nullable Integer getMin() {
return getDataSource().getMinimumIdle();
}
@Override
public @Nullable String getValidationQuery() {
return getDataSource().getConnectionTestQuery();
}
@Override
public @Nullable Boolean getDefaultAutoCommit() {
return getDataSource().isAutoCommit();
}
}
| HikariDataSourcePoolMetadata |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/script/ScriptContext.java | {
"start": 3266,
"end": 7265
} | class ____ is an instance of a script. */
public final Class<?> instanceClazz;
/** The default size of the cache for the context if not overridden */
public final int cacheSizeDefault;
/** The default expiration of a script in the cache for the context, if not overridden */
public final TimeValue cacheExpireDefault;
/** Is compilation rate limiting enabled for this context? */
public final boolean compilationRateLimited;
/** Determines if the script can be stored as part of the cluster state. */
public final boolean allowStoredScript;
/** Construct a context with the related instance and compiled classes with caller provided cache defaults */
public ScriptContext(
String name,
Class<FactoryType> factoryClazz,
int cacheSizeDefault,
TimeValue cacheExpireDefault,
boolean compilationRateLimited,
boolean allowStoredScript
) {
this.name = name;
this.factoryClazz = factoryClazz;
Method newInstanceMethod = findMethod("FactoryType", factoryClazz, "newInstance");
Method newFactoryMethod = findMethod("FactoryType", factoryClazz, "newFactory");
if (newFactoryMethod != null) {
assert newInstanceMethod == null;
statefulFactoryClazz = newFactoryMethod.getReturnType();
newInstanceMethod = findMethod("StatefulFactoryType", statefulFactoryClazz, "newInstance");
if (newInstanceMethod == null) {
throw new IllegalArgumentException(
"Could not find method newInstance StatefulFactoryType class ["
+ statefulFactoryClazz.getName()
+ "] for script context ["
+ name
+ "]"
);
}
} else if (newInstanceMethod != null) {
assert newFactoryMethod == null;
statefulFactoryClazz = null;
} else {
throw new IllegalArgumentException(
"Could not find method newInstance or method newFactory on FactoryType class ["
+ factoryClazz.getName()
+ "] for script context ["
+ name
+ "]"
);
}
instanceClazz = newInstanceMethod.getReturnType();
this.cacheSizeDefault = cacheSizeDefault;
this.cacheExpireDefault = cacheExpireDefault;
this.compilationRateLimited = compilationRateLimited;
this.allowStoredScript = allowStoredScript;
}
/** Construct a context with the related instance and compiled classes with defaults for cacheSizeDefault, cacheExpireDefault and
* compilationRateLimited and allow scripts of this context to be stored scripts */
public ScriptContext(String name, Class<FactoryType> factoryClazz) {
// cache size default, cache expire default, max compilation rate are defaults from ScriptService.
this(name, factoryClazz, 100, TimeValue.timeValueMillis(0), true, true);
}
/** Returns a method with the given name, or throws an exception if multiple are found. */
private Method findMethod(String type, Class<?> clazz, String methodName) {
Method foundMethod = null;
for (Method method : clazz.getMethods()) {
if (method.getName().equals(methodName)) {
if (foundMethod != null) {
throw new IllegalArgumentException(
"Cannot have multiple "
+ methodName
+ " methods on "
+ type
+ " class ["
+ clazz.getName()
+ "] for script context ["
+ name
+ "]"
);
}
foundMethod = method;
}
}
return foundMethod;
}
}
| that |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/bytecode/enhancement/lazy/proxy/NoProxyFactoryTests.java | {
"start": 2564,
"end": 2879
} | class ____ {
@Id
private Integer id;
private String name;
public final Integer getId() {
return id;
}
public final void setId(Integer id) {
this.id = id;
}
public final String getName() {
return name;
}
public final void setName(String name) {
this.name = name;
}
}
}
| SimpleEntity |
java | google__dagger | javatests/dagger/internal/codegen/MissingBindingValidationTest.java | {
"start": 8228,
"end": 8690
} | interface ____ {",
" void inject(Self target);",
"}");
CompilerTests.daggerCompiler(self, component)
.withProcessingOptions(compilerMode.processorOptions())
.compile(
subject -> {
subject.hasErrorCount(1);
subject.hasErrorContaining("Self cannot be provided without an @Inject constructor")
.onSource(component)
.onLineContaining(" | SelfComponent |
java | micronaut-projects__micronaut-core | inject-java/src/test/groovy/io/micronaut/inject/failures/fieldcirculardependency/MyClassC.java | {
"start": 703,
"end": 764
} | class ____ {
@Inject
protected MyClassB propB;
}
| MyClassC |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MyBatisBeanEndpointBuilderFactory.java | {
"start": 9630,
"end": 10307
} | class ____.
*
* Path parameter: methodName (required)
* Name of the method on the bean that has the SQL query to be executed.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path beanName:methodName
* @return the dsl builder
*/
default MyBatisBeanEndpointBuilder mybatisBean(String componentName, String path) {
return MyBatisBeanEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
/**
* The builder of headers' name for the MyBatis Bean component.
*/
public static | name |
java | quarkusio__quarkus | integration-tests/rest-client/src/test/java/io/quarkus/it/rest/client/wronghost/ExternalWrongHostUsingHostnameVerifierIT.java | {
"start": 131,
"end": 237
} | class ____ extends ExternalWrongHostUsingHostnameVerifierTestCase {
}
| ExternalWrongHostUsingHostnameVerifierIT |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/builder/mappingTarget/simple/SimpleImmutableTarget.java | {
"start": 216,
"end": 627
} | class ____ {
private final String name;
private final int age;
SimpleImmutableTarget(Builder builder) {
this.name = builder.name;
this.age = builder.age;
}
public static Builder builder() {
return new Builder();
}
public int getAge() {
return age;
}
public String getName() {
return name;
}
public static | SimpleImmutableTarget |
java | apache__camel | components/camel-grape/src/main/java/org/apache/camel/component/grape/MavenCoordinates.java | {
"start": 853,
"end": 2098
} | class ____ {
private final String groupId;
private final String artifactId;
private final String version;
private final String classifier;
MavenCoordinates(String groupId, String artifactId, String version, String classifier) {
this.groupId = groupId;
this.artifactId = artifactId;
this.version = version;
this.classifier = classifier;
}
public static MavenCoordinates parseMavenCoordinates(String coordinates) {
String[] coordinatesParts = coordinates.split("/");
String clazzifier = "";
if (coordinatesParts.length < 3 || coordinatesParts.length > 4) {
throw new IllegalArgumentException("Invalid coordinates: " + coordinates);
}
if (coordinatesParts.length == 4) {
clazzifier = coordinatesParts[3];
}
return new MavenCoordinates(coordinatesParts[0], coordinatesParts[1], coordinatesParts[2], clazzifier);
}
public String getGroupId() {
return groupId;
}
public String getArtifactId() {
return artifactId;
}
public String getVersion() {
return version;
}
public String getClassifier() {
return classifier;
}
}
| MavenCoordinates |
java | apache__camel | components/camel-vertx/camel-vertx-http/src/test/java/org/apache/camel/component/vertx/http/VertxHttpFileUploadMultipartEasyTest.java | {
"start": 1316,
"end": 2730
} | class ____ extends VertxHttpTestSupport {
@Test
public void testVertxFileUpload() {
File f = new File("src/test/resources/log4j2.properties");
Exchange out
= template.request(getProducerUri() + "/upload2?multipartUpload=true&multipartUploadName=cheese", exchange -> {
exchange.getMessage().setBody(f);
});
assertNotNull(out);
assertFalse(out.isFailed(), "Should not fail");
assertEquals("log4j2.properties", out.getMessage().getBody(String.class));
}
@Override
protected RoutesBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from(getTestServerUri() + "/upload2")
.process(new Processor() {
@Override
public void process(Exchange exchange) {
// undertow store the multipart-form as map in the camel message
DataHandler dh = (DataHandler) exchange.getMessage().getBody(Map.class).get("cheese");
String out = dh.getDataSource().getName();
exchange.getMessage().setBody(out);
}
});
}
};
}
}
| VertxHttpFileUploadMultipartEasyTest |
java | apache__flink | flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/connector/blackhole/table/BlackHoleTableSinkFactory.java | {
"start": 2419,
"end": 3411
} | class ____ implements DynamicTableSink, SupportsPartitioning {
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
ChangelogMode.Builder builder = ChangelogMode.newBuilder();
for (RowKind kind : requestedMode.getContainedKinds()) {
if (kind != RowKind.UPDATE_BEFORE) {
builder.addContainedKind(kind);
}
}
return builder.build();
}
@Override
public SinkRuntimeProvider getSinkRuntimeProvider(DynamicTableSink.Context context) {
return SinkV2Provider.of(new DiscardingSink<>());
}
@Override
public DynamicTableSink copy() {
return new BlackHoleSink();
}
@Override
public String asSummaryString() {
return "BlackHole";
}
@Override
public void applyStaticPartition(Map<String, String> partition) {}
}
}
| BlackHoleSink |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/config/dialect/StorageSpecificMysqlDBTest.java | {
"start": 532,
"end": 1458
} | class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClass(MyEntity.class)
.addAsResource("application-start-offline-mariadb-dialect.properties", "application.properties"))
.setForcedDependencies(List.of(
Dependency.of("io.quarkus", "quarkus-jdbc-mariadb-deployment", Version.getVersion())))
.setLogRecordPredicate(record -> HibernateProcessorUtil.class.getName().equals(record.getLoggerName()))
.overrideConfigKey("quarkus.hibernate-orm.dialect.storage-engine", "")
.overrideConfigKey("quarkus.hibernate-orm.dialect.mysql.storage-engine", "innodb");
@Test
public void applicationStarts() {
assertThat(System.getProperty("hibernate.dialect.storage_engine")).isEqualTo("innodb");
}
}
| StorageSpecificMysqlDBTest |
java | alibaba__nacos | client/src/test/java/com/alibaba/nacos/client/config/common/GroupKeyTest.java | {
"start": 891,
"end": 2684
} | class ____ {
@Test
void testGetKey() {
assertEquals("1+foo", GroupKey.getKey("1", "foo"));
assertEquals("1+foo+bar", GroupKey.getKey("1", "foo", "bar"));
assertEquals("1+f%2Boo+b%25ar", GroupKey.getKey("1", "f+oo", "b%ar"));
}
@Test
void testGetKeyTenant() {
assertEquals("1+foo+bar", GroupKey.getKeyTenant("1", "foo", "bar"));
}
@Test
void testParseKey() {
assertArrayEquals(new String[] {"a", "f+oo", null}, GroupKey.parseKey("a+f%2Boo"));
assertArrayEquals(new String[] {"b", "f%oo", null}, GroupKey.parseKey("b+f%25oo"));
assertArrayEquals(new String[] {"a", "b", "c"}, GroupKey.parseKey("a+b+c"));
}
@Test
void testParseKeyIllegalArgumentException1() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.parseKey("");
});
}
@Test
void testParseKeyIllegalArgumentException2() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.parseKey("f%oo");
});
}
@Test
void testParseKeyIllegalArgumentException3() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.parseKey("f+o+o+bar");
});
}
@Test
void testParseKeyIllegalArgumentException4() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.parseKey("f++bar");
});
}
@Test
void testGetKeyDatIdParam() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.getKey("", "a");
});
}
@Test
void testGetKeyGroupParam() {
assertThrows(IllegalArgumentException.class, () -> {
GroupKey.getKey("a", "");
});
}
}
| GroupKeyTest |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/io/FileBench.java | {
"start": 9355,
"end": 9629
} | enum ____ {
blk("BLOCK"),
rec("RECORD");
String typ;
CType(String typ) { this.typ = typ; }
public void configure(JobConf job) {
job.set("mapred.map.output.compression.type", typ);
job.set("mapred.output.compression.type", typ);
}
}
| CType |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/extraction/ExtractionUtils.java | {
"start": 21411,
"end": 24285
} | class ____ publicly accessible and has a default constructor.",
rawSerializer.getName());
}
}
/** Resolves a {@link TypeVariable} using the given type hierarchy if possible. */
static Type resolveVariable(List<Type> typeHierarchy, TypeVariable<?> variable) {
// iterate through hierarchy from top to bottom until type variable gets a non-variable
// assigned
for (int i = typeHierarchy.size() - 1; i >= 0; i--) {
final Type currentType = typeHierarchy.get(i);
if (currentType instanceof ParameterizedType) {
final Type resolvedType =
resolveVariableInParameterizedType(
variable, (ParameterizedType) currentType);
if (resolvedType instanceof TypeVariable) {
// follow type variables transitively
variable = (TypeVariable<?>) resolvedType;
} else if (resolvedType != null) {
return resolvedType;
}
}
}
// unresolved variable
return variable;
}
private static @Nullable Type resolveVariableInParameterizedType(
TypeVariable<?> variable, ParameterizedType currentType) {
final Class<?> currentRaw = (Class<?>) currentType.getRawType();
final TypeVariable<?>[] currentVariables = currentRaw.getTypeParameters();
// search for matching type variable
for (int paramPos = 0; paramPos < currentVariables.length; paramPos++) {
if (typeVariableEquals(variable, currentVariables[paramPos])) {
return currentType.getActualTypeArguments()[paramPos];
}
}
return null;
}
private static boolean typeVariableEquals(
TypeVariable<?> variable, TypeVariable<?> currentVariable) {
return currentVariable.getGenericDeclaration().equals(variable.getGenericDeclaration())
&& currentVariable.getName().equals(variable.getName());
}
/**
* Validates if a given type is not already contained in the type hierarchy of a structured
* type.
*
* <p>Otherwise this would lead to infinite data type extraction cycles.
*/
static void validateStructuredSelfReference(Type t, List<Type> typeHierarchy) {
final Class<?> clazz = toClass(t);
if (clazz != null
&& !clazz.isInterface()
&& clazz != Object.class
&& typeHierarchy.contains(t)) {
throw extractionError(
"Cyclic reference detected for class '%s'. Attributes of structured types must not "
+ "(transitively) reference the structured type itself.",
clazz.getName());
}
}
/** Returns the fields of a | is |
java | apache__kafka | streams/src/main/java/org/apache/kafka/streams/kstream/internals/WrappingNullableSerializer.java | {
"start": 982,
"end": 1119
} | interface ____<Outer, InnerK, InnerV> extends Serializer<Outer> {
void setIfUnset(final SerdeGetter getter);
}
| WrappingNullableSerializer |
java | google__dagger | javatests/artifacts/dagger-ksp/transitive-annotation-app/library1/src/main/java/library1/FooBase.java | {
"start": 882,
"end": 2464
} | class ____ {
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
int baseNonDaggerField;
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@Inject
@MyQualifier
Dep baseDaggerField;
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
FooBase(
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
MyTransitiveType nonDaggerParameter) {}
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@Inject
FooBase(
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@MyQualifier
Dep dep) {}
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
void baseNonDaggerMethod(
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
int i) {}
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@Inject
void baseDaggerMethod(
@MyTransitiveBaseAnnotation
@MyAnnotation(MyTransitiveType.VALUE)
@MyOtherAnnotation(MyTransitiveType.class)
@MyQualifier
Dep dep) {}
}
| FooBase |
java | apache__camel | components/camel-python/src/main/java/org/apache/camel/language/python/PythonLanguage.java | {
"start": 3601,
"end": 3947
} | class ____ {
private final Map<String, PyCode> cache = new HashMap<>();
public void addScript(String script, PyCode compiledScript) {
cache.put(script, compiledScript);
}
public PythonLanguage build() {
return new PythonLanguage(Collections.unmodifiableMap(cache));
}
}
}
| Builder |
java | alibaba__fastjson | src/main/java/com/alibaba/fastjson/JSONPath.java | {
"start": 117638,
"end": 118906
} | class ____ extends PropertyFilter {
private final String value;
private final Operator op;
public StringOpSegement(String propertyName, boolean function, String value, Operator op){
super(propertyName, function);
this.value = value;
this.op = op;
}
public boolean apply(JSONPath path, Object rootObject, Object currentObject, Object item) {
Object propertyValue = get(path, rootObject, item);
if (op == Operator.EQ) {
return value.equals(propertyValue);
} else if (op == Operator.NE) {
return !value.equals(propertyValue);
}
if (propertyValue == null) {
return false;
}
int compareResult = value.compareTo(propertyValue.toString());
if (op == Operator.GE) {
return compareResult <= 0;
} else if (op == Operator.GT) {
return compareResult < 0;
} else if (op == Operator.LE) {
return compareResult >= 0;
} else if (op == Operator.LT) {
return compareResult > 0;
}
return false;
}
}
static | StringOpSegement |
java | apache__flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogDatabaseImpl.java | {
"start": 1139,
"end": 2709
} | class ____ implements CatalogDatabase {
// Property of the database
private final Map<String, String> properties;
// Comment of the database
private final String comment;
public CatalogDatabaseImpl(Map<String, String> properties, @Nullable String comment) {
this.properties = checkNotNull(properties, "properties cannot be null");
this.comment = comment;
}
/** Get a map of properties associated with the database. */
public Map<String, String> getProperties() {
return properties;
}
/**
* Get comment of the database.
*
* @return comment of the database
*/
public String getComment() {
return comment;
}
/**
* Get a deep copy of the CatalogDatabase instance.
*
* @return a copy of CatalogDatabase instance
*/
public CatalogDatabase copy() {
return copy(getProperties());
}
@Override
public CatalogDatabase copy(Map<String, String> properties) {
return new CatalogDatabaseImpl(new HashMap<>(properties), comment);
}
/**
* Get a brief description of the database.
*
* @return an optional short description of the database
*/
public Optional<String> getDescription() {
return Optional.ofNullable(comment);
}
/**
* Get a detailed description of the database.
*
* @return an optional long description of the database
*/
public Optional<String> getDetailedDescription() {
return Optional.ofNullable(comment);
}
}
| CatalogDatabaseImpl |
java | bumptech__glide | instrumentation/src/androidTest/java/com/bumptech/glide/NonBitmapDrawableResourcesTest.java | {
"start": 1280,
"end": 20115
} | class ____ {
@Rule public final TestName testName = new TestName();
@Rule public final TearDownGlide tearDownGlide = new TearDownGlide();
private final Context context = ApplicationProvider.getApplicationContext();
@Test
public void load_withBitmapResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable = Glide.with(context).load(android.R.drawable.star_big_off).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withBitmapResourceId_asDrawable_withTransformation_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(android.R.drawable.star_big_off)
.apply(centerCropTransform())
.submit()
.get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withBitmapResourceId_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context).asBitmap().load(android.R.drawable.star_big_off).submit().get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withBitmapAliasResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable = Glide.with(context).load(ResourceIds.drawable.bitmap_alias).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withBitmapAliasResourceId_asDrawable_withTransformation_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(ResourceIds.drawable.bitmap_alias)
.apply(centerCropTransform())
.submit()
.get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withBitmapAliasResourceId_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context).asBitmap().load(ResourceIds.drawable.bitmap_alias).submit().get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withShapeDrawableResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context).load(ResourceIds.drawable.shape_drawable).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withShapeDrawableResourceId_asDrawable_withTransformation_sizeOriginal_fails()
throws ExecutionException, InterruptedException {
assertThrows(
ExecutionException.class,
new ThrowingRunnable() {
@Override
public void run() throws Throwable {
Glide.with(context)
.load(ResourceIds.drawable.shape_drawable)
.apply(centerCropTransform())
.submit()
.get();
}
});
}
@Test
public void load_withShapeDrawableResourceId_asDrawable_withTransformation_validSize_succeeds()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(ResourceIds.drawable.shape_drawable)
.apply(bitmapTransform(new RoundedCorners(10)))
.submit(100, 200)
.get();
assertThat(drawable).isNotNull();
assertThat(drawable.getIntrinsicWidth()).isEqualTo(100);
assertThat(drawable.getIntrinsicHeight()).isEqualTo(200);
}
@Test
public void load_withShapeDrawableResourceId_asBitmap_withSizeOriginal_fails()
throws ExecutionException, InterruptedException {
assertThrows(
ExecutionException.class,
new ThrowingRunnable() {
@Override
public void run() throws Throwable {
Glide.with(context).asBitmap().load(ResourceIds.drawable.shape_drawable).submit().get();
}
});
}
@Test
public void load_withShapeDrawableResourceId_asBitmap_withValidSize_returnsNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.shape_drawable)
.submit(100, 200)
.get();
assertThat(bitmap).isNotNull();
assertThat(bitmap.getWidth()).isEqualTo(100);
assertThat(bitmap.getHeight()).isEqualTo(200);
}
@Test
public void load_withShapeDrawableResourceId_asBitmap_withValidSizeAndTransform_nonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.shape_drawable)
.apply(centerCropTransform())
.submit(100, 200)
.get();
assertThat(bitmap).isNotNull();
assertThat(bitmap.getWidth()).isEqualTo(100);
assertThat(bitmap.getHeight()).isEqualTo(200);
}
@Test
public void load_withStateListDrawableResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context).load(ResourceIds.drawable.state_list_drawable).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withStateListDrawableResourceId_asDrawable_withTransformation_nonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(ResourceIds.drawable.state_list_drawable)
.apply(centerCropTransform())
.submit()
.get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withStateListDrawableResourceId_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.state_list_drawable)
.submit()
.get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withStateListDrawableResourceId_asBitmap_withTransformation_nonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.state_list_drawable)
.apply(centerCropTransform())
.submit()
.get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withVectorDrawableResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context).load(ResourceIds.drawable.vector_drawable).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withVectorDrawableResourceId_asDrawable_withTransformation_nonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(ResourceIds.drawable.vector_drawable)
.apply(centerCropTransform())
.submit()
.get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withVectorDrawableResourceId_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context).asBitmap().load(ResourceIds.drawable.vector_drawable).submit().get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withVectorDrawableResourceId_asBitmap_withTransformation_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.vector_drawable)
.apply(centerCropTransform())
.submit()
.get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withNinePatchResourceId_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context).load(ResourceIds.drawable.googlelogo_color_120x44dp).submit().get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withNinePatchResourceId_asDrawable_withTransformation_producesNonNullDrawable()
throws ExecutionException, InterruptedException {
Drawable drawable =
Glide.with(context)
.load(ResourceIds.drawable.googlelogo_color_120x44dp)
.apply(centerCropTransform())
.submit()
.get();
assertThat(drawable).isNotNull();
}
@Test
public void load_withNinePatchResourceId_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.googlelogo_color_120x44dp)
.submit()
.get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withNinePatchResourceId_asBitmap_withTransformation_producesNonNullBitmap()
throws ExecutionException, InterruptedException {
Bitmap bitmap =
Glide.with(context)
.asBitmap()
.load(ResourceIds.drawable.googlelogo_color_120x44dp)
.apply(centerCropTransform())
.submit()
.get();
assertThat(bitmap).isNotNull();
}
@Test
public void load_withApplicationIconResourceIdUri_asDrawable_producesNonNullDrawable()
throws NameNotFoundException, ExecutionException, InterruptedException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.path(String.valueOf(iconResourceId))
.build();
Drawable drawable = Glide.with(context).load(uri).submit().get();
assertThat(drawable).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceIdUri_asDrawable_withTransformation_nonNullDrawable()
throws NameNotFoundException, ExecutionException, InterruptedException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.path(String.valueOf(iconResourceId))
.build();
Drawable drawable = Glide.with(context).load(uri).apply(centerCropTransform()).submit().get();
assertThat(drawable).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceIdUri_asBitmap_producesNonNullBitmap()
throws NameNotFoundException, ExecutionException, InterruptedException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.path(String.valueOf(iconResourceId))
.build();
Bitmap bitmap = Glide.with(context).asBitmap().load(uri).submit().get();
assertThat(bitmap).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceIdUri_asBitmap_withTransformation_nonNullBitmap()
throws ExecutionException, InterruptedException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.path(String.valueOf(iconResourceId))
.build();
Bitmap bitmap =
Glide.with(context).asBitmap().apply(centerCropTransform()).load(uri).submit().get();
assertThat(bitmap).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceNameUri_asDrawable_producesNonNullDrawable()
throws ExecutionException, InterruptedException, NameNotFoundException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Context toUse = context.createPackageContext(packageName, /* flags= */ 0);
Resources resources = toUse.getResources();
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.appendPath(resources.getResourceTypeName(iconResourceId))
.appendPath(resources.getResourceEntryName(iconResourceId))
.build();
Drawable drawable = Glide.with(context).load(uri).submit().get();
assertThat(drawable).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceNameUri_asDrawable_withTransform_nonNullDrawable()
throws ExecutionException, InterruptedException, NameNotFoundException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Context toUse = context.createPackageContext(packageName, /* flags= */ 0);
Resources resources = toUse.getResources();
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.appendPath(resources.getResourceTypeName(iconResourceId))
.appendPath(resources.getResourceEntryName(iconResourceId))
.build();
Drawable drawable = Glide.with(context).load(uri).apply(centerCropTransform()).submit().get();
assertThat(drawable).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceNameUri_asBitmap_producesNonNullBitmap()
throws ExecutionException, InterruptedException, NameNotFoundException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Context toUse = context.createPackageContext(packageName, /* flags= */ 0);
Resources resources = toUse.getResources();
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.appendPath(resources.getResourceTypeName(iconResourceId))
.appendPath(resources.getResourceEntryName(iconResourceId))
.build();
Bitmap bitmap = Glide.with(context).asBitmap().load(uri).submit().get();
assertThat(bitmap).isNotNull();
}
}
@Test
public void load_withApplicationIconResourceNameUri_asBitmap_withTransform_nonNullBitmap()
throws ExecutionException, InterruptedException, NameNotFoundException {
for (String packageName : getInstalledPackages()) {
int iconResourceId = getResourceId(packageName);
Context toUse = context.createPackageContext(packageName, /* flags= */ 0);
Resources resources = toUse.getResources();
Uri uri =
new Uri.Builder()
.scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
.authority(packageName)
.appendPath(resources.getResourceTypeName(iconResourceId))
.appendPath(resources.getResourceEntryName(iconResourceId))
.build();
Bitmap bitmap =
Glide.with(context).asBitmap().apply(centerCropTransform()).load(uri).submit().get();
assertThat(bitmap).isNotNull();
}
}
private Set<String> getInstalledPackages() {
Intent mainIntent = new Intent(Intent.ACTION_MAIN, null);
mainIntent.addCategory(Intent.CATEGORY_LAUNCHER);
PackageManager packageManager = context.getPackageManager();
List<ResolveInfo> pkgAppsList =
packageManager.queryIntentActivities(mainIntent, /* flags= */ 0);
Set<String> result = new HashSet<>();
for (ResolveInfo info : pkgAppsList) {
String packageName = info.activityInfo.packageName;
int iconResourceId = getResourceId(packageName);
if (iconResourceId != 0
&& doesApplicationPackageNameMatchResourcePackageName(packageName, iconResourceId)) {
result.add(info.activityInfo.packageName);
}
}
return result;
}
private int getResourceId(String packageName) {
PackageInfo packageInfo;
try {
packageInfo = context.getPackageManager().getPackageInfo(packageName, /* flags= */ 0);
} catch (NameNotFoundException e) {
return 0;
}
return packageInfo.applicationInfo.icon;
}
/**
* Returns {@code true} iff the resource package name is exactly the same as the containing
* application package name for a given resource id.
*
* <p>The resource package name is the value returned by {@link
* Resources#getResourcePackageName(int)}. The application package name is package name of the
* enclosing application. If these two things are equal, then we can both construct a Context for
* that package and retrieve a resource id for that package from a "standard" resource Uri
* containing a name instead of an id. If they aren't equal, then we can do only one of the two
* required tasks, so our Uri load will always fail. To handle this properly, we'd need callers to
* include both package names in the Uri. I'm not aware of any standardized Uri format for doing
* so, so these requests will just be treated as unsupported for the time being.
*
* <p>Take Calendar (emulators API 24 and below) as an example:
*
* <ul>
* <li>package name: com.google.android.calendar
* <li>resource package name: com.android.calendar
* </ul>
*
* We can construct one of two possible Uris:
*
* <ul>
* <li>android.resource://com.google.android.calendar/mipmap/ic_icon_calendar.
* <li>android.resource://com.android.calendar/mipmap/ic_icon_calendar.<
* </ul>
*
* From the first Uri, we can obtain the correct Context/Resources for the calendar package, but
* our attempts to resolve the correct resource id will fail because we do not have the resource
* package name. From the second Uri we cannot obtain the Context/Resources for the calendar
* package because the resource package name doesn't match the application package name.
*/
private boolean doesApplicationPackageNameMatchResourcePackageName(
String applicationPackageName, int iconResourceId) {
try {
Context current = context.createPackageContext(applicationPackageName, /* flags= */ 0);
String resourcePackageName = current.getResources().getResourcePackageName(iconResourceId);
return applicationPackageName.equals(resourcePackageName);
} catch (NameNotFoundException e) {
// This should never happen
throw new RuntimeException(e);
}
}
}
| NonBitmapDrawableResourcesTest |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/mapping/fetch/depth/form/AbstractFormFetchDepthTest.java | {
"start": 6275,
"end": 6611
} | class ____ {
@EmbeddedId
private FormVersionId id;
public FormVersion() {
id = new FormVersionId();
}
public FormVersion(Form form, int version) {
this();
this.id.setForm( form );
this.id.setVersionNumber( version );
}
public FormVersionId getId() {
return id;
}
}
@Embeddable
public static | FormVersion |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/exceptions/misusing/UnnecessaryStubbingException.java | {
"start": 797,
"end": 2147
} | class ____ test method mock as 'lenient' with
* our JUnit support ({@link org.mockito.junit.MockitoJUnit}) or Mockito session ({@link MockitoSession})</li>
* </ol>
*
* <p>
* Unnecessary stubbings are stubbed method calls that were never realized during test execution. Example:
* <pre class="code"><code class="java">
* //code under test:
* ...
* String result = translator.translate("one")
* ...
*
* //test:
* ...
* when(translator.translate("one")).thenReturn("jeden"); // <- stubbing realized during code execution
* when(translator.translate("two")).thenReturn("dwa"); // <- stubbing never realized
* ...
* </code>
* </pre>
* Notice that one of the stubbed methods were never realized in the code under test, during test execution.
* The stray stubbing might be an oversight of the developer, the artifact of copy-paste
* or the effect of not understanding the test/code.
* Either way, the developer ends up with unnecessary test code.
* In order to keep the codebase clean and maintainable it is necessary to remove unnecessary code.
* Otherwise tests are harder to read and reason about.
* <p>
* Mockito JUnit Runner triggers <code>UnnecessaryStubbingException</code> only when none of the test methods use the stubbings.
* This means that it is ok to put default stubbing in a 'setup' method or in test | or |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/source/coordinator/SplitAssignmentTrackerTest.java | {
"start": 1450,
"end": 7805
} | class ____ {
@Test
void testRecordIncrementalSplitAssignment() {
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
tracker.recordSplitAssignment(getSplitsAssignment(3, 0));
tracker.recordSplitAssignment(getSplitsAssignment(2, 6));
verifyAssignment(Arrays.asList("0", "6"), tracker.uncheckpointedAssignments().get(0));
verifyAssignment(
Arrays.asList("1", "2", "7", "8"), tracker.uncheckpointedAssignments().get(1));
verifyAssignment(Arrays.asList("3", "4", "5"), tracker.uncheckpointedAssignments().get(2));
}
@Test
void testSnapshotStateAndRestoreState() throws Exception {
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
tracker.recordSplitAssignment(getSplitsAssignment(3, 0));
tracker.recordSplitAssignment(getSplitsAssignment(2, 6));
byte[] snapshotState = tracker.snapshotState(new MockSourceSplitSerializer());
SplitAssignmentTracker<MockSourceSplit> trackerToRestore = new SplitAssignmentTracker<>();
assertThat(trackerToRestore.uncheckpointedAssignments()).isEmpty();
trackerToRestore.restoreState(new MockSourceSplitSerializer(), snapshotState);
verifyAssignment(
Arrays.asList("0", "6"), trackerToRestore.uncheckpointedAssignments().get(0));
verifyAssignment(
Arrays.asList("1", "2", "7", "8"),
trackerToRestore.uncheckpointedAssignments().get(1));
verifyAssignment(
Arrays.asList("3", "4", "5"), trackerToRestore.uncheckpointedAssignments().get(2));
}
@Test
void testOnCheckpoint() throws Exception {
final long checkpointId = 123L;
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
tracker.recordSplitAssignment(getSplitsAssignment(3, 0));
// Serialize
tracker.onCheckpoint(checkpointId);
// Verify the uncheckpointed assignments.
assertThat(tracker.uncheckpointedAssignments()).isEmpty();
// verify assignments put into the checkpoints.
Map<Long, Map<Integer, LinkedHashSet<MockSourceSplit>>> assignmentsByCheckpoints =
tracker.assignmentsByCheckpointId();
assertThat(assignmentsByCheckpoints.size()).isOne();
Map<Integer, LinkedHashSet<MockSourceSplit>> assignmentForCheckpoint =
assignmentsByCheckpoints.get(checkpointId);
assertThat(assignmentForCheckpoint).isNotNull();
verifyAssignment(Arrays.asList("0"), assignmentForCheckpoint.get(0));
verifyAssignment(Arrays.asList("1", "2"), assignmentForCheckpoint.get(1));
verifyAssignment(Arrays.asList("3", "4", "5"), assignmentForCheckpoint.get(2));
}
@Test
void testOnCheckpointComplete() throws Exception {
final long checkpointId1 = 100L;
final long checkpointId2 = 101L;
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
// Assign some splits to subtask 0 and 1.
tracker.recordSplitAssignment(getSplitsAssignment(2, 0));
// Take the first snapshot.
tracker.onCheckpoint(checkpointId1);
verifyAssignment(
Arrays.asList("0"), tracker.assignmentsByCheckpointId(checkpointId1).get(0));
verifyAssignment(
Arrays.asList("1", "2"), tracker.assignmentsByCheckpointId(checkpointId1).get(1));
// Assign additional splits to subtask 0 and 1.
tracker.recordSplitAssignment(getSplitsAssignment(2, 3));
// Take the second snapshot.
tracker.onCheckpoint(checkpointId2);
verifyAssignment(
Arrays.asList("0"), tracker.assignmentsByCheckpointId(checkpointId1).get(0));
verifyAssignment(
Arrays.asList("1", "2"), tracker.assignmentsByCheckpointId(checkpointId1).get(1));
verifyAssignment(
Arrays.asList("3"), tracker.assignmentsByCheckpointId(checkpointId2).get(0));
verifyAssignment(
Arrays.asList("4", "5"), tracker.assignmentsByCheckpointId(checkpointId2).get(1));
// Complete the first checkpoint.
tracker.onCheckpointComplete(checkpointId1);
assertThat(tracker.assignmentsByCheckpointId(checkpointId1)).isNull();
verifyAssignment(
Arrays.asList("3"), tracker.assignmentsByCheckpointId(checkpointId2).get(0));
verifyAssignment(
Arrays.asList("4", "5"), tracker.assignmentsByCheckpointId(checkpointId2).get(1));
}
@Test
void testGetAndRemoveUncheckpointedAssignment() throws Exception {
final long checkpointId1 = 100L;
final long checkpointId2 = 101L;
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
// Assign some splits and take snapshot 1.
tracker.recordSplitAssignment(getSplitsAssignment(2, 0));
tracker.onCheckpoint(checkpointId1);
// Assign some more splits and take snapshot 2.
tracker.recordSplitAssignment(getSplitsAssignment(2, 3));
tracker.onCheckpoint(checkpointId2);
// Now assume subtask 0 has failed.
List<MockSourceSplit> splitsToPutBack =
tracker.getAndRemoveUncheckpointedAssignment(0, checkpointId1 - 1);
verifyAssignment(Arrays.asList("0", "3"), splitsToPutBack);
}
@Test
void testGetAndRemoveSplitsAfterSomeCheckpoint() throws Exception {
final long checkpointId1 = 100L;
final long checkpointId2 = 101L;
SplitAssignmentTracker<MockSourceSplit> tracker = new SplitAssignmentTracker<>();
// Assign some splits and take snapshot 1.
tracker.recordSplitAssignment(getSplitsAssignment(2, 0));
tracker.onCheckpoint(checkpointId1);
// Assign some more splits and take snapshot 2.
tracker.recordSplitAssignment(getSplitsAssignment(2, 3));
tracker.onCheckpoint(checkpointId2);
// Now assume subtask 0 has failed.
List<MockSourceSplit> splitsToPutBack =
tracker.getAndRemoveUncheckpointedAssignment(0, checkpointId1);
verifyAssignment(Collections.singletonList("3"), splitsToPutBack);
}
// ---------------------
}
| SplitAssignmentTrackerTest |
java | spring-projects__spring-framework | spring-orm/src/main/java/org/springframework/orm/jpa/vendor/HibernateJpaDialect.java | {
"start": 2128,
"end": 8786
} | class ____ extends DefaultJpaDialect {
private final HibernateExceptionTranslator exceptionTranslator = new HibernateExceptionTranslator();
boolean prepareConnection = true;
/**
* Set whether to prepare the underlying JDBC Connection of a transactional
* Hibernate Session, that is, whether to apply a transaction-specific
* isolation level and/or the transaction's read-only flag to the underlying
* JDBC Connection.
* <p>Default is "true". If you turn this flag off, JPA transaction management
* will not support per-transaction isolation levels anymore. It will not call
* {@code Connection.setReadOnly(true)} for read-only transactions anymore either.
* If this flag is turned off, no cleanup of a JDBC Connection is required after
* a transaction, since no Connection settings will get modified.
* <p><b>NOTE:</b> The default behavior in terms of read-only handling changed
* in Spring 4.1, propagating the read-only status to the JDBC Connection now,
* analogous to other Spring transaction managers. This may have the effect
* that you're running into read-only enforcement now where previously write
* access has accidentally been tolerated: Please revise your transaction
* declarations accordingly, removing invalid read-only markers if necessary.
* @since 4.1
* @see java.sql.Connection#setTransactionIsolation
* @see java.sql.Connection#setReadOnly
*/
public void setPrepareConnection(boolean prepareConnection) {
this.prepareConnection = prepareConnection;
}
/**
* Set the JDBC exception translator for Hibernate exception translation purposes.
* <p>Applied to any detected {@link java.sql.SQLException} root cause of a Hibernate
* {@link JDBCException}, overriding Hibernate's own {@code SQLException} translation
* (which is based on a Hibernate Dialect for a specific target database).
* <p>As of 6.1, also applied to {@link org.hibernate.TransactionException} translation
* with a {@link SQLException} root cause (where Hibernate does not translate itself
* at all), overriding Spring's default {@link SQLExceptionSubclassTranslator} there.
* @param exceptionTranslator the {@link SQLExceptionTranslator} to delegate to, or
* {@code null} for none. By default, a {@link SQLExceptionSubclassTranslator} will
* be used for {@link org.hibernate.TransactionException} translation as of 6.1;
* this can be reverted to pre-6.1 behavior through setting {@code null} here.
* @since 5.1
* @see java.sql.SQLException
* @see org.hibernate.JDBCException
* @see org.springframework.jdbc.support.SQLExceptionSubclassTranslator
* @see org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator
*/
public void setJdbcExceptionTranslator(@Nullable SQLExceptionTranslator exceptionTranslator) {
this.exceptionTranslator.setJdbcExceptionTranslator(exceptionTranslator);
}
@Override
public Object beginTransaction(EntityManager entityManager, TransactionDefinition definition)
throws PersistenceException, SQLException, TransactionException {
SessionImplementor session = entityManager.unwrap(SessionImplementor.class);
if (definition.getTimeout() != TransactionDefinition.TIMEOUT_DEFAULT) {
session.getTransaction().setTimeout(definition.getTimeout());
}
boolean isolationLevelNeeded = (definition.getIsolationLevel() != TransactionDefinition.ISOLATION_DEFAULT);
Integer previousIsolationLevel = null;
Connection preparedCon = null;
if (isolationLevelNeeded || definition.isReadOnly()) {
if (this.prepareConnection && ConnectionReleaseMode.ON_CLOSE.equals(
session.getJdbcCoordinator().getLogicalConnection().getConnectionHandlingMode().getReleaseMode())) {
preparedCon = session.getJdbcCoordinator().getLogicalConnection().getPhysicalConnection();
previousIsolationLevel = DataSourceUtils.prepareConnectionForTransaction(preparedCon, definition);
}
else if (isolationLevelNeeded) {
throw new InvalidIsolationLevelException(
"HibernateJpaDialect is not allowed to support custom isolation levels: " +
"make sure that its 'prepareConnection' flag is on (the default) and that the " +
"Hibernate connection release mode is set to ON_CLOSE.");
}
}
// Standard JPA transaction begin call for full JPA context setup...
entityManager.getTransaction().begin();
// Adapt flush mode and store previous isolation level, if any.
FlushMode previousFlushMode = prepareFlushMode(session, definition.isReadOnly());
if (definition instanceof ResourceTransactionDefinition rtd && rtd.isLocalResource()) {
// As of 5.1, we explicitly optimize for a transaction-local EntityManager,
// aligned with native HibernateTransactionManager behavior.
previousFlushMode = null;
if (definition.isReadOnly()) {
session.setDefaultReadOnly(true);
}
}
return new SessionTransactionData(
session, previousFlushMode, (preparedCon != null), previousIsolationLevel, definition.isReadOnly());
}
@Override
public Object prepareTransaction(EntityManager entityManager, boolean readOnly, @Nullable String name)
throws PersistenceException {
SessionImplementor session = entityManager.unwrap(SessionImplementor.class);
FlushMode previousFlushMode = prepareFlushMode(session, readOnly);
return new SessionTransactionData(session, previousFlushMode, false, null, readOnly);
}
protected @Nullable FlushMode prepareFlushMode(Session session, boolean readOnly) throws PersistenceException {
FlushMode flushMode = session.getHibernateFlushMode();
if (readOnly) {
// We should suppress flushing for a read-only transaction.
if (!flushMode.equals(FlushMode.MANUAL)) {
session.setHibernateFlushMode(FlushMode.MANUAL);
return flushMode;
}
}
else {
// We need AUTO or COMMIT for a non-read-only transaction.
if (flushMode.lessThan(FlushMode.COMMIT)) {
session.setHibernateFlushMode(FlushMode.AUTO);
return flushMode;
}
}
// No FlushMode change needed...
return null;
}
@Override
public void cleanupTransaction(@Nullable Object transactionData) {
if (transactionData instanceof SessionTransactionData sessionTransactionData) {
sessionTransactionData.resetSessionState();
}
}
@Override
public ConnectionHandle getJdbcConnection(EntityManager entityManager, boolean readOnly)
throws PersistenceException, SQLException {
return new HibernateConnectionHandle(entityManager.unwrap(SessionImplementor.class));
}
@Override
public @Nullable DataAccessException translateExceptionIfPossible(RuntimeException ex) {
return this.exceptionTranslator.translateExceptionIfPossible(ex);
}
private static | HibernateJpaDialect |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TestExceptionCheckerTest.java | {
"start": 3279,
"end": 3823
} | class ____ {
@Test
public void test() throws Exception {
Path p = Paths.get("NOSUCH");
assertThrows(IOException.class, () -> Files.readAllBytes(p));
}
}
""")
.doTest();
}
@Test
public void oneStatement() {
testHelper
.addInputLines(
"in/ExceptionTest.java",
"""
import java.io.IOException;
import java.nio.file.*;
import org.junit.Test;
| ExceptionTest |
java | elastic__elasticsearch | test/logger-usage/src/main/java/org/elasticsearch/test/loggerusage/ESLoggerUsageChecker.java | {
"start": 1923,
"end": 6676
} | class ____ {
public static final Type LOGGER_CLASS = Type.getType(Logger.class);
public static final Type THROWABLE_CLASS = Type.getType(Throwable.class);
public static final Type STRING_CLASS = Type.getType(String.class);
public static final Type STRING_ARRAY_CLASS = Type.getType(String[].class);
public static final Type OBJECT_CLASS = Type.getType(Object.class);
public static final Type OBJECT_ARRAY_CLASS = Type.getType(Object[].class);
public static final Type SUPPLIER_ARRAY_CLASS = Type.getType(Supplier[].class);
public static final Type MARKER_CLASS = Type.getType(Marker.class);
public static final List<String> LOGGER_METHODS = Arrays.asList("trace", "debug", "info", "warn", "error", "fatal");
public static final String IGNORE_CHECKS_ANNOTATION = "org.elasticsearch.common.SuppressLoggerChecks";
// types which are subject to checking when used in logger. <code>TestMessage<code> is also declared here to
// make sure this functionality works
public static final Set<Type> CUSTOM_MESSAGE_TYPE = Set.of(Type.getObjectType("org/elasticsearch/common/logging/ESLogMessage"));
public static final Type PARAMETERIZED_MESSAGE_CLASS = Type.getType(ParameterizedMessage.class);
@SuppressForbidden(reason = "command line tool")
public static void main(String... args) throws Exception {
System.out.println("checking for wrong usages of ESLogger...");
boolean[] wrongUsageFound = new boolean[1];
checkLoggerUsage(wrongLoggerUsage -> {
System.err.println(wrongLoggerUsage.getErrorLines());
wrongUsageFound[0] = true;
}, args);
if (wrongUsageFound[0]) {
throw new Exception("Wrong logger usages found");
} else {
System.out.println("No wrong usages found");
}
}
private static void checkLoggerUsage(Consumer<WrongLoggerUsage> wrongUsageCallback, String... classDirectories) throws IOException {
for (String classDirectory : classDirectories) {
Path root = Paths.get(classDirectory);
if (Files.isDirectory(root) == false) {
throw new IllegalArgumentException(root + " should be an existing directory");
}
Files.walkFileTree(root, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
if (Files.isRegularFile(file) && file.getFileName().toString().endsWith(".class")) {
try (InputStream in = Files.newInputStream(file)) {
ESLoggerUsageChecker.check(wrongUsageCallback, in);
}
}
return super.visitFile(file, attrs);
}
});
}
}
public static void check(Consumer<WrongLoggerUsage> wrongUsageCallback, InputStream inputStream) throws IOException {
check(wrongUsageCallback, inputStream, s -> true);
}
// used by tests
static void check(Consumer<WrongLoggerUsage> wrongUsageCallback, InputStream inputStream, Predicate<String> methodsToCheck)
throws IOException {
ClassReader cr = new ClassReader(inputStream);
cr.accept(new ClassChecker(wrongUsageCallback, methodsToCheck), 0);
}
public record WrongLoggerUsage(String className, String methodName, String logMethodName, int line, String errorMessage) {
/**
* Returns an error message that has the form of stack traces emitted by {@link Throwable#printStackTrace}
*/
public String getErrorLines() {
String fullClassName = Type.getObjectType(className).getClassName();
String simpleClassName = fullClassName.substring(fullClassName.lastIndexOf('.') + 1, fullClassName.length());
int innerClassIndex = simpleClassName.indexOf('$');
if (innerClassIndex > 0) {
simpleClassName = simpleClassName.substring(0, innerClassIndex);
}
simpleClassName = simpleClassName + ".java";
StringBuilder sb = new StringBuilder();
sb.append("Bad usage of ");
sb.append(LOGGER_CLASS.getClassName()).append("#").append(logMethodName);
sb.append(": ");
sb.append(errorMessage);
sb.append("\n\tat ");
sb.append(fullClassName);
sb.append(".");
sb.append(methodName);
sb.append("(");
sb.append(simpleClassName);
sb.append(":");
sb.append(line);
sb.append(")");
return sb.toString();
}
}
private static | ESLoggerUsageChecker |
java | resilience4j__resilience4j | resilience4j-ratelimiter/src/main/java/io/github/resilience4j/ratelimiter/internal/InMemoryRateLimiterRegistry.java | {
"start": 1406,
"end": 7337
} | class ____ extends
AbstractRegistry<RateLimiter, RateLimiterConfig> implements RateLimiterRegistry {
/**
* The constructor with default default.
*/
public InMemoryRateLimiterRegistry() {
this(RateLimiterConfig.ofDefaults());
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs) {
this(configs, emptyMap());
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs, Map<String, String> tags) {
this(configs.getOrDefault(DEFAULT_CONFIG, RateLimiterConfig.ofDefaults()), tags);
this.configurations.putAll(configs);
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs,
RegistryEventConsumer<RateLimiter> registryEventConsumer) {
this(configs, registryEventConsumer, emptyMap());
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs,
RegistryEventConsumer<RateLimiter> registryEventConsumer, Map<String, String> tags) {
this(configs.getOrDefault(DEFAULT_CONFIG, RateLimiterConfig.ofDefaults()),
registryEventConsumer, tags);
this.configurations.putAll(configs);
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs,
List<RegistryEventConsumer<RateLimiter>> registryEventConsumers) {
this(configs, registryEventConsumers, emptyMap());
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs,
List<RegistryEventConsumer<RateLimiter>> registryEventConsumers, Map<String, String> tags) {
this(configs.getOrDefault(DEFAULT_CONFIG, RateLimiterConfig.ofDefaults()),
registryEventConsumers, tags);
this.configurations.putAll(configs);
}
/**
* The constructor with custom default config.
*
* @param defaultConfig The default config.
*/
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig) {
super(defaultConfig);
}
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig, Map<String, String> tags) {
super(defaultConfig, tags);
}
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig,
RegistryEventConsumer<RateLimiter> registryEventConsumer) {
super(defaultConfig, registryEventConsumer);
}
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig,
RegistryEventConsumer<RateLimiter> registryEventConsumer, Map<String, String> tags) {
super(defaultConfig, registryEventConsumer, tags);
}
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig,
List<RegistryEventConsumer<RateLimiter>> registryEventConsumers) {
super(defaultConfig, registryEventConsumers);
}
public InMemoryRateLimiterRegistry(RateLimiterConfig defaultConfig,
List<RegistryEventConsumer<RateLimiter>> registryEventConsumers, Map<String, String> tags) {
super(defaultConfig, registryEventConsumers, tags);
}
public InMemoryRateLimiterRegistry(Map<String, RateLimiterConfig> configs,
List<RegistryEventConsumer<RateLimiter>> registryEventConsumers,
Map<String, String> tags, RegistryStore<RateLimiter> registryStore) {
super(configs.getOrDefault(DEFAULT_CONFIG, RateLimiterConfig.ofDefaults()),
registryEventConsumers, Optional.ofNullable(tags).orElse(emptyMap()),
Optional.ofNullable(registryStore).orElse(new InMemoryRegistryStore<>()));
this.configurations.putAll(configs);
}
/**
* {@inheritDoc}
*/
@Override
public Set<RateLimiter> getAllRateLimiters() {
return new HashSet<>(entryMap.values());
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(final String name) {
return rateLimiter(name, getDefaultConfig());
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(String name, Map<String, String> tags) {
return rateLimiter(name, getDefaultConfig(), tags);
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(final String name, final RateLimiterConfig config) {
return rateLimiter(name, config, emptyMap());
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(String name, RateLimiterConfig config, Map<String, String> tags) {
return computeIfAbsent(name, () -> new AtomicRateLimiter(name,
Objects.requireNonNull(config, CONFIG_MUST_NOT_BE_NULL), getAllTags(tags)));
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(final String name,
final Supplier<RateLimiterConfig> rateLimiterConfigSupplier) {
return rateLimiter(name, rateLimiterConfigSupplier, emptyMap());
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(String name,
Supplier<RateLimiterConfig> rateLimiterConfigSupplier, Map<String, String> tags) {
return computeIfAbsent(name, () -> new AtomicRateLimiter(name, Objects.requireNonNull(
Objects.requireNonNull(rateLimiterConfigSupplier, SUPPLIER_MUST_NOT_BE_NULL).get(),
CONFIG_MUST_NOT_BE_NULL), getAllTags(tags)));
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(String name, String configName) {
return rateLimiter(name, configName, emptyMap());
}
/**
* {@inheritDoc}
*/
@Override
public RateLimiter rateLimiter(String name, String configName, Map<String, String> tags) {
return computeIfAbsent(name, () -> RateLimiter.of(name, getConfiguration(configName)
.orElseThrow(() -> new ConfigurationNotFoundException(configName)), getAllTags(tags)));
}
}
| InMemoryRateLimiterRegistry |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java | {
"start": 2185,
"end": 2290
} | class ____ a histogram bin, which is just an (x,y) pair.
*
* @since 3.3.0
*/
public static | defines |
java | apache__kafka | clients/src/main/java/org/apache/kafka/clients/consumer/StickyAssignor.java | {
"start": 7015,
"end": 8288
} | class ____ implements ConsumerRebalanceListener {
* Collection<TopicPartition> lastAssignment = Collections.emptyList();
*
* void onPartitionsRevoked(Collection<TopicPartition> partitions) {
* for (TopicPartition partition: partitions)
* commitOffsets(partition);
* }
*
* void onPartitionsAssigned(Collection<TopicPartition> assignment) {
* for (TopicPartition partition: difference(lastAssignment, assignment))
* cleanupState(partition);
*
* for (TopicPartition partition: difference(assignment, lastAssignment))
* initializeState(partition);
*
* for (TopicPartition partition: assignment)
* initializeOffset(partition);
*
* this.lastAssignment = assignment;
* }
* }
* }
* </pre>
*
* Any consumer that uses sticky assignment can leverage this listener like this:
* <code>consumer.subscribe(topics, new TheNewRebalanceListener());</code>
*
* Note that you can leverage the {@link CooperativeStickyAssignor} so that only partitions which are being
* reassigned to another consumer will be revoked. That is the preferred assignor for newer cluster. See
* {@link ConsumerPartitionAssignor.RebalanceProtocol} for a detailed explanation of cooperative rebalancing.
*/
public | TheNewRebalanceListener |
java | apache__logging-log4j2 | log4j-jpa/src/main/java/org/apache/logging/log4j/core/appender/db/jpa/converter/ContextMapJsonAttributeConverter.java | {
"start": 1587,
"end": 2553
} | class ____ implements AttributeConverter<Map<String, String>, String> {
static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
@Override
public String convertToDatabaseColumn(final Map<String, String> contextMap) {
if (contextMap == null) {
return null;
}
try {
return OBJECT_MAPPER.writeValueAsString(contextMap);
} catch (final IOException e) {
throw new PersistenceException("Failed to convert map to JSON string.", e);
}
}
@Override
public Map<String, String> convertToEntityAttribute(final String s) {
if (Strings.isEmpty(s)) {
return null;
}
try {
return OBJECT_MAPPER.readValue(s, new TypeReference<Map<String, String>>() {});
} catch (final IOException e) {
throw new PersistenceException("Failed to convert JSON string to map.", e);
}
}
}
| ContextMapJsonAttributeConverter |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/support/jndi/ExampleBean.java | {
"start": 850,
"end": 1468
} | class ____ {
private String id;
private String name;
private double price;
@Override
public String toString() {
return "ExampleBean[name: " + name + " price: " + price + " id: " + id + "]";
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public double getPrice() {
return price;
}
public void setPrice(double price) {
this.price = price;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
| ExampleBean |
java | playframework__playframework | documentation/manual/working/javaGuide/main/http/code/javaguide/http/JavaBodyParsers.java | {
"start": 5335,
"end": 5563
} | class ____ extends MockJavaAction {
MaxLengthAction(JavaHandlerComponents javaHandlerComponents) {
super(javaHandlerComponents);
}
// #max-length
// Accept only 10KB of data.
public static | MaxLengthAction |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java | {
"start": 14510,
"end": 16732
} | class ____ extends IPCLoggerChannel {
private int rpcCount = 0;
private final Map<Integer, Callable<Void>> injections = Maps.newHashMap();
public InvocationCountingChannel(Configuration conf, NamespaceInfo nsInfo,
String journalId, InetSocketAddress addr) {
super(conf, nsInfo, journalId, addr);
}
int getRpcCount() {
return rpcCount;
}
void failIpcNumber(final int idx) {
Preconditions.checkArgument(idx > 0,
"id must be positive");
inject(idx, new Callable<Void>() {
@Override
public Void call() throws Exception {
throw new IOException("injected failed IPC at " + idx);
}
});
}
private void inject(int beforeRpcNumber, Callable<Void> injectedCode) {
injections.put(beforeRpcNumber, injectedCode);
}
@Override
protected QJournalProtocol createProxy() throws IOException {
final QJournalProtocol realProxy = super.createProxy();
QJournalProtocol mock = mockProxy(
new WrapEveryCall<Object>(realProxy) {
void beforeCall(InvocationOnMock invocation) throws Exception {
rpcCount++;
String param="";
for (Object val : invocation.getArguments()) {
param += val +",";
}
String callStr = "[" + addr + "] " +
invocation.getMethod().getName() + "(" +
param + ")";
Callable<Void> inject = injections.get(rpcCount);
if (inject != null) {
LOG.info("Injecting code before IPC #" + rpcCount + ": " +
callStr);
inject.call();
} else {
LOG.info("IPC call #" + rpcCount + ": " + callStr);
}
}
});
return mock;
}
}
private static QJournalProtocol mockProxy(WrapEveryCall<Object> wrapper)
throws IOException {
QJournalProtocol mock = Mockito.mock(QJournalProtocol.class,
Mockito.withSettings()
.defaultAnswer(wrapper)
.extraInterfaces(Closeable.class));
return mock;
}
private static abstract | InvocationCountingChannel |
java | elastic__elasticsearch | libs/ssl-config/src/main/java/org/elasticsearch/common/ssl/SslVerificationMode.java | {
"start": 700,
"end": 3105
} | enum ____ {
/**
* Verify neither the hostname, nor the provided certificate.
*/
NONE {
@Override
public boolean isHostnameVerificationEnabled() {
return false;
}
@Override
public boolean isCertificateVerificationEnabled() {
return false;
}
},
/**
* Verify the provided certificate against the trust chain, but do not verify the hostname.
*/
CERTIFICATE {
@Override
public boolean isHostnameVerificationEnabled() {
return false;
}
@Override
public boolean isCertificateVerificationEnabled() {
return true;
}
},
/**
* Verify the provided certificate against the trust chain, and also verify that the hostname to which this client is connected
* matches one of the Subject-Alternative-Names in the certificate.
*/
FULL {
@Override
public boolean isHostnameVerificationEnabled() {
return true;
}
@Override
public boolean isCertificateVerificationEnabled() {
return true;
}
};
/**
* @return true if hostname verification is enabled
*/
public abstract boolean isHostnameVerificationEnabled();
/**
* @return true if certificate verification is enabled
*/
public abstract boolean isCertificateVerificationEnabled();
private static final Map<String, SslVerificationMode> LOOKUP = Collections.unmodifiableMap(buildLookup());
private static Map<String, SslVerificationMode> buildLookup() {
Map<String, SslVerificationMode> map = new LinkedHashMap<>(3);
map.put("none", NONE);
map.put("certificate", CERTIFICATE);
map.put("full", FULL);
return map;
}
public static SslVerificationMode parse(String value) {
final SslVerificationMode mode = LOOKUP.get(value.toLowerCase(Locale.ROOT));
if (mode == null) {
final String allowedValues = String.join(",", LOOKUP.keySet());
throw new SslConfigException(
"could not resolve ssl client verification mode, unknown value ["
+ value
+ "], recognised values are ["
+ allowedValues
+ "]"
);
}
return mode;
}
}
| SslVerificationMode |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/internal/GeneratorBinder.java | {
"start": 19862,
"end": 20693
} | class ____ implements {@code Generator}
*/
private static <T extends Generator> T instantiateGeneratorAsBean(
BeanContainer beanContainer,
Class<T> generatorClass) {
return getBean(
beanContainer,
generatorClass,
false,
true,
() -> instantiateGeneratorViaDefaultConstructor( generatorClass )
);
}
/**
* Instantiate a {@link Generator} by calling an appropriate constructor,
* for the case where the generator was specified using a generator annotation.
* We look for three possible signatures:
* <ol>
* <li>{@code (Annotation, Member, GeneratorCreationContext)}</li>
* <li>{@code (Annotation)}</li>
* <li>{@code ()}</li>
* </ol>
* where {@code Annotation} is the generator annotation type.
*
* @param annotation the generator annotation
* @param generatorClass a | which |
java | apache__spark | sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/codegen/UnsafeArrayWriter.java | {
"start": 1287,
"end": 1474
} | class ____ write data into global row buffer using `UnsafeArrayData` format,
* used by {@link org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection}.
*/
public final | to |
java | alibaba__nacos | naming/src/main/java/com/alibaba/nacos/naming/healthcheck/heartbeat/ClientBeatUpdateTask.java | {
"start": 1014,
"end": 1539
} | class ____ extends AbstractExecuteTask {
private final IpPortBasedClient client;
public ClientBeatUpdateTask(IpPortBasedClient client) {
this.client = client;
}
@Override
public void run() {
long currentTime = System.currentTimeMillis();
for (InstancePublishInfo each : client.getAllInstancePublishInfo()) {
((HealthCheckInstancePublishInfo) each).setLastHeartBeatTime(currentTime);
}
client.setLastUpdatedTime();
}
}
| ClientBeatUpdateTask |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/ExecNodeMetadata.java | {
"start": 3436,
"end": 6126
} | class ____ contains
* an option (via key or fallback key) for the given key.
*
* <p>Restore can verify whether the restored ExecNode config map contains only options of the
* given keys.
*
* <p>Common options used for all {@link StreamExecNode}s:
*
* <ul>
* <li>{@link ExecutionConfigOptions#TABLE_EXEC_SIMPLIFY_OPERATOR_NAME_ENABLED}
* <li>{@link ExecutionConfigOptions#TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM}
* </ul>
*/
String[] consumedOptions() default {};
/**
* Set of transformation names that can be part of the resulting {@link Transformation}s.
*
* <p>Restore and completeness tests can verify there exists at least one test that adds each
* operator and that the created {@link Transformation}s contain only operators with {@link
* Transformation#getUid()} containing the given operator names.
*
* <p>The concrete combinations or existence of these operators in the final pipeline depends on
* various parameters (both configuration and ExecNode-specific arguments such as interval size
* etc.).
*/
String[] producedTransformations() default {};
/**
* Used for plan validation and potentially plan migration.
*
* <p>Needs to be updated when the JSON for the {@link ExecNode} changes: e.g. after adding an
* attribute to the JSON spec of the ExecNode.
*
* <p>The annotation does not need to be updated for every Flink version. As the name suggests
* it is about the "minimum" version for a restore. If the minimum version is higher than the
* current Flink version, plan migration is necessary.
*
* <p>Changing this version will always result in a new {@link #version()} for the {@link
* ExecNode}.
*
* <p>Plan migration tests can use this information.
*
* <p>Completeness tests can verify that restore tests exist for all JSON plan variations.
*/
FlinkVersion minPlanVersion();
/**
* Used for operator and potentially savepoint migration.
*
* <p>Needs to be updated whenever the state layout of an ExecNode changes. In some cases, the
* operator can implement and perform state migration. If the minimum version is higher than the
* current Flink version, savepoint migration is necessary.
*
* <p>Changing this version will always result in a new ExecNode {@link #version()}.
*
* <p>Restore tests can verify that operator migration works for all Flink state versions.
*
* <p>Completeness tests can verify that restore tests exist for all state variations.
*/
FlinkVersion minStateVersion();
}
| still |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/ImapsComponentBuilderFactory.java | {
"start": 35121,
"end": 42884
} | class ____
extends AbstractComponentBuilder<MailComponent>
implements ImapsComponentBuilder {
@Override
protected MailComponent buildConcreteComponent() {
return new MailComponent();
}
private org.apache.camel.component.mail.MailConfiguration getOrCreateConfiguration(MailComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.mail.MailConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "bridgeErrorHandler": ((MailComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "closeFolder": getOrCreateConfiguration((MailComponent) component).setCloseFolder((boolean) value); return true;
case "copyTo": getOrCreateConfiguration((MailComponent) component).setCopyTo((java.lang.String) value); return true;
case "decodeFilename": getOrCreateConfiguration((MailComponent) component).setDecodeFilename((boolean) value); return true;
case "delete": getOrCreateConfiguration((MailComponent) component).setDelete((boolean) value); return true;
case "disconnect": getOrCreateConfiguration((MailComponent) component).setDisconnect((boolean) value); return true;
case "handleFailedMessage": getOrCreateConfiguration((MailComponent) component).setHandleFailedMessage((boolean) value); return true;
case "mimeDecodeHeaders": getOrCreateConfiguration((MailComponent) component).setMimeDecodeHeaders((boolean) value); return true;
case "moveTo": getOrCreateConfiguration((MailComponent) component).setMoveTo((java.lang.String) value); return true;
case "peek": getOrCreateConfiguration((MailComponent) component).setPeek((boolean) value); return true;
case "skipFailedMessage": getOrCreateConfiguration((MailComponent) component).setSkipFailedMessage((boolean) value); return true;
case "unseen": getOrCreateConfiguration((MailComponent) component).setUnseen((boolean) value); return true;
case "failOnDuplicateFileAttachment": getOrCreateConfiguration((MailComponent) component).setFailOnDuplicateFileAttachment((boolean) value); return true;
case "fetchSize": getOrCreateConfiguration((MailComponent) component).setFetchSize((int) value); return true;
case "folderName": getOrCreateConfiguration((MailComponent) component).setFolderName((java.lang.String) value); return true;
case "generateMissingAttachmentNames": getOrCreateConfiguration((MailComponent) component).setGenerateMissingAttachmentNames((java.lang.String) value); return true;
case "handleDuplicateAttachmentNames": getOrCreateConfiguration((MailComponent) component).setHandleDuplicateAttachmentNames((java.lang.String) value); return true;
case "mapMailMessage": getOrCreateConfiguration((MailComponent) component).setMapMailMessage((boolean) value); return true;
case "bcc": getOrCreateConfiguration((MailComponent) component).setBcc((java.lang.String) value); return true;
case "cc": getOrCreateConfiguration((MailComponent) component).setCc((java.lang.String) value); return true;
case "from": getOrCreateConfiguration((MailComponent) component).setFrom((java.lang.String) value); return true;
case "lazyStartProducer": ((MailComponent) component).setLazyStartProducer((boolean) value); return true;
case "replyTo": getOrCreateConfiguration((MailComponent) component).setReplyTo((java.lang.String) value); return true;
case "subject": getOrCreateConfiguration((MailComponent) component).setSubject((java.lang.String) value); return true;
case "to": getOrCreateConfiguration((MailComponent) component).setTo((java.lang.String) value); return true;
case "javaMailSender": getOrCreateConfiguration((MailComponent) component).setJavaMailSender((org.apache.camel.component.mail.JavaMailSender) value); return true;
case "additionalJavaMailProperties": getOrCreateConfiguration((MailComponent) component).setAdditionalJavaMailProperties((java.util.Properties) value); return true;
case "alternativeBodyHeader": getOrCreateConfiguration((MailComponent) component).setAlternativeBodyHeader((java.lang.String) value); return true;
case "attachmentsContentTransferEncodingResolver": getOrCreateConfiguration((MailComponent) component).setAttachmentsContentTransferEncodingResolver((org.apache.camel.component.mail.AttachmentsContentTransferEncodingResolver) value); return true;
case "authenticator": getOrCreateConfiguration((MailComponent) component).setAuthenticator((org.apache.camel.component.mail.MailAuthenticator) value); return true;
case "autowiredEnabled": ((MailComponent) component).setAutowiredEnabled((boolean) value); return true;
case "configuration": ((MailComponent) component).setConfiguration((org.apache.camel.component.mail.MailConfiguration) value); return true;
case "connectionTimeout": getOrCreateConfiguration((MailComponent) component).setConnectionTimeout((int) value); return true;
case "contentType": getOrCreateConfiguration((MailComponent) component).setContentType((java.lang.String) value); return true;
case "contentTypeResolver": ((MailComponent) component).setContentTypeResolver((org.apache.camel.component.mail.ContentTypeResolver) value); return true;
case "debugMode": getOrCreateConfiguration((MailComponent) component).setDebugMode((boolean) value); return true;
case "ignoreUnsupportedCharset": getOrCreateConfiguration((MailComponent) component).setIgnoreUnsupportedCharset((boolean) value); return true;
case "ignoreUriScheme": getOrCreateConfiguration((MailComponent) component).setIgnoreUriScheme((boolean) value); return true;
case "javaMailProperties": getOrCreateConfiguration((MailComponent) component).setJavaMailProperties((java.util.Properties) value); return true;
case "session": getOrCreateConfiguration((MailComponent) component).setSession((jakarta.mail.Session) value); return true;
case "useInlineAttachments": getOrCreateConfiguration((MailComponent) component).setUseInlineAttachments((boolean) value); return true;
case "headerFilterStrategy": ((MailComponent) component).setHeaderFilterStrategy((org.apache.camel.spi.HeaderFilterStrategy) value); return true;
case "healthCheckConsumerEnabled": ((MailComponent) component).setHealthCheckConsumerEnabled((boolean) value); return true;
case "healthCheckProducerEnabled": ((MailComponent) component).setHealthCheckProducerEnabled((boolean) value); return true;
case "password": getOrCreateConfiguration((MailComponent) component).setPassword((java.lang.String) value); return true;
case "sslContextParameters": getOrCreateConfiguration((MailComponent) component).setSslContextParameters((org.apache.camel.support.jsse.SSLContextParameters) value); return true;
case "useGlobalSslContextParameters": ((MailComponent) component).setUseGlobalSslContextParameters((boolean) value); return true;
case "username": getOrCreateConfiguration((MailComponent) component).setUsername((java.lang.String) value); return true;
default: return false;
}
}
}
} | ImapsComponentBuilderImpl |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/main/java/org/apache/camel/spring/xml/handler/CamelNamespaceHandler.java | {
"start": 14495,
"end": 15802
} | class ____ extends BeanDefinitionParser {
public RouteConfigurationContextDefinitionParser() {
super(CamelRouteConfigurationContextFactoryBean.class, false);
}
@Override
protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) {
doBeforeParse(element);
super.doParse(element, parserContext, builder);
// now lets parse the routes with JAXB
Binder<Node> binder;
try {
binder = getJaxbContext().createBinder();
} catch (JAXBException e) {
throw new BeanDefinitionStoreException("Failed to create the JAXB binder", e);
}
Object value = parseUsingJaxb(element, parserContext, binder);
if (value instanceof CamelRouteConfigurationContextFactoryBean) {
CamelRouteConfigurationContextFactoryBean factoryBean = (CamelRouteConfigurationContextFactoryBean) value;
builder.addPropertyValue("routeConfigurations", factoryBean.getRouteConfigurations());
}
// lets inject the namespaces into any namespace aware POJOs
injectNamespaces(element, binder);
}
}
protected | RouteConfigurationContextDefinitionParser |
java | reactor__reactor-core | reactor-core/src/jcstress/java/reactor/core/publisher/FluxWindowTimeoutStressTest.java | {
"start": 8095,
"end": 11535
} | class ____ {
final VirtualTimeScheduler virtualTimeScheduler = VirtualTimeScheduler.create();
StressSubscriber<Long> subscriber1 = new StressSubscriber<>();
StressSubscriber<Long> subscriber2 = new StressSubscriber<>();
StressSubscriber<Long> subscriber3 = new StressSubscriber<>();
StressSubscriber<Long> subscriber4 = new StressSubscriber<>();
final StressSubscriber<Flux<Long>> mainSubscriber =
new StressSubscriber<Flux<Long>>(3) {
int index = 0;
@Override
public void onNext(Flux<Long> window) {
super.onNext(window);
switch (index++) {
case 0:
window.subscribe(subscriber1);
break;
case 1:
window.subscribe(subscriber2);
break;
case 2:
window.subscribe(subscriber3);
break;
case 3:
window.subscribe(subscriber4);
break;
}
}
};
final FastLogger fastLogger =
new FastLogger("FluxWindowTimoutStressTest1_2");
final FluxWindowTimeout.WindowTimeoutWithBackpressureSubscriber<Long>
windowTimeoutSubscriber =
new FluxWindowTimeout.WindowTimeoutWithBackpressureSubscriber<>(
mainSubscriber,
2,
1,
TimeUnit.SECONDS,
virtualTimeScheduler,
new StateLogger(fastLogger));
final StressSubscription<Long> subscription =
new StressSubscription<>(windowTimeoutSubscriber);
{
windowTimeoutSubscriber.onSubscribe(subscription);
}
@Actor
public void next() {
windowTimeoutSubscriber.onNext(0L);
windowTimeoutSubscriber.onNext(1L);
windowTimeoutSubscriber.onComplete();
}
@Actor
public void advanceTime() {
virtualTimeScheduler.advanceTimeBy(Duration.ofSeconds(1));
}
@Actor
public void requestMain() {
mainSubscriber.request(1);
}
@Arbiter
public void arbiter(LLLLL_Result result) {
result.r1 =
subscriber1.onNextCalls.get() + subscriber2.onNextCalls.get() + subscriber3.onNextCalls.get() + subscriber4.onNextCalls.get();
result.r2 =
subscriber1.onCompleteCalls.get() + subscriber2.onCompleteCalls.get() + subscriber3.onCompleteCalls.get() + subscriber4.onCompleteCalls.get();
result.r3 = mainSubscriber.onNextCalls.get();
result.r4 = mainSubscriber.onCompleteCalls.get();
result.r5 = subscription.requested;
if (mainSubscriber.concurrentOnNext.get()) {
throw new IllegalStateException("mainSubscriber Concurrent OnNext " + result + "\n" + fastLogger, mainSubscriber.stacktraceOnNext);
}
if (mainSubscriber.concurrentOnComplete.get()) {
throw new IllegalStateException("mainSubscriber Concurrent OnComplete " + result + "\n" + fastLogger, mainSubscriber.stacktraceOnComplete);
}
if (mainSubscriber.onCompleteCalls.get() != 1) {
throw new IllegalStateException("unexpected completion " + mainSubscriber.onCompleteCalls.get() + "\n" + fastLogger);
}
}
}
@JCStressTest
@Outcome(id = {
"8, 4, 4, 1, 8",
"8, 5, 5, 1, 8",
"8, 5, 5, 1, 9",
"8, 5, 5, 1, 10",
"8, 6, 6, 1, 8",
"8, 6, 6, 1, 9",
"8, 6, 6, 1, 10",
"8, 7, 7, 1, 8",
"8, 7, 7, 1, 9",
"8, 7, 7, 1, 10",
"8, 8, 8, 1, 8",
"8, 8, 8, 1, 9",
"8, 8, 8, 1, 10",
"8, 9, 9, 1, 8",
"8, 9, 9, 1, 9",
"8, 9, 9, 1, 10",
"8, 9, 9, 0, 8",
"8, 9, 9, 0, 9",
"8, 9, 9, 0, 10",
}, expect = ACCEPTABLE, desc = "")
@State
public static | FluxWindowTimoutStressTest1_2 |
java | google__jimfs | jimfs/src/test/java/com/google/common/jimfs/UserDefinedAttributeProviderTest.java | {
"start": 1243,
"end": 4018
} | class ____
extends AbstractAttributeProviderTest<UserDefinedAttributeProvider> {
@Override
protected UserDefinedAttributeProvider createProvider() {
return new UserDefinedAttributeProvider();
}
@Override
protected Set<? extends AttributeProvider> createInheritedProviders() {
return ImmutableSet.of();
}
@Test
public void testInitialAttributes() {
// no initial attributes
assertThat(ImmutableList.copyOf(file.getAttributeKeys())).isEmpty();
assertThat(provider.attributes(file)).isEmpty();
}
@Test
public void testGettingAndSetting() {
byte[] bytes = {0, 1, 2, 3};
provider.set(file, "user", "one", bytes, false);
provider.set(file, "user", "two", ByteBuffer.wrap(bytes), false);
byte[] one = (byte[]) provider.get(file, "one");
byte[] two = (byte[]) provider.get(file, "two");
assertThat(Arrays.equals(one, bytes)).isTrue();
assertThat(Arrays.equals(two, bytes)).isTrue();
assertSetFails("foo", "hello");
assertThat(provider.attributes(file)).containsExactly("one", "two");
}
@Test
public void testSetOnCreate() {
assertSetFailsOnCreate("anything", new byte[0]);
}
@Test
public void testView() throws IOException {
UserDefinedFileAttributeView view = provider.view(fileLookup(), NO_INHERITED_VIEWS);
assertNotNull(view);
assertThat(view.name()).isEqualTo("user");
assertThat(view.list()).isEmpty();
byte[] b1 = {0, 1, 2};
byte[] b2 = {0, 1, 2, 3, 4};
view.write("b1", ByteBuffer.wrap(b1));
view.write("b2", ByteBuffer.wrap(b2));
assertThat(view.list()).containsAtLeast("b1", "b2");
assertThat(file.getAttributeKeys()).containsExactly("user:b1", "user:b2");
assertThat(view.size("b1")).isEqualTo(3);
assertThat(view.size("b2")).isEqualTo(5);
ByteBuffer buf1 = ByteBuffer.allocate(view.size("b1"));
ByteBuffer buf2 = ByteBuffer.allocate(view.size("b2"));
view.read("b1", buf1);
view.read("b2", buf2);
assertThat(Arrays.equals(b1, buf1.array())).isTrue();
assertThat(Arrays.equals(b2, buf2.array())).isTrue();
view.delete("b2");
assertThat(view.list()).containsExactly("b1");
assertThat(file.getAttributeKeys()).containsExactly("user:b1");
IllegalArgumentException expected =
assertThrows(IllegalArgumentException.class, () -> view.size("b2"));
assertThat(expected).hasMessageThat().contains("not set");
expected =
assertThrows(
IllegalArgumentException.class, () -> view.read("b2", ByteBuffer.allocate(10)));
assertThat(expected).hasMessageThat().contains("not set");
view.write("b1", ByteBuffer.wrap(b2));
assertThat(view.size("b1")).isEqualTo(5);
view.delete("b2"); // succeeds
}
}
| UserDefinedAttributeProviderTest |
java | spring-projects__spring-security | config/src/main/java/org/springframework/security/config/annotation/web/builders/HttpSecurity.java | {
"start": 11163,
"end": 11836
} | class ____ {
*
* @Bean
* public SecurityFilterChain securityFilterChain(HttpSecurity http) {
* http
* .headers((headers) -> headers.disable());
* return http.build();
* }
* }
* </pre>
*
* You can enable only a few of the headers by first invoking
* {@link HeadersConfigurer#defaultsDisabled()} and then invoking the appropriate
* methods on the {@link #headers(Customizer)} result. For example, the following will
* enable {@link HeadersConfigurer#cacheControl(Customizer)} and
* {@link HeadersConfigurer#frameOptions(Customizer)} only.
*
* <pre>
* @Configuration
* @EnableWebSecurity
* public | CsrfSecurityConfig |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/FilterOutputFormat.java | {
"start": 1298,
"end": 1394
} | class ____ wraps OutputFormat.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public | that |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/collection/map/MapMappingTest.java | {
"start": 953,
"end": 6941
} | class ____ {
@ProcessorTest
public void shouldCreateMapMethodImplementation() {
Map<Long, Date> values = new HashMap<>();
values.put( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() );
values.put( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() );
Map<String, String> target = SourceTargetMapper.INSTANCE.longDateMapToStringStringMap( values );
assertThat( target ).isNotNull();
assertThat( target ).hasSize( 2 );
assertThat( target ).contains(
entry( "42", "01.01.1980" ),
entry( "121", "20.07.2013" )
);
}
@ProcessorTest
public void shouldCreateReverseMapMethodImplementation() {
Map<String, String> values = createStringStringMap();
Map<Long, Date> target = SourceTargetMapper.INSTANCE.stringStringMapToLongDateMap( values );
assertResult( target );
}
@ProcessorTest
@IssueKey("19")
public void shouldCreateMapMethodImplementationWithTargetParameter() {
Map<String, String> values = createStringStringMap();
Map<Long, Date> target = new HashMap<>();
target.put( 66L, new GregorianCalendar( 2013, Calendar.AUGUST, 16 ).getTime() );
SourceTargetMapper.INSTANCE.stringStringMapToLongDateMapUsingTargetParameter( target, values );
assertResult( target );
}
@ProcessorTest
@IssueKey("19")
public void shouldCreateMapMethodImplementationWithReturnedTargetParameter() {
Map<String, String> values = createStringStringMap();
Map<Long, Date> target = new HashMap<>();
target.put( 66L, new GregorianCalendar( 2013, Calendar.AUGUST, 16 ).getTime() );
Map<Long, Date> returnedTarget = SourceTargetMapper.INSTANCE
.stringStringMapToLongDateMapUsingTargetParameterAndReturn( values, target );
assertThat( target ).isSameAs( returnedTarget );
assertResult( target );
}
@ProcessorTest
@IssueKey("1752")
public void shouldCreateMapMethodImplementationWithReturnedTargetParameterAndNullSource() {
Map<Long, Date> target = new HashMap<>();
target.put( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() );
target.put( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() );
Map<Long, Date> returnedTarget = SourceTargetMapper.INSTANCE
.stringStringMapToLongDateMapUsingTargetParameterAndReturn( null, target );
assertThat( target ).isSameAs( returnedTarget );
assertResult( target );
}
private void assertResult(Map<Long, Date> target) {
assertThat( target ).isNotNull();
assertThat( target ).hasSize( 2 );
assertThat( target ).contains(
entry( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() ),
entry( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() )
);
}
private Map<String, String> createStringStringMap() {
Map<String, String> values = new HashMap<>();
values.put( "42", "01.01.1980" );
values.put( "121", "20.07.2013" );
return values;
}
@ProcessorTest
public void shouldInvokeMapMethodImplementationForMapTypedProperty() {
Map<Long, Date> values = new HashMap<>();
values.put( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() );
values.put( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() );
Source source = new Source();
source.setValues( values );
source.setPublicValues( new HashMap<>( values ) );
Target target = SourceTargetMapper.INSTANCE.sourceToTarget( source );
assertThat( target ).isNotNull();
assertThat( target.getValues() ).isNotNull();
assertThat( target.getValues() ).hasSize( 2 );
assertThat( target.getValues() ).contains(
entry( "42", "01.01.1980" ),
entry( "121", "20.07.2013" )
);
assertThat( target.publicValues )
.isNotNull()
.hasSize( 2 )
.contains(
entry( "42", "01.01.1980" ),
entry( "121", "20.07.2013" )
);
}
@ProcessorTest
public void shouldInvokeReverseMapMethodImplementationForMapTypedProperty() {
Map<String, String> values = createStringStringMap();
Target target = new Target();
target.setValues( values );
target.publicValues = new HashMap<>( values );
Source source = SourceTargetMapper.INSTANCE.targetToSource( target );
assertThat( source ).isNotNull();
assertThat( source.getValues() ).isNotNull();
assertThat( source.getValues() ).hasSize( 2 );
assertThat( source.getValues() ).contains(
entry( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() ),
entry( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() )
);
assertThat( source.getPublicValues() )
.isNotNull()
.hasSize( 2 )
.contains(
entry( 42L, new GregorianCalendar( 1980, Calendar.JANUARY, 1 ).getTime() ),
entry( 121L, new GregorianCalendar( 2013, Calendar.JULY, 20 ).getTime() )
);
}
private Map<Integer, Integer> createIntIntMap() {
Map<Integer, Integer> values = new HashMap<>();
values.put( 42, 47 );
values.put( 121, 123 );
return values;
}
@ProcessorTest
@IssueKey("87")
public void shouldCreateMapMethodImplementationWithoutConversionOrElementMappingMethod() {
Map<Integer, Integer> values = createIntIntMap();
Map<Number, Number> target = SourceTargetMapper.INSTANCE.intIntToNumberNumberMap( values );
assertThat( target ).isNotNull();
assertThat( target ).hasSize( 2 );
assertThat( target ).isEqualTo( values );
}
}
| MapMappingTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/compress/SnappyCompression.java | {
"start": 1360,
"end": 3097
} | class ____ implements Compression {
private SnappyCompression() {}
@Override
public CompressionType type() {
return CompressionType.SNAPPY;
}
@Override
public OutputStream wrapForOutput(ByteBufferOutputStream bufferStream, byte messageVersion) {
try {
return new SnappyOutputStream(bufferStream);
} catch (Throwable e) {
throw new KafkaException(e);
}
}
@Override
public InputStream wrapForInput(ByteBuffer buffer, byte messageVersion, BufferSupplier decompressionBufferSupplier) {
// SnappyInputStream uses default implementation of InputStream for skip. Default implementation of
// SnappyInputStream allocates a new skip buffer every time, hence, we prefer our own implementation.
try {
return new ChunkedBytesStream(new SnappyInputStream(new ByteBufferInputStream(buffer)),
decompressionBufferSupplier,
decompressionOutputSize(),
false);
} catch (Throwable e) {
throw new KafkaException(e);
}
}
@Override
public int decompressionOutputSize() {
// SnappyInputStream already uses an intermediate buffer internally. The size
// of this buffer is based on legacy implementation based on skipArray introduced in
// https://github.com/apache/kafka/pull/6785
return 2 * 1024; // 2KB
}
@Override
public boolean equals(Object o) {
return o instanceof SnappyCompression;
}
@Override
public int hashCode() {
return super.hashCode();
}
public static | SnappyCompression |
java | junit-team__junit5 | junit-platform-console/src/main/java/org/junit/platform/console/options/TestDiscoveryOptionsMixin.java | {
"start": 1498,
"end": 2159
} | class ____ {
private static final String CP_OPTION = "cp";
@ArgGroup(validate = false, order = 2, heading = "%n@|bold SELECTORS|@%n%n")
SelectorOptions selectorOptions;
@ArgGroup(validate = false, order = 3, heading = "%n For more information on selectors including syntax examples, see"
+ "%n @|underline https://docs.junit.org/${junit.docs.version}/user-guide/#running-tests-discovery-selectors|@"
+ "%n%n@|bold FILTERS|@%n%n")
FilterOptions filterOptions;
@ArgGroup(validate = false, order = 4, heading = "%n@|bold RUNTIME CONFIGURATION|@%n%n")
RuntimeConfigurationOptions runtimeConfigurationOptions;
public static | TestDiscoveryOptionsMixin |
java | mapstruct__mapstruct | processor/src/main/java/org/mapstruct/ap/internal/conversion/JavaLocalDateToStringConversion.java | {
"start": 317,
"end": 504
} | class ____ extends AbstractJavaTimeToStringConversion {
@Override
protected String defaultFormatterSuffix() {
return "ISO_LOCAL_DATE";
}
}
| JavaLocalDateToStringConversion |
java | google__guava | android/guava/src/com/google/common/graph/GraphConnections.java | {
"start": 1261,
"end": 2444
} | class ____ connected to.
*/
Iterator<EndpointPair<N>> incidentEdgeIterator(N thisNode);
/**
* Returns the value associated with the edge connecting the origin node to {@code node}, or null
* if there is no such edge.
*/
@Nullable V value(N node);
/** Remove {@code node} from the set of predecessors. */
void removePredecessor(N node);
/**
* Remove {@code node} from the set of successors. Returns the value previously associated with
* the edge connecting the two nodes.
*/
@CanIgnoreReturnValue
@Nullable V removeSuccessor(N node);
/**
* Add {@code node} as a predecessor to the origin node. In the case of an undirected graph, it
* also becomes a successor. Associates {@code value} with the edge connecting the two nodes.
*/
void addPredecessor(N node, V value);
/**
* Add {@code node} as a successor to the origin node. In the case of an undirected graph, it also
* becomes a predecessor. Associates {@code value} with the edge connecting the two nodes. Returns
* the value previously associated with the edge connecting the two nodes.
*/
@CanIgnoreReturnValue
@Nullable V addSuccessor(N node, V value);
}
| are |
java | google__guava | android/guava/src/com/google/common/base/CharMatcher.java | {
"start": 8345,
"end": 8760
} | class ____.
* @since 19.0 (since 1.0 as constant {@code JAVA_LETTER})
*/
@Deprecated
public static CharMatcher javaLetter() {
return JavaLetter.INSTANCE;
}
/**
* Determines whether a character is a BMP letter or digit according to {@linkplain
* Character#isLetterOrDigit(char) Java's definition}.
*
* @deprecated Most letters and digits are supplementary characters; see the | documentation |
java | quarkusio__quarkus | extensions/container-image/container-image-openshift/deployment/src/main/java/io/quarkus/container/image/openshift/deployment/OpenshiftProcessor.java | {
"start": 4043,
"end": 34911
} | class ____ {
public static final String OPENSHIFT = "openshift";
private static final String BUILD_CONFIG_NAME = "openshift.io/build-config.name";
private static final String RUNNING = "Running";
private static final String JAVA_APP_JAR = "JAVA_APP_JAR";
private static final String OPENSHIFT_INTERNAL_REGISTRY = "openshift-image-registry";
private static final int LOG_TAIL_SIZE = 10;
private static final Logger LOG = Logger.getLogger(OpenshiftProcessor.class);
@BuildStep
public AvailableContainerImageExtensionBuildItem availability() {
return new AvailableContainerImageExtensionBuildItem(OPENSHIFT);
}
@BuildStep(onlyIf = { OpenshiftBuild.class }, onlyIfNot = NativeBuild.class)
public void openshiftPrepareJvmDockerBuild(ContainerImageOpenshiftConfig config,
OutputTargetBuildItem out,
BuildProducer<DecoratorBuildItem> decorator) {
if (config.buildStrategy() == BuildStrategy.DOCKER) {
decorator.produce(new DecoratorBuildItem(new ApplyDockerfileToBuildConfigDecorator(null,
findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(config.jvmDockerfile()))));
//When using the docker build strategy, we can't possibly know these values, so it's the image responsibility to work without them.
decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_JAR")));
decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_LIB")));
}
}
@BuildStep(onlyIf = { OpenshiftBuild.class, NativeBuild.class })
public void openshiftPrepareNativeDockerBuild(ContainerImageOpenshiftConfig config,
OutputTargetBuildItem out,
BuildProducer<DecoratorBuildItem> decorator) {
if (config.buildStrategy() == BuildStrategy.DOCKER) {
decorator.produce(new DecoratorBuildItem(new ApplyDockerfileToBuildConfigDecorator(null,
findMainSourcesRoot(out.getOutputDirectory()).getValue().resolve(config.nativeDockerfile()))));
}
//Let's remove this for all kinds of native build
decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_JAR")));
decorator.produce(new DecoratorBuildItem(new RemoveEnvVarDecorator(null, "JAVA_APP_LIB")));
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class }, onlyIfNot = NativeBuild.class)
public void openshiftRequirementsJvm(ContainerImageOpenshiftConfig config,
CurateOutcomeBuildItem curateOutcomeBuildItem,
OutputTargetBuildItem out,
PackageConfig packageConfig,
JarBuildItem jarBuildItem,
CompiledJavaVersionBuildItem compiledJavaVersion,
BuildProducer<DecoratorBuildItem> decorator,
BuildProducer<KubernetesEnvBuildItem> envProducer,
BuildProducer<BaseImageInfoBuildItem> builderImageProducer,
BuildProducer<KubernetesCommandBuildItem> commandProducer) {
String outputJarFileName = jarBuildItem.getPath().getFileName().toString();
String jarFileName = config.jarFileName().orElse(outputJarFileName);
String baseJvmImage = config.baseJvmImage()
.orElse(ContainerImageOpenshiftConfig.getDefaultJvmImage(compiledJavaVersion.getJavaVersion()));
boolean hasCustomJarPath = config.jarFileName().isPresent() || config.jarDirectory().isPresent();
boolean hasCustomJvmArguments = config.jvmArguments().isPresent();
builderImageProducer.produce(new BaseImageInfoBuildItem(baseJvmImage));
if (config.buildStrategy() == BuildStrategy.BINARY) {
// Jar directory priorities:
// 1. explicitly specified by the user.
// 3. fallback value
String jarDirectory = config.jarDirectory().orElse(ContainerImageOpenshiftConfig.FALLBACK_JAR_DIRECTORY);
String pathToJar = concatUnixPaths(jarDirectory, jarFileName);
//In all other cases its the responsibility of the image to set those up correctly.
if (hasCustomJarPath || hasCustomJvmArguments) {
List<String> cmd = new ArrayList<>();
cmd.add("java");
cmd.addAll(config.getEffectiveJvmArguments());
cmd.addAll(Arrays.asList("-jar", pathToJar));
envProducer.produce(KubernetesEnvBuildItem.createSimpleVar(JAVA_APP_JAR, pathToJar, null));
commandProducer.produce(KubernetesCommandBuildItem.command(cmd));
}
}
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class, NativeBuild.class })
public void openshiftRequirementsNative(ContainerImageOpenshiftConfig config,
CurateOutcomeBuildItem curateOutcomeBuildItem,
OutputTargetBuildItem out,
PackageConfig packageConfig,
NativeImageBuildItem nativeImage,
BuildProducer<KubernetesEnvBuildItem> envProducer,
BuildProducer<BaseImageInfoBuildItem> builderImageProducer,
BuildProducer<KubernetesCommandBuildItem> commandProducer) {
boolean usingDefaultBuilder = ImageUtil.getRepository(ContainerImages.QUARKUS_BINARY_S2I)
.equals(ImageUtil.getRepository(config.baseNativeImage()));
String outputNativeBinaryFileName = nativeImage.getPath().getFileName().toString();
String nativeBinaryFileName = null;
boolean hasCustomNativePath = config.nativeBinaryFileName().isPresent() || config.nativeBinaryDirectory().isPresent();
boolean hasCustomNativeArguments = config.nativeArguments().isPresent();
//The default openshift builder for native builds, renames the native binary.
//To make things easier for the user, we need to handle it.
if (usingDefaultBuilder && !config.nativeBinaryFileName().isPresent()) {
nativeBinaryFileName = ContainerImageOpenshiftConfig.DEFAULT_NATIVE_TARGET_FILENAME;
} else {
nativeBinaryFileName = config.nativeBinaryFileName().orElse(outputNativeBinaryFileName);
}
if (config.buildStrategy() == BuildStrategy.BINARY) {
builderImageProducer.produce(new BaseImageInfoBuildItem(config.baseNativeImage()));
// Native binary directory priorities:
// 1. explicitly specified by the user.
// 2. fallback vale
String nativeBinaryDirectory = config.nativeBinaryDirectory().orElse(config.FALLBACK_NATIVE_BINARY_DIRECTORY);
String pathToNativeBinary = concatUnixPaths(nativeBinaryDirectory, nativeBinaryFileName);
if (hasCustomNativePath || hasCustomNativeArguments) {
commandProducer
.produce(
KubernetesCommandBuildItem.commandWithArgs(pathToNativeBinary, config.nativeArguments().get()));
}
}
}
@BuildStep(onlyIf = { OpenshiftBuild.class })
public void configureExternalRegistry(ApplicationInfoBuildItem applicationInfo,
ContainerImageOpenshiftConfig openshiftConfig,
ContainerImageInfoBuildItem containerImageInfo,
BuildProducer<DecoratorBuildItem> decorator) {
containerImageInfo.registry.ifPresent(registry -> {
final String name = applicationInfo.getName();
final String serviceAccountName = applicationInfo.getName();
String repositoryWithRegistry = registry + "/" + containerImageInfo.getRepository();
if (openshiftConfig.imagePushSecret().isPresent()) {
//if a push secret has been specified, we need to apply it.
String imagePushSecret = openshiftConfig.imagePushSecret().get();
decorator.produce(new DecoratorBuildItem(OPENSHIFT, new ApplyDockerImageOutputToBuildConfigDecorator(
applicationInfo.getName(), containerImageInfo.getImage(), imagePushSecret)));
} else if (registry.contains(OPENSHIFT_INTERNAL_REGISTRY)) {
//no special handling of secrets is really needed.
} else if (containerImageInfo.username.isPresent() && containerImageInfo.password.isPresent()) {
String imagePushSecret = applicationInfo.getName() + "-push-secret";
decorator.produce(new DecoratorBuildItem(OPENSHIFT,
new AddDockerConfigJsonSecretDecorator(imagePushSecret, containerImageInfo.registry.get(),
containerImageInfo.username.get(), containerImageInfo.password.get())));
decorator.produce(new DecoratorBuildItem(OPENSHIFT, new ApplyDockerImageOutputToBuildConfigDecorator(
applicationInfo.getName(), containerImageInfo.getImage(), imagePushSecret)));
} else {
LOG.warn("An external image registry has been specified, but no push secret or credentials.");
}
decorator.produce(new DecoratorBuildItem(OPENSHIFT,
new ApplyDockerImageRepositoryToImageStream(applicationInfo.getName(), repositoryWithRegistry)));
});
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class }, onlyIfNot = NativeBuild.class)
public void openshiftBuildFromJar(ContainerImageOpenshiftConfig config,
ContainerImageConfig containerImageConfig,
KubernetesClientBuildItem kubernetesClientBuilder,
ContainerImageInfoBuildItem containerImage,
ArchiveRootBuildItem archiveRoot, OutputTargetBuildItem out, PackageConfig packageConfig,
List<GeneratedFileSystemResourceBuildItem> generatedResources,
Optional<ContainerImageBuildRequestBuildItem> buildRequest,
Optional<ContainerImagePushRequestBuildItem> pushRequest,
BuildProducer<ArtifactResultBuildItem> artifactResultProducer,
BuildProducer<ContainerImageBuilderBuildItem> containerImageBuilder,
// used to ensure that the jar has been built
JarBuildItem jar) {
if (containerImageConfig.isBuildExplicitlyDisabled()) {
return;
}
if (!containerImageConfig.isBuildExplicitlyEnabled() && !containerImageConfig.isPushExplicitlyEnabled()
&& !buildRequest.isPresent() && !pushRequest.isPresent()) {
return;
}
Optional<GeneratedFileSystemResourceBuildItem> openshiftYml = generatedResources
.stream()
.filter(r -> r.getName().endsWith(File.separator + "openshift.yml"))
.findFirst();
if (openshiftYml.isEmpty()) {
LOG.warn(
"No OpenShift manifests were generated so no OpenShift build process will be taking place");
return;
}
try (KubernetesClient kubernetesClient = buildClient(kubernetesClientBuilder)) {
String namespace = Optional.ofNullable(kubernetesClient.getNamespace()).orElse("default");
LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy() + " on server: "
+ kubernetesClient.getMasterUrl() + " in namespace:" + namespace + ".");
//The contextRoot is where inside the tarball we will add the jars. A null value means everything will be added under '/' while "target" means everything will be added under '/target'.
//For docker kind of builds where we use instructions like: `COPY target/*.jar /deployments` it using '/target' is a requirement.
//For s2i kind of builds where jars are expected directly in the '/' we have to use null.
String outputDirName = out.getOutputDirectory().getFileName().toString();
PackageConfig.JarConfig.JarType jarType = packageConfig.jar().type();
String contextRoot = getContextRoot(outputDirName, jarType == FAST_JAR || jarType == MUTABLE_JAR,
config.buildStrategy());
KubernetesClientBuilder clientBuilder = newClientBuilderWithoutHttp2(kubernetesClient.getConfiguration(),
kubernetesClientBuilder.getHttpClientFactory());
if (jarType == FAST_JAR || jarType == MUTABLE_JAR) {
createContainerImage(clientBuilder, openshiftYml.get(), config, contextRoot, jar.getPath().getParent(),
jar.getPath().getParent());
} else if (jar.getLibraryDir() != null) { //When using uber-jar the libraryDir is going to be null, potentially causing NPE.
createContainerImage(clientBuilder, openshiftYml.get(), config, contextRoot, jar.getPath().getParent(),
jar.getPath(), jar.getLibraryDir());
} else {
createContainerImage(clientBuilder, openshiftYml.get(), config, contextRoot, jar.getPath().getParent(),
jar.getPath());
}
artifactResultProducer.produce(new ArtifactResultBuildItem(null, "jar-container", Collections.emptyMap()));
containerImageBuilder.produce(new ContainerImageBuilderBuildItem(OPENSHIFT));
}
}
private String getContextRoot(String outputDirName, boolean isFastJar, BuildStrategy buildStrategy) {
if (buildStrategy != BuildStrategy.DOCKER) {
return null;
}
if (!isFastJar) {
return outputDirName;
}
return outputDirName + "/" + DEFAULT_FAST_JAR_DIRECTORY_NAME;
}
@BuildStep(onlyIf = { IsNormalNotRemoteDev.class, OpenshiftBuild.class, NativeBuild.class })
public void openshiftBuildFromNative(ContainerImageOpenshiftConfig config,
ContainerImageConfig containerImageConfig,
KubernetesClientBuildItem kubernetesClientBuilder,
ContainerImageInfoBuildItem containerImage,
ArchiveRootBuildItem archiveRoot, OutputTargetBuildItem out, PackageConfig packageConfig,
List<GeneratedFileSystemResourceBuildItem> generatedResources,
Optional<ContainerImageBuildRequestBuildItem> buildRequest,
Optional<ContainerImagePushRequestBuildItem> pushRequest,
BuildProducer<ArtifactResultBuildItem> artifactResultProducer,
BuildProducer<ContainerImageBuilderBuildItem> containerImageBuilder,
NativeImageBuildItem nativeImage) {
if (containerImageConfig.isBuildExplicitlyDisabled()) {
return;
}
if (!containerImageConfig.isBuildExplicitlyEnabled() && !containerImageConfig.isPushExplicitlyEnabled()
&& !buildRequest.isPresent() && !pushRequest.isPresent()) {
return;
}
try (KubernetesClient kubernetesClient = buildClient(kubernetesClientBuilder)) {
String namespace = Optional.ofNullable(kubernetesClient.getNamespace()).orElse("default");
LOG.info("Starting (in-cluster) container image build for jar using: " + config.buildStrategy() + " on server: "
+ kubernetesClient.getMasterUrl() + " in namespace:" + namespace + ".");
Optional<GeneratedFileSystemResourceBuildItem> openshiftYml = generatedResources
.stream()
.filter(r -> r.getName().endsWith(File.separator + "openshift.yml"))
.findFirst();
if (openshiftYml.isEmpty()) {
LOG.warn(
"No OpenShift manifests were generated so no OpenShift build process will be taking place");
return;
}
//The contextRoot is where inside the tarball we will add the jars. A null value means everything will be added under '/' while "target" means everything will be added under '/target'.
//For docker kind of builds where we use instructions like: `COPY target/*.jar /deployments` it using '/target' is a requirement.
//For s2i kind of builds where jars are expected directly in the '/' we have to use null.
String contextRoot = config.buildStrategy() == BuildStrategy.DOCKER ? "target" : null;
createContainerImage(
newClientBuilderWithoutHttp2(kubernetesClient.getConfiguration(),
kubernetesClientBuilder.getHttpClientFactory()),
openshiftYml.get(), config, contextRoot, out.getOutputDirectory(), nativeImage.getPath());
artifactResultProducer.produce(new ArtifactResultBuildItem(null, "native-container", Collections.emptyMap()));
containerImageBuilder.produce(new ContainerImageBuilderBuildItem(OPENSHIFT));
}
}
public static void createContainerImage(KubernetesClientBuilder kubernetesClientBuilder,
GeneratedFileSystemResourceBuildItem openshiftManifests,
ContainerImageOpenshiftConfig openshiftConfig,
String base,
Path output,
Path... additional) {
File tar;
try {
File original = Packaging.packageFile(output, base, additional);
//Let's rename the archive and give it a more descriptive name, as it may appear in the logs.
tar = Files.createTempFile("quarkus-", "-openshift").toFile();
Files.move(original.toPath(), tar.toPath(), StandardCopyOption.REPLACE_EXISTING);
} catch (Exception e) {
throw new RuntimeException("Error creating the openshift binary build archive.", e);
}
try (KubernetesClient client = kubernetesClientBuilder.build()) {
OpenShiftClient openShiftClient = toOpenshiftClient(client);
KubernetesList kubernetesList = Serialization
.unmarshalAsList(new ByteArrayInputStream(openshiftManifests.getData()));
List<HasMetadata> buildResources = kubernetesList.getItems().stream()
.filter(i -> i instanceof BuildConfig || i instanceof ImageStream || i instanceof Secret)
.collect(Collectors.toList());
applyOpenshiftResources(openShiftClient, buildResources);
openshiftBuild(buildResources, tar, openshiftConfig, kubernetesClientBuilder);
} finally {
try {
tar.delete();
} catch (Exception e) {
LOG.warn("Unable to delete temporary file " + tar.toPath().toAbsolutePath(), e);
}
}
}
private static OpenShiftClient toOpenshiftClient(KubernetesClient client) {
try {
return client.adapt(OpenShiftClient.class);
} catch (KubernetesClientException e) {
KubernetesClientErrorHandler.handle(e);
return null; // will never happen
}
}
/**
* Apply the openshift resources and wait until ImageStreamTags are created.
*
* @param client the client instance
* @param buildResources resources to apply
*/
private static void applyOpenshiftResources(OpenShiftClient client, List<HasMetadata> buildResources) {
// Apply build resource requirements
try {
for (HasMetadata i : distinct(buildResources)) {
deployResource(client, i);
LOG.info("Applied: " + i.getKind() + " " + i.getMetadata().getName());
}
try {
OpenshiftUtils.waitForImageStreamTags(client, buildResources, 2, TimeUnit.MINUTES);
} catch (KubernetesClientException e) {
//User may not have permission to get / list `ImageStreamTag` or this step may fail for any reason.
//As this is not an integral part of the build we should catch and log.
LOG.debug("Waiting for ImageStream tag failed. Ignoring.");
}
} catch (KubernetesClientException e) {
KubernetesClientErrorHandler.handle(e);
}
}
private static void openshiftBuild(List<HasMetadata> buildResources, File binaryFile,
ContainerImageOpenshiftConfig openshiftConfig, KubernetesClientBuilder kubernetesClientBuilder) {
distinct(buildResources).stream().filter(i -> i instanceof BuildConfig).map(i -> (BuildConfig) i)
.forEach(bc -> {
Build build = startOpenshiftBuild(bc, binaryFile, openshiftConfig, kubernetesClientBuilder);
waitForOpenshiftBuild(build, openshiftConfig, kubernetesClientBuilder);
});
}
/**
* Performs the binary build of the specified {@link BuildConfig} with the given
* binary input.
*
* @param buildConfig The build config
* @param binaryFile The binary file
* @param openshiftConfig The openshift configuration
* @param kubernetesClientBuilder The kubernetes client builder
*/
private static Build startOpenshiftBuild(BuildConfig buildConfig, File binaryFile,
ContainerImageOpenshiftConfig openshiftConfig, KubernetesClientBuilder kubernetesClientBuilder) {
try (KubernetesClient kubernetesClient = kubernetesClientBuilder.build()) {
OpenShiftClient client = toOpenshiftClient(kubernetesClient);
try {
return client.buildConfigs().withName(buildConfig.getMetadata().getName())
.instantiateBinary()
.withTimeoutInMillis(openshiftConfig.buildTimeout().toMillis())
.fromFile(binaryFile);
} catch (Exception e) {
Optional<Build> running = buildsOf(client, buildConfig).stream().findFirst();
if (running.isPresent()) {
LOG.warn("An exception: '" + e.getMessage()
+ " ' occurred while instantiating the build, however the build has been started.");
return running.get();
} else {
throw openshiftException(e);
}
}
}
}
private static void waitForOpenshiftBuild(Build build, ContainerImageOpenshiftConfig openshiftConfig,
KubernetesClientBuilder kubernetesClientBuilder) {
while (isNew(build) || isPending(build) || isRunning(build)) {
final String buildName = build.getMetadata().getName();
try (KubernetesClient kubernetesClient = kubernetesClientBuilder.build()) {
OpenShiftClient client = toOpenshiftClient(kubernetesClient);
Build updated = client.builds().withName(buildName).get();
if (updated == null) {
throw new IllegalStateException("Build:" + build.getMetadata().getName() + " is no longer present!");
} else if (updated.getStatus() == null) {
throw new IllegalStateException("Build:" + build.getMetadata().getName() + " has no status!");
} else if (isNew(updated) || isPending(updated) || isRunning(updated)) {
build = updated;
try (LogWatch w = client.builds().withName(buildName).withPrettyOutput().watchLog();
Reader reader = new InputStreamReader(w.getOutput())) {
display(reader, openshiftConfig.buildLogLevel());
} catch (IOException | KubernetesClientException ex) {
// This may happen if the LogWatch is closed while we are still reading.
// We shouldn't let the build fail, so let's log a warning and display last few lines of the log
LOG.warn("Log stream closed, redisplaying last " + LOG_TAIL_SIZE + " entries:");
try {
display(client.builds().withName(buildName).tailingLines(LOG_TAIL_SIZE).getLogReader(),
Logger.Level.WARN);
} catch (IOException | KubernetesClientException ignored) {
// Let's ignore this.
}
}
} else if (isComplete(updated)) {
return;
} else if (isCancelled(updated)) {
throw new IllegalStateException("Build:" + buildName + " cancelled!");
} else if (isFailed(updated)) {
throw new IllegalStateException(
"Build:" + buildName + " failed! " + updated.getStatus().getMessage());
} else if (isError(updated)) {
throw new IllegalStateException(
"Build:" + buildName + " encountered error! " + updated.getStatus().getMessage());
}
}
}
}
public static Predicate<HasMetadata> distinctByResourceKey() {
Map<Object, Boolean> seen = new ConcurrentHashMap<>();
return t -> seen.putIfAbsent(t.getApiVersion() + "/" + t.getKind() + ":" + t.getMetadata().getName(),
Boolean.TRUE) == null;
}
private static Collection<HasMetadata> distinct(Collection<HasMetadata> resources) {
return resources.stream().filter(distinctByResourceKey()).collect(Collectors.toList());
}
private static List<Build> buildsOf(OpenShiftClient client, BuildConfig config) {
return client.builds().withLabel(BUILD_CONFIG_NAME, config.getMetadata().getName()).list().getItems();
}
private static RuntimeException openshiftException(Throwable t) {
if (t instanceof KubernetesClientException) {
KubernetesClientErrorHandler.handle((KubernetesClientException) t);
}
return new RuntimeException("Execution of openshift build failed. See build output for more details", t);
}
private static void display(Reader logReader, Logger.Level level) throws IOException {
BufferedReader reader = new BufferedReader(logReader);
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
LOG.log(level, line);
}
}
private static KubernetesClientBuilder newClientBuilderWithoutHttp2(Config configuration,
HttpClient.Factory httpClientFactory) {
//Let's disable http2 as it causes issues with duplicate build triggers.
configuration.setHttp2Disable(true);
return new KubernetesClientBuilder().withConfig(configuration).withHttpClientFactory(httpClientFactory);
}
private static KubernetesClient buildClient(KubernetesClientBuildItem kubernetesClientBuilder) {
getNamespace().ifPresent(kubernetesClientBuilder.getConfig()::setNamespace);
return kubernetesClientBuilder.buildClient();
}
private static void deployResource(OpenShiftClient client, HasMetadata metadata) {
DeployStrategy deployStrategy = getDeployStrategy();
var r = client.resource(metadata);
// Delete build config it already existed unless the deploy strategy is not create or update.
if (deployStrategy != DeployStrategy.CreateOrUpdate && r instanceof BuildConfig) {
deleteBuildConfig(client, metadata, r);
}
// If the image stream is already installed, we proceed with the next.
if (r instanceof ImageStream) {
ImageStream is = (ImageStream) r;
ImageStream existing = client.imageStreams().withName(metadata.getMetadata().getName()).get();
if (existing != null &&
existing.getSpec() != null &&
existing.getSpec().getDockerImageRepository() != null &&
existing.getSpec().getDockerImageRepository().equals(is.getSpec().getDockerImageRepository())) {
LOG.info("Found: " + metadata.getKind() + " " + metadata.getMetadata().getName() + " repository: "
+ existing.getSpec().getDockerImageRepository());
return;
}
}
// Deploy the current resource.
switch (deployStrategy) {
case Create:
r.create();
break;
case Replace:
r.replace();
break;
case ServerSideApply:
r.patch(PatchContext.of(PatchType.SERVER_SIDE_APPLY));
break;
default:
r.createOrReplace();
break;
}
}
private static void deleteBuildConfig(OpenShiftClient client, HasMetadata metadata, NamespaceableResource<HasMetadata> r) {
r.cascading(true).delete();
try {
client.resource(metadata).waitUntilCondition(d -> d == null, 10, TimeUnit.SECONDS);
} catch (IllegalArgumentException e) {
// We should ignore that, as its expected to be thrown when item is actually
// deleted.
}
}
// visible for test
static String concatUnixPaths(String... elements) {
StringBuilder result = new StringBuilder();
for (String element : elements) {
if (element.endsWith("/")) {
element = element.substring(0, element.length() - 1);
}
if (element.isEmpty()) {
continue;
}
if (!element.startsWith("/") && result.length() > 0) {
result.append('/');
}
result.append(element);
}
return result.toString();
}
static boolean isNew(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.New.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isPending(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Pending.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isRunning(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Running.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isComplete(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Complete.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isFailed(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Failed.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isError(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Error.name().equalsIgnoreCase(build.getStatus().getPhase());
}
static boolean isCancelled(Build build) {
return build != null && build.getStatus() != null
&& BuildStatus.Cancelled.name().equalsIgnoreCase(build.getStatus().getPhase());
}
}
| OpenshiftProcessor |
java | junit-team__junit5 | junit-platform-commons/src/main/java/org/junit/platform/commons/support/ReflectionSupport.java | {
"start": 31354,
"end": 31674
} | class ____ be searched; never {@code null}
* @param predicate the predicate against which the list of nested classes is
* checked; never {@code null}
* @return an immutable list of all such classes found; never {@code null}
* but potentially empty
* @throws JUnitException if a cycle is detected within an inner | to |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/SpringAiChatEndpointBuilderFactory.java | {
"start": 35297,
"end": 41340
} | class ____ {
/**
* The internal instance of the builder used to access to all the
* methods representing the name of headers.
*/
private static final SpringAiChatHeaderNameBuilder INSTANCE = new SpringAiChatHeaderNameBuilder();
/**
* The response from the chat model.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatResponse}.
*/
public String springAiChatResponse() {
return "CamelSpringAiChatResponse";
}
/**
* The number of input tokens used.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiInputTokenCount}.
*/
public String springAiInputTokenCount() {
return "CamelSpringAiInputTokenCount";
}
/**
* The number of output tokens used.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiOutputTokenCount}.
*/
public String springAiOutputTokenCount() {
return "CamelSpringAiOutputTokenCount";
}
/**
* The total number of tokens used.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiTotalTokenCount}.
*/
public String springAiTotalTokenCount() {
return "CamelSpringAiTotalTokenCount";
}
/**
* The prompt template with placeholders for variable substitution.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatPromptTemplate}.
*/
public String springAiChatPromptTemplate() {
return "CamelSpringAiChatPromptTemplate";
}
/**
* Augmented data for RAG as List.
*
* The option is a: {@code
* java.util.List<org.springframework.ai.document.Document>} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatAugmentedData}.
*/
public String springAiChatAugmentedData() {
return "CamelSpringAiChatAugmentedData";
}
/**
* System message for the conversation.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatSystemMessage}.
*/
public String springAiChatSystemMessage() {
return "CamelSpringAiChatSystemMessage";
}
/**
* Temperature parameter for response randomness (0.0-2.0).
*
* The option is a: {@code Double} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatTemperature}.
*/
public String springAiChatTemperature() {
return "CamelSpringAiChatTemperature";
}
/**
* Maximum tokens in the response.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatMaxTokens}.
*/
public String springAiChatMaxTokens() {
return "CamelSpringAiChatMaxTokens";
}
/**
* Top P parameter for nucleus sampling.
*
* The option is a: {@code Double} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatTopP}.
*/
public String springAiChatTopP() {
return "CamelSpringAiChatTopP";
}
/**
* Top K parameter for sampling.
*
* The option is a: {@code Integer} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatTopK}.
*/
public String springAiChatTopK() {
return "CamelSpringAiChatTopK";
}
/**
* User message text for multimodal requests.
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatUserMessage}.
*/
public String springAiChatUserMessage() {
return "CamelSpringAiChatUserMessage";
}
/**
* Media data for multimodal requests (image or audio).
*
* The option is a: {@code byte[]} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatMediaData}.
*/
public String springAiChatMediaData() {
return "CamelSpringAiChatMediaData";
}
/**
* Media type (MIME type) for multimodal requests (e.g., image/png,
* audio/wav).
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatMediaType}.
*/
public String springAiChatMediaType() {
return "CamelSpringAiChatMediaType";
}
/**
* The output format type for structured output conversion (BEAN, MAP,
* LIST).
*
* The option is a: {@code String} type.
*
* Group: producer
*
* @return the name of the header {@code SpringAiChatOutputFormat}.
*/
public String springAiChatOutputFormat() {
return "CamelSpringAiChatOutputFormat";
}
/**
* The Java | SpringAiChatHeaderNameBuilder |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/SubtypeSensitiveMatchingTests.java | {
"start": 2377,
"end": 2508
} | class ____ implements NonSerializableFoo {
@Override
public void foo() {}
}
@SuppressWarnings("serial")
| SubtypeMatchingTestClassA |
java | apache__kafka | shell/src/main/java/org/apache/kafka/shell/command/PwdCommandHandler.java | {
"start": 1217,
"end": 1357
} | class ____ implements Commands.Handler {
public static final Commands.Type TYPE = new PwdCommandType();
public static | PwdCommandHandler |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/targetclass/mixed/PostConstructInterceptorNestingTest.java | {
"start": 1843,
"end": 2228
} | class ____ {
@PostConstruct
void postConstruct(InvocationContext ctx) throws Exception {
try {
MyBean.invocations.add(MyInterceptor.class.getSimpleName());
ctx.proceed();
} catch (IllegalArgumentException e) {
MyBean.invocations.add("expected-exception");
}
}
}
}
| MyInterceptor |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MissingRefasterAnnotationTest.java | {
"start": 2818,
"end": 3378
} | class ____<K, V> {
// @Placeholder is missing
abstract V function(K key);
@BeforeTemplate
void before(Map<K, V> map, K key) {
if (!map.containsKey(key)) {
map.put(key, function(key));
}
}
@AfterTemplate
void after(Map<K, V> map, K key) {
map.computeIfAbsent(key, k -> function(k));
}
}
static final | MethodLacksPlaceholderAnnotation |
java | micronaut-projects__micronaut-core | core-reactive/src/main/java/io/micronaut/core/async/publisher/CompletableFuturePublisher.java | {
"start": 1105,
"end": 1771
} | class ____<T> implements Publishers.MicronautPublisher<T> {
private final Supplier<CompletableFuture<T>> futureSupplier;
/**
* @param futureSupplier The function that supplies the future.
*/
CompletableFuturePublisher(Supplier<CompletableFuture<T>> futureSupplier) {
this.futureSupplier = futureSupplier;
}
@Override
public final void subscribe(Subscriber<? super T> subscriber) {
Objects.requireNonNull(subscriber, "Subscriber cannot be null");
subscriber.onSubscribe(new CompletableFutureSubscription(subscriber));
}
/**
* CompletableFuture subscription.
*/
| CompletableFuturePublisher |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/client/OAuth2LoginConfigurerTests.java | {
"start": 44292,
"end": 45073
} | class ____ extends CommonSecurityFilterChainConfig {
private ClientRegistrationRepository clientRegistrationRepository = new InMemoryClientRegistrationRepository(
GOOGLE_CLIENT_REGISTRATION);
OAuth2AuthorizationRequestResolver resolver = mock(OAuth2AuthorizationRequestResolver.class);
@Bean
SecurityFilterChain filterChain(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2Login((login) -> login
.clientRegistrationRepository(this.clientRegistrationRepository)
.authorizationEndpoint((authorize) -> authorize
.authorizationRequestResolver(this.resolver)));
// @formatter:on
return super.configureFilterChain(http);
}
}
@Configuration
@EnableWebSecurity
static | OAuth2LoginConfigCustomAuthorizationRequestResolver |
java | elastic__elasticsearch | x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestGetSearchApplicationAction.java | {
"start": 937,
"end": 1796
} | class ____ extends EnterpriseSearchBaseRestHandler {
public RestGetSearchApplicationAction(XPackLicenseState licenseState) {
super(licenseState, LicenseUtils.Product.SEARCH_APPLICATION);
}
@Override
public String getName() {
return "search_application_action";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/" + EnterpriseSearch.SEARCH_APPLICATION_API_ENDPOINT + "/{name}"));
}
@Override
protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeClient client) {
GetSearchApplicationAction.Request request = new GetSearchApplicationAction.Request(restRequest.param("name"));
return channel -> client.execute(GetSearchApplicationAction.INSTANCE, request, new RestToXContentListener<>(channel));
}
}
| RestGetSearchApplicationAction |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/logging/slf4j/Slf4jLocationAwareLoggerImpl.java | {
"start": 911,
"end": 2063
} | class ____ implements Log {
private static final Marker MARKER = MarkerFactory.getMarker(LogFactory.MARKER);
private static final String FQCN = Slf4jImpl.class.getName();
private final LocationAwareLogger logger;
Slf4jLocationAwareLoggerImpl(LocationAwareLogger logger) {
this.logger = logger;
}
@Override
public boolean isDebugEnabled() {
return logger.isDebugEnabled();
}
@Override
public boolean isTraceEnabled() {
return logger.isTraceEnabled();
}
@Override
public void error(String s, Throwable e) {
logger.log(MARKER, FQCN, LocationAwareLogger.ERROR_INT, s, null, e);
}
@Override
public void error(String s) {
logger.log(MARKER, FQCN, LocationAwareLogger.ERROR_INT, s, null, null);
}
@Override
public void debug(String s) {
logger.log(MARKER, FQCN, LocationAwareLogger.DEBUG_INT, s, null, null);
}
@Override
public void trace(String s) {
logger.log(MARKER, FQCN, LocationAwareLogger.TRACE_INT, s, null, null);
}
@Override
public void warn(String s) {
logger.log(MARKER, FQCN, LocationAwareLogger.WARN_INT, s, null, null);
}
}
| Slf4jLocationAwareLoggerImpl |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/InstanceOfAssertFactoriesTest.java | {
"start": 44538,
"end": 45188
} | class ____ {
private final Object actual = new Object[][] { { 0, "" }, { 3.0, 'b' } };
@Test
void createAssert() {
// WHEN
Object2DArrayAssert<Object> result = ARRAY_2D.createAssert(actual);
// THEN
result.hasDimensions(2, 2);
}
@Test
void createAssert_with_ValueProvider() {
// GIVEN
ValueProvider<?> valueProvider = mockThatDelegatesTo(type -> actual);
// WHEN
Object2DArrayAssert<Object> result = ARRAY_2D.createAssert(valueProvider);
// THEN
result.hasDimensions(2, 2);
verify(valueProvider).apply(Object[][].class);
}
}
@Nested
| Array_2D_Factory |
java | grpc__grpc-java | grpclb/src/main/java/io/grpc/grpclb/GrpclbLoadBalancerProvider.java | {
"start": 1434,
"end": 4432
} | class ____ extends LoadBalancerProvider {
private static final Mode DEFAULT_MODE = Mode.ROUND_ROBIN;
@Override
public boolean isAvailable() {
return true;
}
@Override
public int getPriority() {
return 5;
}
@Override
public String getPolicyName() {
return "grpclb";
}
@Override
public LoadBalancer newLoadBalancer(LoadBalancer.Helper helper) {
return
new GrpclbLoadBalancer(
helper,
Context.ROOT,
new CachedSubchannelPool(helper),
TimeProvider.SYSTEM_TIME_PROVIDER,
Stopwatch.createUnstarted(),
new ExponentialBackoffPolicy.Provider());
}
@Override
public ConfigOrError parseLoadBalancingPolicyConfig(
Map<String, ?> rawLoadBalancingConfigPolicy) {
try {
return parseLoadBalancingConfigPolicyInternal(rawLoadBalancingConfigPolicy);
} catch (RuntimeException e) {
return ConfigOrError.fromError(
Status.fromThrowable(e).withDescription(
"Failed to parse GRPCLB config: " + rawLoadBalancingConfigPolicy));
}
}
ConfigOrError parseLoadBalancingConfigPolicyInternal(
Map<String, ?> rawLoadBalancingPolicyConfig) {
if (rawLoadBalancingPolicyConfig == null) {
return ConfigOrError.fromConfig(GrpclbConfig.create(DEFAULT_MODE));
}
String serviceName = JsonUtil.getString(rawLoadBalancingPolicyConfig, "serviceName");
List<?> rawChildPolicies = JsonUtil.getList(rawLoadBalancingPolicyConfig, "childPolicy");
Long initialFallbackTimeoutNs =
JsonUtil.getStringAsDuration(rawLoadBalancingPolicyConfig, "initialFallbackTimeout");
long timeoutMs = GrpclbState.FALLBACK_TIMEOUT_MS;
if (initialFallbackTimeoutNs != null) {
timeoutMs = initialFallbackTimeoutNs / 1000000;
}
List<LbConfig> childPolicies = null;
if (rawChildPolicies != null) {
childPolicies =
ServiceConfigUtil
.unwrapLoadBalancingConfigList(JsonUtil.checkObjectList(rawChildPolicies));
}
if (childPolicies == null || childPolicies.isEmpty()) {
return ConfigOrError.fromConfig(
GrpclbConfig.create(DEFAULT_MODE, serviceName, timeoutMs));
}
List<String> policiesTried = new ArrayList<>();
for (LbConfig childPolicy : childPolicies) {
String childPolicyName = childPolicy.getPolicyName();
switch (childPolicyName) {
case "round_robin":
return ConfigOrError.fromConfig(
GrpclbConfig.create(Mode.ROUND_ROBIN, serviceName, timeoutMs));
case "pick_first":
return ConfigOrError.fromConfig(
GrpclbConfig.create(Mode.PICK_FIRST, serviceName, timeoutMs));
default:
policiesTried.add(childPolicyName);
}
}
return ConfigOrError.fromError(
Status
.UNAVAILABLE
.withDescription(
"None of " + policiesTried + " specified child policies are available."));
}
}
| GrpclbLoadBalancerProvider |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/MissingSuperCall.java | {
"start": 2263,
"end": 6080
} | enum ____ {
ANDROID("android.support.annotation.CallSuper"),
ANDROIDX("androidx.annotation.CallSuper"),
ERROR_PRONE("com.google.errorprone.annotations.OverridingMethodsMustInvokeSuper"),
JSR305("javax.annotation.OverridingMethodsMustInvokeSuper"),
FINDBUGS("edu.umd.cs.findbugs.annotations.OverrideMustInvoke");
private final String fullyQualifiedName;
AnnotationType(String fullyQualifiedName) {
this.fullyQualifiedName = fullyQualifiedName;
}
String fullyQualifiedName() {
return fullyQualifiedName;
}
String simpleName() {
int index = fullyQualifiedName().lastIndexOf('.');
if (index >= 0) {
return fullyQualifiedName().substring(index + 1);
} else {
return fullyQualifiedName();
}
}
}
private static final Matcher<AnnotationTree> ANNOTATION_MATCHER =
anyOf(
Arrays.stream(AnnotationType.values())
.map(anno -> isType(anno.fullyQualifiedName()))
.collect(ImmutableList.toImmutableList()));
/**
* Prevents abstract methods from being annotated with {@code @CallSuper} et al. It doesn't make
* sense to require overriders to call a method with no implementation.
*/
@Override
public Description matchAnnotation(AnnotationTree tree, VisitorState state) {
if (!ANNOTATION_MATCHER.matches(tree, state)) {
return Description.NO_MATCH;
}
MethodTree methodTree = ASTHelpers.findEnclosingNode(state.getPath(), MethodTree.class);
if (methodTree == null) {
return Description.NO_MATCH;
}
MethodSymbol methodSym = ASTHelpers.getSymbol(methodTree);
if (!methodSym.getModifiers().contains(Modifier.ABSTRACT)) {
return Description.NO_MATCH;
}
// Match, find the matched annotation to use for the error message.
Symbol annotationSym = ASTHelpers.getSymbol(tree);
if (annotationSym == null) {
return Description.NO_MATCH;
}
return buildDescription(tree)
.setMessage(
String.format(
"@%s cannot be applied to an abstract method", annotationSym.getSimpleName()))
.build();
}
/**
* Matches a method that overrides a method that has been annotated with {@code @CallSuper} et
* al., but does not call the super method.
*/
@Override
public Description matchMethod(MethodTree tree, VisitorState state) {
MethodSymbol methodSym = ASTHelpers.getSymbol(tree);
// Allow abstract methods.
if (methodSym.getModifiers().contains(Modifier.ABSTRACT)) {
return Description.NO_MATCH;
}
String annotatedSuperMethod = null;
String matchedAnnotationSimpleName = null;
for (MethodSymbol method : ASTHelpers.findSuperMethods(methodSym, state.getTypes())) {
for (AnnotationType annotationType : AnnotationType.values()) {
if (ASTHelpers.hasAnnotation(method, annotationType.fullyQualifiedName(), state)) {
annotatedSuperMethod = getMethodName(method);
matchedAnnotationSimpleName = annotationType.simpleName();
break;
}
}
}
if (annotatedSuperMethod == null || matchedAnnotationSimpleName == null) {
return Description.NO_MATCH;
}
TreeScanner<Boolean, Void> findSuper = new FindSuperTreeScanner(tree.getName().toString());
if (findSuper.scan(tree, null)) {
return Description.NO_MATCH;
}
return buildDescription(tree)
.setMessage(
String.format(
"This method overrides %s, which is annotated with @%s, but does not call the "
+ "super method",
annotatedSuperMethod, matchedAnnotationSimpleName))
.build();
}
/** Scans a tree looking for calls to a method that is overridden by the given one. */
private static | AnnotationType |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/SchemaUpdateWithFunctionIndexTest.java | {
"start": 1621,
"end": 2982
} | class ____ {
@Test
public void testUpdateSchema(DomainModelScope modelScope) {
new SchemaUpdate().execute( EnumSet.of( TargetType.DATABASE, TargetType.STDOUT ), modelScope.getDomainModel() );
}
@BeforeEach
public void setUp(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> session.doWork( (connection) -> {
try (var statement = connection.createStatement()) {
statement.execute( "DROP INDEX IF EXISTS uk_MyEntity_name_lowercase;" );
statement.execute( "DROP TABLE IF EXISTS MyEntity;" );
statement.execute( "CREATE TABLE MyEntity(id bigint, name varchar(255));" );
statement.execute( "CREATE UNIQUE INDEX uk_MyEntity_name_lowercase ON MyEntity (lower(name));" );
}
} ) );
}
@AfterEach
public void tearDown(DomainModelScope modelScope, SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> session.doWork( (connection) -> {
try (var statement = connection.createStatement()) {
statement.execute( "DROP INDEX IF EXISTS uk_MyEntity_name_lowercase;" );
statement.execute( "DROP TABLE IF EXISTS MyEntity;" );
}
} ) );
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE, TargetType.STDOUT ), modelScope.getDomainModel() );
}
@Entity
@Table(name = "MyEntity", indexes = @Index(columnList = "otherInfo"))
public static | SchemaUpdateWithFunctionIndexTest |
java | mockito__mockito | mockito-core/src/test/java/org/mockito/internal/stubbing/defaultanswers/ReturnsEmptyValuesTest.java | {
"start": 655,
"end": 9084
} | class ____ extends TestBase {
private final ReturnsEmptyValues values = new ReturnsEmptyValues();
@Test
public void should_return_empty_collections_or_null_for_non_collections() {
assertTrue(((Collection<?>) values.returnValueFor(Collection.class)).isEmpty());
assertTrue(((Set<?>) values.returnValueFor(Set.class)).isEmpty());
assertTrue(((SortedSet<?>) values.returnValueFor(SortedSet.class)).isEmpty());
assertTrue(((HashSet<?>) values.returnValueFor(HashSet.class)).isEmpty());
assertTrue(((TreeSet<?>) values.returnValueFor(TreeSet.class)).isEmpty());
assertTrue(((LinkedHashSet<?>) values.returnValueFor(LinkedHashSet.class)).isEmpty());
assertTrue(((List<?>) values.returnValueFor(List.class)).isEmpty());
assertTrue(((ArrayList<?>) values.returnValueFor(ArrayList.class)).isEmpty());
assertTrue(((LinkedList<?>) values.returnValueFor(LinkedList.class)).isEmpty());
assertTrue(((Map<?, ?>) values.returnValueFor(Map.class)).isEmpty());
assertTrue(((SortedMap<?, ?>) values.returnValueFor(SortedMap.class)).isEmpty());
assertTrue(((HashMap<?, ?>) values.returnValueFor(HashMap.class)).isEmpty());
assertTrue(((TreeMap<?, ?>) values.returnValueFor(TreeMap.class)).isEmpty());
assertTrue(((LinkedHashMap<?, ?>) values.returnValueFor(LinkedHashMap.class)).isEmpty());
assertNull(values.returnValueFor(String.class));
}
@Test
public void should_return_empty_iterable() throws Exception {
assertFalse(((Iterable<?>) values.returnValueFor(Iterable.class)).iterator().hasNext());
}
@Test
public void should_return_primitive() {
assertEquals(false, values.returnValueFor(Boolean.TYPE));
assertEquals((char) 0, values.returnValueFor(Character.TYPE));
assertEquals((byte) 0, values.returnValueFor(Byte.TYPE));
assertEquals((short) 0, values.returnValueFor(Short.TYPE));
assertEquals(0, values.returnValueFor(Integer.TYPE));
assertEquals(0L, values.returnValueFor(Long.TYPE));
assertEquals(0F, values.returnValueFor(Float.TYPE));
assertEquals(0D, values.returnValueFor(Double.TYPE));
}
@Test
public void should_return_non_zero_for_compareTo_method() {
//
// given
Date d = mock(Date.class);
d.compareTo(new Date());
Invocation compareTo = this.getLastInvocation();
// when
Object result = values.answer(compareTo);
// then
assertTrue(result != (Object) 0);
}
@SuppressWarnings("SelfComparison")
@Test
public void should_return_zero_if_mock_is_compared_to_itself() {
// given
Date d = mock(Date.class);
d.compareTo(d);
Invocation compareTo = this.getLastInvocation();
// when
Object result = values.answer(compareTo);
// then
assertEquals(0, result);
}
@Test
public void should_return_empty_Optional() throws Exception {
verify_empty_Optional_is_returned("java.util.stream.Stream", "java.util.Optional");
}
@Test
public void should_return_empty_OptionalDouble() throws Exception {
verify_empty_Optional_is_returned(
"java.util.stream.DoubleStream", "java.util.OptionalDouble");
}
@Test
public void should_return_empty_OptionalInt() throws Exception {
verify_empty_Optional_is_returned("java.util.stream.IntStream", "java.util.OptionalInt");
}
@Test
public void should_return_empty_OptionalLong() throws Exception {
verify_empty_Optional_is_returned("java.util.stream.LongStream", "java.util.OptionalLong");
}
private void verify_empty_Optional_is_returned(String streamFqcn, String optionalFqcn)
throws Exception {
// given
assumeThat("JDK 8+ required for Optional", isJavaVersionAtLeast(8), is(true));
Class<?> streamType = Class.forName(streamFqcn);
Object stream = mock(streamType);
Object optional = streamType.getMethod("findAny").invoke(stream);
assertNotNull(optional);
assertFalse((Boolean) Class.forName(optionalFqcn).getMethod("isPresent").invoke(optional));
Invocation findAny = this.getLastInvocation();
// when
Object result = values.answer(findAny);
// then
assertEquals(optional, result);
}
@Test
public void should_return_empty_Stream() throws Exception {
verify_empty_Stream_is_returned("java.util.stream.Stream");
}
@Test
public void should_return_empty_DoubleStream() throws Exception {
verify_empty_Stream_is_returned("java.util.stream.DoubleStream");
}
@Test
public void should_return_empty_IntStream() throws Exception {
verify_empty_Stream_is_returned("java.util.stream.IntStream");
}
@Test
public void should_return_empty_LongStream() throws Exception {
verify_empty_Stream_is_returned("java.util.stream.LongStream");
}
private void verify_empty_Stream_is_returned(String streamFqcn) throws Exception {
// given
assumeThat("JDK 8+ required for Stream", isJavaVersionAtLeast(8), is(true));
Class<?> streamType = Class.forName(streamFqcn);
// when
Object stream = values.returnValueFor(streamType);
long count = (Long) streamType.getMethod("count").invoke(stream);
// then
assertEquals("count of empty " + streamFqcn, 0L, count);
}
@Test
public void should_return_empty_duration() throws Exception {
// given
assumeThat("JDK 8+ required for Duration", isJavaVersionAtLeast(8), is(true));
final String fqcn = "java.time.Duration";
Class<?> durationClass = Class.forName(fqcn);
// when
final Object duration = values.returnValueFor(durationClass);
final int nano = (Integer) durationClass.getMethod("getNano").invoke(duration);
final long seconds = (Long) durationClass.getMethod("getSeconds").invoke(duration);
// then
assertEquals("nano of empty " + fqcn, 0, nano);
assertEquals("seconds of empty " + fqcn, 0L, seconds);
}
@Test
public void should_return_empty_sequenced_collection() throws Exception {
assumeThat("JDK 21+ required for SequencedCollection", isJavaVersionAtLeast(21), is(true));
Class<?> sequencedCollectionClass = Class.forName("java.util.SequencedCollection");
Object result = values.returnValueFor(sequencedCollectionClass);
assertNotNull("SequencedCollection should return non-null value", result);
assertTrue("Should return empty collection", ((Collection<?>) result).isEmpty());
assertTrue("Should return ArrayList instance", result instanceof ArrayList);
}
@Test
public void should_return_empty_sequenced_set() throws Exception {
assumeThat("JDK 21+ required for SequencedSet", isJavaVersionAtLeast(21), is(true));
Class<?> sequencedSetClass = Class.forName("java.util.SequencedSet");
Object result = values.returnValueFor(sequencedSetClass);
assertNotNull("SequencedSet should return non-null value", result);
assertTrue("Should return empty set", ((Set<?>) result).isEmpty());
assertTrue("Should return LinkedHashSet instance", result instanceof LinkedHashSet);
}
@Test
public void should_return_empty_sequenced_map() throws Exception {
assumeThat("JDK 21+ required for SequencedMap", isJavaVersionAtLeast(21), is(true));
Class<?> sequencedMapClass = Class.forName("java.util.SequencedMap");
Object result = values.returnValueFor(sequencedMapClass);
assertNotNull("SequencedMap should return non-null value", result);
assertTrue("Should return empty map", ((Map<?, ?>) result).isEmpty());
assertTrue("Should return LinkedHashMap instance", result instanceof LinkedHashMap);
}
/**
* Checks if the current Java version is at least the specified version.
*/
private boolean isJavaVersionAtLeast(int majorVersion) {
String javaVersion = System.getProperty("java.version");
String[] versionParts = javaVersion.split("\\.");
int currentMajorVersion = Integer.parseInt(versionParts[0]);
return currentMajorVersion >= majorVersion;
}
| ReturnsEmptyValuesTest |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/query/hql/spi/SemanticPathPart.java | {
"start": 427,
"end": 463
} | class ____
* * field name
* * | name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.