language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
core/camel-base-engine/src/main/java/org/apache/camel/impl/engine/DefaultReactiveExecutor.java
|
{
"start": 1473,
"end": 4110
}
|
class ____ extends ServiceSupport implements ReactiveExecutor, StaticService {
private static final Logger LOG = LoggerFactory.getLogger(DefaultReactiveExecutor.class);
// use for statistics so we have insights at runtime
private boolean statisticsEnabled;
private final AtomicInteger createdWorkers = new AtomicInteger();
private final LongAdder runningWorkers = new LongAdder();
private final LongAdder pendingTasks = new LongAdder();
private final NamedThreadLocal<Worker> workers = new NamedThreadLocal<>("CamelReactiveWorker", () -> {
int number = createdWorkers.incrementAndGet();
return new Worker(number, DefaultReactiveExecutor.this);
});
@Override
public void schedule(Runnable runnable) {
workers.get().schedule(runnable, false, false, false);
}
@Override
public void scheduleMain(Runnable runnable) {
workers.get().schedule(runnable, true, true, false);
}
@Override
public void scheduleSync(Runnable runnable) {
workers.get().schedule(runnable, false, true, true);
}
@Override
public void scheduleQueue(Runnable runnable) {
if (LOG.isTraceEnabled()) {
LOG.trace("ScheduleQueue: {}", runnable);
}
workers.get().queue.add(runnable);
}
@Override
public boolean executeFromQueue() {
return workers.get().executeFromQueue();
}
@Override
@ManagedAttribute(description = "Whether statistics is enabled")
public boolean isStatisticsEnabled() {
return statisticsEnabled;
}
@Override
public void setStatisticsEnabled(boolean statisticsEnabled) {
this.statisticsEnabled = statisticsEnabled;
}
@ManagedAttribute(description = "Number of created workers")
public int getCreatedWorkers() {
return createdWorkers.get();
}
@ManagedAttribute(description = "Number of running workers")
public int getRunningWorkers() {
return runningWorkers.intValue();
}
@ManagedAttribute(description = "Number of pending tasks")
public int getPendingTasks() {
return pendingTasks.intValue();
}
@Override
protected void doStop() throws Exception {
if (LOG.isDebugEnabled() && statisticsEnabled) {
LOG.debug("Stopping DefaultReactiveExecutor [createdWorkers: {}, runningWorkers: {}, pendingTasks: {}]",
getCreatedWorkers(), getRunningWorkers(), getPendingTasks());
}
}
@Override
protected void doShutdown() throws Exception {
workers.remove();
}
private static
|
DefaultReactiveExecutor
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/RemoveUnusedImportsTest.java
|
{
"start": 12172,
"end": 12512
}
|
interface ____ {}
""")
.expectUnchanged()
.addInputLines(
"a/Two.java",
"""
package a;
import java.lang.annotation.ElementType;
import java.lang.annotation.Target;
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
public @
|
One
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxOnAssembly.java
|
{
"start": 14204,
"end": 17663
}
|
class ____<T>
implements InnerOperator<T, T>, QueueSubscription<T> {
final AssemblySnapshot snapshotStack;
final Publisher<?> parent;
final Publisher<?> current;
final CoreSubscriber<? super T> actual;
@Nullable QueueSubscription<T> qs;
@SuppressWarnings("NotNullFieldNotInitialized") // s is set in onSubscribe
Subscription s;
int fusionMode;
OnAssemblySubscriber(CoreSubscriber<? super T> actual,
AssemblySnapshot snapshotStack,
Publisher<?> parent,
Publisher<?> current) {
this.actual = actual;
this.snapshotStack = snapshotStack;
this.parent = parent;
this.current = current;
}
@Override
public final CoreSubscriber<? super T> actual() {
return actual;
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.PARENT) return s;
if (key == Attr.ACTUAL_METADATA) return !snapshotStack.isCheckpoint;
if (key == Attr.RUN_STYLE) return Attr.RunStyle.SYNC;
return InnerOperator.super.scanUnsafe(key);
}
@Override
public String toString() {
return snapshotStack.operatorAssemblyInformation();
}
@Override
public String stepName() {
return toString();
}
@Override
final public void onNext(T t) {
actual.onNext(t);
}
@Override
final public void onError(Throwable t) {
actual.onError(fail(t));
}
@Override
final public void onComplete() {
actual.onComplete();
}
@Override
final public int requestFusion(int requestedMode) {
QueueSubscription<T> qs = this.qs;
if (qs != null) {
int m = qs.requestFusion(requestedMode);
if (m != Fuseable.NONE) {
fusionMode = m;
}
return m;
}
return Fuseable.NONE;
}
final Throwable fail(Throwable t) {
boolean lightCheckpoint = snapshotStack.isLight();
OnAssemblyException onAssemblyException = null;
for (Throwable e : t.getSuppressed()) {
if (e instanceof OnAssemblyException) {
onAssemblyException = (OnAssemblyException) e;
break;
}
}
if (onAssemblyException == null) {
if (lightCheckpoint) {
onAssemblyException = new OnAssemblyException("");
}
else {
StringBuilder sb = new StringBuilder();
fillStacktraceHeader(sb, parent.getClass(), snapshotStack.getDescription());
sb.append(snapshotStack.toAssemblyInformation().replaceFirst("\\n$", ""));
String description = sb.toString();
onAssemblyException = new OnAssemblyException(description);
}
t = Exceptions.addSuppressed(t, onAssemblyException);
final StackTraceElement[] stackTrace = t.getStackTrace();
if (stackTrace.length > 0) {
StackTraceElement[] newStackTrace = new StackTraceElement[stackTrace.length];
int i = 0;
for (StackTraceElement stackTraceElement : stackTrace) {
String className = stackTraceElement.getClassName();
if (className.startsWith("reactor.core.publisher.") && className.contains("OnAssembly")) {
continue;
}
newStackTrace[i] = stackTraceElement;
i++;
}
newStackTrace = Arrays.copyOf(newStackTrace, i);
onAssemblyException.setStackTrace(newStackTrace);
t.setStackTrace(new StackTraceElement[] {
stackTrace[0]
});
}
}
onAssemblyException.add(parent, current, snapshotStack);
return t;
}
@Override
final public boolean isEmpty() {
try {
assert qs != null : "Queue
|
OnAssemblySubscriber
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/error/BasicErrorMessageFactory_create_Test.java
|
{
"start": 1416,
"end": 3169
}
|
class ____ {
private MessageFormatter formatter;
private BasicErrorMessageFactory factory;
@BeforeEach
public void setUp() {
formatter = mock(MessageFormatter.class);
factory = new BasicErrorMessageFactory("Hello %s", "Yoda");
factory.formatter = formatter;
}
@Test
void should_implement_toString() {
// GIVEN
Description description = new TestDescription("Test");
Representation representation = new StandardRepresentation();
String formattedMessage = "[Test] Hello Yoda";
given(formatter.format(description, representation, "Hello %s", "Yoda")).willReturn(formattedMessage);
// WHEN
String message = factory.create(description, representation);
// THEN
then(message).isEqualTo(formattedMessage);
}
@Test
void should_create_error_with_configured_representation() {
// GIVEN
Description description = new TestDescription("Test");
String formattedMessage = "[Test] Hello Yoda";
given(formatter.format(description, CONFIGURATION_PROVIDER.representation(), "Hello %s",
"Yoda")).willReturn(formattedMessage);
// WHEN
String message = factory.create(description);
// THEN
then(message).isEqualTo(formattedMessage);
}
@Test
void should_create_error_with_empty_description_and_configured_representation() {
Description description = emptyDescription();
String formattedMessage = "[] Hello Yoda";
given(formatter.format(description, CONFIGURATION_PROVIDER.representation(), "Hello %s",
"Yoda")).willReturn(formattedMessage);
// WHEN
String message = factory.create(description);
// THEN
then(message).isEqualTo(formattedMessage);
}
}
|
BasicErrorMessageFactory_create_Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CheckReturnValueWellKnownLibrariesTest.java
|
{
"start": 20089,
"end": 20606
}
|
class ____ {
public static void log(Level severity, String message) {}
}
""")
.addSourceLines(
"Caller.java",
"""
package com.google.frobber;
import com.google.auto.value.AutoBuilder;
import java.util.logging.Level;
import com.google.errorprone.annotations.CheckReturnValue;
@CheckReturnValue
@AutoBuilder(callMethod = "log", ofClass = LogUtil.class)
public
|
LogUtil
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/MapMakerInternalMap.java
|
{
"start": 14845,
"end": 15278
}
|
class ____<K, V>
extends AbstractStrongKeyEntry<K, V, StrongKeyStrongValueEntry<K, V>>
implements StrongValueEntry<K, V, StrongKeyStrongValueEntry<K, V>> {
private volatile @Nullable V value = null;
private StrongKeyStrongValueEntry(K key, int hash) {
super(key, hash);
}
@Override
public final @Nullable V getValue() {
return value;
}
private static final
|
StrongKeyStrongValueEntry
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestBuilder.java
|
{
"start": 797,
"end": 4002
}
|
class ____ extends ActionRequestBuilder<ClusterStateRequest, ClusterStateResponse> {
public ClusterStateRequestBuilder(ElasticsearchClient client, TimeValue masterNodeTimeout) {
super(client, ClusterStateAction.INSTANCE, new ClusterStateRequest(masterNodeTimeout));
}
/**
* Include all data
*/
public ClusterStateRequestBuilder all() {
request.all();
return this;
}
/**
* Do not include any data
*/
public ClusterStateRequestBuilder clear() {
request.clear();
return this;
}
public ClusterStateRequestBuilder setBlocks(boolean filter) {
request.blocks(filter);
return this;
}
/**
* Should the cluster state result include the {@link Metadata}. Defaults
* to {@code true}.
*/
public ClusterStateRequestBuilder setMetadata(boolean filter) {
request.metadata(filter);
return this;
}
/**
* Should the cluster state result include the {@link org.elasticsearch.cluster.node.DiscoveryNodes}. Defaults
* to {@code true}.
*/
public ClusterStateRequestBuilder setNodes(boolean filter) {
request.nodes(filter);
return this;
}
/**
* Should the cluster state result include the {@link org.elasticsearch.cluster.ClusterState.Custom}. Defaults
* to {@code true}.
*/
public ClusterStateRequestBuilder setCustoms(boolean filter) {
request.customs(filter);
return this;
}
/**
* Should the cluster state result include the {@link org.elasticsearch.cluster.routing.RoutingTable}. Defaults
* to {@code true}.
*/
public ClusterStateRequestBuilder setRoutingTable(boolean filter) {
request.routingTable(filter);
return this;
}
/**
* When {@link #setMetadata(boolean)} is set, which indices to return the {@link org.elasticsearch.cluster.metadata.IndexMetadata}
* for. Defaults to all indices.
*/
public ClusterStateRequestBuilder setIndices(String... indices) {
request.indices(indices);
return this;
}
public ClusterStateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
request.indicesOptions(indicesOptions);
return this;
}
/**
* Causes the request to wait for the metadata version to advance to at least the given version.
* @param waitForMetadataVersion The metadata version for which to wait
*/
public ClusterStateRequestBuilder setWaitForMetadataVersion(long waitForMetadataVersion) {
request.waitForMetadataVersion(waitForMetadataVersion);
return this;
}
/**
* If {@link ClusterStateRequest#waitForMetadataVersion()} is set then this determines how long to wait
*/
public ClusterStateRequestBuilder setWaitForTimeOut(TimeValue waitForTimeout) {
request.waitForTimeout(waitForTimeout);
return this;
}
/**
* When set then the response will be in multi-project format
*/
public ClusterStateRequestBuilder setMultiproject(boolean multiproject) {
request.multiproject(multiproject);
return this;
}
}
|
ClusterStateRequestBuilder
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedSignificantTerms.java
|
{
"start": 1925,
"end": 5013
}
|
class ____ extends InternalSignificantTerms.Bucket<Bucket> {
private Bucket(long subsetDf, long supersetDf, InternalAggregations aggregations, DocValueFormat format) {
super(subsetDf, supersetDf, aggregations, format);
}
}
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, Map<String, Object> metadata) {
super(name, requiredSize, minDocCount, metadata);
}
/**
* Read from a stream.
*/
public UnmappedSignificantTerms(StreamInput in) throws IOException {
super(in);
}
@Override
protected void writeTermTypeInfoTo(StreamOutput out) throws IOException {
// Nothing to write
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public String getType() {
return SignificantStringTerms.NAME;
}
@Override
public UnmappedSignificantTerms create(List<Bucket> buckets) {
return new UnmappedSignificantTerms(name, requiredSize, minDocCount, metadata);
}
@Override
public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) {
throw new UnsupportedOperationException("not supported for UnmappedSignificantTerms");
}
@Override
protected UnmappedSignificantTerms create(long subsetSize, long supersetSize, List<Bucket> buckets) {
throw new UnsupportedOperationException("not supported for UnmappedSignificantTerms");
}
@Override
Bucket createBucket(long subsetDf, long supersetDf, InternalAggregations aggregations, Bucket prototype) {
throw new UnsupportedOperationException("not supported for UnmappedSignificantTerms");
}
@Override
public InternalAggregation finalizeSampling(SamplingContext samplingContext) {
return new UnmappedSignificantTerms(name, requiredSize, minDocCount, metadata);
}
@Override
protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceContext, int size) {
throw new UnsupportedOperationException();
}
@Override
public boolean canLeadReduction() {
return false;
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startArray(CommonFields.BUCKETS.getPreferredName()).endArray();
return builder;
}
@Override
protected Bucket[] createBucketsArray(int size) {
return new Bucket[size];
}
@Override
public Iterator<SignificantTerms.Bucket> iterator() {
return emptyIterator();
}
@Override
public List<Bucket> getBuckets() {
return emptyList();
}
@Override
public SignificantTerms.Bucket getBucketByKey(String term) {
return null;
}
@Override
protected SignificanceHeuristic getSignificanceHeuristic() {
throw new UnsupportedOperationException();
}
@Override
public long getSubsetSize() {
return 0;
}
@Override
public long getSupersetSize() {
return 0;
}
}
|
Bucket
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java
|
{
"start": 6173,
"end": 6838
}
|
class ____ extends SourceBlockLoader {
public BytesRefsBlockLoader(ValueFetcher fetcher, LeafIteratorLookup lookup) {
super(fetcher, lookup);
}
@Override
public final Builder builder(BlockFactory factory, int expectedCount) {
return factory.bytesRefs(expectedCount);
}
@Override
protected RowStrideReader rowStrideReader(LeafReaderContext context, DocIdSetIterator iter) throws IOException {
return new BytesRefs(fetcher, iter);
}
@Override
protected String name() {
return "Bytes";
}
}
public static
|
BytesRefsBlockLoader
|
java
|
alibaba__druid
|
druid-spring-boot-3-starter/src/test/java/com/alibaba/druid/spring/boot3/testcase/DruidFilterTestCase.java
|
{
"start": 963,
"end": 1370
}
|
class ____ {
@Resource
private DruidDataSource dataSource;
@Test
public void test() {
List<Filter> filters = dataSource.getProxyFilters();
//配置文件中3个,自定义1个,共4个
assertThat(filters.size()).isEqualTo(4);
}
/**
* @author dk
* 用于此测试的一个配置,仅加入了一个自定义的Filter,此Filter打印出数据库连接url
*/
@Configuration
@ComponentScan
public static
|
DruidFilterTestCase
|
java
|
redisson__redisson
|
redisson-spring-data/redisson-spring-data-32/src/main/java/org/redisson/spring/data/connection/RedisClusterNodeDecoder.java
|
{
"start": 1443,
"end": 4985
}
|
class ____ implements Decoder<List<RedisClusterNode>> {
private final ServiceManager serviceManager;
public RedisClusterNodeDecoder(ServiceManager serviceManager) {
this.serviceManager = serviceManager;
}
@Override
public List<RedisClusterNode> decode(ByteBuf buf, State state) throws IOException {
String response = buf.toString(CharsetUtil.UTF_8);
List<RedisClusterNode> nodes = new ArrayList<RedisClusterNode>();
for (String nodeInfo : response.split("\n")) {
String[] params = nodeInfo.split(" ");
String nodeId = params[0];
String flagsStr = params[2];
Set<Flag> flags = EnumSet.noneOf(Flag.class);
for (String flag : flagsStr.split(",")) {
String flagValue = flag.replace("slave", "replica")
.toUpperCase(Locale.ENGLISH).replaceAll("\\?", "");
flags.add(Flag.valueOf(flagValue));
}
RedisURI address = null;
if (!flags.contains(Flag.NOADDR)) {
String addr = params[1].split("@")[0];
String name = addr.substring(0, addr.lastIndexOf(":"));
if (name.isEmpty()) {
// skip nodes with empty address
continue;
}
address = new RedisURI(RedisURI.REDIS_PROTOCOL + addr);
address = serviceManager.toURI("redis", address.getHost(), String.valueOf(address.getPort()));
}
String masterId = params[3];
if ("-".equals(masterId)) {
masterId = null;
}
Set<Integer> slotsCollection = new HashSet<Integer>();
LinkState linkState = null;
if (params.length >= 8 && params[7] != null) {
linkState = LinkState.valueOf(params[7].toUpperCase(Locale.ENGLISH));
}
if (params.length > 8) {
for (int i = 0; i < params.length - 8; i++) {
String slots = params[i + 8];
if (slots.indexOf("-<-") != -1 || slots.indexOf("->-") != -1) {
continue;
}
String[] parts = slots.split("-");
if(parts.length == 1) {
slotsCollection.add(Integer.valueOf(parts[0]));
} else if(parts.length == 2) {
for (int j = Integer.valueOf(parts[0]); j < Integer.valueOf(parts[1]) + 1; j++) {
slotsCollection.add(j);
}
}
}
}
NodeType type = null;
if (flags.contains(Flag.MASTER)) {
type = NodeType.MASTER;
} else if (flags.contains(Flag.REPLICA)) {
type = NodeType.REPLICA;
}
RedisClusterNodeBuilder builder = RedisClusterNode.newRedisClusterNode()
.linkState(linkState)
.replicaOf(masterId)
.serving(new SlotRange(slotsCollection))
.withId(nodeId)
.promotedAs(type)
.withFlags(flags);
if (address != null) {
builder.listeningAt(address.getHost(), address.getPort());
}
nodes.add(builder.build());
}
return nodes;
}
}
|
RedisClusterNodeDecoder
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/ThreadSafeAnalysis.java
|
{
"start": 10826,
"end": 12081
}
|
class ____ should be final or annotated with @GuardedBy. See "
+ "https://errorprone.info/bugpattern/ThreadSafe for details.")
.addFix(SuggestedFixes.addModifiers(tree.get(), state, modifier))
.build());
return Violation.absent();
}
return Violation.of(
String.format(message, threadSafety.getPrettyName(classSym), var.getSimpleName()));
}
/**
* Gets the {@link Tree}'s {@code @ThreadSafe} annotation info, either from an annotation on the
* symbol or from the list of well-known immutable types.
*/
AnnotationInfo getThreadSafeAnnotation(Tree tree, VisitorState state) {
Symbol sym = ASTHelpers.getSymbol(tree);
return getThreadSafeAnnotation(sym, state);
}
AnnotationInfo getThreadSafeAnnotation(Symbol sym, VisitorState state) {
String nameStr = sym.flatName().toString();
AnnotationInfo known = wellKnownThreadSafety.getKnownThreadSafeClasses().get(nameStr);
if (known != null) {
return known;
}
return threadSafety.getInheritedAnnotation(sym, state);
}
public ImmutableSet<String> threadSafeTypeParametersInScope(Symbol sym) {
return ImmutableSet.copyOf(threadSafety.threadSafeTypeParametersInScope(sym));
}
}
|
fields
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NonCanonicalTypeTest.java
|
{
"start": 5041,
"end": 5376
}
|
class ____<E extends Enum<E>> {
E test(Class<E> clazz, String name) {
return E.valueOf(clazz, name);
}
}
""")
.doTest();
}
@Test
public void arrays() {
compilationHelper
.addSourceLines(
"Test.java",
"""
|
Test
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/NegativeCharLiteralTest.java
|
{
"start": 3374,
"end": 3475
}
|
class ____ {
int x = (int) -1;
}
""")
.doTest();
}
}
|
Test
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/FluxTapFuseable.java
|
{
"start": 8990,
"end": 9656
}
|
class ____<T> extends TapFuseableSubscriber<T> implements ConditionalSubscriber<T> {
final ConditionalSubscriber<? super T> actualConditional;
public TapConditionalFuseableSubscriber(ConditionalSubscriber<? super T> actual,
SignalListener<T> signalListener, Context ctx) {
super(actual, signalListener, ctx);
this.actualConditional = actual;
}
@Override
public boolean tryOnNext(T t) {
if (actualConditional.tryOnNext(t)) {
try {
listener.doOnNext(t);
}
catch (Throwable listenerError) {
handleListenerErrorAndTerminate(listenerError);
}
return true;
}
return false;
}
}
}
|
TapConditionalFuseableSubscriber
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/logging/LoggingWithPanacheProcessor.java
|
{
"start": 13171,
"end": 13595
}
|
interface ____ must be public static final per the JVMS
int access = isInterface
? Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL
: Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC | Opcodes.ACC_FINAL;
super.visitField(access, SYNTHETIC_LOGGER_FIELD_NAME, JBOSS_LOGGER_DESCRIPTOR, null, null);
generatedLoggerField = true;
}
}
}
|
fields
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/bugs/injection/InjectionByTypeShouldFirstLookForExactTypeThenAncestorTest.java
|
{
"start": 2237,
"end": 2336
}
|
class ____ {
Bean injectMePlease;
Object keepMeNull = null;
}
}
|
WithNullObjectField
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/query/NativeQuery.java
|
{
"start": 17533,
"end": 26448
}
|
interface ____ extends ResultNode {
String getTableAlias();
String getOwnerAlias();
Fetchable getFetchable();
String getFetchableName();
/**
* Set the lock mode for this return.
*
* @param lockMode The new lock mode.
*
* @return {@code this}, for method chaining
*/
FetchReturn setLockMode(LockMode lockMode);
/**
* Add a simple property-to-one-column mapping.
*
* @param propertyName The name of the property.
* @param columnAlias The name of the column
*
* @return {@code this}, for method chaining
*/
FetchReturn addProperty(String propertyName, String columnAlias);
/**
* Add a property, presumably with more than one column.
*
* @param propertyName The name of the property.
*
* @return The config object for further control.
*/
ReturnProperty addProperty(String propertyName);
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// covariant overrides - SynchronizeableQuery
@Override
NativeQuery<T> addSynchronizedQuerySpace(String querySpace);
@Override
NativeQuery<T> addSynchronizedEntityName(String entityName) throws MappingException;
@Override
NativeQuery<T> addSynchronizedEntityClass(@SuppressWarnings("rawtypes") Class entityClass) throws MappingException;
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// covariant overrides - Query
@Override @Deprecated(since = "7")
NativeQuery<T> setHibernateFlushMode(FlushMode flushMode);
@Override
NativeQuery<T> setQueryFlushMode(QueryFlushMode queryFlushMode);
@Override @Deprecated(since = "7")
NativeQuery<T> setFlushMode(FlushModeType flushMode);
@Override
NativeQuery<T> setCacheMode(CacheMode cacheMode);
@Override
NativeQuery<T> setCacheStoreMode(CacheStoreMode cacheStoreMode);
@Override
NativeQuery<T> setCacheRetrieveMode(CacheRetrieveMode cacheRetrieveMode);
@Override
NativeQuery<T> setCacheable(boolean cacheable);
@Override
NativeQuery<T> setCacheRegion(String cacheRegion);
@Override
NativeQuery<T> setTimeout(int timeout);
@Override
NativeQuery<T> setFetchSize(int fetchSize);
@Override
NativeQuery<T> setReadOnly(boolean readOnly);
@Override
NativeQuery<T> setComment(String comment);
@Override
NativeQuery<T> addQueryHint(String hint);
@Override
NativeQuery<T> setMaxResults(int maxResults);
@Override
NativeQuery<T> setFirstResult(int startPosition);
@Override
NativeQuery<T> setHint(String hintName, Object value);
/**
* @inheritDoc
*
* This operation is supported even for native queries.
* Note that specifying an explicit lock mode might
* result in changes to the native SQL query that is
* actually executed.
*/
@Override @Deprecated(forRemoval = true)
LockOptions getLockOptions();
/**
* @inheritDoc
*
* This operation is supported even for native queries.
* Note that specifying an explicit lock mode might
* result in changes to the native SQL query that is
* actually executed.
*/
@Override @Deprecated(forRemoval = true)
NativeQuery<T> setLockOptions(LockOptions lockOptions);
/**
* Not applicable to native SQL queries, due to an unfortunate
* requirement of the JPA specification.
* <p>
* Use {@link #getHibernateLockMode()} to obtain the lock mode.
*
* @throws IllegalStateException as required by JPA
*/
@Override
LockModeType getLockMode();
/**
* @inheritDoc
*
* This operation is supported even for native queries.
* Note that specifying an explicit lock mode might
* result in changes to the native SQL query that is
* actually executed.
*/
@Override
LockMode getHibernateLockMode();
/**
* Not applicable to native SQL queries, due to an unfortunate
* requirement of the JPA specification.
* <p>
* Use {@link #setHibernateLockMode(LockMode)} or the hint named
* {@value org.hibernate.jpa.HibernateHints#HINT_NATIVE_LOCK_MODE}
* to set the lock mode.
*
* @throws IllegalStateException as required by JPA
*/
@Override
NativeQuery<T> setLockMode(LockModeType lockMode);
/**
* @inheritDoc
*
* This operation is supported even for native queries.
* Note that specifying an explicit lock mode might
* result in changes to the native SQL query that is
* actually executed.
*/
@Override
NativeQuery<T> setHibernateLockMode(LockMode lockMode);
/**
* Apply a timeout to the corresponding database query.
*
* @param timeout The timeout to apply
*
* @return {@code this}, for method chaining
*/
NativeQuery<T> setTimeout(Timeout timeout);
/**
* Apply a scope to any pessimistic locking applied to the query.
*
* @param lockScope The lock scope to apply
*
* @return {@code this}, for method chaining
*/
NativeQuery<T> setLockScope(PessimisticLockScope lockScope);
@Override
<R> NativeQuery<R> setTupleTransformer(TupleTransformer<R> transformer);
@Override
NativeQuery<T> setResultListTransformer(ResultListTransformer<T> transformer);
@Override @Deprecated @SuppressWarnings("deprecation")
<S> NativeQuery<S> setResultTransformer(ResultTransformer<S> transformer);
@Override
NativeQuery<T> setParameter(String name, Object value);
@Override
<P> NativeQuery<T> setParameter(String name, P val, Class<P> type);
@Override
<P> NativeQuery<T> setParameter(String name, P val, Type<P> type);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(String name, Instant value, TemporalType temporalType);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(String name, Calendar value, TemporalType temporalType);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(String name, Date value, TemporalType temporalType);
@Override
NativeQuery<T> setParameter(int position, Object value);
@Override
<P> NativeQuery<T> setParameter(int position, P val, Class<P> type);
@Override
<P> NativeQuery<T> setParameter(int position, P val, Type<P> type);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(int position, Instant value, TemporalType temporalType);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(int position, Calendar value, TemporalType temporalType);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(int position, Date value, TemporalType temporalType);
@Override
<P> NativeQuery<T> setParameter(QueryParameter<P> parameter, P val);
@Override
<P> NativeQuery<T> setParameter(QueryParameter<P> parameter, P val, Class<P> type);
@Override
<P> NativeQuery<T> setParameter(QueryParameter<P> parameter, P val, Type<P> type);
@Override
<P> NativeQuery<T> setParameter(Parameter<P> param, P value);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(Parameter<Calendar> param, Calendar value, TemporalType temporalType);
@Override @Deprecated(since = "7")
NativeQuery<T> setParameter(Parameter<Date> param, Date value, TemporalType temporalType);
@Override
NativeQuery<T> setParameterList(String name, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> NativeQuery<T> setParameterList(String name, Collection<? extends P> values, Class<P> type);
@Override
<P> NativeQuery<T> setParameterList(String name, Collection<? extends P> values, Type<P> type);
@Override
NativeQuery<T> setParameterList(String name, Object[] values);
@Override
<P> NativeQuery<T> setParameterList(String name, P[] values, Class<P> type);
@Override
<P> NativeQuery<T> setParameterList(String name, P[] values, Type<P> type);
@Override
NativeQuery<T> setParameterList(int position, @SuppressWarnings("rawtypes") Collection values);
@Override
<P> NativeQuery<T> setParameterList(int position, Collection<? extends P> values, Class<P> type);
@Override
<P> NativeQuery<T> setParameterList(int position, Collection<? extends P> values, Type<P> javaType);
@Override
NativeQuery<T> setParameterList(int position, Object[] values);
@Override
<P> NativeQuery<T> setParameterList(int position, P[] values, Class<P> javaType);
@Override
<P> NativeQuery<T> setParameterList(int position, P[] values, Type<P> javaType);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Class<P> javaType);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, Collection<? extends P> values, Type<P> type);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, P[] values);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, P[] values, Class<P> javaType);
@Override
<P> NativeQuery<T> setParameterList(QueryParameter<P> parameter, P[] values, Type<P> type);
@Override
NativeQuery<T> setProperties(Object bean);
@Override
NativeQuery<T> setProperties(@SuppressWarnings("rawtypes") Map bean);
}
|
FetchReturn
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/bugs/CompareMatcherTest.java
|
{
"start": 2371,
"end": 3106
}
|
class ____ implements ArgumentMatcher<Integer> {
@Override
public boolean matches(Integer arg) {
return false;
}
@SuppressWarnings("unused")
public boolean matches(Date arg) {
throw new UnsupportedOperationException();
}
@SuppressWarnings("unused")
public boolean matches(Integer arg, Void v) {
throw new UnsupportedOperationException();
}
}
when(mock.forObject(argThat(new TestMatcher()))).thenReturn("x");
assertThat(mock.forObject(123)).isNull();
}
@Test
public void matchesWithSubTypeExtendingGenericClass() {
abstract
|
TestMatcher
|
java
|
hibernate__hibernate-orm
|
hibernate-testing/src/main/java/org/hibernate/testing/orm/junit/BaseSessionFactoryFunctionalTest.java
|
{
"start": 1754,
"end": 11342
}
|
class ____
implements ServiceRegistryProducer, ServiceRegistryScopeAware,
DomainModelProducer, DomainModelScopeAware,
SessionFactoryProducer, SessionFactoryScopeAware {
protected static final Dialect DIALECT = DialectContext.getDialect();
protected static final Class<?>[] NO_CLASSES = new Class[0];
protected static final String[] NO_MAPPINGS = new String[0];
private static final Logger log = Logger.getLogger( BaseSessionFactoryFunctionalTest.class );
private DomainModelScope modelScope;
private SessionFactoryScope sessionFactoryScope;
private final ExecutorService executorService = Executors.newSingleThreadExecutor();
protected SessionFactoryScope sessionFactoryScope() {
return sessionFactoryScope;
}
protected SessionFactoryImplementor sessionFactory() {
return sessionFactoryScope.getSessionFactory();
}
protected MetadataImplementor getMetadata(){
return modelScope.getDomainModel();
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder ssrBuilder) {
ssrBuilder.applySetting( AvailableSettings.HBM2DDL_AUTO, exportSchema() ? "create-drop" : "none" );
applySettings( ssrBuilder );
ServiceRegistryUtil.applySettings( ssrBuilder );
return ssrBuilder.build();
}
protected boolean exportSchema() {
return true;
}
protected void applySettings(StandardServiceRegistryBuilder builder) {
}
@Override
public void injectServiceRegistryScope(ServiceRegistryScope registryScope) {
}
@Override
public MetadataImplementor produceModel(StandardServiceRegistry serviceRegistry) {
MetadataSources metadataSources = new MetadataSources( serviceRegistry );
MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder();
applyMetadataBuilder( metadataBuilder );
applyMetadataSources( metadataSources );
final MetadataImplementor metadata = (MetadataImplementor) metadataBuilder.build();
if ( overrideCacheStrategy() && getCacheConcurrencyStrategy() != null ) {
applyCacheSettings( metadata );
}
return metadata;
}
protected final void applyCacheSettings(Metadata metadata) {
for ( PersistentClass entityBinding : metadata.getEntityBindings() ) {
if ( !entityBinding.isInherited() ) {
if ( !hasLob( entityBinding ) ) {
final RootClass rootClass = (RootClass) entityBinding;
rootClass.setCacheConcurrencyStrategy( getCacheConcurrencyStrategy() );
entityBinding.setCached( true );
}
}
}
for ( Collection collectionBinding : metadata.getCollectionBindings() ) {
if ( !isLob( collectionBinding ) ) {
collectionBinding.setCacheConcurrencyStrategy( getCacheConcurrencyStrategy() );
}
}
}
private static boolean isLob(Collection collectionBinding) {
return collectionBinding.getElement().isSimpleValue()
&& isLob( (SimpleValue) collectionBinding.getElement() );
}
private static boolean hasLob(PersistentClass entityBinding) {
for ( Property prop : entityBinding.getPropertyClosure() ) {
if ( prop.getValue().isSimpleValue() ) {
if ( isLob( (SimpleValue) prop.getValue() ) ) {
return true;
}
}
}
return false;
}
protected boolean overrideCacheStrategy() {
return true;
}
protected String getCacheConcurrencyStrategy() {
return null;
}
protected void applyMetadataBuilder(MetadataBuilder metadataBuilder) {
}
protected void applyMetadataSources(MetadataSources metadataSources) {
for ( Class annotatedClass : getAnnotatedClasses() ) {
metadataSources.addAnnotatedClass( annotatedClass );
}
String[] xmlFiles = getOrmXmlFiles();
if ( xmlFiles != null ) {
for ( String xmlFile : xmlFiles ) {
try ( InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream( xmlFile ) ) {
metadataSources.addInputStream( is );
}
catch (IOException e) {
throw new IllegalArgumentException( e );
}
}
}
}
protected Class[] getAnnotatedClasses() {
return NO_CLASSES;
}
protected String[] getOrmXmlFiles() {
return NO_MAPPINGS;
}
@Override
public void injectTestModelScope(DomainModelScope modelScope) {
this.modelScope = modelScope;
}
@Override
public SessionFactoryImplementor produceSessionFactory(MetadataImplementor model) {
log.trace( "Producing SessionFactory" );
final SessionFactoryBuilder sfBuilder = model.getSessionFactoryBuilder();
configure( sfBuilder );
final SessionFactoryImplementor factory = (SessionFactoryImplementor) sfBuilder.build();
sessionFactoryBuilt( factory );
return factory;
}
protected void configure(SessionFactoryBuilder builder) {
}
protected void sessionFactoryBuilt(SessionFactoryImplementor factory) {
}
@Override
public void injectSessionFactoryScope(SessionFactoryScope scope) {
sessionFactoryScope = scope;
}
// there is a chicken-egg problem here where the
// @AfterAll
// public void dropDatabase() {
// final SchemaManagementToolCoordinator.ActionGrouping actions = SchemaManagementToolCoordinator.ActionGrouping.interpret(
// registry.getService( ConfigurationService.class ).getSettings()
// );
//
// final boolean needsDropped = this.model != null && ( exportSchema() || actions.getDatabaseAction() != Action.NONE );
//
// if ( needsDropped ) {
// // atm we do not expose the (runtime) DatabaseModel from the SessionFactory so we
// // need to recreate it from the boot model.
// //
// // perhaps we should expose it from SF?
// final DatabaseModel databaseModel = Helper.buildDatabaseModel( registry, model );
// new SchemaExport( databaseModel, registry ).drop( EnumSet.of( TargetType.DATABASE ) );
// }
// }
@AfterEach
public final void afterTest() {
if ( isCleanupTestDataRequired() ) {
cleanupTestData();
}
}
protected boolean isCleanupTestDataRequired() {
return false;
}
protected void cleanupTestData() {
inTransaction(
session ->
getMetadata().getEntityBindings().forEach(
entityType -> session.createQuery( "delete from " + entityType.getEntityName() ).executeUpdate()
)
);
}
protected void inTransaction(Consumer<SessionImplementor> action) {
sessionFactoryScope().inTransaction( action );
}
protected <T> T fromTransaction(Function<SessionImplementor, T> action) {
return sessionFactoryScope().fromTransaction( action );
}
protected void inSession(Consumer<SessionImplementor> action){
sessionFactoryScope.inSession( action );
}
protected <T> T fromSession(Function<SessionImplementor, T> action){
return sessionFactoryScope.fromSession( action );
}
protected Dialect getDialect(){
return DialectContext.getDialect();
}
private static boolean isLob(SimpleValue value) {
final String typeName = value.getTypeName();
if ( typeName != null ) {
String significantTypeNamePart = typeName.substring( typeName.lastIndexOf( '.' ) + 1 )
.toLowerCase( Locale.ROOT );
switch ( significantTypeNamePart ) {
case "blob":
case "blobtype":
case "clob":
case "clobtype":
case "nclob":
case "nclobtype":
return true;
}
}
return false;
}
protected Future<?> executeAsync(Runnable callable) {
return executorService.submit(callable);
}
protected void executeSync(Runnable callable) {
try {
executeAsync( callable ).get();
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
catch (ExecutionException e) {
throw new RuntimeException( e.getCause() );
}
}
/**
* Execute function in a Hibernate transaction without return value
*
* @param sessionBuilderSupplier SessionFactory supplier
* @param function function
*/
public static void doInHibernateSessionBuilder(
Supplier<SessionBuilder> sessionBuilderSupplier,
TransactionUtil.HibernateTransactionConsumer function) {
Session session = null;
Transaction txn = null;
try {
session = sessionBuilderSupplier.get().openSession();
function.beforeTransactionCompletion();
txn = session.beginTransaction();
function.accept( session );
if ( !txn.getRollbackOnly() ) {
txn.commit();
}
else {
try {
txn.rollback();
}
catch (Exception e) {
log.error( "Rollback failure", e );
}
}
}
catch ( Throwable t ) {
if ( txn != null && txn.isActive() ) {
try {
txn.rollback();
}
catch (Exception e) {
log.error( "Rollback failure", e );
}
}
throw t;
}
finally {
function.afterTransactionCompletion();
if ( session != null ) {
session.close();
}
}
}
/**
* Execute function in a Hibernate transaction without return value
*
* @param sessionBuilderSupplier SessionFactory supplier
* @param function function
*/
public static <T> T doInHibernateSessionBuilder(
Supplier<SessionBuilder> sessionBuilderSupplier,
TransactionUtil.HibernateTransactionFunction<T> function) {
Session session = null;
Transaction txn = null;
try {
session = sessionBuilderSupplier.get().openSession();
function.beforeTransactionCompletion();
txn = session.beginTransaction();
T result = function.apply( session );
if ( !txn.getRollbackOnly() ) {
txn.commit();
}
else {
try {
txn.rollback();
}
catch (Exception e) {
log.error( "Rollback failure", e );
}
}
return result;
}
catch ( Throwable t ) {
if ( txn != null && txn.isActive() ) {
try {
txn.rollback();
}
catch (Exception e) {
log.error( "Rollback failure", e );
}
}
throw t;
}
finally {
function.afterTransactionCompletion();
if ( session != null ) {
session.close();
}
}
}
}
|
BaseSessionFactoryFunctionalTest
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/StreamsConfig.java
|
{
"start": 43841,
"end": 45787
}
|
class ____ that implements the <code>org.apache.kafka.streams.state.RocksDBConfigSetter</code> interface";
/** {@code security.protocol} */
@SuppressWarnings("WeakerAccess")
public static final String SECURITY_PROTOCOL_CONFIG = CommonClientConfigs.SECURITY_PROTOCOL_CONFIG;
/** {@code send.buffer.bytes} */
@SuppressWarnings("WeakerAccess")
public static final String SEND_BUFFER_CONFIG = CommonClientConfigs.SEND_BUFFER_CONFIG;
/** {@code state.cleanup.delay} */
@SuppressWarnings("WeakerAccess")
public static final String STATE_CLEANUP_DELAY_MS_CONFIG = "state.cleanup.delay.ms";
private static final String STATE_CLEANUP_DELAY_MS_DOC = "The amount of time in milliseconds to wait before deleting state when a partition has migrated. Only state directories that have not been modified for at least <code>state.cleanup.delay.ms</code> will be removed";
/** {@code state.dir} */
@SuppressWarnings("WeakerAccess")
public static final String STATE_DIR_CONFIG = "state.dir";
private static final String STATE_DIR_DOC = "Directory location for state store. This path must be unique for each streams instance sharing the same underlying filesystem. Note that if not configured, then the default location will be different in each environment as it is computed using System.getProperty(\"java.io.tmpdir\")";
/** {@code statestore.cache.max.bytes} */
@SuppressWarnings("WeakerAccess")
public static final String STATESTORE_CACHE_MAX_BYTES_CONFIG = "statestore.cache.max.bytes";
@Deprecated
public static final String STATESTORE_CACHE_MAX_BYTES_DOC = "Maximum number of memory bytes to be used for statestore cache across all threads";
/** {@code task.assignor.class} */
@SuppressWarnings("WeakerAccess")
public static final String TASK_ASSIGNOR_CLASS_CONFIG = "task.assignor.class";
private static final String TASK_ASSIGNOR_CLASS_DOC = "A task assignor
|
name
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/NodeToAttributeValue.java
|
{
"start": 1131,
"end": 1803
}
|
class ____ {
public static NodeToAttributeValue newInstance(String hostname,
String attributeValue) {
NodeToAttributeValue nodeToAttributeValue =
Records.newRecord(NodeToAttributeValue.class);
nodeToAttributeValue.setAttributeValue(attributeValue);
nodeToAttributeValue.setHostname(hostname);
return nodeToAttributeValue;
}
@Public
@Unstable
public abstract String getAttributeValue();
@Public
@Unstable
public abstract void setAttributeValue(String attributeValue);
@Public
@Unstable
public abstract String getHostname();
@Public
@Unstable
public abstract void setHostname(String hostname);
}
|
NodeToAttributeValue
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/common/util/concurrent/ThrottledIterator.java
|
{
"start": 961,
"end": 4353
}
|
class ____<T> implements Releasable {
private static final Logger logger = LogManager.getLogger(ThrottledIterator.class);
/**
* Iterate through the given collection, performing an operation on each item which may fork background tasks, but with a limit on the
* number of such background tasks running concurrently to avoid overwhelming the rest of the system (e.g. starving other work of access
* to an executor).
*
* @param iterator The items to iterate. May be accessed by multiple threads, but accesses are all protected by synchronizing on itself.
* @param itemConsumer The operation to perform on each item. Each operation receives a {@link RefCounted} which can be used to track
* the execution of any background tasks spawned for this item. This operation may run on the thread which
* originally called {@link #run}, if this method has not yet returned. Otherwise it will run on a thread on which a
* background task previously called {@link RefCounted#decRef()} on its ref count. This operation should not throw
* any exceptions.
* @param maxConcurrency The maximum number of ongoing operations at any time.
* @param onCompletion Executed when all items are completed.
*/
public static <T> void run(Iterator<T> iterator, BiConsumer<Releasable, T> itemConsumer, int maxConcurrency, Runnable onCompletion) {
try (var throttledIterator = new ThrottledIterator<>(iterator, itemConsumer, maxConcurrency, onCompletion)) {
throttledIterator.run();
}
}
private final RefCounted refs; // one ref for each running item, plus one for the iterator if incomplete
private final Iterator<T> iterator;
private final BiConsumer<Releasable, T> itemConsumer;
private final Semaphore permits;
private ThrottledIterator(Iterator<T> iterator, BiConsumer<Releasable, T> itemConsumer, int maxConcurrency, Runnable onCompletion) {
this.iterator = Objects.requireNonNull(iterator);
this.itemConsumer = Objects.requireNonNull(itemConsumer);
if (maxConcurrency <= 0) {
throw new IllegalArgumentException("maxConcurrency must be positive");
}
this.permits = new Semaphore(maxConcurrency);
this.refs = AbstractRefCounted.of(onCompletion);
}
private void run() {
while (permits.tryAcquire()) {
final T item;
synchronized (iterator) {
if (iterator.hasNext()) {
item = iterator.next();
} else {
permits.release();
return;
}
}
try (var itemRefs = new ItemRefCounted()) {
itemRefs.mustIncRef();
itemConsumer.accept(Releasables.releaseOnce(itemRefs::decRef), item);
} catch (Exception e) {
logger.error(Strings.format("exception when processing [%s] with [%s]", item, itemConsumer), e);
assert false : e;
}
}
}
@Override
public void close() {
refs.decRef();
}
// A RefCounted for a single item, including protection against calling back into run() if it's created and closed within a single
// invocation of run().
private
|
ThrottledIterator
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/client/tls/TlsWithP12TrustStoreWithHttpServerTest.java
|
{
"start": 866,
"end": 2417
}
|
class ____ {
private static final String configuration = """
quarkus.grpc.clients.hello.plain-text=false
quarkus.grpc.clients.hello.tls.trust-certificate-p12.path=target/certs/grpc-client-truststore.p12
quarkus.grpc.clients.hello.tls.trust-certificate-p12.password=password
quarkus.grpc.clients.hello.tls.enabled=true
quarkus.grpc.clients.hello.use-quarkus-grpc-client=true
quarkus.grpc.server.use-separate-server=false
quarkus.grpc.server.plain-text=false # Force the client to use TLS for the tests
quarkus.http.ssl.certificate.key-store-file=target/certs/grpc-keystore.jks
quarkus.http.ssl.certificate.key-store-password=password
quarkus.http.insecure-requests=disabled
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addPackage(HelloWorldTlsEndpoint.class.getPackage())
.addPackage(GreeterGrpc.class.getPackage())
.add(new StringAsset(configuration), "application.properties"));
@GrpcClient("hello")
GreeterGrpc.GreeterBlockingStub blockingHelloService;
@Test
void testClientTlsConfiguration() {
HelloReply reply = blockingHelloService.sayHello(HelloRequest.newBuilder().setName("neo").build());
assertThat(reply.getMessage()).isEqualTo("Hello neo");
}
}
|
TlsWithP12TrustStoreWithHttpServerTest
|
java
|
apache__kafka
|
trogdor/src/main/java/org/apache/kafka/trogdor/workload/ProduceBenchWorker.java
|
{
"start": 6607,
"end": 7227
}
|
class ____ extends Throttle {
private final KafkaProducer<?, ?> producer;
SendRecordsThrottle(int maxPerPeriod, KafkaProducer<?, ?> producer) {
super(maxPerPeriod, THROTTLE_PERIOD_MS);
this.producer = producer;
}
@Override
protected synchronized void delay(long amount) throws InterruptedException {
long startMs = time().milliseconds();
producer.flush();
long endMs = time().milliseconds();
long delta = endMs - startMs;
super.delay(amount - delta);
}
}
public
|
SendRecordsThrottle
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/ScanStream.java
|
{
"start": 1926,
"end": 11601
}
|
class ____ {
private ScanStream() {
}
/**
* Sequentially iterate over keys in the keyspace. This method uses {@code SCAN} to perform an iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<K> scan(RedisKeyReactiveCommands<K, V> commands) {
return scan(commands, Optional.empty());
}
/**
* Sequentially iterate over keys in the keyspace. This method uses {@code SCAN} to perform an iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param scanArgs the scan arguments, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<K> scan(RedisKeyReactiveCommands<K, V> commands, ScanArgs scanArgs) {
LettuceAssert.notNull(scanArgs, "ScanArgs must not be null");
return scan(commands, Optional.of(scanArgs));
}
private static <K, V> Flux<K> scan(RedisKeyReactiveCommands<K, V> commands, Optional<ScanArgs> scanArgs) {
LettuceAssert.notNull(commands, "RedisKeyCommands must not be null");
return scanArgs.map(commands::scan).orElseGet(commands::scan)
.expand(c -> !c.isFinished() ? scanArgs.map(it -> commands.scan(c, it)).orElseGet(() -> commands.scan(c))
: Mono.empty())
.flatMapIterable(KeyScanCursor::getKeys);
}
/**
* Sequentially iterate over entries in a hash identified by {@code key}. This method uses {@code HSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the hash to scan.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<KeyValue<K, V>> hscan(RedisHashReactiveCommands<K, V> commands, K key) {
return hscan(commands, key, Optional.empty());
}
/**
* Sequentially iterate over keys in a hash identified by {@code key}. This method uses {@code HSCAN NOVALUES} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the hash to scan.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
* @since 6.4
*/
public static <K, V> Flux<K> hscanNovalues(RedisHashReactiveCommands<K, V> commands, K key) {
return hscanNovalues(commands, key, Optional.empty());
}
/**
* Sequentially iterate over entries in a hash identified by {@code key}. This method uses {@code HSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the hash to scan.
* @param scanArgs the scan arguments, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<KeyValue<K, V>> hscan(RedisHashReactiveCommands<K, V> commands, K key, ScanArgs scanArgs) {
LettuceAssert.notNull(scanArgs, "ScanArgs must not be null");
return hscan(commands, key, Optional.of(scanArgs));
}
/**
* Sequentially iterate over keys in a hash identified by {@code key}. This method uses {@code HSCAN NOVALUES} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the hash to scan.
* @param scanArgs the scan arguments, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
* @since 6.4
*/
public static <K, V> Flux<K> hscanNovalues(RedisHashReactiveCommands<K, V> commands, K key, ScanArgs scanArgs) {
LettuceAssert.notNull(scanArgs, "ScanArgs must not be null");
return hscanNovalues(commands, key, Optional.of(scanArgs));
}
private static <K, V> Flux<KeyValue<K, V>> hscan(RedisHashReactiveCommands<K, V> commands, K key,
Optional<ScanArgs> scanArgs) {
LettuceAssert.notNull(commands, "RedisHashReactiveCommands must not be null");
LettuceAssert.notNull(key, "Key must not be null");
return scanArgs.map(it -> commands.hscan(key, it)).orElseGet(() -> commands.hscan(key))
.expand(c -> !c.isFinished()
? scanArgs.map(it -> commands.hscan(key, c, it)).orElseGet(() -> commands.hscan(key, c))
: Mono.empty())
.flatMapIterable(c -> {
List<KeyValue<K, V>> list = new ArrayList<>(c.getMap().size());
for (Map.Entry<K, V> kvEntry : c.getMap().entrySet()) {
list.add(KeyValue.fromNullable(kvEntry.getKey(), kvEntry.getValue()));
}
return list;
});
}
private static <K, V> Flux<K> hscanNovalues(RedisHashReactiveCommands<K, V> commands, K key, Optional<ScanArgs> scanArgs) {
LettuceAssert.notNull(commands, "RedisHashReactiveCommands must not be null");
LettuceAssert.notNull(key, "Key must not be null");
return scanArgs.map(it -> commands.hscanNovalues(key, it)).orElseGet(() -> commands.hscanNovalues(key))
.expand(c -> !c.isFinished()
? scanArgs.map(it -> commands.hscanNovalues(key, c, it)).orElseGet(() -> commands.hscanNovalues(key, c))
: Mono.empty())
.flatMapIterable(c -> {
List<K> list = new ArrayList<>(c.getKeys().size());
list.addAll(c.getKeys());
return list;
});
}
/**
* Sequentially iterate over elements in a set identified by {@code key}. This method uses {@code SSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the set to scan.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<V> sscan(RedisSetReactiveCommands<K, V> commands, K key) {
return sscan(commands, key, Optional.empty());
}
/**
* Sequentially iterate over elements in a set identified by {@code key}. This method uses {@code SSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the set to scan.
* @param scanArgs the scan arguments, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<V> sscan(RedisSetReactiveCommands<K, V> commands, K key, ScanArgs scanArgs) {
LettuceAssert.notNull(scanArgs, "ScanArgs must not be null");
return sscan(commands, key, Optional.of(scanArgs));
}
private static <K, V> Flux<V> sscan(RedisSetReactiveCommands<K, V> commands, K key, Optional<ScanArgs> scanArgs) {
LettuceAssert.notNull(commands, "RedisSetReactiveCommands must not be null");
LettuceAssert.notNull(key, "Key must not be null");
return scanArgs.map(it -> commands.sscan(key, it)).orElseGet(() -> commands.sscan(key))
.expand(c -> !c.isFinished()
? scanArgs.map(it -> commands.sscan(key, c, it)).orElseGet(() -> commands.sscan(key, c))
: Mono.empty())
.flatMapIterable(ValueScanCursor::getValues);
}
/**
* Sequentially iterate over elements in a set identified by {@code key}. This method uses {@code SSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the sorted set to scan.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<ScoredValue<V>> zscan(RedisSortedSetReactiveCommands<K, V> commands, K key) {
return zscan(commands, key, Optional.empty());
}
/**
* Sequentially iterate over elements in a set identified by {@code key}. This method uses {@code SSCAN} to perform an
* iterative scan.
*
* @param commands the commands interface, must not be {@code null}.
* @param key the sorted set to scan.
* @param scanArgs the scan arguments, must not be {@code null}.
* @param <K> Key type.
* @param <V> Value type.
* @return a new {@link Flux}.
*/
public static <K, V> Flux<ScoredValue<V>> zscan(RedisSortedSetReactiveCommands<K, V> commands, K key, ScanArgs scanArgs) {
LettuceAssert.notNull(scanArgs, "ScanArgs must not be null");
return zscan(commands, key, Optional.of(scanArgs));
}
private static <K, V> Flux<ScoredValue<V>> zscan(RedisSortedSetReactiveCommands<K, V> commands, K key,
Optional<ScanArgs> scanArgs) {
LettuceAssert.notNull(commands, "RedisSortedSetReactiveCommands must not be null");
LettuceAssert.notNull(key, "Key must not be null");
return scanArgs.map(it -> commands.zscan(key, it)).orElseGet(() -> commands.zscan(key))
.expand(c -> !c.isFinished()
? scanArgs.map(it -> commands.zscan(key, c, it)).orElseGet(() -> commands.zscan(key, c))
: Mono.empty())
.flatMapIterable(ScoredValueScanCursor::getValues);
}
}
|
ScanStream
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ser/jdk/NumberSerializers.java
|
{
"start": 7350,
"end": 8117
}
|
class ____ extends Base<Object> {
final static FloatSerializer instance = new FloatSerializer();
public FloatSerializer() {
super(Float.class, JsonParser.NumberType.FLOAT, "number");
}
@Override
public void serialize(Object value, JsonGenerator gen,
SerializationContext provider) throws JacksonException {
gen.writeNumber((Float) value);
}
}
/**
* This is the special serializer for regular {@link java.lang.Double}s (and
* primitive doubles)
* <p>
* Since this is one of "native" types, no type information is ever included
* on serialization (unlike for most scalar types as of 1.5)
*/
@JacksonStdImpl
public static
|
FloatSerializer
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/path/JSONPath_array_multi.java
|
{
"start": 194,
"end": 1220
}
|
class ____ extends TestCase {
Object[] list = new Object[10];
public JSONPath_array_multi(){
for (int i = 0; i < list.length; ++i) {
list[i] = new Object();
}
}
public void test_list_multi() throws Exception {
List<Object> result = (List<Object>) new JSONPath("$[2,4,5,8,100]").eval(list);
Assert.assertEquals(5, result.size());
Assert.assertSame(list[2], result.get(0));
Assert.assertSame(list[4], result.get(1));
Assert.assertSame(list[5], result.get(2));
Assert.assertSame(list[8], result.get(3));
Assert.assertNull(result.get(4));
}
public void test_list_multi_negative() throws Exception {
List<Object> result = (List<Object>) new JSONPath("$[-1,-2,-100]")
.eval(list);
Assert.assertEquals(3, result.size());
Assert.assertSame(list[9], result.get(0));
Assert.assertSame(list[8], result.get(1));
Assert.assertNull(result.get(2));
}
}
|
JSONPath_array_multi
|
java
|
apache__camel
|
components/camel-zeebe/src/main/java/org/apache/camel/component/zeebe/processor/ZeebeProcessor.java
|
{
"start": 932,
"end": 1031
}
|
interface ____ extends Service {
void process(Exchange exchange) throws Exception;
}
|
ZeebeProcessor
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/blocking/inheritance/BlockingFromTransactionalTest.java
|
{
"start": 2303,
"end": 3318
}
|
class ____ implements BlockingTestService {
@Override
public Uni<InheritenceTest.Msg> overridden1(InheritenceTest.Msg request) {
return isBlocking();
}
@Override
public Uni<InheritenceTest.Msg> overridden2(InheritenceTest.Msg request) {
return isBlocking();
}
@Override
public Uni<InheritenceTest.Msg> notOverridden1(InheritenceTest.Msg request) {
return isBlocking();
}
@Override
@NonBlocking
public Uni<InheritenceTest.Msg> notOverridden2(InheritenceTest.Msg request) {
return isBlocking();
}
Uni<InheritenceTest.Msg> isBlocking() {
boolean isEventLoop = Thread.currentThread().getName().contains("eventloop");
return Uni.createFrom().item(isEventLoop ? NON_BLOCKING : BLOCKING)
.map(text -> InheritenceTest.Msg.newBuilder().setText(text).build());
}
}
@GrpcService
public static
|
ServiceA
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/elementcollection/EmbeddedElementCollectionWithIdenticallyNamedAssociation2Test.java
|
{
"start": 5840,
"end": 6163
}
|
class ____ {
@Embedded
NestedEmbeddableB nested;
public EmbeddableB() {
}
public EmbeddableB(EntityA identicallyNamedAssociation) {
this.nested = new NestedEmbeddableB( identicallyNamedAssociation );
}
public NestedEmbeddableB getNested() {
return nested;
}
}
@Embeddable
public static
|
EmbeddableB
|
java
|
apache__dubbo
|
dubbo-compatible/src/test/java/org/apache/dubbo/config/spring/context/annotation/consumer/ConsumerConfiguration.java
|
{
"start": 3436,
"end": 3866
}
|
class ____ extends Ancestor {
private DemoService demoServiceFromParent;
public DemoService getDemoServiceFromParent() {
return demoServiceFromParent;
}
@Reference(version = "2.5.7", url = remoteURL)
public void setDemoServiceFromParent(DemoService demoServiceFromParent) {
this.demoServiceFromParent = demoServiceFromParent;
}
}
public static
|
Parent
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java
|
{
"start": 6569,
"end": 7482
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory ip;
private final EvalOperator.ExpressionEvaluator.Factory[] cidrs;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory ip,
EvalOperator.ExpressionEvaluator.Factory[] cidrs) {
this.source = source;
this.ip = ip;
this.cidrs = cidrs;
}
@Override
public CIDRMatchEvaluator get(DriverContext context) {
EvalOperator.ExpressionEvaluator[] cidrs = Arrays.stream(this.cidrs).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new);
return new CIDRMatchEvaluator(source, ip.get(context), cidrs, context);
}
@Override
public String toString() {
return "CIDRMatchEvaluator[" + "ip=" + ip + ", cidrs=" + Arrays.toString(cidrs) + "]";
}
}
}
|
Factory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/indexcoll/Currency.java
|
{
"start": 309,
"end": 637
}
|
class ____ {
private Integer id;
@Id
@GeneratedValue
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Column
private String currency;
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
}
|
Currency
|
java
|
apache__logging-log4j2
|
log4j-1.2-api/src/main/java/org/apache/log4j/builders/filter/FilterBuilder.java
|
{
"start": 967,
"end": 1031
}
|
interface ____ extends Parser<Filter> {
// empty
}
|
FilterBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/UnknownFilterException.java
|
{
"start": 297,
"end": 754
}
|
class ____ extends HibernateException {
private final String name;
/**
* Constructs an {@code UnknownFilterException} for the given name.
*
* @param name The filter that was unknown.
*/
public UnknownFilterException(String name) {
super( "No filter named '" + name + "'" );
this.name = name;
}
/**
* The unknown filter name.
*
* @return The unknown filter name.
*/
public String getName() {
return name;
}
}
|
UnknownFilterException
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java
|
{
"start": 2238,
"end": 17096
}
|
class ____ extends ESTestCase {
private static ThreadPool threadPool;
private static final TimeValue timeout = TimeValue.MAX_VALUE;
@BeforeClass
public static void startThreadPool() {
threadPool = new TestThreadPool(TransportServiceHandshakeTests.class.getSimpleName());
}
private final List<TransportService> transportServices = new ArrayList<>();
private TransportService startServices(
String nodeNameAndId,
Settings settings,
TransportVersion transportVersion,
VersionInformation nodeVersion,
TransportInterceptor transportInterceptor
) {
TcpTransport transport = new Netty4Transport(
settings,
transportVersion,
threadPool,
new NetworkService(Collections.emptyList()),
PageCacheRecycler.NON_RECYCLING_INSTANCE,
new NamedWriteableRegistry(Collections.emptyList()),
new NoneCircuitBreakerService(),
new SharedGroupFactory(settings)
);
TransportService transportService = new MockTransportService(
settings,
transport,
threadPool,
transportInterceptor,
(boundAddress) -> DiscoveryNodeUtils.builder(nodeNameAndId)
.name(nodeNameAndId)
.address(boundAddress.publishAddress())
.roles(emptySet())
.version(nodeVersion)
.build(),
null,
Collections.emptySet(),
nodeNameAndId
);
transportService.start();
transportService.acceptIncomingRequests();
transportServices.add(transportService);
return transportService;
}
@After
public void tearDown() throws Exception {
for (TransportService transportService : transportServices) {
transportService.close();
}
super.tearDown();
}
@AfterClass
public static void terminateThreadPool() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
// since static must set to null to be eligible for collection
threadPool = null;
}
public void testConnectToNodeLight() {
Settings settings = Settings.builder().put("cluster.name", "test").build();
TransportService transportServiceA = startServices(
"TS_A",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
TransportService transportServiceB = startServices(
"TS_B",
settings,
TransportVersionUtils.randomCompatibleVersion(random()),
new VersionInformation(
VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT),
IndexVersions.MINIMUM_COMPATIBLE,
IndexVersion.current()
),
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("")
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())
.build();
try (
Transport.Connection connection = AbstractSimpleTransportTestCase.openConnection(
transportServiceA,
discoveryNode,
TestProfiles.LIGHT_PROFILE
)
) {
DiscoveryNode connectedNode = safeAwait(listener -> transportServiceA.handshake(connection, timeout, listener));
assertNotNull(connectedNode);
// the name and version should be updated
assertEquals(connectedNode.getName(), "TS_B");
assertEquals(connectedNode.getVersion(), transportServiceB.getLocalNode().getVersion());
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
}
public void testMismatchedClusterName() {
TransportService transportServiceA = startServices(
"TS_A",
Settings.builder().put("cluster.name", "a").build(),
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
TransportService transportServiceB = startServices(
"TS_B",
Settings.builder().put("cluster.name", "b").build(),
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("")
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())
.build();
try (
Transport.Connection connection = AbstractSimpleTransportTestCase.openConnection(
transportServiceA,
discoveryNode,
TestProfiles.LIGHT_PROFILE
)
) {
assertThat(
safeAwaitFailure(
IllegalStateException.class,
DiscoveryNode.class,
listener -> transportServiceA.handshake(connection, timeout, listener)
).getMessage(),
containsString(
"handshake with [" + discoveryNode + "] failed: remote cluster name [b] does not match local cluster name [a]"
)
);
}
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
public void testIncompatibleNodeVersions() {
Settings settings = Settings.builder().put("cluster.name", "test").build();
TransportService transportServiceA = startServices(
"TS_A",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
TransportService transportServiceB = startServices(
"TS_B",
settings,
TransportVersion.minimumCompatible(),
new VersionInformation(
VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()),
IndexVersions.MINIMUM_COMPATIBLE,
IndexVersion.current()
),
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("")
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())
.build();
try (
Transport.Connection connection = AbstractSimpleTransportTestCase.openConnection(
transportServiceA,
discoveryNode,
TestProfiles.LIGHT_PROFILE
)
) {
assertThat(
safeAwaitFailure(
IllegalStateException.class,
DiscoveryNode.class,
listener -> transportServiceA.handshake(connection, timeout, listener)
).getMessage(),
containsString(
"handshake with ["
+ discoveryNode
+ "] failed: remote node version ["
+ transportServiceB.getLocalNode().getVersion()
+ "] is incompatible with local node version ["
+ Version.CURRENT
+ "]"
)
);
}
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
public void testIncompatibleTransportVersions() {
Settings settings = Settings.builder().put("cluster.name", "test").build();
TransportService transportServiceA = startServices(
"TS_A",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
TransportService transportServiceB = startServices(
"TS_B",
settings,
TransportVersionUtils.getPreviousVersion(TransportVersion.minimumCompatible()),
new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()),
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("")
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())
.build();
assertThat(
safeAwaitFailure(
Transport.Connection.class,
listener -> transportServiceA.openConnection(discoveryNode, TestProfiles.LIGHT_PROFILE, listener)
),
instanceOf(ConnectTransportException.class)
);
// the error is exposed as a general connection exception, the actual message is in the logs
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
public void testNodeConnectWithDifferentNodeId() {
Settings settings = Settings.builder().put("cluster.name", "test").build();
TransportService transportServiceA = startServices(
"TS_A",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
TransportService transportServiceB = startServices(
"TS_B",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
TransportService.NOOP_TRANSPORT_INTERCEPTOR
);
DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder(randomAlphaOfLength(10))
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(transportServiceB.getLocalNode().getVersionInformation())
.build();
assertThat(
safeAwaitFailure(
ConnectTransportException.class,
Releasable.class,
listener -> transportServiceA.connectToNode(discoveryNode, TestProfiles.LIGHT_PROFILE, listener)
).getMessage(),
allOf(
containsString("Connecting to [" + discoveryNode.getAddress() + "] failed"),
containsString("expected to connect to [" + discoveryNode.descriptionWithoutAttributes() + "]"),
containsString("found [" + transportServiceB.getLocalNode().descriptionWithoutAttributes() + "] instead"),
containsString("Ensure that each node has its own distinct publish address"),
containsString("routed to the correct node"),
containsString("https://www.elastic.co/docs/reference/elasticsearch/configuration-reference/networking-settings")
)
);
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
public void testRejectsMismatchedBuildHash() {
final var transportInterceptorA = new BuildHashModifyingTransportInterceptor();
final var transportInterceptorB = new BuildHashModifyingTransportInterceptor();
final Settings settings = Settings.builder()
.put("cluster.name", "a")
.put(IGNORE_DESERIALIZATION_ERRORS_SETTING.getKey(), true) // suppress assertions to test production error-handling
.build();
final TransportService transportServiceA = startServices(
"TS_A",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
transportInterceptorA
);
final TransportService transportServiceB = startServices(
"TS_B",
settings,
TransportVersion.current(),
VersionInformation.CURRENT,
transportInterceptorB
);
final DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("")
.address(transportServiceB.getLocalNode().getAddress())
.roles(emptySet())
.version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current())
.build();
try (
MockLog mockLog = MockLog.capture(TransportService.class);
Transport.Connection connection = AbstractSimpleTransportTestCase.openConnection(
transportServiceA,
discoveryNode,
TestProfiles.LIGHT_PROFILE
)
) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"message",
TransportService.class.getCanonicalName(),
Level.WARN,
"which has an incompatible wire format"
)
);
DiscoveryNode connectedNode = safeAwait(listener -> transportServiceA.handshake(connection, timeout, listener));
assertNotNull(connectedNode);
mockLog.awaitAllExpectationsMatched();
}
assertFalse(transportServiceA.nodeConnected(discoveryNode));
}
public void testAcceptsMismatchedBuildHashFromDifferentVersion() {
final var transportInterceptorA = new BuildHashModifyingTransportInterceptor();
final var transportInterceptorB = new BuildHashModifyingTransportInterceptor();
final TransportService transportServiceA = startServices(
"TS_A",
Settings.builder().put("cluster.name", "a").build(),
TransportVersion.current(),
VersionInformation.CURRENT,
transportInterceptorA
);
final TransportService transportServiceB = startServices(
"TS_B",
Settings.builder().put("cluster.name", "a").build(),
TransportVersion.minimumCompatible(),
new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()),
transportInterceptorB
);
AbstractSimpleTransportTestCase.connectToNode(transportServiceA, transportServiceB.getLocalNode(), TestProfiles.LIGHT_PROFILE);
assertTrue(transportServiceA.nodeConnected(transportServiceB.getLocalNode()));
}
}
|
TransportServiceHandshakeTests
|
java
|
netty__netty
|
testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketAutoReadTest.java
|
{
"start": 1643,
"end": 5149
}
|
class ____ extends AbstractSocketTest {
@Test
public void testAutoReadOffDuringReadOnlyReadsOneTime(TestInfo testInfo) throws Throwable {
run(testInfo, new Runner<ServerBootstrap, Bootstrap>() {
@Override
public void run(ServerBootstrap serverBootstrap, Bootstrap bootstrap) throws Throwable {
testAutoReadOffDuringReadOnlyReadsOneTime(serverBootstrap, bootstrap);
}
});
}
public void testAutoReadOffDuringReadOnlyReadsOneTime(ServerBootstrap sb, Bootstrap cb) throws Throwable {
testAutoReadOffDuringReadOnlyReadsOneTime(true, sb, cb);
testAutoReadOffDuringReadOnlyReadsOneTime(false, sb, cb);
}
private static void testAutoReadOffDuringReadOnlyReadsOneTime(boolean readOutsideEventLoopThread,
ServerBootstrap sb, Bootstrap cb) throws Throwable {
Channel serverChannel = null;
Channel clientChannel = null;
try {
AutoReadInitializer serverInitializer = new AutoReadInitializer(!readOutsideEventLoopThread);
AutoReadInitializer clientInitializer = new AutoReadInitializer(!readOutsideEventLoopThread);
sb.option(ChannelOption.SO_BACKLOG, 1024)
.option(ChannelOption.AUTO_READ, true)
.childOption(ChannelOption.AUTO_READ, true)
// We want to ensure that we attempt multiple individual read operations per read loop so we can
// test the auto read feature being turned off when data is first read.
.childOption(ChannelOption.RECVBUF_ALLOCATOR, new TestRecvByteBufAllocator())
.childHandler(serverInitializer);
serverChannel = sb.bind().syncUninterruptibly().channel();
cb.option(ChannelOption.AUTO_READ, true)
// We want to ensure that we attempt multiple individual read operations per read loop so we can
// test the auto read feature being turned off when data is first read.
.option(ChannelOption.RECVBUF_ALLOCATOR, new TestRecvByteBufAllocator())
.handler(clientInitializer);
clientChannel = cb.connect(serverChannel.localAddress()).syncUninterruptibly().channel();
// 3 bytes means 3 independent reads for TestRecvByteBufAllocator
clientChannel.writeAndFlush(randomBufferType(clientChannel.alloc(), new byte[3], 0, 3));
serverInitializer.autoReadHandler.assertSingleRead();
// 3 bytes means 3 independent reads for TestRecvByteBufAllocator
serverInitializer.channel.writeAndFlush(
randomBufferType(serverInitializer.channel.alloc(), new byte[3], 0, 3));
clientInitializer.autoReadHandler.assertSingleRead();
if (readOutsideEventLoopThread) {
serverInitializer.channel.read();
}
serverInitializer.autoReadHandler.assertSingleReadSecondTry();
if (readOutsideEventLoopThread) {
clientChannel.read();
}
clientInitializer.autoReadHandler.assertSingleReadSecondTry();
} finally {
if (clientChannel != null) {
clientChannel.close().sync();
}
if (serverChannel != null) {
serverChannel.close().sync();
}
}
}
private static
|
SocketAutoReadTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/properties/PropertiesComponentServiceTest.java
|
{
"start": 984,
"end": 2430
}
|
class ____ extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testFunction() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:foo").transform().constant("{{service:FOO}}").to("mock:bar");
}
});
context.start();
String body = System.getenv("FOO_SERVICE_HOST") + ":" + System.getenv("FOO_SERVICE_PORT");
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:bar").expectedBodiesReceived(body);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testFunctionGetOrElse() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("mock:foo").transform().constant("{{service:BAR:myotherserver:8888}}").to("mock:bar");
}
});
context.start();
getMockEndpoint("mock:foo").expectedBodiesReceived("Hello World");
getMockEndpoint("mock:bar").expectedBodiesReceived("myotherserver:8888");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
}
|
PropertiesComponentServiceTest
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/util/TestMBeans.java
|
{
"start": 1197,
"end": 3046
}
|
class ____ implements DummyMXBean {
private int counter = 1;
@Test
public void testRegister() throws Exception {
ObjectName objectName = null;
try {
counter = 23;
objectName = MBeans.register("UnitTest",
"RegisterTest", this);
MBeanServer platformMBeanServer =
ManagementFactory.getPlatformMBeanServer();
int jmxCounter = (int) platformMBeanServer
.getAttribute(objectName, "Counter");
assertEquals(counter, jmxCounter);
} finally {
if (objectName != null) {
MBeans.unregister(objectName);
}
}
}
@Test
public void testRegisterWithAdditionalProperties() throws Exception {
ObjectName objectName = null;
try {
counter = 42;
Map<String, String> properties = new HashMap<String, String>();
properties.put("flavour", "server");
objectName = MBeans.register("UnitTest", "RegisterTest",
properties, this);
MBeanServer platformMBeanServer =
ManagementFactory.getPlatformMBeanServer();
int jmxCounter =
(int) platformMBeanServer.getAttribute(objectName, "Counter");
assertEquals(counter, jmxCounter);
} finally {
if (objectName != null) {
MBeans.unregister(objectName);
}
}
}
@Test
public void testGetMbeanNameName() {
HashMap<String, String> properties = new HashMap<>();
ObjectName mBeanName = MBeans.getMBeanName("Service",
"Name", properties);
assertEquals("Service",
MBeans.getMbeanNameService(mBeanName));
properties.put("key", "value");
mBeanName = MBeans.getMBeanName(
"Service",
"Name",
properties);
assertEquals("Service",
MBeans.getMbeanNameService(mBeanName));
}
@Override
public int getCounter() {
return counter;
}
}
|
TestMBeans
|
java
|
spring-projects__spring-framework
|
spring-context/src/test/java/org/springframework/context/annotation/configuration/ConfigurationBeanNameTests.java
|
{
"start": 3128,
"end": 3235
}
|
class ____ {
@Bean public String nestedBean() { return ""; }
}
}
@Configuration("imported")
static
|
B
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/IgnoredSourceFieldMapper.java
|
{
"start": 6355,
"end": 8195
}
|
class ____ extends StringFieldType {
private static final IgnoredValuesFieldMapperType INSTANCE = new IgnoredValuesFieldMapperType();
private IgnoredValuesFieldMapperType() {
super(NAME, IndexType.NONE, true, TextSearchInfo.NONE, Collections.emptyMap());
}
@Override
public String typeName() {
return NAME;
}
@Override
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
return new StoredValueFetcher(context.lookup(), NAME);
}
}
private IgnoredSourceFieldMapper(IndexSettings indexSettings) {
super(IgnoredValuesFieldMapperType.INSTANCE);
this.indexSettings = indexSettings;
}
@Override
protected String contentType() {
return NAME;
}
@Override
public void postParse(DocumentParserContext context) {
// Ignored values are only expected in synthetic mode.
if (context.mappingLookup().isSourceSynthetic() == false) {
assert context.getIgnoredFieldValues().isEmpty();
return;
}
ignoredSourceFormat(context.indexSettings().getIndexVersionCreated()).writeIgnoredFields(context.getIgnoredFieldValues());
}
// In rare cases decoding values stored in this field can fail leading to entire source
// not being available.
// We would like to have an option to lose some values in synthetic source
// but have search not fail.
public static Set<String> ensureLoaded(Set<String> fieldsToLoadForSyntheticSource, IndexSettings indexSettings) {
if (indexSettings.getSkipIgnoredSourceRead() == false) {
fieldsToLoadForSyntheticSource.add(NAME);
}
return fieldsToLoadForSyntheticSource;
}
public static
|
IgnoredValuesFieldMapperType
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/internal/files/Files_assertIsDirectoryContaining_SyntaxAndPattern_Test.java
|
{
"start": 1887,
"end": 6277
}
|
class ____ extends FilesBaseTest {
private static final String JAVA_SOURCE_PATTERN = "regex:.+\\.java";
private static final String JAVA_SOURCE_PATTERN_DESCRIPTION = "the '%s' pattern".formatted(JAVA_SOURCE_PATTERN);
@Test
void should_pass_if_actual_contains_a_file_matching_the_given_pathMatcherPattern() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/folder");
newFile(actual.getAbsolutePath() + "/Test.java");
// WHEN/THEN
underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN);
}
@Test
void should_pass_if_all_actual_files_match_the_given_pathMatcherPattern() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/folder");
newFile(actual.getAbsolutePath() + "/Test.java");
newFile(actual.getAbsolutePath() + "/Utils.java");
// WHEN/THEN
underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN);
}
@Test
void should_pass_if_actual_contains_some_files_matching_the_given_pathMatcherPattern() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/folder");
newFile(actual.getAbsolutePath() + "/Test.java");
newFile(actual.getAbsolutePath() + "/Test.class");
newFile(actual.getAbsolutePath() + "/Utils.class");
newFile(actual.getAbsolutePath() + "/Utils.java");
newFile(actual.getAbsolutePath() + "/application.yml");
// WHEN/THEN
underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN);
}
@Test
void should_throw_error_if_pathMatcherPattern_is_null() {
// GIVEN
String pathMatcherPattern = null;
// THEN
assertThatNullPointerException().isThrownBy(() -> underTest.assertIsDirectoryContaining(INFO, null, pathMatcherPattern))
.withMessage("The syntax and pattern should not be null");
}
@Test
void should_fail_if_actual_is_null() {
// GIVEN
File actual = null;
// WHEN
var error = expectAssertionError(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_exist() {
// GIVEN
File actual = new File("xyz");
// WHEN
expectAssertionError(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
verify(failures).failure(INFO, shouldBeDirectory(actual));
}
@Test
void should_fail_if_actual_exists_but_is_not_a_directory() {
// GIVEN
File actual = newFile(tempDir.getAbsolutePath() + "/Test.java");
// WHEN
expectAssertionError(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
verify(failures).failure(INFO, shouldBeDirectory(actual));
}
// use mock as it's hard to simulate listFiles(FileFilter.class) to return null
@Test
void should_throw_error_on_null_listing() {
// GIVEN
File actual = mock(File.class);
given(actual.exists()).willReturn(true);
given(actual.isDirectory()).willReturn(true);
given(actual.listFiles(any(FileFilter.class))).willReturn(null);
mockPathMatcher(actual);
// WHEN
Throwable error = catchThrowable(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
then(error).isInstanceOf(NullPointerException.class)
.hasMessage("Directory listing should not be null");
}
@Test
void should_fail_if_actual_is_empty() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/folder");
// WHEN
expectAssertionError(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
verify(failures).failure(INFO, directoryShouldContain(actual, emptyList(), JAVA_SOURCE_PATTERN_DESCRIPTION));
}
@Test
void should_fail_if_actual_does_not_contain_any_files_matching_the_given_pathMatcherPattern() {
// GIVEN
File actual = newFolder(tempDir.getAbsolutePath() + "/folder");
File file = newFile(actual.getAbsolutePath() + "/Test.class");
List<File> items = list(file);
// WHEN
expectAssertionError(() -> underTest.assertIsDirectoryContaining(INFO, actual, JAVA_SOURCE_PATTERN));
// THEN
verify(failures).failure(INFO, directoryShouldContain(actual, items, JAVA_SOURCE_PATTERN_DESCRIPTION));
}
}
|
Files_assertIsDirectoryContaining_SyntaxAndPattern_Test
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ReflectionRegistration.java
|
{
"start": 674,
"end": 1222
}
|
class ____ it's needed.
*
* @param beanClassName
* @param subclassName
*/
default void registerSubclass(DotName beanClassName, String subclassName) {
}
ReflectionRegistration NOOP = new ReflectionRegistration() {
@Override
public void registerMethod(String declaringClass, String name, String... params) {
}
@Override
public void registerMethod(MethodInfo methodInfo) {
}
@Override
public void registerField(FieldInfo fieldInfo) {
}
};
}
|
if
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transform/Transform.java
|
{
"start": 1165,
"end": 3672
}
|
enum ____ {
SUCCESS,
FAILURE
}
protected final String type;
protected final Status status;
@Nullable
protected final Payload payload;
@Nullable
protected final String reason;
@Nullable
protected final Exception exception;
public Result(String type, Payload payload) {
this.type = type;
this.status = Status.SUCCESS;
this.payload = payload;
this.reason = null;
this.exception = null;
}
public Result(String type, String reason) {
this.type = type;
this.status = Status.FAILURE;
this.reason = reason;
this.payload = null;
this.exception = null;
}
public Result(String type, Exception e) {
this.type = type;
this.status = Status.FAILURE;
this.reason = e.getMessage();
this.payload = null;
this.exception = e;
}
public String type() {
return type;
}
public Status status() {
return status;
}
public Payload payload() {
assert status == Status.SUCCESS;
return payload;
}
public String reason() {
assert status == Status.FAILURE;
return reason;
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(TYPE.getPreferredName(), type);
builder.field(STATUS.getPreferredName(), status.name().toLowerCase(Locale.ROOT));
switch (status) {
case SUCCESS:
assert exception == null;
builder.field(PAYLOAD.getPreferredName(), payload, params);
break;
case FAILURE:
assert payload == null;
builder.field(REASON.getPreferredName(), reason);
ElasticsearchException.generateFailureXContent(builder, params, exception, true);
break;
default:
assert false;
}
typeXContent(builder, params);
return builder.endObject();
}
protected abstract XContentBuilder typeXContent(XContentBuilder builder, Params params) throws IOException;
}
|
Status
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/MappingException.java
|
{
"start": 258,
"end": 813
}
|
class ____ extends org.hibernate.MappingException {
private final Origin origin;
public MappingException(String message, Origin origin) {
super( message );
this.origin = origin;
}
public MappingException(String message, Throwable root, Origin origin) {
super( message, root );
this.origin = origin;
}
@Override
public String getMessage() {
final String message = super.getMessage();
return origin != null
? message + " [" + origin.getName() + "]"
: message;
}
public Origin getOrigin() {
return origin;
}
}
|
MappingException
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/TransformBeanExceptionTest.java
|
{
"start": 973,
"end": 2472
}
|
class ____ extends ContextTestSupport {
@Test
public void testTransformBeanException() throws Exception {
getMockEndpoint("mock:dead").expectedBodiesReceived("Hello World", "Bye World", "Hi World", "Hi Camel", "Bye Camel");
template.sendBody("direct:transform", "Hello World");
template.sendBody("direct:bean", "Bye World");
template.sendBody("direct:setBody", "Hi World");
template.sendBody("direct:setHeader", "Hi Camel");
template.sendBody("direct:setProperty", "Bye Camel");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
errorHandler(deadLetterChannel("mock:dead"));
from("direct:transform").transform().method(TransformBeanExceptionTest.class, "throwUp");
from("direct:bean").bean(TransformBeanExceptionTest.class, "throwUp");
from("direct:setBody").setBody().method(TransformBeanExceptionTest.class, "throwUp");
from("direct:setHeader").setHeader("hello").method(TransformBeanExceptionTest.class, "throwUp");
from("direct:setProperty").setProperty("bye").method(TransformBeanExceptionTest.class, "throwUp");
}
};
}
public static String throwUp(String body) {
throw new IllegalArgumentException("Forced");
}
}
|
TransformBeanExceptionTest
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/ZooKeeperCheckpointIDCounterITCase.java
|
{
"start": 1754,
"end": 6403
}
|
class ____ extends CheckpointIDCounterTestBase {
private final ZooKeeperExtension zooKeeperExtension = new ZooKeeperExtension();
@RegisterExtension
final EachCallbackWrapper<ZooKeeperExtension> zooKeeperResource =
new EachCallbackWrapper<>(zooKeeperExtension);
@RegisterExtension
final TestingFatalErrorHandlerExtension testingFatalErrorHandlerResource =
new TestingFatalErrorHandlerExtension();
private CuratorFramework getZooKeeperClient() {
return zooKeeperExtension.getZooKeeperClient(
testingFatalErrorHandlerResource.getTestingFatalErrorHandler());
}
/** Tests that counter node is removed from ZooKeeper after shutdown. */
@Test
void testShutdownRemovesState() throws Exception {
ZooKeeperCheckpointIDCounter counter = createCheckpointIdCounter();
counter.start();
CuratorFramework client = getZooKeeperClient();
assertThat(client.checkExists().forPath(counter.getPath())).isNotNull();
counter.shutdown(JobStatus.FINISHED).join();
assertThat(client.checkExists().forPath(counter.getPath())).isNull();
}
@Test
void testIdempotentShutdown() throws Exception {
ZooKeeperCheckpointIDCounter counter = createCheckpointIdCounter();
counter.start();
CuratorFramework client = getZooKeeperClient();
counter.shutdown(JobStatus.FINISHED).join();
// shutdown shouldn't fail due to missing path
counter.shutdown(JobStatus.FINISHED).join();
assertThat(client.checkExists().forPath(counter.getPath())).isNull();
}
@Test
void testShutdownWithFailureDueToMissingConnection() throws Exception {
ZooKeeperCheckpointIDCounter counter = createCheckpointIdCounter();
counter.start();
zooKeeperExtension.close();
assertThatFuture(counter.shutdown(JobStatus.FINISHED))
.as("The shutdown should fail because of the client connection being dropped.")
.eventuallyFailsWith(ExecutionException.class)
.withCauseInstanceOf(IllegalStateException.class);
}
@Test
void testShutdownWithFailureDueToExistingChildNodes() throws Exception {
final ZooKeeperCheckpointIDCounter counter = createCheckpointIdCounter();
counter.start();
final CuratorFramework client =
ZooKeeperUtils.useNamespaceAndEnsurePath(getZooKeeperClient(), "/");
final String counterNodePath = ZooKeeperUtils.generateZookeeperPath(counter.getPath());
final String childNodePath =
ZooKeeperUtils.generateZookeeperPath(
counterNodePath, "unexpected-child-node-causing-a-failure");
client.create().forPath(childNodePath);
final String namespacedCounterNodePath =
ZooKeeperUtils.generateZookeeperPath(client.getNamespace(), counterNodePath);
final Throwable expectedRootCause =
KeeperException.create(KeeperException.Code.NOTEMPTY, namespacedCounterNodePath);
assertThatFuture(counter.shutdown(JobStatus.FINISHED))
.as(
"The shutdown should fail because of a child node being present and the shutdown not performing an explicit recursive deletion.")
.eventuallyFailsWith(ExecutionException.class)
.havingCause()
.withCause(expectedRootCause);
client.delete().forPath(childNodePath);
counter.shutdown(JobStatus.FINISHED).join();
assertThat(client.checkExists().forPath(counterNodePath))
.as(
"A retry of the shutdown should have worked now after the root cause was resolved.")
.isNull();
}
/** Tests that counter node is NOT removed from ZooKeeper after suspend. */
@Test
void testSuspendKeepsState() throws Exception {
ZooKeeperCheckpointIDCounter counter = createCheckpointIdCounter();
counter.start();
CuratorFramework client = getZooKeeperClient();
assertThat(client.checkExists().forPath(counter.getPath())).isNotNull();
counter.shutdown(JobStatus.SUSPENDED).join();
assertThat(client.checkExists().forPath(counter.getPath())).isNotNull();
}
@Override
protected ZooKeeperCheckpointIDCounter createCheckpointIdCounter() throws Exception {
return new ZooKeeperCheckpointIDCounter(
ZooKeeperUtils.useNamespaceAndEnsurePath(getZooKeeperClient(), "/"),
new DefaultLastStateConnectionStateListener());
}
}
|
ZooKeeperCheckpointIDCounterITCase
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/resolver/package-info.java
|
{
"start": 1501,
"end": 1868
}
|
interface ____ determining the NN
* and local file path for a given file/folder based on the global namespace
* path.
* </ul>
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
package org.apache.hadoop.hdfs.server.federation.resolver;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
|
for
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/event/spi/EnversPreCollectionUpdateEventListenerImpl.java
|
{
"start": 564,
"end": 1266
}
|
class ____
extends BaseEnversCollectionEventListener
implements PreCollectionUpdateEventListener {
public EnversPreCollectionUpdateEventListenerImpl(EnversService enversService) {
super( enversService );
}
@Override
public void onPreUpdateCollection(PreCollectionUpdateEvent event) {
final CollectionEntry collectionEntry = getCollectionEntry( event );
if ( !collectionEntry.getLoadedPersister().isInverse() ) {
onCollectionAction( event, event.getCollection(), collectionEntry.getSnapshot(), collectionEntry );
}
else {
onCollectionActionInversed( event, event.getCollection(), collectionEntry.getSnapshot(), collectionEntry );
}
}
}
|
EnversPreCollectionUpdateEventListenerImpl
|
java
|
elastic__elasticsearch
|
modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java
|
{
"start": 14990,
"end": 16034
}
|
class ____ implements IndexingOperationListener {
private static final Logger log = LogManager.getLogger(CancelTests.class);
@Override
public Engine.Index preIndex(ShardId shardId, Engine.Index index) {
return preCheck(index);
}
@Override
public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) {
return preCheck(delete);
}
private <T extends Engine.Operation> T preCheck(T operation) {
if ((operation.origin() != Origin.PRIMARY)) {
return operation;
}
try {
log.debug("checking");
if (ALLOWED_OPERATIONS.tryAcquire(30, TimeUnit.SECONDS)) {
log.debug("passed");
return operation;
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
throw new IllegalStateException("Something went wrong");
}
}
}
|
BlockingOperationListener
|
java
|
apache__camel
|
components/camel-box/camel-box-component/src/generated/java/org/apache/camel/component/box/internal/BoxApiName.java
|
{
"start": 254,
"end": 724
}
|
enum ____ implements ApiName {
COLLABORATIONS("collaborations"),
COMMENTS("comments"),
EVENT_LOGS("event-logs"),
FILES("files"),
FOLDERS("folders"),
GROUPS("groups"),
EVENTS("events"),
SEARCH("search"),
TASKS("tasks"),
USERS("users");
private final String name;
private BoxApiName(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
}
|
BoxApiName
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/Action.java
|
{
"start": 761,
"end": 822
}
|
class ____ implements ToXContentFragment {
public
|
Result
|
java
|
ReactiveX__RxJava
|
src/test/java/io/reactivex/rxjava3/internal/operators/observable/ObservableOnErrorCompleteTest.java
|
{
"start": 997,
"end": 3895
}
|
class ____ {
@Test
public void normal() {
Observable.range(1, 10)
.onErrorComplete()
.test()
.assertResult(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void empty() {
Observable.empty()
.onErrorComplete()
.test()
.assertResult();
}
@Test
public void error() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Observable.error(new TestException())
.onErrorComplete()
.test()
.assertResult();
assertTrue("" + errors, errors.isEmpty());
});
}
@Test
public void errorMatches() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Observable.error(new TestException())
.onErrorComplete(error -> error instanceof TestException)
.test()
.assertResult();
assertTrue("" + errors, errors.isEmpty());
});
}
@Test
public void errorNotMatches() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Observable.error(new IOException())
.onErrorComplete(error -> error instanceof TestException)
.test()
.assertFailure(IOException.class);
assertTrue("" + errors, errors.isEmpty());
});
}
@Test
public void errorPredicateCrash() throws Throwable {
TestHelper.withErrorTracking(errors -> {
TestObserverEx<Object> to = Observable.error(new IOException())
.onErrorComplete(error -> { throw new TestException(); })
.subscribeWith(new TestObserverEx<>())
.assertFailure(CompositeException.class);
TestHelper.assertError(to, 0, IOException.class);
TestHelper.assertError(to, 1, TestException.class);
assertTrue("" + errors, errors.isEmpty());
});
}
@Test
public void itemsThenError() throws Throwable {
TestHelper.withErrorTracking(errors -> {
Observable.range(1, 5)
.map(v -> 4 / (3 - v))
.onErrorComplete()
.test()
.assertResult(2, 4);
assertTrue("" + errors, errors.isEmpty());
});
}
@Test
public void dispose() {
PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> to = ps
.onErrorComplete()
.test();
assertTrue("No subscribers?!", ps.hasObservers());
to.dispose();
assertFalse("Still subscribers?!", ps.hasObservers());
}
@Test
public void onSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(f -> f.onErrorComplete());
}
@Test
public void isDisposed() {
TestHelper.checkDisposed(PublishSubject.create().onErrorComplete());
}
}
|
ObservableOnErrorCompleteTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/cli/src/test/java/org/elasticsearch/xpack/security/cli/HttpCertificateCommandTests.java
|
{
"start": 39908,
"end": 40411
}
|
class ____ extends HttpCertificateCommand {
final Map<String, Path> paths;
PathAwareHttpCertificateCommand(Path... configuredPaths) {
paths = Stream.of(configuredPaths).collect(Collectors.toUnmodifiableMap(Path::toString, Function.identity()));
}
@Override
protected Path resolvePath(String name) {
return Optional.ofNullable(this.paths.get(name)).orElseGet(() -> super.resolvePath(name));
}
}
}
|
PathAwareHttpCertificateCommand
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/engine/extension/ExtensionRegistrationViaParametersAndFieldsTests.java
|
{
"start": 21842,
"end": 22452
}
|
class ____ {
@SuppressWarnings("unused")
@MagicField
private static String staticField1;
@MagicField
static String staticField2;
@BeforeAll
static void beforeAll() {
assertThat(staticField1).isEqualTo("beforeAll - staticField1");
assertThat(staticField2).isEqualTo("beforeAll - staticField2");
}
@Test
void test() {
assertThat(staticField1).isEqualTo("beforeAll - staticField1");
assertThat(staticField2).isEqualTo("beforeAll - staticField2");
}
}
/**
* The {@link MagicField.Extension} is registered via an instance field.
*/
@NullUnmarked
static
|
StaticFieldTestCase
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
|
{
"start": 1978,
"end": 3207
}
|
class ____ {
private static HardLinkCommandGetter getHardLinkCommand;
public final LinkStats linkStats; //not static
static final Logger LOG = LoggerFactory.getLogger(HardLink.class);
private static final String FILE_ATTRIBUTE_VIEW = "unix";
private static final String FILE_ATTRIBUTE = "unix:nlink";
//initialize the command "getters" statically, so can use their
//methods without instantiating the HardLink object
static {
if (Shell.WINDOWS) {
// Windows
getHardLinkCommand = new HardLinkCGWin();
} else {
// Unix or Linux
getHardLinkCommand = new HardLinkCGUnix();
//override getLinkCountCommand for the particular Unix variant
//Linux is already set as the default - {"stat","-c%h", null}
if (Shell.MAC || Shell.FREEBSD) {
String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
} else if (Shell.SOLARIS) {
String[] linkCountCmdTemplate = {"ls","-l", null};
HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
}
}
}
public HardLink() {
linkStats = new LinkStats();
}
/**
* This abstract
|
HardLink
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/client/protocol/decoder/CodecDecoder.java
|
{
"start": 773,
"end": 932
}
|
class ____ implements MultiDecoder<Object> {
@Override
public Object decode(List<Object> parts, State state) {
return parts;
}
}
|
CodecDecoder
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/utils/UtilsTest.java
|
{
"start": 55245,
"end": 55796
}
|
class ____ extends Exception {
final String key;
TestException(String key) {
this.key = key;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
TestException that = (TestException) o;
return key.equals(that.key);
}
@Override
public int hashCode() {
return key.hashCode();
}
}
}
|
TestException
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/array/StructArrayWithNullElementTestDemoTest.java
|
{
"start": 1801,
"end": 1954
}
|
class ____ {
@Id
int id;
List<Author> authors;
}
@Embeddable
@Struct(name = "Author")
record Author(String firstName, String lastName) {
}
}
|
Book
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java
|
{
"start": 55035,
"end": 57003
}
|
class ____ extends RetryableAction<PersistentTasksCustomMetadata.PersistentTask<?>> {
private static final int MIN_RETRY_SLEEP_MILLIS = 500;
private static final int RETRY_TIMEOUT_SECONDS = 30;
private final JobTask jobTask;
private final JobTaskState jobTaskState;
/**
* @param logger The logger (use AutodetectProcessManager.logger)
* @param threadPool The ThreadPool to schedule retries on
* @param jobTask The JobTask whose state we’re updating
* @param jobTaskState The new state to persist
*/
UpdateStateRetryableAction(
Logger logger,
ThreadPool threadPool,
JobTask jobTask,
JobTaskState jobTaskState,
ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> delegateListener
) {
super(
logger,
threadPool,
TimeValue.timeValueMillis(UpdateStateRetryableAction.MIN_RETRY_SLEEP_MILLIS),
TimeValue.timeValueSeconds(UpdateStateRetryableAction.RETRY_TIMEOUT_SECONDS),
delegateListener,
// executor for retries
threadPool.generic()
);
this.jobTask = Objects.requireNonNull(jobTask);
this.jobTaskState = Objects.requireNonNull(jobTaskState);
}
@Override
public void tryAction(ActionListener<PersistentTasksCustomMetadata.PersistentTask<?>> listener) {
// this will call back either onResponse(...) or onFailure(...)
jobTask.updatePersistentTaskState(jobTaskState, listener);
}
@Override
public boolean shouldRetry(Exception e) {
// retry everything *except* when the task truly no longer exists
return (ExceptionsHelper.unwrapCause(e) instanceof ResourceNotFoundException) == false;
}
}
}
|
UpdateStateRetryableAction
|
java
|
quarkusio__quarkus
|
integration-tests/spring-data-jpa/src/main/java/io/quarkus/it/spring/data/jpa/Animal.java
|
{
"start": 185,
"end": 304
}
|
class ____ {
@Id
@GeneratedValue
private long id;
public long getId() {
return id;
}
}
|
Animal
|
java
|
apache__flink
|
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/header/operation/GetOperationStatusHeaders.java
|
{
"start": 1176,
"end": 2072
}
|
class ____ extends AbstractOperationHeaders {
private static final GetOperationStatusHeaders INSTANCE = new GetOperationStatusHeaders();
private static final String URL =
"/sessions/:"
+ SessionHandleIdPathParameter.KEY
+ "/operations/:"
+ OperationHandleIdPathParameter.KEY
+ "/status";
@Override
public String getDescription() {
return "Get the status of operation.";
}
@Override
public HttpMethodWrapper getHttpMethod() {
return HttpMethodWrapper.GET;
}
@Override
public String getTargetRestEndpointURL() {
return URL;
}
public static GetOperationStatusHeaders getInstance() {
return INSTANCE;
}
@Override
public String operationId() {
return "getOperationStatus";
}
}
|
GetOperationStatusHeaders
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/LogAggregationReportPBImpl.java
|
{
"start": 1766,
"end": 5636
}
|
class ____ extends LogAggregationReport {
LogAggregationReportProto proto = LogAggregationReportProto
.getDefaultInstance();
LogAggregationReportProto.Builder builder = null;
boolean viaProto = false;
private ApplicationId applicationId;
public LogAggregationReportPBImpl() {
builder = LogAggregationReportProto.newBuilder();
}
public LogAggregationReportPBImpl(LogAggregationReportProto proto) {
this.proto = proto;
viaProto = true;
}
public LogAggregationReportProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return TextFormat.shortDebugString(getProto());
}
private void mergeLocalToBuilder() {
if (this.applicationId != null
&& !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
builder.getApplicationId())) {
builder.setApplicationId(convertToProtoFormat(this.applicationId));
}
}
private void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
proto = builder.build();
viaProto = true;
}
private void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = LogAggregationReportProto.newBuilder(proto);
}
viaProto = false;
}
@Override
public ApplicationId getApplicationId() {
if (this.applicationId != null) {
return this.applicationId;
}
LogAggregationReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasApplicationId()) {
return null;
}
this.applicationId = convertFromProtoFormat(p.getApplicationId());
return this.applicationId;
}
@Override
public void setApplicationId(ApplicationId appId) {
maybeInitBuilder();
if (appId == null)
builder.clearApplicationId();
this.applicationId = appId;
}
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
return ((ApplicationIdPBImpl) t).getProto();
}
private ApplicationIdPBImpl convertFromProtoFormat(
ApplicationIdProto applicationId) {
return new ApplicationIdPBImpl(applicationId);
}
@Override
public LogAggregationStatus getLogAggregationStatus() {
LogAggregationReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasLogAggregationStatus()) {
return null;
}
return convertFromProtoFormat(p.getLogAggregationStatus());
}
@Override
public void
setLogAggregationStatus(LogAggregationStatus logAggregationStatus) {
maybeInitBuilder();
if (logAggregationStatus == null) {
builder.clearLogAggregationStatus();
return;
}
builder.setLogAggregationStatus(convertToProtoFormat(logAggregationStatus));
}
private LogAggregationStatus convertFromProtoFormat(
LogAggregationStatusProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private LogAggregationStatusProto
convertToProtoFormat(LogAggregationStatus s) {
return ProtoUtils.convertToProtoFormat(s);
}
@Override
public String getDiagnosticMessage() {
LogAggregationReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasDiagnostics()) {
return null;
}
return p.getDiagnostics();
}
@Override
public void setDiagnosticMessage(String diagnosticMessage) {
maybeInitBuilder();
if (diagnosticMessage == null) {
builder.clearDiagnostics();
return;
}
builder.setDiagnostics(diagnosticMessage);
}
}
|
LogAggregationReportPBImpl
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/criteria/CriteriaDynamicInstantiationInheritanceTest.java
|
{
"start": 2097,
"end": 2995
}
|
interface ____ {
void apply(CriteriaBuilder cb, CriteriaQuery<AddressEntity> cq, Root<AddressEntity> root);
}
private void executeCriteriaQuery(EntityManagerFactoryScope scope, SelectionProducer selectionProducer) {
scope.inTransaction( entityManager -> {
final CriteriaBuilder cb = entityManager.getCriteriaBuilder();
final CriteriaQuery<AddressEntity> cq = cb.createQuery( AddressEntity.class );
final Root<AddressEntity> root = cq.from( AddressEntity.class );
selectionProducer.apply( cb, cq, root );
final List<AddressEntity> list = entityManager.createQuery( cq ).getResultList();
assertThat( list ).hasSize( 1 );
assertThat( list.get( 0 ).getId() ).isNotNull();
assertThat( list.get( 0 ).getStreet() ).isEqualTo( "Via Roma" );
assertThat( list.get( 0 ).getCity() ).isEqualTo( "Pegognaga" );
} );
}
@MappedSuperclass
public static abstract
|
SelectionProducer
|
java
|
apache__maven
|
compat/maven-model-builder/src/main/java/org/apache/maven/model/interpolation/reflection/ClassMap.java
|
{
"start": 1044,
"end": 1300
}
|
class ____.
* Keys {@link Method} objects by a concatenation of the
* method name and the names of classes that make up the parameters.
*
* @deprecated use {@code org.apache.maven.api.services.ModelBuilder} instead
*/
@Deprecated(since = "4.0.0")
|
instance
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/NamedCommitterFactory.java
|
{
"start": 1414,
"end": 2354
}
|
class ____ extends
PathOutputCommitterFactory {
private static final Logger LOG =
LoggerFactory.getLogger(NamedCommitterFactory.class);
@SuppressWarnings("JavaReflectionMemberAccess")
@Override
public PathOutputCommitter createOutputCommitter(Path outputPath,
TaskAttemptContext context) throws IOException {
Class<? extends PathOutputCommitter> clazz = loadCommitterClass(context);
LOG.debug("Using PathOutputCommitter implementation {}", clazz);
try {
Constructor<? extends PathOutputCommitter> ctor
= clazz.getConstructor(Path.class, TaskAttemptContext.class);
return ctor.newInstance(outputPath, context);
} catch (NoSuchMethodException
| InstantiationException
| IllegalAccessException
| InvocationTargetException e) {
throw new IOException("Failed to create " + clazz
+ ":" + e, e);
}
}
/**
* Load the
|
NamedCommitterFactory
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/main/java/org/springframework/cloud/gateway/route/builder/RouteLocatorBuilder.java
|
{
"start": 2908,
"end": 3396
}
|
class ____ {
private final Route.AsyncBuilder routeBuilder = Route.async();
private final Builder builder;
RouteSpec(Builder builder) {
this.builder = builder;
}
public PredicateSpec id(String id) {
this.routeBuilder.id(id);
return predicateBuilder();
}
public PredicateSpec randomId() {
return id(UUID.randomUUID().toString());
}
private PredicateSpec predicateBuilder() {
return new PredicateSpec(this.routeBuilder, this.builder);
}
}
}
|
RouteSpec
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java
|
{
"start": 1543,
"end": 4027
}
|
class ____ extends TransportTasksAction<TrainedModelDeploymentTask, Request, Response, Response> {
@Inject
public TransportClearDeploymentCacheAction(
TransportService transportService,
ActionFilters actionFilters,
ClusterService clusterService
) {
super(
ClearDeploymentCacheAction.NAME,
clusterService,
transportService,
actionFilters,
Request::new,
Response::new,
EsExecutors.DIRECT_EXECUTOR_SERVICE
);
}
@Override
protected Response newResponse(
Request request,
List<Response> taskResponse,
List<TaskOperationFailure> taskOperationFailures,
List<FailedNodeException> failedNodeExceptions
) {
if (taskOperationFailures.isEmpty() == false) {
throw ExceptionsHelper.taskOperationFailureToStatusException(taskOperationFailures.get(0));
} else if (failedNodeExceptions.isEmpty() == false) {
throw failedNodeExceptions.get(0);
}
return new Response(true);
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
final ClusterState clusterState = clusterService.state();
final TrainedModelAssignmentMetadata assignment = TrainedModelAssignmentMetadata.fromState(clusterState);
TrainedModelAssignment trainedModelAssignment = assignment.getDeploymentAssignment(request.getDeploymentId());
if (trainedModelAssignment == null) {
listener.onFailure(ExceptionsHelper.missingModelDeployment(request.getDeploymentId()));
return;
}
String[] nodes = trainedModelAssignment.getNodeRoutingTable()
.entrySet()
.stream()
.filter(entry -> entry.getValue().isRoutable())
.map(Map.Entry::getKey)
.toArray(String[]::new);
if (nodes.length == 0) {
listener.onResponse(new Response(true));
return;
}
request.setNodes(nodes);
super.doExecute(task, request, listener);
}
@Override
protected void taskOperation(
CancellableTask actionTask,
Request request,
TrainedModelDeploymentTask task,
ActionListener<Response> listener
) {
task.clearCache(listener.delegateFailureAndWrap((l, r) -> l.onResponse(new Response(true))));
}
}
|
TransportClearDeploymentCacheAction
|
java
|
quarkusio__quarkus
|
independent-projects/bootstrap/app-model/src/main/java/io/quarkus/maven/dependency/Dependency.java
|
{
"start": 98,
"end": 3351
}
|
interface ____ extends ArtifactCoords {
String SCOPE_COMPILE = "compile";
String SCOPE_IMPORT = "import";
static Dependency of(String groupId, String artifactId) {
return new ArtifactDependency(groupId, artifactId, null, ArtifactCoords.TYPE_JAR, null);
}
static Dependency of(String groupId, String artifactId, String version) {
return new ArtifactDependency(groupId, artifactId, null, ArtifactCoords.TYPE_JAR, version);
}
static Dependency pomImport(String groupId, String artifactId, String version) {
return new ArtifactDependency(groupId, artifactId, null, ArtifactCoords.TYPE_POM, version, SCOPE_IMPORT, false);
}
String getScope();
default Collection<ArtifactKey> getExclusions() {
return List.of();
}
int getFlags();
default boolean isOptional() {
return isFlagSet(DependencyFlags.OPTIONAL);
}
default boolean isDirect() {
return isFlagSet(DependencyFlags.DIRECT);
}
default boolean isRuntimeExtensionArtifact() {
return isFlagSet(DependencyFlags.RUNTIME_EXTENSION_ARTIFACT);
}
default boolean isRuntimeCp() {
return isFlagSet(DependencyFlags.RUNTIME_CP);
}
default boolean isDeploymentCp() {
return isFlagSet(DependencyFlags.DEPLOYMENT_CP);
}
default boolean isWorkspaceModule() {
return isFlagSet(DependencyFlags.WORKSPACE_MODULE);
}
default boolean isReloadable() {
return isFlagSet(DependencyFlags.RELOADABLE) && isWorkspaceModule();
}
default boolean isClassLoaderParentFirst() {
return isFlagSet(DependencyFlags.CLASSLOADER_PARENT_FIRST);
}
/**
* Checks whether a dependency has a given flag set.
* If the value of the {@code flag} argument combines multiple flags,
* the implementation will return {@code true} only if the dependency
* has all the flags set.
*
* @param flag flag (or flags) to check
* @return true if the flag is set, otherwise false
*/
default boolean isFlagSet(int flag) {
return (getFlags() & flag) == flag;
}
/**
* Checks whether a dependency has any of the flags combined in the value of {@code flags} set.
*
* @param flags flags to check
* @return true, if any of the flags is set, otherwise - false
*/
default boolean isAnyFlagSet(int flags) {
return (getFlags() & flags) > 0;
}
/**
* Checks whether any of the flags are set on a dependency
*
* @param flags flags to check
* @return true if any of the flags are set, otherwise false
*/
default boolean hasAnyFlag(int... flags) {
for (var flag : flags) {
if (isFlagSet(flag)) {
return true;
}
}
return false;
}
/**
* Checks whether all the passed in flags are set on a dependency
*
* @param flags flags to check
* @return true if all the passed in flags are set on a dependency, otherwise false
*/
default boolean hasAllFlags(int... flags) {
for (var flag : flags) {
if (!isFlagSet(flag)) {
return false;
}
}
return true;
}
}
|
Dependency
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/tcp/reactor/ReactorNettyTcpClient.java
|
{
"start": 11157,
"end": 11533
}
|
class ____<P> extends ByteToMessageDecoder {
private final ReactorNettyCodec<P> codec;
StompMessageDecoder(ReactorNettyCodec<P> codec) {
this.codec = codec;
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) {
Collection<Message<P>> messages = this.codec.decode(in);
out.addAll(messages);
}
}
}
|
StompMessageDecoder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/searchablesnapshots/SearchableSnapshotsConstantsTests.java
|
{
"start": 535,
"end": 1787
}
|
class ____ extends ESTestCase {
public void testIsPartialSearchableSnapshotIndex() {
assertThat(
SearchableSnapshotsSettings.isPartialSearchableSnapshotIndex(
Map.of(
IndexModule.INDEX_STORE_TYPE_SETTING,
SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE,
SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING,
false
)
),
is(false)
);
assertThat(
SearchableSnapshotsSettings.isPartialSearchableSnapshotIndex(
Map.of(IndexModule.INDEX_STORE_TYPE_SETTING, "abc", SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING, randomBoolean())
),
is(false)
);
assertThat(
SearchableSnapshotsSettings.isPartialSearchableSnapshotIndex(
Map.of(
IndexModule.INDEX_STORE_TYPE_SETTING,
SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE,
SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING,
true
)
),
is(true)
);
}
}
|
SearchableSnapshotsConstantsTests
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/ProcessInheritIODisabled.java
|
{
"start": 817,
"end": 1088
}
|
class ____ extends SimpleBuildItem {
/**
* Generates a {@link List<Consumer<BuildChainBuilder>> build chain builder} which creates a build step
* producing the {@link ProcessInheritIODisabled} build item
*/
public static final
|
ProcessInheritIODisabled
|
java
|
apache__logging-log4j2
|
log4j-api/src/main/java/org/apache/logging/log4j/util/internal/SerializationUtil.java
|
{
"start": 1557,
"end": 5895
}
|
class ____ {
private static final String DEFAULT_FILTER_CLASS =
"org.apache.logging.log4j.util.internal.DefaultObjectInputFilter";
private static final Method setObjectInputFilter;
private static final Method getObjectInputFilter;
private static final Method newObjectInputFilter;
static {
Method[] methods = ObjectInputStream.class.getMethods();
Method setMethod = null;
Method getMethod = null;
for (final Method method : methods) {
if (method.getName().equals("setObjectInputFilter")) {
setMethod = method;
} else if (method.getName().equals("getObjectInputFilter")) {
getMethod = method;
}
}
Method newMethod = null;
try {
if (setMethod != null) {
final Class<?> clazz = Class.forName(DEFAULT_FILTER_CLASS);
methods = clazz.getMethods();
for (final Method method : methods) {
if (method.getName().equals("newInstance") && Modifier.isStatic(method.getModifiers())) {
newMethod = method;
break;
}
}
}
} catch (final ClassNotFoundException ex) {
// Ignore the exception
}
newObjectInputFilter = newMethod;
setObjectInputFilter = setMethod;
getObjectInputFilter = getMethod;
}
public static final List<String> REQUIRED_JAVA_CLASSES = Arrays.asList(
"java.math.BigDecimal",
"java.math.BigInteger",
// for Message delegate
"java.rmi.MarshalledObject",
// all primitives
"boolean",
"byte",
"char",
"double",
"float",
"int",
"long",
"short");
public static final List<String> REQUIRED_JAVA_PACKAGES =
Arrays.asList("java.lang.", "java.time.", "java.util.", "org.apache.logging.log4j.");
public static void writeWrappedObject(final Serializable obj, final ObjectOutputStream out) throws IOException {
final ByteArrayOutputStream bout = new ByteArrayOutputStream();
try (final ObjectOutputStream oos = new ObjectOutputStream(bout)) {
oos.writeObject(obj);
oos.flush();
out.writeObject(bout.toByteArray());
}
}
@SuppressFBWarnings(
value = "OBJECT_DESERIALIZATION",
justification =
"Object deserialization uses either Java 9 native filter or our custom filter to limit the kinds of classes deserialized.")
public static Object readWrappedObject(final ObjectInputStream in) throws IOException, ClassNotFoundException {
assertFiltered(in);
final byte[] data = (byte[]) in.readObject();
final ByteArrayInputStream bin = new ByteArrayInputStream(data);
final ObjectInputStream ois;
if (in instanceof FilteredObjectInputStream) {
ois = new FilteredObjectInputStream(bin, ((FilteredObjectInputStream) in).getAllowedClasses());
} else {
try {
final Object obj = getObjectInputFilter.invoke(in);
final Object filter = newObjectInputFilter.invoke(null, obj);
ois = new ObjectInputStream(bin);
setObjectInputFilter.invoke(ois, filter);
} catch (IllegalAccessException | InvocationTargetException ex) {
throw new StreamCorruptedException("Unable to set ObjectInputFilter on stream");
}
}
try {
return ois.readObject();
} catch (final Exception | LinkageError e) {
StatusLogger.getLogger().warn("Ignoring {} during deserialization", e.getMessage());
return null;
} finally {
ois.close();
}
}
public static void assertFiltered(final java.io.ObjectInputStream stream) {
if (!(stream instanceof FilteredObjectInputStream) && setObjectInputFilter == null) {
throw new IllegalArgumentException(
"readObject requires a FilteredObjectInputStream or an ObjectInputStream that accepts an ObjectInputFilter");
}
}
/**
* Gets the
|
SerializationUtil
|
java
|
apache__thrift
|
lib/java/src/test/java/org/apache/thrift/transport/TestTSSLTransportFactoryCustomClient1.java
|
{
"start": 850,
"end": 1316
}
|
class ____ extends TestTSSLTransportFactory {
@Override
public TTransport getClientTransport(TTransport underlyingTransport) throws Exception {
TSSLTransportFactory.TSSLTransportParameters params =
new TSSLTransportFactory.TSSLTransportParameters();
params.setTrustStore(getTrustStoreLocation(), getTrustStorePassword());
return TSSLTransportFactory.getClientSocket(HOST, PORT, 0 /*timeout*/, params);
}
}
|
TestTSSLTransportFactoryCustomClient1
|
java
|
quarkusio__quarkus
|
extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/intrumentation/grpc/GrpcSpanNameExtractor.java
|
{
"start": 148,
"end": 371
}
|
class ____ implements SpanNameExtractor<GrpcRequest> {
@Override
public String extract(final GrpcRequest grpcRequest) {
return grpcRequest.getMethodDescriptor().getFullMethodName();
}
}
|
GrpcSpanNameExtractor
|
java
|
apache__spark
|
sql/catalyst/src/test/java/org/apache/spark/sql/catalyst/JavaTypeInferenceBeans.java
|
{
"start": 2593,
"end": 2773
}
|
class ____<T> {
private Bar<T> bar;
public Bar<T> getBar() {
return bar;
}
public void setBar(Bar<T> bar) {
this.bar = bar;
}
}
static
|
BarWrapper
|
java
|
apache__dubbo
|
dubbo-plugin/dubbo-qos/src/main/java/org/apache/dubbo/qos/command/impl/OfflineInterface.java
|
{
"start": 1197,
"end": 1564
}
|
class ____ extends BaseOffline {
public OfflineInterface(FrameworkModel frameworkModel) {
super(frameworkModel);
}
@Override
protected void doUnexport(ProviderModel.RegisterStatedURL statedURL) {
if (!UrlUtils.isServiceDiscoveryURL(statedURL.getRegistryUrl())) {
super.doUnexport(statedURL);
}
}
}
|
OfflineInterface
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-guava-tests/src/test/java/org/assertj/tests/guava/api/RangeSetAssert_doesNotEnclose_Test.java
|
{
"start": 1507,
"end": 3324
}
|
class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
RangeSet<Integer> actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotEnclose(closed(0, 1)));
// THEN
then(error).hasMessage(actualIsNull());
}
@Test
void should_fail_if_ranges_is_null() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of();
Range<Integer>[] ranges = null;
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotEnclose(ranges));
// THEN
then(thrown).isInstanceOf(NullPointerException.class)
.hasMessage(shouldNotBeNull("ranges").create());
}
@Test
void should_fail_if_ranges_is_empty() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 1));
Range<Integer>[] ranges = array();
// WHEN
Throwable thrown = catchThrowable(() -> assertThat(actual).doesNotEnclose(ranges));
// THEN
then(thrown).isInstanceOf(IllegalArgumentException.class)
.hasMessage("Expecting ranges not to be empty");
}
@Test
void should_fail_if_actual_encloses_ranges() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 100));
Range<Integer>[] ranges = array(open(10, 50), open(50, 110));
// WHEN
var error = expectAssertionError(() -> assertThat(actual).doesNotEnclose(ranges));
// THEN
then(error).hasMessage(shouldNotEnclose(actual, ranges, singleton(open(10, 50))).create());
}
@Test
void should_pass_if_actual_does_not_enclose_ranges() {
// GIVEN
RangeSet<Integer> actual = ImmutableRangeSet.of(closed(0, 100));
Range<Integer>[] ranges = array(open(-10, 50), open(50, 110));
// WHEN/THEN
assertThat(actual).doesNotEnclose(ranges);
}
}
|
RangeSetAssert_doesNotEnclose_Test
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/Bean.java
|
{
"start": 9668,
"end": 13894
}
|
interface ____ {
/**
* Alias for {@link #name}.
* <p>Intended to be used when no other attributes are needed, for example:
* {@code @Bean("customBeanName")}.
* @since 4.3.3
* @see #name
*/
@AliasFor("name")
String[] value() default {};
/**
* The name of this bean, or if several names, a primary bean name plus aliases.
* <p>If left unspecified, the name of the bean is the name of the annotated method.
* If specified, the method name is ignored.
* <p>The bean name and aliases may also be configured via the {@link #value}
* attribute if no other attributes are declared.
* @see #value
*/
@AliasFor("value")
String[] name() default {};
/**
* Is this bean a candidate for getting autowired into some other bean at all?
* <p>Default is {@code true}; set this to {@code false} for internal delegates
* that are not meant to get in the way of beans of the same type in other places.
* @since 5.1
* @see #defaultCandidate()
*/
boolean autowireCandidate() default true;
/**
* Is this bean a candidate for getting autowired into some other bean based on
* the plain type, without any further indications such as a qualifier match?
* <p>Default is {@code true}; set this to {@code false} for restricted delegates
* that are supposed to be injectable in certain areas but are not meant to get
* in the way of beans of the same type in other places.
* <p>This is a variation of {@link #autowireCandidate()} which does not disable
* injection in general, just enforces an additional indication such as a qualifier.
* @since 6.2
* @see #autowireCandidate()
*/
boolean defaultCandidate() default true;
/**
* The bootstrap mode for this bean: default is the main pre-instantiation thread
* for non-lazy singleton beans and the caller thread for prototype beans.
* <p>Set {@link Bootstrap#BACKGROUND} to allow for instantiating this bean on a
* background thread. For a non-lazy singleton, a background pre-instantiation
* thread can be used then, while still enforcing the completion at the end of
* {@link org.springframework.context.ConfigurableApplicationContext#refresh()}.
* For a lazy singleton, a background pre-instantiation thread can be used as well
* - with completion allowed at a later point, enforcing it when actually accessed.
* @since 6.2
* @see Lazy
*/
Bootstrap bootstrap() default Bootstrap.DEFAULT;
/**
* The optional name of a method to call on the bean instance during initialization.
* Not commonly used, given that the method may be called programmatically directly
* within the body of a Bean-annotated method.
* <p>The default value is {@code ""}, indicating no init method to be called.
* @see org.springframework.beans.factory.InitializingBean
* @see org.springframework.context.ConfigurableApplicationContext#refresh()
*/
String initMethod() default "";
/**
* The optional name of a method to call on the bean instance upon closing the
* application context, for example a {@code close()} method on a JDBC
* {@code DataSource} implementation, or a Hibernate {@code SessionFactory} object.
* The method must have no arguments but may throw any exception.
* <p>As a convenience to the user, the container will attempt to infer a destroy
* method against an object returned from the {@code @Bean} method. For example, given
* an {@code @Bean} method returning an Apache Commons DBCP {@code BasicDataSource},
* the container will notice the {@code close()} method available on that object and
* automatically register it as the {@code destroyMethod}. This 'destroy method
* inference' is currently limited to detecting only public, no-arg methods named
* 'close' or 'shutdown'. The method may be declared at any level of the inheritance
* hierarchy and will be detected regardless of the return type of the {@code @Bean}
* method (i.e., detection occurs reflectively against the bean instance itself at
* creation time).
* <p>To disable destroy method inference for a particular {@code @Bean}, specify an
* empty string as the value, for example, {@code @Bean(destroyMethod="")}. Note that the
* {@link org.springframework.beans.factory.DisposableBean} callback
|
Bean
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/context/config/ConfigDataEnvironmentPostProcessor.java
|
{
"start": 1672,
"end": 7794
}
|
class ____ implements EnvironmentPostProcessor, Ordered {
/**
* The default order for the processor.
*/
public static final int ORDER = Ordered.HIGHEST_PRECEDENCE + 10;
/**
* Property used to determine what action to take when a
* {@code ConfigDataLocationNotFoundException} is thrown.
* @see ConfigDataNotFoundAction
*/
public static final String ON_LOCATION_NOT_FOUND_PROPERTY = ConfigDataEnvironment.ON_NOT_FOUND_PROPERTY;
private final DeferredLogFactory logFactory;
private final Log logger;
private final ConfigurableBootstrapContext bootstrapContext;
private final @Nullable ConfigDataEnvironmentUpdateListener environmentUpdateListener;
public ConfigDataEnvironmentPostProcessor(DeferredLogFactory logFactory,
ConfigurableBootstrapContext bootstrapContext) {
this(logFactory, bootstrapContext, null);
}
private ConfigDataEnvironmentPostProcessor(DeferredLogFactory logFactory,
ConfigurableBootstrapContext bootstrapContext,
@Nullable ConfigDataEnvironmentUpdateListener environmentUpdateListener) {
this.logFactory = logFactory;
this.logger = logFactory.getLog(getClass());
this.bootstrapContext = bootstrapContext;
this.environmentUpdateListener = environmentUpdateListener;
}
@Override
public int getOrder() {
return ORDER;
}
@Override
public void postProcessEnvironment(ConfigurableEnvironment environment, SpringApplication application) {
postProcessEnvironment(environment, application.getResourceLoader(), application.getAdditionalProfiles());
}
void postProcessEnvironment(ConfigurableEnvironment environment, @Nullable ResourceLoader resourceLoader,
Collection<String> additionalProfiles) {
this.logger.trace("Post-processing environment to add config data");
resourceLoader = (resourceLoader != null) ? resourceLoader : new DefaultResourceLoader();
getConfigDataEnvironment(environment, resourceLoader, additionalProfiles).processAndApply();
}
ConfigDataEnvironment getConfigDataEnvironment(ConfigurableEnvironment environment, ResourceLoader resourceLoader,
Collection<String> additionalProfiles) {
return new ConfigDataEnvironment(this.logFactory, this.bootstrapContext, environment, resourceLoader,
additionalProfiles, this.environmentUpdateListener);
}
/**
* Apply {@link ConfigData} post-processing to an existing {@link Environment}. This
* method can be useful when working with an {@link Environment} that has been created
* directly and not necessarily as part of a {@link SpringApplication}.
* @param environment the environment to apply {@link ConfigData} to
*/
public static void applyTo(ConfigurableEnvironment environment) {
applyTo(environment, null, null, Collections.emptyList());
}
/**
* Apply {@link ConfigData} post-processing to an existing {@link Environment}. This
* method can be useful when working with an {@link Environment} that has been created
* directly and not necessarily as part of a {@link SpringApplication}.
* @param environment the environment to apply {@link ConfigData} to
* @param resourceLoader the resource loader to use
* @param bootstrapContext the bootstrap context to use or {@code null} to use a
* throw-away context
* @param additionalProfiles any additional profiles that should be applied
*/
public static void applyTo(ConfigurableEnvironment environment, ResourceLoader resourceLoader,
@Nullable ConfigurableBootstrapContext bootstrapContext, String... additionalProfiles) {
applyTo(environment, resourceLoader, bootstrapContext, Arrays.asList(additionalProfiles));
}
/**
* Apply {@link ConfigData} post-processing to an existing {@link Environment}. This
* method can be useful when working with an {@link Environment} that has been created
* directly and not necessarily as part of a {@link SpringApplication}.
* @param environment the environment to apply {@link ConfigData} to
* @param resourceLoader the resource loader to use
* @param bootstrapContext the bootstrap context to use or {@code null} to use a
* throw-away context
* @param additionalProfiles any additional profiles that should be applied
*/
public static void applyTo(ConfigurableEnvironment environment, @Nullable ResourceLoader resourceLoader,
@Nullable ConfigurableBootstrapContext bootstrapContext, Collection<String> additionalProfiles) {
DeferredLogFactory logFactory = Supplier::get;
bootstrapContext = (bootstrapContext != null) ? bootstrapContext : new DefaultBootstrapContext();
ConfigDataEnvironmentPostProcessor postProcessor = new ConfigDataEnvironmentPostProcessor(logFactory,
bootstrapContext);
postProcessor.postProcessEnvironment(environment, resourceLoader, additionalProfiles);
}
/**
* Apply {@link ConfigData} post-processing to an existing {@link Environment}. This
* method can be useful when working with an {@link Environment} that has been created
* directly and not necessarily as part of a {@link SpringApplication}.
* @param environment the environment to apply {@link ConfigData} to
* @param resourceLoader the resource loader to use
* @param bootstrapContext the bootstrap context to use or {@code null} to use a
* throw-away context
* @param additionalProfiles any additional profiles that should be applied
* @param environmentUpdateListener optional
* {@link ConfigDataEnvironmentUpdateListener} that can be used to track
* {@link Environment} updates.
*/
public static void applyTo(ConfigurableEnvironment environment, @Nullable ResourceLoader resourceLoader,
@Nullable ConfigurableBootstrapContext bootstrapContext, Collection<String> additionalProfiles,
ConfigDataEnvironmentUpdateListener environmentUpdateListener) {
DeferredLogFactory logFactory = Supplier::get;
bootstrapContext = (bootstrapContext != null) ? bootstrapContext : new DefaultBootstrapContext();
ConfigDataEnvironmentPostProcessor postProcessor = new ConfigDataEnvironmentPostProcessor(logFactory,
bootstrapContext, environmentUpdateListener);
postProcessor.postProcessEnvironment(environment, resourceLoader, additionalProfiles);
}
}
|
ConfigDataEnvironmentPostProcessor
|
java
|
apache__flink
|
flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/test/util/SecureTestEnvironment.java
|
{
"start": 2741,
"end": 10991
}
|
class ____ {
protected static final Logger LOG = LoggerFactory.getLogger(SecureTestEnvironment.class);
public static final String HOST_NAME = "localhost";
private static MiniKdc kdc;
private static String testKeytab = null;
private static String testZkServerPrincipal = null;
private static String testZkClientPrincipal = null;
private static String testKafkaServerPrincipal = null;
private static String hadoopServicePrincipal = null;
private static String testPrincipal = null;
private static void doPrepare(File baseDirForSecureRun, String... additionalPrincipals) {
checkArgument(additionalPrincipals != null, "Valid principals must be provided");
try {
LOG.info("Base Directory for Secure Environment: {}", baseDirForSecureRun);
Properties kdcConf = MiniKdc.createConf();
if (LOG.isDebugEnabled()) {
kdcConf.setProperty(MiniKdc.DEBUG, "true");
}
kdcConf.setProperty(MiniKdc.KDC_BIND_ADDRESS, HOST_NAME);
kdc = new MiniKdc(kdcConf, baseDirForSecureRun);
kdc.start();
LOG.info("Started Mini KDC");
File keytabFile = new File(baseDirForSecureRun, "test-users.keytab");
testKeytab = keytabFile.getAbsolutePath();
testZkServerPrincipal = "zookeeper/" + HOST_NAME;
testZkClientPrincipal = "zk-client/" + HOST_NAME;
testKafkaServerPrincipal = "kafka/" + HOST_NAME;
hadoopServicePrincipal = "hadoop/" + HOST_NAME;
testPrincipal = "client/" + HOST_NAME;
String[] embeddedPrincipals = {
testZkServerPrincipal,
testZkClientPrincipal,
testKafkaServerPrincipal,
hadoopServicePrincipal,
testPrincipal
};
String[] principals = ArrayUtils.addAll(embeddedPrincipals, additionalPrincipals);
kdc.createPrincipal(keytabFile, principals);
testZkServerPrincipal = testZkServerPrincipal + "@" + kdc.getRealm();
testZkClientPrincipal = testZkClientPrincipal + "@" + kdc.getRealm();
testKafkaServerPrincipal = testKafkaServerPrincipal + "@" + kdc.getRealm();
hadoopServicePrincipal = hadoopServicePrincipal + "@" + kdc.getRealm();
testPrincipal = testPrincipal + "@" + kdc.getRealm();
LOG.info("-------------------------------------------------------------------");
LOG.info("Test Principal: {}", testPrincipal);
LOG.info("Test ZK Server Principal: {}", testZkServerPrincipal);
LOG.info("Test ZK Client Principal: {}", testZkClientPrincipal);
LOG.info("Test Kafka Server Principal: {}", testKafkaServerPrincipal);
LOG.info("Test Hadoop Service Principal: {}", hadoopServicePrincipal);
LOG.info("Test Keytab: {}", testKeytab);
LOG.info("-------------------------------------------------------------------");
// Security Context is established to allow non hadoop applications that requires JAAS
// based SASL/Kerberos authentication to work. However, for Hadoop specific applications
// the context can be reinitialized with Hadoop configuration by calling
// ctx.setHadoopConfiguration() for the UGI implementation to work properly.
// See Yarn test case module for reference
Configuration flinkConfig = GlobalConfiguration.loadConfiguration();
flinkConfig.set(SecurityOptions.ZOOKEEPER_SASL_DISABLE, false);
flinkConfig.set(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
flinkConfig.set(SecurityOptions.KERBEROS_LOGIN_USETICKETCACHE, false);
flinkConfig.set(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
flinkConfig.set(
SecurityOptions.KERBEROS_LOGIN_CONTEXTS,
"Client,KafkaClient," + KerberosUtils.getDefaultKerberosInitAppEntryName());
SecurityConfiguration ctx = new SecurityConfiguration(flinkConfig);
TestingSecurityContext.install(ctx, getClientSecurityConfigurationMap());
populateJavaPropertyVariables();
} catch (Exception e) {
throw new RuntimeException("Exception occurred while preparing secure environment.", e);
}
}
public static void prepare(File tempFolder, String... additionalPrincipals) {
doPrepare(tempFolder, additionalPrincipals);
}
public static void cleanup() {
LOG.info("Cleaning up Secure Environment");
if (kdc != null) {
kdc.stop();
LOG.info("Stopped KDC server");
}
resetSystemEnvVariables();
testKeytab = null;
testZkServerPrincipal = null;
testZkClientPrincipal = null;
testKafkaServerPrincipal = null;
hadoopServicePrincipal = null;
testPrincipal = null;
}
private static void populateJavaPropertyVariables() {
if (LOG.isDebugEnabled()) {
System.setProperty("sun.security.krb5.debug", "true");
}
System.setProperty("java.security.krb5.conf", kdc.getKrb5conf().getAbsolutePath());
System.setProperty(
"zookeeper.authProvider.1",
"org.apache.zookeeper.server.auth.SASLAuthenticationProvider");
System.setProperty("zookeeper.kerberos.removeHostFromPrincipal", "true");
System.setProperty("zookeeper.kerberos.removeRealmFromPrincipal", "true");
}
private static void resetSystemEnvVariables() {
System.clearProperty("java.security.krb5.conf");
System.clearProperty("sun.security.krb5.debug");
System.clearProperty("zookeeper.authProvider.1");
System.clearProperty("zookeeper.kerberos.removeHostFromPrincipal");
System.clearProperty("zookeeper.kerberos.removeRealmFromPrincipal");
}
public static org.apache.flink.configuration.Configuration populateFlinkSecureConfigurations(
@Nullable org.apache.flink.configuration.Configuration flinkConf) {
org.apache.flink.configuration.Configuration conf;
if (flinkConf == null) {
conf = new org.apache.flink.configuration.Configuration();
} else {
conf = flinkConf;
}
conf.set(SecurityOptions.KERBEROS_LOGIN_KEYTAB, testKeytab);
conf.set(SecurityOptions.KERBEROS_LOGIN_PRINCIPAL, testPrincipal);
return conf;
}
public static Map<String, TestingSecurityContext.ClientSecurityConfiguration>
getClientSecurityConfigurationMap() {
Map<String, TestingSecurityContext.ClientSecurityConfiguration>
clientSecurityConfigurationMap = new HashMap<>();
if (testZkServerPrincipal != null) {
TestingSecurityContext.ClientSecurityConfiguration zkServer =
new TestingSecurityContext.ClientSecurityConfiguration(
testZkServerPrincipal, testKeytab);
clientSecurityConfigurationMap.put("Server", zkServer);
}
if (testZkClientPrincipal != null) {
TestingSecurityContext.ClientSecurityConfiguration zkClient =
new TestingSecurityContext.ClientSecurityConfiguration(
testZkClientPrincipal, testKeytab);
clientSecurityConfigurationMap.put("Client", zkClient);
}
if (testKafkaServerPrincipal != null) {
TestingSecurityContext.ClientSecurityConfiguration kafkaServer =
new TestingSecurityContext.ClientSecurityConfiguration(
testKafkaServerPrincipal, testKeytab);
clientSecurityConfigurationMap.put("KafkaServer", kafkaServer);
}
return clientSecurityConfigurationMap;
}
public static String getRealm() {
checkNotNull(kdc, "KDC must be initialized");
return kdc.getRealm();
}
public static String getTestKeytab() {
return testKeytab;
}
public static String getHadoopServicePrincipal() {
return hadoopServicePrincipal;
}
}
|
SecureTestEnvironment
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/bootstrap/binding/annotations/embedded/Leg.java
|
{
"start": 379,
"end": 751
}
|
enum ____ {
ANNUALY, SEMIANNUALLY, QUARTERLY, MONTHLY }
;
/**
* Shows how frequent payments according to this leg should be made.
*/
private Frequency paymentFrequency;
public Frequency getPaymentFrequency() {
return paymentFrequency;
}
public void setPaymentFrequency(Frequency paymentFrequency) {
this.paymentFrequency = paymentFrequency;
}
}
|
Frequency
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/param/MySqlParameterizedOutputVisitorTest_6.java
|
{
"start": 787,
"end": 1575
}
|
class ____ extends com.alibaba.druid.bvt.sql.mysql.param.MySQLParameterizedTest {
public void test_0() throws Exception {
String sql = "SET autocommit=1";
String paramSql = "SET autocommit = ?";
assertEquals(ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL), paramSql);
paramaterizeAST(sql, paramSql);
}
public void test_1() throws Exception {
String sql = "SET GLOBAL sort_buffer_size=1000000, SESSION sort_buffer_size=1000000;";
String paramSql = "SET @@global.sort_buffer_size = ?, @@session.sort_buffer_size = ?;";
assertEquals(paramSql, ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL));
paramaterizeAST(sql, paramSql);
}
}
|
MySqlParameterizedOutputVisitorTest_6
|
java
|
spring-projects__spring-framework
|
spring-tx/src/main/java/org/springframework/transaction/jta/ManagedTransactionAdapter.java
|
{
"start": 1343,
"end": 3087
}
|
class ____ implements Transaction {
private final TransactionManager transactionManager;
/**
* Create a new ManagedTransactionAdapter for the given TransactionManager.
* @param transactionManager the JTA TransactionManager to wrap
*/
public ManagedTransactionAdapter(TransactionManager transactionManager) throws SystemException {
Assert.notNull(transactionManager, "TransactionManager must not be null");
this.transactionManager = transactionManager;
}
/**
* Return the JTA TransactionManager that this adapter delegates to.
*/
public final TransactionManager getTransactionManager() {
return this.transactionManager;
}
@Override
public void commit() throws RollbackException, HeuristicMixedException, HeuristicRollbackException,
SecurityException, SystemException {
this.transactionManager.commit();
}
@Override
public void rollback() throws SystemException {
this.transactionManager.rollback();
}
@Override
public void setRollbackOnly() throws SystemException {
this.transactionManager.setRollbackOnly();
}
@Override
public int getStatus() throws SystemException {
return this.transactionManager.getStatus();
}
@Override
public boolean enlistResource(XAResource xaRes) throws RollbackException, SystemException {
return this.transactionManager.getTransaction().enlistResource(xaRes);
}
@Override
public boolean delistResource(XAResource xaRes, int flag) throws SystemException {
return this.transactionManager.getTransaction().delistResource(xaRes, flag);
}
@Override
public void registerSynchronization(Synchronization sync) throws RollbackException, SystemException {
this.transactionManager.getTransaction().registerSynchronization(sync);
}
}
|
ManagedTransactionAdapter
|
java
|
apache__avro
|
lang/java/trevni/avro/src/main/java/org/apache/trevni/avro/mapreduce/AvroTrevniRecordWriterBase.java
|
{
"start": 1787,
"end": 4454
}
|
class ____<K, V, T> extends RecordWriter<K, V> {
/** trevni file extension */
public final static String EXT = ".trv";
/** prefix of job configs that we care about */
public static final String META_PREFIX = "trevni.meta.";
/**
* Counter that increments as new trevni files are create because the current
* file has exceeded the block size
*/
protected int part = 0;
/** Trevni file writer */
protected AvroColumnWriter<T> writer;
/** This will be a unique directory linked to the task */
final Path dirPath;
/** HDFS object */
final FileSystem fs;
/** Current configured blocksize */
final long blockSize;
/** Provided avro schema from the context */
protected Schema schema;
/** meta data to be stored in the output file. */
protected ColumnFileMetaData meta;
/**
* Constructor.
*
* @param context The TaskAttempContext to supply the writer with information
* form the job configuration
*/
public AvroTrevniRecordWriterBase(TaskAttemptContext context) throws IOException {
schema = initSchema(context);
meta = filterMetadata(context.getConfiguration());
writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
Path outputPath = FileOutputFormat.getOutputPath(context);
String dir = FileOutputFormat.getUniqueFile(context, "part", "");
dirPath = new Path(outputPath.toString() + "/" + dir);
fs = dirPath.getFileSystem(context.getConfiguration());
fs.mkdirs(dirPath);
blockSize = fs.getDefaultBlockSize(dirPath);
}
/**
* Use the task context to construct a schema for writing
*
* @throws IOException
*/
abstract protected Schema initSchema(TaskAttemptContext context);
/**
* A Trevni flush will close the current file and prep a new writer
*
* @throws IOException
*/
public void flush() throws IOException {
try (OutputStream out = fs.create(new Path(dirPath, "part-" + (part++) + EXT))) {
writer.writeTo(out);
}
writer = new AvroColumnWriter<>(schema, meta, ReflectData.get());
}
/** {@inheritDoc} */
@Override
public void close(TaskAttemptContext arg0) throws IOException, InterruptedException {
flush();
}
static ColumnFileMetaData filterMetadata(final Configuration configuration) {
final ColumnFileMetaData meta = new ColumnFileMetaData();
for (Entry<String, String> confEntry : configuration) {
if (confEntry.getKey().startsWith(META_PREFIX))
meta.put(confEntry.getKey().substring(META_PREFIX.length()),
confEntry.getValue().getBytes(StandardCharsets.UTF_8));
}
return meta;
}
}
|
AvroTrevniRecordWriterBase
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingCombiner.java
|
{
"start": 1020,
"end": 1821
}
|
class ____ extends TestStreaming {
protected String combine = UtilTest.makeJavaCommand(
UniqApp.class, new String[]{""});
public TestStreamingCombiner() throws IOException {
super();
}
protected String[] genArgs() {
args.add("-combiner");
args.add(combine);
return super.genArgs();
}
@Test
public void testCommandLine() throws Exception {
super.testCommandLine();
// validate combiner counters
String counterGrp = "org.apache.hadoop.mapred.Task$Counter";
Counters counters = job.running_.getCounters();
assertTrue(counters.findCounter(
counterGrp, "COMBINE_INPUT_RECORDS").getValue() != 0);
assertTrue(counters.findCounter(
counterGrp, "COMBINE_OUTPUT_RECORDS").getValue() != 0);
}
}
|
TestStreamingCombiner
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/iterative/io/SerializedUpdateBuffer.java
|
{
"start": 10504,
"end": 15006
}
|
class ____ extends AbstractPagedInputView {
private final LinkedBlockingQueue<MemorySegment> emptyBufferTarget;
private final Deque<MemorySegment> fullBufferSource;
private final BlockChannelReader<MemorySegment> spilledBufferSource;
private int spilledBuffersRemaining;
private int requestsRemaining;
private ReadEnd(
MemorySegment firstMemSegment,
LinkedBlockingQueue<MemorySegment> emptyBufferTarget,
Deque<MemorySegment> fullBufferSource,
BlockChannelReader<MemorySegment> spilledBufferSource,
List<MemorySegment> emptyBuffers,
int numBuffersSpilled)
throws IOException {
super(firstMemSegment, firstMemSegment.getInt(0), HEADER_LENGTH);
this.emptyBufferTarget = emptyBufferTarget;
this.fullBufferSource = fullBufferSource;
this.spilledBufferSource = spilledBufferSource;
requestsRemaining = numBuffersSpilled;
this.spilledBuffersRemaining = numBuffersSpilled;
// send the first requests
while (requestsRemaining > 0 && emptyBuffers.size() > 0) {
this.spilledBufferSource.readBlock(emptyBuffers.remove(emptyBuffers.size() - 1));
requestsRemaining--;
}
}
@Override
protected MemorySegment nextSegment(MemorySegment current) throws IOException {
// use the buffer to send the next request
if (requestsRemaining > 0) {
requestsRemaining--;
spilledBufferSource.readBlock(current);
} else {
emptyBufferTarget.add(current);
}
// get the next buffer either from the return queue, or the full buffer source
if (spilledBuffersRemaining > 0) {
spilledBuffersRemaining--;
try {
return spilledBufferSource.getReturnQueue().take();
} catch (InterruptedException e) {
throw new RuntimeException(
"Read End was interrupted while waiting for spilled buffer.", e);
}
} else if (fullBufferSource.size() > 0) {
return fullBufferSource.removeFirst();
} else {
clear();
// delete the channel, if we had one
if (spilledBufferSource != null) {
spilledBufferSource.closeAndDelete();
}
throw new EOFException();
}
}
@Override
protected int getLimitForSegment(MemorySegment segment) {
return segment.getInt(0);
}
private boolean disposeIfDone() {
if (fullBufferSource.isEmpty() && spilledBuffersRemaining == 0) {
if (getCurrentSegment() == null
|| getCurrentPositionInSegment() >= getCurrentSegmentLimit()) {
if (getCurrentSegment() != null) {
emptyBufferTarget.add(getCurrentSegment());
clear();
}
if (spilledBufferSource != null) {
try {
spilledBufferSource.closeAndDelete();
} catch (Throwable t) {
// do nothing
}
}
return true;
}
}
return false;
}
private void forceDispose(List<MemorySegment> freeMemTarget) throws InterruptedException {
// add the current segment
final MemorySegment current = getCurrentSegment();
clear();
if (current != null) {
freeMemTarget.add(current);
}
// add all remaining memory
freeMemTarget.addAll(fullBufferSource);
// add the segments with the requests issued but not returned
for (int i = spilledBuffersRemaining - requestsRemaining; i > 0; --i) {
freeMemTarget.add(emptyBufferTarget.take());
}
if (spilledBufferSource != null) {
try {
spilledBufferSource.closeAndDelete();
} catch (Throwable t) {
// do nothing
}
}
}
}
}
|
ReadEnd
|
java
|
spring-projects__spring-framework
|
spring-jdbc/src/test/java/org/springframework/jdbc/core/simple/JdbcClientIntegrationTests.java
|
{
"start": 4536,
"end": 7545
}
|
class ____ {
private static final String QUERY1 = """
select * from users
where
first_name in ('Bogus', :name) or
last_name in (:name, 'Bogus')
order by last_name
""";
private static final String QUERY2 = """
select * from users
where
first_name in (:names) or
last_name in (:names)
order by last_name
""";
@BeforeEach
void insertTestUsers() {
jdbcClient.sql(INSERT_WITH_JDBC_PARAMS).params("John", "John").update();
jdbcClient.sql(INSERT_WITH_JDBC_PARAMS).params("John", "Smith").update();
jdbcClient.sql(INSERT_WITH_JDBC_PARAMS).params("Smith", "Smith").update();
assertNumUsers(4);
}
@Test
void selectWithReusedNamedParameter() {
List<User> users = jdbcClient.sql(QUERY1)
.param("name", "John")
.query(User.class)
.list();
assertResults(users);
}
@Test
void selectWithReusedNamedParameterFromBeanProperties() {
List<User> users = jdbcClient.sql(QUERY1)
.paramSource(new Name("John"))
.query(User.class)
.list();
assertResults(users);
}
@Test
void selectWithReusedNamedParameterAndMaxRows() {
List<User> users = jdbcClient.sql(QUERY1)
.withFetchSize(1)
.withMaxRows(1)
.withQueryTimeout(1)
.param("name", "John")
.query(User.class)
.list();
assertSingleResult(users);
}
@Test
void selectWithReusedNamedParameterList() {
List<User> users = jdbcClient.sql(QUERY2)
.param("names", List.of("John", "Bogus"))
.query(User.class)
.list();
assertResults(users);
}
@Test
void selectWithReusedNamedParameterListFromBeanProperties() {
List<User> users = jdbcClient.sql(QUERY2)
.paramSource(new Names(List.of("John", "Bogus")))
.query(User.class)
.list();
assertResults(users);
}
@Test
void selectWithReusedNamedParameterListAndMaxRows() {
List<User> users = jdbcClient.sql(QUERY2)
.withFetchSize(1)
.withMaxRows(1)
.withQueryTimeout(1)
.paramSource(new Names(List.of("John", "Bogus")))
.query(User.class)
.list();
assertSingleResult(users);
}
private static void assertResults(List<User> users) {
assertThat(users).containsExactly(new User(1, "John", "John"), new User(2, "John", "Smith"));
}
private static void assertSingleResult(List<User> users) {
assertThat(users).containsExactly(new User(1, "John", "John"));
}
record Name(String name) {}
record Names(List<String> names) {}
}
private void assertNumUsers(long count) {
long numUsers = this.jdbcClient.sql("select count(id) from users").query(Long.class).single();
assertThat(numUsers).isEqualTo(count);
}
private void assertUser(long id, String firstName, String lastName) {
User user = this.jdbcClient.sql("select * from users where id = ?").param(id).query(User.class).single();
assertThat(user).isEqualTo(new User(id, firstName, lastName));
}
record User(long id, String firstName, String lastName) {}
}
|
ReusedNamedParameterTests
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/json/JsonObject.java
|
{
"start": 1405,
"end": 37042
}
|
class ____ implements Iterable<Map.Entry<String, Object>>, ClusterSerializable, Shareable {
private Map<String, Object> map;
/**
* Create an instance from a string of JSON
*
* @param json the string of JSON
*/
public JsonObject(String json) {
if (json == null) {
throw new NullPointerException();
}
fromJson(json);
if (map == null) {
throw new DecodeException("Invalid JSON object: " + json);
}
}
/**
* Create a new, empty instance
*/
public JsonObject() {
map = new LinkedHashMap<>();
}
/**
* Create an instance from a Map. The Map is not copied.
*
* @param map the map to create the instance from.
*/
public JsonObject(Map<String, Object> map) {
if (map == null) {
throw new NullPointerException();
}
this.map = map;
}
/**
* Create an instance from a buffer.
*
* @param buf the buffer to create the instance from.
*/
public JsonObject(Buffer buf) {
if (buf == null) {
throw new NullPointerException();
}
fromBuffer(buf);
if (map == null) {
throw new DecodeException("Invalid JSON object: " + buf);
}
}
/**
* Create a JsonObject containing zero mappings.
*
* @return an empty JsonObject.
*/
public static JsonObject of() {
return new JsonObject();
}
/**
* Create a JsonObject containing a single mapping.
*
* @param k1 the mapping's key
* @param v1 the mapping's value
* @return a JsonObject containing the specified mapping.
*/
public static JsonObject of(String k1, Object v1) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(1));
obj.put(k1, v1);
return obj;
}
/**
* Create a JsonObject containing two mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(2));
obj.put(k1, v1);
obj.put(k2, v2);
return obj;
}
/**
* Create a JsonObject containing three mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(3));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
return obj;
}
/**
* Create a JsonObject containing four mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(4));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
return obj;
}
/**
* Create a JsonObject containing five mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(5));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
return obj;
}
/**
* Create a JsonObject containing six mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @param k6 the sixth mapping's key
* @param v6 the sixth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5, String k6, Object v6) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(6));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
obj.put(k6, v6);
return obj;
}
/**
* Create a JsonObject containing seven mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @param k6 the sixth mapping's key
* @param v6 the sixth mapping's value
* @param k7 the seventh mapping's key
* @param v7 the seventh mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5, String k6, Object v6,
String k7, Object v7) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(7));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
obj.put(k6, v6);
obj.put(k7, v7);
return obj;
}
/**
* Create a JsonObject containing eight mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @param k6 the sixth mapping's key
* @param v6 the sixth mapping's value
* @param k7 the seventh mapping's key
* @param v7 the seventh mapping's value
* @param k8 the eighth mapping's key
* @param v8 the eighth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5, String k6, Object v6,
String k7, Object v7, String k8, Object v8) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(8));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
obj.put(k6, v6);
obj.put(k7, v7);
obj.put(k8, v8);
return obj;
}
/**
* Create a JsonObject containing nine mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @param k6 the sixth mapping's key
* @param v6 the sixth mapping's value
* @param k7 the seventh mapping's key
* @param v7 the seventh mapping's value
* @param k8 the eighth mapping's key
* @param v8 the eighth mapping's value
* @param k9 the ninth mapping's key
* @param v9 the ninth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5, String k6, Object v6,
String k7, Object v7, String k8, Object v8, String k9, Object v9) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(9));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
obj.put(k6, v6);
obj.put(k7, v7);
obj.put(k8, v8);
obj.put(k9, v9);
return obj;
}
/**
* Create a JsonObject containing ten mappings.
*
* @param k1 the first mapping's key
* @param v1 the first mapping's value
* @param k2 the second mapping's key
* @param v2 the second mapping's value
* @param k3 the third mapping's key
* @param v3 the third mapping's value
* @param k4 the fourth mapping's key
* @param v4 the fourth mapping's value
* @param k5 the fifth mapping's key
* @param v5 the fifth mapping's value
* @param k6 the sixth mapping's key
* @param v6 the sixth mapping's value
* @param k7 the seventh mapping's key
* @param v7 the seventh mapping's value
* @param k8 the eighth mapping's key
* @param v8 the eighth mapping's value
* @param k9 the ninth mapping's key
* @param v9 the ninth mapping's value
* @param k10 the tenth mapping's key
* @param v10 the tenth mapping's value
* @return a JsonObject containing the specified mappings.
*/
public static JsonObject of(String k1, Object v1, String k2, Object v2, String k3, Object v3,
String k4, Object v4, String k5, Object v5, String k6, Object v6,
String k7, Object v7, String k8, Object v8, String k9, Object v9,
String k10, Object v10) {
JsonObject obj = new JsonObject(new LinkedHashMap<>(10));
obj.put(k1, v1);
obj.put(k2, v2);
obj.put(k3, v3);
obj.put(k4, v4);
obj.put(k5, v5);
obj.put(k6, v6);
obj.put(k7, v7);
obj.put(k8, v8);
obj.put(k9, v9);
obj.put(k10, v10);
return obj;
}
/**
* Create a JsonObject from the fields of a Java object.
* Faster than calling `new JsonObject(Json.encode(obj))`.
* <p/
* Returns {@code null} when {@code obj} is {@code null}.
*
* @param obj The object to convert to a JsonObject.
* @throws IllegalArgumentException if conversion fails due to an incompatible type.
*/
@SuppressWarnings("unchecked")
public static JsonObject mapFrom(Object obj) {
if (obj == null) {
return null;
} else {
return new JsonObject((Map<String, Object>) Json.CODEC.fromValue(obj, Map.class));
}
}
/**
* Instantiate a Java object from a JsonObject.
* Faster than calling `Json.decodeValue(Json.encode(jsonObject), type)`.
*
* @param type The type to instantiate from the JsonObject.
* @throws IllegalArgumentException if the type cannot be instantiated.
*/
public <T> T mapTo(Class<T> type) {
return Json.CODEC.fromValue(map, type);
}
/**
* Get the string value with the specified key, special cases are addressed for extended JSON types {@code Instant},
* {@code byte[]} and {@code Enum} which can be converted to String.
*
* @param key the key to return the value for
* @return the value string representation or null if no value for that key
*/
public String getString(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
if (val == null) {
return null;
}
if (val instanceof Instant) {
return ISO_INSTANT.format((Instant) val);
} else if (val instanceof byte[]) {
return BASE64_ENCODER.encodeToString((byte[]) val);
} else if (val instanceof Buffer) {
return BASE64_ENCODER.encodeToString(((Buffer) val).getBytes());
} else if (val instanceof Enum) {
return ((Enum) val).name();
} else {
return val.toString();
}
}
/**
* Get the Number value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a Number
*/
public Number getNumber(String key) {
Objects.requireNonNull(key);
return (Number) map.get(key);
}
/**
* Get the Integer value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not an Integer
*/
public Integer getInteger(String key) {
Objects.requireNonNull(key);
Number number = (Number) map.get(key);
if (number == null) {
return null;
} else if (number instanceof Integer) {
return (Integer) number; // Avoids unnecessary unbox/box
} else {
return number.intValue();
}
}
/**
* Get the Long value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a Long
*/
public Long getLong(String key) {
Objects.requireNonNull(key);
Number number = (Number) map.get(key);
if (number == null) {
return null;
} else if (number instanceof Long) {
return (Long) number; // Avoids unnecessary unbox/box
} else {
return number.longValue();
}
}
/**
* Get the Double value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a Double
*/
public Double getDouble(String key) {
Objects.requireNonNull(key);
Number number = (Number) map.get(key);
if (number == null) {
return null;
} else if (number instanceof Double) {
return (Double) number; // Avoids unnecessary unbox/box
} else {
return number.doubleValue();
}
}
/**
* Get the Float value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a Float
*/
public Float getFloat(String key) {
Objects.requireNonNull(key);
Number number = (Number) map.get(key);
if (number == null) {
return null;
} else if (number instanceof Float) {
return (Float) number; // Avoids unnecessary unbox/box
} else {
return number.floatValue();
}
}
/**
* Get the Boolean value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a Boolean
*/
public Boolean getBoolean(String key) {
Objects.requireNonNull(key);
return (Boolean) map.get(key);
}
/**
* Get the JsonObject value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a JsonObject
*/
public JsonObject getJsonObject(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
if (val instanceof Map) {
val = new JsonObject((Map) val);
}
return (JsonObject) val;
}
/**
* Get the JsonArray value with the specified key
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a JsonArray
*/
public JsonArray getJsonArray(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
if (val instanceof List) {
val = new JsonArray((List) val);
}
return (JsonArray) val;
}
/**
* Get the binary value with the specified key.
* <p>
* JSON itself has no notion of a binary, this extension complies to the RFC-7493, so this method assumes there is a
* String value with the key and it contains a Base64 encoded binary, which it decodes if found and returns.
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a String
* @throws java.lang.IllegalArgumentException if the String value is not a legal Base64 encoded value
*/
public byte[] getBinary(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
// no-op
if (val == null) {
return null;
}
// no-op if value is already an byte[]
if (val instanceof byte[]) {
return (byte[]) val;
}
// unwrap if value is already a Buffer
if (val instanceof Buffer) {
return ((Buffer) val).getBytes();
}
// assume that the value is in String format as per RFC
String encoded = (String) val;
// parse to proper type
return BASE64_DECODER.decode(encoded);
}
/**
* Get the {@code Buffer} value with the specified key.
* <p>
* JSON itself has no notion of a binary, this extension complies to the RFC-7493, so this method assumes there is a
* String value with the key and it contains a Base64 encoded binary, which it decodes if found and returns.
*
* @param key the string to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a string
* @throws java.lang.IllegalArgumentException if the value is not a legal Base64 encoded string
*/
public Buffer getBuffer(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
// no-op
if (val == null) {
return null;
}
// no-op if value is already an Buffer
if (val instanceof Buffer) {
return (Buffer) val;
}
// wrap if value is already an byte[]
if (val instanceof byte[]) {
return Buffer.buffer((byte[]) val);
}
// assume that the value is in String format as per RFC
String encoded = (String) val;
// parse to proper type
return Buffer.buffer(BASE64_DECODER.decode(encoded));
}
/**
* Get the instant value with the specified key.
* <p>
* JSON itself has no notion of a temporal types, this extension allows ISO 8601 string formatted dates with timezone
* always set to zero UTC offset, as denoted by the suffix "Z" to be parsed as a instant value.
* {@code YYYY-MM-DDTHH:mm:ss.sssZ} is the default format used by web browser scripting. This extension complies to
* the RFC-7493 with all the restrictions mentioned before. The method will then decode and return a instant value.
*
* @param key the key to return the value for
* @return the value or null if no value for that key
* @throws java.lang.ClassCastException if the value is not a String
* @throws java.time.format.DateTimeParseException if the String value is not a legal ISO 8601 encoded value
*/
public Instant getInstant(String key) {
Objects.requireNonNull(key);
Object val = map.get(key);
// no-op
if (val == null) {
return null;
}
// no-op if value is already an Instant
if (val instanceof Instant) {
return (Instant) val;
}
// assume that the value is in String format as per RFC
String encoded = (String) val;
// parse to proper type
return Instant.from(ISO_INSTANT.parse(encoded));
}
/**
* Get the value with the specified key, as an Object with types respecting the limitations of JSON.
* <ul>
* <li>{@code Map} will be wrapped to {@code JsonObject}</li>
* <li>{@code List} will be wrapped to {@code JsonArray}</li>
* <li>{@code Instant} will be converted to {@code String}</li>
* <li>{@code byte[]} will be converted to {@code String}</li>
* <li>{@code Enum} will be converted to {@code String}</li>
* </ul>
*
* @param key the key to lookup
* @return the value
*/
public Object getValue(String key) {
Objects.requireNonNull(key);
return wrapJsonValue(map.get(key));
}
/**
* Like {@link #getString(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public String getString(String key, String def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getString(key);
} else {
return def;
}
}
/**
* Like {@link #getNumber(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Number getNumber(String key, Number def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getNumber(key);
} else {
return def;
}
}
/**
* Like {@link #getInteger(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Integer getInteger(String key, Integer def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getInteger(key);
} else {
return def;
}
}
/**
* Like {@link #getLong(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Long getLong(String key, Long def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getLong(key);
} else {
return def;
}
}
/**
* Like {@link #getDouble(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Double getDouble(String key, Double def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getDouble(key);
} else {
return def;
}
}
/**
* Like {@link #getFloat(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Float getFloat(String key, Float def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getFloat(key);
} else {
return def;
}
}
/**
* Like {@link #getBoolean(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Boolean getBoolean(String key, Boolean def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getBoolean(key);
} else {
return def;
}
}
/**
* Like {@link #getJsonObject(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public JsonObject getJsonObject(String key, JsonObject def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getJsonObject(key);
} else {
return def;
}
}
/**
* Like {@link #getJsonArray(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public JsonArray getJsonArray(String key, JsonArray def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getJsonArray(key);
} else {
return def;
}
}
/**
* Like {@link #getBinary(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public byte[] getBinary(String key, byte[] def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getBinary(key);
} else {
return def;
}
}
/**
* Like {@link #getBuffer(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Buffer getBuffer(String key, Buffer def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getBuffer(key);
} else {
return def;
}
}
/**
* Like {@link #getInstant(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Instant getInstant(String key, Instant def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getInstant(key);
} else {
return def;
}
}
/**
* Like {@link #getValue(String)} but specifying a default value to return if there is no entry.
*
* @param key the key to lookup
* @param def the default value to use if the entry is not present
* @return the value or {@code def} if no entry present
*/
public Object getValue(String key, Object def) {
Objects.requireNonNull(key);
if (map.containsKey(key)) {
return getValue(key);
} else {
return def;
}
}
/**
* Does the JSON object contain the specified key?
*
* @param key the key
* @return true if it contains the key, false if not.
*/
public boolean containsKey(String key) {
Objects.requireNonNull(key);
return map.containsKey(key);
}
/**
* Return the set of field names in the JSON objects
*
* @return the set of field names
*/
public Set<String> fieldNames() {
return map.keySet();
}
/**
* Put a null value into the JSON object with the specified key.
*
* @param key the key
* @return a reference to this, so the API can be used fluently
*/
public JsonObject putNull(String key) {
Objects.requireNonNull(key);
map.put(key, null);
return this;
}
/**
* Put an Object into the JSON object with the specified key.
*
* @param key the key
* @param value the value
* @return a reference to this, so the API can be used fluently
*/
public JsonObject put(String key, Object value) {
Objects.requireNonNull(key);
map.put(key, value);
return this;
}
/**
* Remove an entry from this object.
*
* @param key the key
* @return the value that was removed, or null if none
*/
public Object remove(String key) {
Objects.requireNonNull(key);
return wrapJsonValue(map.remove(key));
}
/**
* Merge in another JSON object.
* <p>
* This is the equivalent of putting all the entries of the other JSON object into this object. This is not a deep
* merge, entries containing (sub) JSON objects will be replaced entirely.
*
* @param other the other JSON object
* @return a reference to this, so the API can be used fluently
*/
public JsonObject mergeIn(JsonObject other) {
return mergeIn(other, false);
}
/**
* Merge in another JSON object.
* A deep merge (recursive) matches (sub) JSON objects in the existing tree and replaces all
* matching entries. JsonArrays are treated like any other entry, i.e. replaced entirely.
*
* @param other the other JSON object
* @param deep if true, a deep merge is performed
* @return a reference to this, so the API can be used fluently
*/
public JsonObject mergeIn(JsonObject other, boolean deep) {
return mergeIn(other, deep ? Integer.MAX_VALUE : 1);
}
/**
* Merge in another JSON object.
* The merge is deep (recursive) to the specified level. If depth is 0, no merge is performed,
* if depth is greater than the depth of one of the objects, a full deep merge is performed.
*
* @param other the other JSON object
* @param depth depth of merge
* @return a reference to this, so the API can be used fluently
*/
@SuppressWarnings("unchecked")
public JsonObject mergeIn(JsonObject other, int depth) {
if (depth < 1) {
return this;
}
if (depth == 1) {
map.putAll(other.map);
return this;
}
for (Map.Entry<String, Object> e : other.map.entrySet()) {
if (e.getValue() == null) {
map.put(e.getKey(), null);
} else {
map.merge(e.getKey(), e.getValue(), (oldVal, newVal) -> {
if (oldVal instanceof Map) {
oldVal = new JsonObject((Map) oldVal);
}
if (newVal instanceof Map) {
newVal = new JsonObject((Map) newVal);
}
if (oldVal instanceof JsonObject && newVal instanceof JsonObject) {
return ((JsonObject) oldVal).mergeIn((JsonObject) newVal, depth - 1);
}
return newVal;
});
}
}
return this;
}
/**
* Encode this JSON object as a string.
*
* @return the string encoding.
*/
public String encode() {
return Json.CODEC.toString(this, false);
}
/**
* Encode this JSON object a a string, with whitespace to make the object easier to read by a human, or other
* sentient organism.
*
* @return the pretty string encoding.
*/
public String encodePrettily() {
return Json.CODEC.toString(this, true);
}
/**
* Encode this JSON object as buffer.
*
* @return the buffer encoding.
*/
public Buffer toBuffer() {
return Json.CODEC.toBuffer(this, false);
}
/**
* Deep copy of this JSON object.
*
* @return a copy where all elements have been copied recursively
* @throws IllegalStateException when a nested element cannot be copied
*/
@Override
public JsonObject copy() {
return copy(DEFAULT_CLONER);
}
/**
* Deep copy of this JSON object.
*
* <p> Unlike {@link #copy()} that can fail when an unknown element cannot be copied, this method
* delegates the copy of such element to the {@code cloner} function and will not fail.
*
* @param cloner a function that copies custom values not supported by the JSON implementation
* @return a copy where all elements have been copied recursively
*/
public JsonObject copy(Function<Object, ?> cloner) {
Map<String, Object> copiedMap;
if (map instanceof LinkedHashMap) {
copiedMap = new LinkedHashMap<>(map.size());
} else {
copiedMap = new HashMap<>(map.size());
}
for (Map.Entry<String, Object> entry : map.entrySet()) {
Object val = deepCopy(entry.getValue(), cloner);
copiedMap.put(entry.getKey(), val);
}
return new JsonObject(copiedMap);
}
/**
* Get the underlying {@code Map} as is.
* <p>
* This map may contain values that are not the types returned by the {@code JsonObject} and
* with an unpredictable representation of the value, e.g you might get a JSON object
* as a {@link JsonObject} or as a {@link Map}.
*
* @return the underlying Map.
*/
public Map<String, Object> getMap() {
return map;
}
/**
* Get a Stream over the entries in the JSON object. The values in the stream will follow
* the same rules as defined in {@link #getValue(String)}, respecting the JSON requirements.
* <p>
* To stream the raw values, use the storage object stream instead:
* <pre>{@code
* jsonObject
* .getMap()
* .stream()
* }</pre>
*
* @return a Stream
*/
public Stream<Map.Entry<String, Object>> stream() {
return asStream(iterator());
}
/**
* Get an Iterator of the entries in the JSON object.
*
* @return an Iterator of the entries
*/
@Override
public Iterator<Map.Entry<String, Object>> iterator() {
return new Iter(map.entrySet().iterator());
}
/**
* Get the number of entries in the JSON object
*
* @return the number of entries
*/
public int size() {
return map.size();
}
/**
* Remove all the entries in this JSON object
*/
public JsonObject clear() {
map.clear();
return this;
}
/**
* Is this object entry?
*
* @return true if it has zero entries, false if not.
*/
public boolean isEmpty() {
return map.isEmpty();
}
@Override
public String toString() {
return encode();
}
@Override
public boolean equals(Object o) {
// null check
if (o == null) {
return false;
}
// self check
if (this == o) {
return true;
}
// type check and cast
if (getClass() != o.getClass()) {
return false;
}
JsonObject other = (JsonObject) o;
// size check
if (this.size() != other.size()) {
return false;
}
// value comparison
for (String key : map.keySet()) {
if (!other.containsKey(key)) {
return false;
}
Object thisValue = this.getValue(key);
Object otherValue = other.getValue(key);
if (thisValue != otherValue && !compare(thisValue, otherValue)) {
return false;
}
}
// all checks passed
return true;
}
@Override
public int hashCode() {
int h = 0;
for (Map.Entry<String, ?> entry : this) {
Object key = entry.getKey();
Object value = entry.getValue();
h += (key.hashCode() ^ JsonUtil.hashCode(value));
}
return h;
}
@Override
public void writeToBuffer(Buffer buffer) {
Buffer buf = toBuffer();
buffer.appendInt(buf.length());
buffer.appendBuffer(buf);
}
@Override
public int readFromBuffer(int pos, Buffer buffer) {
int length = buffer.getInt(pos);
int start = pos + 4;
Buffer buf = buffer.getBuffer(start, start + length);
fromBuffer(buf);
return pos + length + 4;
}
private void fromJson(String json) {
map = Json.CODEC.fromString(json, Map.class);
}
private void fromBuffer(Buffer buf) {
map = Json.CODEC.fromBuffer(buf, Map.class);
}
private static
|
JsonObject
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/util/InstantiationUtil.java
|
{
"start": 11362,
"end": 11692
}
|
class ____ not be instantiated. The exception
* contains a detailed message about the reason why the instantiation failed.
*/
public static <T> T instantiate(Class<T> clazz, Class<? super T> castTo) {
if (clazz == null) {
throw new NullPointerException();
}
// check if the
|
could
|
java
|
apache__camel
|
core/camel-util/src/test/java/org/apache/camel/util/ScannerTest.java
|
{
"start": 1094,
"end": 2842
}
|
class ____ {
@Test
public void testScannerString() {
String d = "data1\ndata2\ndata3\n";
Scanner s = new Scanner(d, "\n");
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data1", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data2", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data3", s.next());
Assertions.assertFalse(s.hasNext());
}
@Test
public void testScannerInputStream() {
String d = "data1\ndata2\ndata3\n";
InputStream is = new ByteArrayInputStream(d.getBytes(StandardCharsets.UTF_8));
Scanner s = new Scanner(is, "UTF-8", "\n");
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data1", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data2", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data3", s.next());
Assertions.assertFalse(s.hasNext());
}
@Test
public void testPipedInputStream() throws Exception {
PipedOutputStream pos = new PipedOutputStream();
InputStream is = new PipedInputStream(pos);
pos.write("data1\n".getBytes());
pos.write("data2\n".getBytes());
pos.write("data3\n".getBytes());
pos.flush();
Scanner s = new Scanner(is, "UTF-8", "\n");
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data1", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data2", s.next());
Assertions.assertTrue(s.hasNext());
Assertions.assertEquals("data3", s.next());
}
}
|
ScannerTest
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestSyncableDataOutputStream.java
|
{
"start": 992,
"end": 1635
}
|
class ____ {
@Test
public void testCloseWhenFlushThrowingIOException() throws Exception {
MockOutputStream out = new MockOutputStream();
SyncableDataOutputStream sdos = new SyncableDataOutputStream(out);
out.flushThrowIOE = true;
LambdaTestUtils.intercept(IOException.class, "An IOE from flush", () -> sdos.close());
MockOutputStream out2 = new MockOutputStream();
out2.flushThrowIOE = true;
LambdaTestUtils.intercept(IOException.class, "An IOE from flush", () -> {
try (SyncableDataOutputStream sdos2 = new SyncableDataOutputStream(out2)) {
}
});
}
private static
|
TestSyncableDataOutputStream
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/bugs/_1650/AMapper.java
|
{
"start": 340,
"end": 700
}
|
interface ____ {
AMapper INSTANCE = Mappers.getMapper( AMapper.class );
@Mapping(target = "cPrime", source = "b.c")
APrime toAPrime(A a, @MappingTarget APrime mappingTarget);
CPrime toCPrime(C c, @MappingTarget CPrime mappingTarget);
@Mapping(target = "cPrime", source = "b.c")
APrime toAPrime(A a);
CPrime toCPrime(C c);
}
|
AMapper
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/parser/JSONScannerTest_scanFieldDouble.java
|
{
"start": 182,
"end": 4526
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String text = "{\"value\":1.0}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(1D == obj.getValue());
}
public void test_1() throws Exception {
String text = "{\"value\":\"1\"}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(1D == obj.getValue());
}
public void test_2() throws Exception {
String text = "{\"f1\":2,\"value\":1.0}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(1D == obj.getValue());
}
public void test_3() throws Exception {
String text = "{\"value\":1.01}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(1.01D == obj.getValue());
}
public void test_4() throws Exception {
String text = "{\"value\":1.}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(1D == obj.getValue());
}
public void test_5() throws Exception {
String text = "{\"value\":922337203685477580723}";
VO obj = JSON.parseObject(text, VO.class);
Assert.assertTrue(922337203685477580723D == obj.getValue());
}
public void test_error_2() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":32K}";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_3() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":32}{";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_4() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":中}";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_5() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.F";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_6() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.2]";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_7() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.2}]";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_8() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.2}}";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_9() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.2},";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_10() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.\\0}";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_error_11() throws Exception {
JSONException error = null;
try {
String text = "{\"value\":3.中}";
JSON.parseObject(text, VO.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static
|
JSONScannerTest_scanFieldDouble
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/transport/ClusterConnectionManagerTests.java
|
{
"start": 2851,
"end": 18177
}
|
class ____ extends ESTestCase {
private ClusterConnectionManager connectionManager;
private ThreadPool threadPool;
private Transport transport;
private ConnectionProfile connectionProfile;
@Before
public void createConnectionManager() {
Settings settings = Settings.builder().put("node.name", ClusterConnectionManagerTests.class.getSimpleName()).build();
threadPool = new ThreadPool(settings, MeterRegistry.NOOP, new DefaultBuiltInExecutorBuilders());
transport = mock(Transport.class);
connectionManager = new ClusterConnectionManager(settings, transport, threadPool.getThreadContext());
TimeValue oneSecond = new TimeValue(1000);
TimeValue oneMinute = TimeValue.timeValueMinutes(1);
connectionProfile = ConnectionProfile.buildSingleChannelProfile(
TransportRequestOptions.Type.REG,
oneSecond,
oneSecond,
oneMinute,
Compression.Enabled.FALSE,
Compression.Scheme.DEFLATE
);
}
@After
public void stopThreadPool() {
ThreadPool.terminate(threadPool, 10L, TimeUnit.SECONDS);
}
public void testConnectAndDisconnect() {
AtomicInteger nodeConnectedCount = new AtomicInteger();
AtomicInteger nodeDisconnectedCount = new AtomicInteger();
connectionManager.addListener(new TransportConnectionListener() {
@Override
public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) {
nodeConnectedCount.incrementAndGet();
}
@Override
public void onNodeDisconnected(DiscoveryNode node, @Nullable Exception closeException) {
nodeDisconnectedCount.incrementAndGet();
}
});
DiscoveryNode node = DiscoveryNodeUtils.create("", new TransportAddress(InetAddress.getLoopbackAddress(), 0));
Transport.Connection connection = new TestConnect(node);
PlainActionFuture<Void> closeListener = new PlainActionFuture<>();
connection.addCloseListener(closeListener);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Transport.Connection> listener = (ActionListener<Transport.Connection>) invocationOnMock.getArguments()[2];
listener.onResponse(connection);
return null;
}).when(transport).openConnection(eq(node), eq(connectionProfile), anyActionListener());
assertFalse(connectionManager.nodeConnected(node));
final var validatedConnectionRef = new AtomicReference<Transport.Connection>();
ConnectionManager.ConnectionValidator validator = (c, p, l) -> {
validatedConnectionRef.set(c);
l.onResponse(null);
};
safeAwait(listener -> connectionManager.connectToNode(node, connectionProfile, validator, listener.map(x -> null)));
assertFalse(connection.isClosed());
assertTrue(connectionManager.nodeConnected(node));
assertSame(connection, validatedConnectionRef.get());
assertSame(connection, connectionManager.getConnection(node));
assertEquals(1, connectionManager.size());
assertEquals(1, nodeConnectedCount.get());
assertEquals(0, nodeDisconnectedCount.get());
if (randomBoolean()) {
connectionManager.disconnectFromNode(node);
} else {
connection.close();
}
assertTrue(connection.isClosed());
assertThat(closeListener.actionGet(), nullValue());
assertEquals(0, connectionManager.size());
assertEquals(1, nodeConnectedCount.get());
assertEquals(1, nodeDisconnectedCount.get());
}
@TestLogging(
reason = "testing log messages emitted on disconnect",
value = "org.elasticsearch.transport.ClusterConnectionManager:TRACE"
)
public void testDisconnectLogging() {
final Supplier<DiscoveryNode> nodeFactory = () -> DiscoveryNodeUtils.create(
randomAlphaOfLength(10),
new TransportAddress(InetAddress.getLoopbackAddress(), 0),
Collections.singletonMap("attr", "val"),
DiscoveryNodeRole.roles()
);
final DiscoveryNode remoteClose = nodeFactory.get();
final DiscoveryNode localClose = nodeFactory.get();
final DiscoveryNode shutdownClose = nodeFactory.get();
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
final ActionListener<Transport.Connection> listener = (ActionListener<Transport.Connection>) invocationOnMock.getArguments()[2];
final DiscoveryNode discoveryNode = (DiscoveryNode) invocationOnMock.getArguments()[0];
listener.onResponse(new TestConnect(discoveryNode));
return null;
}).when(transport).openConnection(any(), eq(connectionProfile), anyActionListener());
final ConnectionManager.ConnectionValidator validator = (c, p, l) -> l.onResponse(null);
final AtomicReference<Releasable> toClose = new AtomicReference<>();
safeAwait(l -> connectionManager.connectToNode(remoteClose, connectionProfile, validator, l.map(x -> null)));
safeAwait(l -> connectionManager.connectToNode(shutdownClose, connectionProfile, validator, l.map(x -> null)));
safeAwait(l -> connectionManager.connectToNode(localClose, connectionProfile, validator, l.map(toClose::getAndSet)));
final Releasable localConnectionRef = toClose.getAndSet(null);
assertThat(localConnectionRef, notNullValue());
try (var mockLog = MockLog.capture(ClusterConnectionManager.class)) {
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"locally-triggered close message",
ClusterConnectionManager.class.getCanonicalName(),
Level.DEBUG,
"closing unused transport connection to [" + localClose + "]"
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"remotely-triggered close message",
ClusterConnectionManager.class.getCanonicalName(),
Level.INFO,
"transport connection to ["
+ remoteClose.descriptionWithoutAttributes()
+ "] closed by remote; "
+ "if unexpected, see [https://www.elastic.co/docs/*] for troubleshooting guidance"
)
);
mockLog.addExpectation(
new MockLog.SeenEventExpectation(
"shutdown-triggered close message",
ClusterConnectionManager.class.getCanonicalName(),
Level.TRACE,
"connection manager shut down, closing transport connection to [" + shutdownClose + "]"
)
);
Releasables.close(localConnectionRef);
connectionManager.disconnectFromNode(remoteClose);
connectionManager.close();
mockLog.assertAllExpectationsMatched();
}
}
public void testConcurrentConnects() throws Exception {
Set<Transport.Connection> connections = ConcurrentCollections.newConcurrentSet();
DiscoveryNode node = DiscoveryNodeUtils.create("", new TransportAddress(InetAddress.getLoopbackAddress(), 0));
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<Transport.Connection> listener = (ActionListener<Transport.Connection>) invocationOnMock.getArguments()[2];
boolean success = randomBoolean();
if (success) {
Transport.Connection connection = new TestConnect(node);
connections.add(connection);
if (randomBoolean()) {
listener.onResponse(connection);
} else {
threadPool.generic().execute(() -> listener.onResponse(connection));
}
} else {
threadPool.generic().execute(() -> listener.onFailure(new IllegalStateException("dummy exception")));
}
return null;
}).when(transport).openConnection(eq(node), eq(connectionProfile), anyActionListener());
assertFalse(connectionManager.nodeConnected(node));
ConnectionManager.ConnectionValidator validator = (c, p, l) -> {
boolean success = randomBoolean();
if (success) {
if (randomBoolean()) {
l.onResponse(null);
} else {
threadPool.generic().execute(() -> l.onResponse(null));
}
} else {
threadPool.generic().execute(() -> l.onFailure(new IllegalStateException("dummy exception")));
}
};
List<Thread> threads = new ArrayList<>();
AtomicInteger nodeConnectedCount = new AtomicInteger();
AtomicInteger nodeClosedCount = new AtomicInteger();
AtomicInteger nodeFailureCount = new AtomicInteger();
int threadCount = between(1, 10);
Releasable[] releasables = new Releasable[threadCount];
final ThreadContext threadContext = threadPool.getThreadContext();
final String contextHeader = "test-context-header";
CyclicBarrier barrier = new CyclicBarrier(threadCount + 1);
Semaphore pendingCloses = new Semaphore(threadCount);
for (int i = 0; i < threadCount; i++) {
final int threadIndex = i;
Thread thread = new Thread(() -> {
safeAwait(barrier);
CountDownLatch latch = new CountDownLatch(1);
try (ThreadContext.StoredContext ignored = threadContext.stashContext()) {
final String contextValue = randomAlphaOfLength(10);
threadContext.putHeader(contextHeader, contextValue);
connectionManager.connectToNode(node, connectionProfile, validator, ActionListener.wrap(c -> {
assert connectionManager.nodeConnected(node);
assertThat(threadContext.getHeader(contextHeader), equalTo(contextValue));
assertTrue(pendingCloses.tryAcquire());
connectionManager.getConnection(node).addRemovedListener(ActionListener.running(pendingCloses::release));
if (randomBoolean()) {
releasables[threadIndex] = c;
nodeConnectedCount.incrementAndGet();
} else {
Releasables.close(c);
nodeClosedCount.incrementAndGet();
}
assert latch.getCount() == 1;
latch.countDown();
}, e -> {
assertThat(threadContext.getHeader(contextHeader), equalTo(contextValue));
nodeFailureCount.incrementAndGet();
assert latch.getCount() == 1;
latch.countDown();
}));
}
safeAwait(latch);
});
threads.add(thread);
thread.start();
}
safeAwait(barrier);
threads.forEach(t -> {
try {
t.join();
} catch (InterruptedException e) {
throw new IllegalStateException(e);
}
});
assertEquals(threadCount, nodeConnectedCount.get() + nodeClosedCount.get() + nodeFailureCount.get());
if (nodeConnectedCount.get() == 0) {
// Any successful connections were closed
safeAcquire(threadCount, pendingCloses);
pendingCloses.release(threadCount);
assertTrue(connections.stream().allMatch(Transport.Connection::isClosed));
assertEquals(0, connectionManager.size());
} else {
assertEquals(1, connectionManager.size());
assertEquals(1L, connections.stream().filter(c -> c.isClosed() == false).count());
}
if (randomBoolean()) {
Releasables.close(releasables);
safeAcquire(threadCount, pendingCloses);
pendingCloses.release(threadCount);
assertEquals(0, connectionManager.size());
assertTrue(connections.stream().allMatch(Transport.Connection::isClosed));
}
connectionManager.close();
// The connection manager will close all open connections
for (Transport.Connection connection : connections) {
assertTrue(connection.isClosed());
}
}
public void testConcurrentConnectsDuringClose() throws Exception {
// This test ensures that closing the connection manager doesn't block forever, even if there's a constant stream of attempts to
// open connections. Note that closing the connection manager _does_ block while there are in-flight connection attempts, and in
// practice each attempt will (eventually) finish, so we're just trying to test that constant open attempts do not cause starvation.
//
// It works by spawning connection-open attempts in several concurrent loops, putting a Runnable to complete each attempt into a
// queue, and then consuming and completing the enqueued runnables in a separate thread. The consuming thread is throttled via a
// Semaphore, from which the main thread steals a permit which ensures that there's always at least one pending connection while the
// close is ongoing even though no connection attempt blocks forever.
final var pendingConnectionPermits = new Semaphore(0);
final var pendingConnections = ConcurrentCollections.<Runnable>newQueue();
// transport#openConnection enqueues a Runnable to complete the connection attempt
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
final var listener = (ActionListener<Transport.Connection>) invocationOnMock.getArguments()[2];
final var targetNode = (DiscoveryNode) invocationOnMock.getArguments()[0];
pendingConnections.add(() -> listener.onResponse(new TestConnect(targetNode)));
pendingConnectionPermits.release();
return null;
}).when(transport).openConnection(any(), eq(connectionProfile), anyActionListener());
final ConnectionManager.ConnectionValidator validator = (c, p, l) -> l.onResponse(null);
// Once we start to see connections being rejected, we give back the stolen permit so that the last connection can complete
final var onConnectException = new RunOnce(pendingConnectionPermits::release);
// Create a few threads which open connections in a loop. Must be at least 2 so that there's always more connections incoming.
final var connectionLoops = between(2, 4);
final var connectionLoopCountDown = new CountDownLatch(connectionLoops);
final var expectConnectionFailures = new AtomicBoolean(); // unexpected failures would make this test pass vacuously
|
ClusterConnectionManagerTests
|
java
|
bumptech__glide
|
library/test/src/test/java/com/bumptech/glide/load/resource/bitmap/DefaultImageHeaderParserTest.java
|
{
"start": 30996,
"end": 31988
}
|
interface ____ {
void run(DefaultImageHeaderParser parser, InputStream is, ArrayPool byteArrayPool)
throws IOException;
void run(DefaultImageHeaderParser parser, ByteBuffer byteBuffer, ArrayPool byteArrayPool)
throws IOException;
}
private static void runTest(byte[] data, ParserTestCase test) throws IOException {
InputStream is = new ByteArrayInputStream(data);
DefaultImageHeaderParser parser = new DefaultImageHeaderParser();
test.run(parser, is, new LruArrayPool());
ByteBuffer buffer = ByteBuffer.wrap(data);
parser = new DefaultImageHeaderParser();
test.run(parser, buffer, new LruArrayPool());
}
private static byte[] generatePngHeaderWithIhdr(int bitDepth) {
byte[] result = new byte[PNG_HEADER_WITH_IHDR_CHUNK.length];
System.arraycopy(PNG_HEADER_WITH_IHDR_CHUNK, 0, result, 0, PNG_HEADER_WITH_IHDR_CHUNK.length);
result[result.length - 1] = (byte) bitDepth;
return result;
}
private static
|
ParserTestCase
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestShuffleProvider.java
|
{
"start": 5824,
"end": 6398
}
|
class ____ extends AuxiliaryService {
public static final String MAPREDUCE_TEST_SHUFFLE_SERVICEID = "test_shuffle1";
public TestShuffleHandler1() {
super("testshuffle1");
}
@Override
public void initializeApplication(ApplicationInitializationContext context) {
}
@Override
public void stopApplication(ApplicationTerminationContext context) {
}
@Override
public synchronized ByteBuffer getMetaData() {
return ByteBuffer.allocate(0); // Don't 'return null' because of YARN-1256
}
}
static public
|
TestShuffleHandler1
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentNotInstantiableTest.java
|
{
"start": 5993,
"end": 6084
}
|
class ____ extends android.support.v4.app.Fragment {}
public static
|
MyV4Fragment
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/flush_statement_npe/PersonMapper.java
|
{
"start": 715,
"end": 802
}
|
interface ____ {
Person selectById(int id);
void update(Person person);
}
|
PersonMapper
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.