language stringclasses 1 value | repo stringclasses 60 values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | components/camel-guava-eventbus/src/test/java/org/apache/camel/component/guava/eventbus/CustomMultiEventListener.java | {
"start": 932,
"end": 1088
} | interface ____ {
@Subscribe
void stringReceived(String stringEvent);
@Subscribe
void dateReceived(Date dateEvent);
}
| CustomMultiEventListener |
java | google__guava | android/guava/src/com/google/common/collect/AbstractMultiset.java | {
"start": 1247,
"end": 1785
} | class ____ implementing the {@link
* Multiset#entrySet()} method, plus optionally overriding {@link #add(Object, int)} and {@link
* #remove(Object, int)} to enable modifications to the multiset.
*
* <p>The {@link #count} and {@link #size} implementations all iterate across the set returned by
* {@link Multiset#entrySet()}, as do many methods acting on the set returned by {@link
* #elementSet()}. Override those methods for better performance.
*
* @author Kevin Bourrillion
* @author Louis Wasserman
*/
@GwtCompatible
abstract | and |
java | apache__kafka | metadata/src/test/java/org/apache/kafka/metadata/util/RecordRedactorTest.java | {
"start": 1442,
"end": 4391
} | class ____ {
public static final Map<ConfigResource.Type, ConfigDef> CONFIGS = new HashMap<>();
static {
CONFIGS.put(BROKER, new ConfigDef().
define("foobar", ConfigDef.Type.LIST, "1", ConfigDef.Importance.HIGH, "foo bar doc").
define("quux", ConfigDef.Type.PASSWORD, ConfigDef.Importance.HIGH, "quuux2 doc"));
}
private static final KafkaConfigSchema SCHEMA = new KafkaConfigSchema(CONFIGS, Map.of());
private static final RecordRedactor REDACTOR = new RecordRedactor(SCHEMA);
@Test
public void testTopicRecordToString() {
assertEquals("TopicRecord(name='foo', topicId=UOovKkohSU6AGdYW33ZUNg)",
REDACTOR.toLoggableString(new TopicRecord().
setTopicId(Uuid.fromString("UOovKkohSU6AGdYW33ZUNg")).
setName("foo")));
}
@Test
public void testUserScramCredentialRecordToString() {
assertEquals("UserScramCredentialRecord(name='bob', mechanism=0, " +
"salt=(redacted), storedKey=(redacted), serverKey=(redacted), iterations=128)",
REDACTOR.toLoggableString(new UserScramCredentialRecord().
setName("bob").
setMechanism((byte) 0).
setSalt(new byte[512]).
setServerKey(new byte[128]).
setStoredKey(new byte[128]).
setIterations(128)));
}
@Test
public void testUserScramCredentialRecordToStringWithNullName() {
assertEquals("UserScramCredentialRecord(name=null, mechanism=1, " +
"salt=(redacted), storedKey=(redacted), serverKey=(redacted), iterations=256)",
REDACTOR.toLoggableString(new UserScramCredentialRecord().
setName(null).
setMechanism((byte) 1).
setSalt(new byte[512]).
setServerKey(new byte[128]).
setStoredKey(new byte[128]).
setIterations(256)));
}
@Test
public void testSensitiveConfigRecordToString() {
assertEquals("ConfigRecord(resourceType=4, resourceName='0', name='quux', " +
"value='(redacted)')",
REDACTOR.toLoggableString(new ConfigRecord().
setResourceType(BROKER.id()).
setResourceName("0").
setName("quux").
setValue("mysecret")));
}
@Test
public void testNonSensitiveConfigRecordToString() {
assertEquals("ConfigRecord(resourceType=4, resourceName='0', name='foobar', " +
"value='item1,item2')",
REDACTOR.toLoggableString(new ConfigRecord().
setResourceType(BROKER.id()).
setResourceName("0").
setName("foobar").
setValue("item1,item2")));
}
}
| RecordRedactorTest |
java | redisson__redisson | redisson/src/main/java/org/redisson/spring/transaction/ReactiveRedissonResourceHolder.java | {
"start": 817,
"end": 1136
} | class ____ extends ResourceHolderSupport {
private RTransactionReactive transaction;
public RTransactionReactive getTransaction() {
return transaction;
}
public void setTransaction(RTransactionReactive transaction) {
this.transaction = transaction;
}
}
| ReactiveRedissonResourceHolder |
java | spring-projects__spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/mvc/method/annotation/ResponseBodyEmitterReturnValueHandler.java | {
"start": 10787,
"end": 13178
} | class ____ implements ResponseBodyEmitter.Handler {
private final List<HttpMessageConverter<?>> messageConverters;
private final FragmentHandler fragmentHandler;
private final ServerHttpResponse outputMessage;
private final DeferredResult<?> deferredResult;
public DefaultSseEmitterHandler(
List<HttpMessageConverter<?>> messageConverters, FragmentHandler fragmentHandler,
ServerHttpResponse outputMessage, DeferredResult<?> result) {
this.messageConverters = messageConverters;
this.fragmentHandler = fragmentHandler;
this.outputMessage = outputMessage;
this.deferredResult = result;
}
@Override
public void send(Object data, @Nullable MediaType mediaType) throws IOException {
sendInternal(data, mediaType);
this.outputMessage.flush();
}
@Override
public void send(Set<ResponseBodyEmitter.DataWithMediaType> items) throws IOException {
for (ResponseBodyEmitter.DataWithMediaType item : items) {
sendInternal(item.getData(), item.getMediaType());
}
this.outputMessage.flush();
}
@SuppressWarnings("unchecked")
private <T> void sendInternal(T data, @Nullable MediaType mediaType) throws IOException {
if (data instanceof ModelAndView mav) {
this.fragmentHandler.handle(mav);
return;
}
for (HttpMessageConverter<?> converter : this.messageConverters) {
if (converter.canWrite(data.getClass(), mediaType)) {
((HttpMessageConverter<T>) converter).write(data, mediaType, this.outputMessage);
return;
}
}
throw new IllegalArgumentException("No suitable converter for " + data.getClass());
}
@Override
public void complete() {
try {
this.outputMessage.flush();
this.deferredResult.setResult(null);
}
catch (IOException ex) {
this.deferredResult.setErrorResult(ex);
}
}
@Override
public void completeWithError(Throwable failure) {
this.deferredResult.setErrorResult(failure);
}
@Override
public void onTimeout(Runnable callback) {
this.deferredResult.onTimeout(callback);
}
@Override
public void onError(Consumer<Throwable> callback) {
this.deferredResult.onError(callback);
}
@Override
public void onCompletion(Runnable callback) {
this.deferredResult.onCompletion(callback);
}
}
/**
* Handler that renders ModelAndView fragments via FragmentsRendering.
*/
private static final | DefaultSseEmitterHandler |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/inheritance/Strawberry.java | {
"start": 264,
"end": 444
} | class ____ extends Fruit {
private Long size;
@Column(name="size_")
public Long getSize() {
return size;
}
public void setSize(Long size) {
this.size = size;
}
}
| Strawberry |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/property/access/internal/PropertyAccessStrategyIndexBackRefImpl.java | {
"start": 1230,
"end": 1861
} | class ____ implements PropertyAccess {
private final PropertyAccessStrategyIndexBackRefImpl strategy;
private final GetterImpl getter;
public PropertyAccessIndexBackRefImpl(PropertyAccessStrategyIndexBackRefImpl strategy) {
this.strategy = strategy;
this.getter = new GetterImpl( strategy.entityName, strategy.propertyName );
}
@Override
public PropertyAccessStrategy getPropertyAccessStrategy() {
return strategy;
}
@Override
public Getter getGetter() {
return getter;
}
@Override
public Setter getSetter() {
return SetterImpl.INSTANCE;
}
}
private static | PropertyAccessIndexBackRefImpl |
java | junit-team__junit5 | platform-tests/src/test/java/org/junit/platform/commons/support/ReflectionSupportTests.java | {
"start": 21284,
"end": 21377
} | class ____ {
@SuppressWarnings({ "InnerClassMayBeStatic", "unused" })
| ClassWithNestedClasses |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OverrideThrowableToStringTest.java | {
"start": 5702,
"end": 6068
} | class ____ extends Throwable {
public String getMessage() {
return "";
}
}
}\
""")
.doTest(TestMode.AST_MATCH);
}
@Test
public void suppressionOnMethod() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| NoOverride |
java | spring-projects__spring-boot | module/spring-boot-tomcat/src/main/java/org/springframework/boot/tomcat/reactive/TomcatReactiveWebServerFactory.java | {
"start": 2056,
"end": 5040
} | class ____ extends TomcatWebServerFactory
implements ConfigurableTomcatWebServerFactory, ConfigurableReactiveWebServerFactory {
/**
* Create a new {@link TomcatReactiveWebServerFactory} instance.
*/
public TomcatReactiveWebServerFactory() {
}
/**
* Create a new {@link TomcatReactiveWebServerFactory} that listens for requests using
* the specified port.
* @param port the port to listen on
*/
public TomcatReactiveWebServerFactory(int port) {
super(port);
}
@Override
public WebServer getWebServer(HttpHandler httpHandler) {
Tomcat tomcat = createTomcat();
TomcatHttpHandlerAdapter servlet = new TomcatHttpHandlerAdapter(httpHandler);
prepareContext(tomcat.getHost(), servlet);
return getTomcatWebServer(tomcat);
}
protected void prepareContext(Host host, TomcatHttpHandlerAdapter servlet) {
File docBase = createTempDir("tomcat-docbase");
TomcatEmbeddedContext context = new TomcatEmbeddedContext();
WebResourceRoot resourceRoot = new StandardRoot(context);
ignoringNoSuchMethodError(() -> resourceRoot.setReadOnly(true));
context.setResources(resourceRoot);
context.setPath("");
context.setDocBase(docBase.getAbsolutePath());
context.addLifecycleListener(new Tomcat.FixContextListener());
ClassLoader parentClassLoader = ClassUtils.getDefaultClassLoader();
context.setParentClassLoader(parentClassLoader);
skipAllTldScanning(context);
WebappLoader loader = new WebappLoader();
loader.setLoaderInstance(new TomcatEmbeddedWebappClassLoader(parentClassLoader));
loader.setDelegate(true);
context.setLoader(loader);
Tomcat.addServlet(context, "httpHandlerServlet", servlet).setAsyncSupported(true);
context.addServletMappingDecoded("/", "httpHandlerServlet");
host.addChild(context);
configureContext(context);
}
private void ignoringNoSuchMethodError(Runnable method) {
try {
method.run();
}
catch (NoSuchMethodError ex) {
}
}
private void skipAllTldScanning(TomcatEmbeddedContext context) {
StandardJarScanFilter filter = new StandardJarScanFilter();
filter.setTldSkip("*.jar");
context.getJarScanner().setJarScanFilter(filter);
}
/**
* Configure the Tomcat {@link Context}.
* @param context the Tomcat context
*/
protected void configureContext(Context context) {
this.getContextLifecycleListeners().forEach(context::addLifecycleListener);
new DisableReferenceClearingContextCustomizer().customize(context);
this.getContextCustomizers().forEach((customizer) -> customizer.customize(context));
}
/**
* Factory method called to create the {@link TomcatWebServer}. Subclasses can
* override this method to return a different {@link TomcatWebServer} or apply
* additional processing to the Tomcat server.
* @param tomcat the Tomcat server.
* @return a new {@link TomcatWebServer} instance
*/
protected TomcatWebServer getTomcatWebServer(Tomcat tomcat) {
return new TomcatWebServer(tomcat, getPort() >= 0, getShutdown());
}
}
| TomcatReactiveWebServerFactory |
java | quarkusio__quarkus | extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/asynchronous/AsynchronousBean.java | {
"start": 298,
"end": 453
} | class ____ {
@Asynchronous
public CompletionStage<String> hello() {
return CompletableFuture.completedFuture("hello");
}
}
| AsynchronousBean |
java | apache__camel | components/camel-ibm/camel-ibm-secrets-manager/src/test/java/org/apache/camel/component/ibm/secrets/manager/integration/operations/IBMSecretsManagerListSecretsIT.java | {
"start": 2127,
"end": 5084
} | class ____ extends CamelTestSupport {
@EndpointInject("mock:result-write")
private MockEndpoint mockWrite;
@EndpointInject("mock:result-list")
private MockEndpoint mockList;
@EndpointInject("mock:result-delete")
private MockEndpoint mockDelete;
@Test
public void createSecretTest() throws InterruptedException {
mockWrite.expectedMessageCount(1);
mockList.expectedMessageCount(1);
mockDelete.expectedMessageCount(1);
Exchange createdSec = template.request("direct:createSecret", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getIn().setBody("test");
exchange.getMessage().setHeader(IBMSecretsManagerConstants.SECRET_NAME, "secret1");
}
});
Exchange listSec = template.request("direct:listSecrets", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setHeader(IBMSecretsManagerConstants.SECRET_ID, createdSec.getMessage().getBody());
}
});
template.request("direct:deleteSecret", new Processor() {
@Override
public void process(Exchange exchange) {
exchange.getMessage().setHeader(IBMSecretsManagerConstants.SECRET_ID, createdSec.getMessage().getBody());
}
});
MockEndpoint.assertIsSatisfied(context);
Exchange ret = mockList.getExchanges().get(0);
assertNotNull(ret);
SecretMetadataPaginatedCollection collection = ret.getMessage().getBody(SecretMetadataPaginatedCollection.class);
assertEquals("secret1", collection.getSecrets().get(0).getName());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:createSecret")
.toF("ibm-secrets-manager://secret?operation=createArbitrarySecret&token=RAW(%s)&serviceUrl=%s",
System.getProperty("camel.ibm.sm.token"), System.getProperty("camel.ibm.sm.serviceurl"))
.to("mock:result-write");
from("direct:listSecrets")
.toF("ibm-secrets-manager://secret?operation=listSecrets&token=RAW(%s)&serviceUrl=%s",
System.getProperty("camel.ibm.sm.token"), System.getProperty("camel.ibm.sm.serviceurl"))
.to("mock:result-list");
from("direct:deleteSecret")
.toF("ibm-secrets-manager://secret?operation=deleteSecret&token=RAW(%s)&serviceUrl=%s",
System.getProperty("camel.ibm.sm.token"), System.getProperty("camel.ibm.sm.serviceurl"))
.to("mock:result-delete");
}
};
}
}
| IBMSecretsManagerListSecretsIT |
java | google__error-prone | check_api/src/test/java/com/google/errorprone/util/FindIdentifiersTest.java | {
"start": 38246,
"end": 38416
} | class ____ extends java.lang.Object {
// BUG: Diagnostic contains:
Test.A foo() {
return null;
}
| Test |
java | quarkusio__quarkus | integration-tests/native-config-profile/src/test/java/io/quarkus/it/nat/test/profile/BuiltTimeValueChangeManualIT.java | {
"start": 1369,
"end": 1622
} | class ____ implements QuarkusTestProfile {
@Override
public Map<String, String> getConfigOverrides() {
return Collections.singletonMap("quarkus.arc.remove-unused-beans", "all");
}
}
}
| BuildTimeValueChangeTestProfile |
java | netty__netty | handler/src/main/java/io/netty/handler/ssl/ExtendedOpenSslSession.java | {
"start": 6799,
"end": 8163
} | class ____ implements SSLSessionBindingListener {
final SSLSessionBindingListener delegate;
SSLSessionBindingListenerDecorator(SSLSessionBindingListener delegate) {
this.delegate = delegate;
}
@Override
public void valueBound(SSLSessionBindingEvent event) {
delegate.valueBound(new SSLSessionBindingEvent(ExtendedOpenSslSession.this, event.getName()));
}
@Override
public void valueUnbound(SSLSessionBindingEvent event) {
delegate.valueUnbound(new SSLSessionBindingEvent(ExtendedOpenSslSession.this, event.getName()));
}
}
@Override
public void handshakeFinished(byte[] id, String cipher, String protocol, byte[] peerCertificate,
byte[][] peerCertificateChain, long creationTime, long timeout) throws SSLException {
wrapped.handshakeFinished(id, cipher, protocol, peerCertificate, peerCertificateChain, creationTime, timeout);
}
@Override
public boolean equals(Object o) {
return wrapped.equals(o);
}
@Override
public int hashCode() {
return wrapped.hashCode();
}
@Override
public String toString() {
return "ExtendedOpenSslSession{" +
"wrapped=" + wrapped +
'}';
}
}
| SSLSessionBindingListenerDecorator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/ImmutableCheckerTest.java | {
"start": 3885,
"end": 4059
} | interface ____ {}
""")
.addSourceLines(
"MyTest.java",
"""
import java.lang.annotation.Annotation;
final | Test |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/PrincipalDeserializationException.java | {
"start": 954,
"end": 1286
} | class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public PrincipalDeserializationException(String message) {
super(message);
}
public PrincipalDeserializationException(String message, Throwable cause) {
super(message, cause);
}
}
| PrincipalDeserializationException |
java | elastic__elasticsearch | x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/LocalSourceOperator.java | {
"start": 1343,
"end": 2549
} | interface ____ extends Supplier<Page> {}
protected final PageSupplier supplier;
boolean finished;
public LocalSourceOperator(BlockFactory blockFactory, ObjectSupplier objectSupplier) {
this(() -> fromListRow(blockFactory, objectSupplier.get()));
}
public LocalSourceOperator(BlockFactory blockFactory, ListSupplier listSupplier) {
this(() -> fromList(blockFactory, listSupplier.get()));
}
public LocalSourceOperator(BlockSupplier blockSupplier) {
this(() -> {
var blocks = blockSupplier.get();
return CollectionUtils.isEmpty(blocks) ? new Page(0, blocks) : new Page(blocks);
});
}
public LocalSourceOperator(PageSupplier pageSupplier) {
this.supplier = pageSupplier;
}
@Override
public void finish() {
finished = true;
}
@Override
public boolean isFinished() {
return finished;
}
@Override
public Page getOutput() {
var page = supplier.get();
finished = true;
return page;
}
@Override
public void close() {}
@Override
public String toString() {
return "LocalSourceOperator";
}
}
| PageSupplier |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/resettable/ReusingBlockResettableIteratorTest.java | {
"start": 1575,
"end": 7341
} | class ____ {
private static final int MEMORY_CAPACITY = 3 * 128 * 1024;
private static final int NUM_VALUES = 20000;
private MemoryManager memman;
private Iterator<Record> reader;
private List<Record> objects;
private final TypeSerializer<Record> serializer = RecordSerializer.get();
@BeforeEach
void startup() {
// set up IO and memory manager
this.memman = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_CAPACITY).build();
// create test objects
this.objects = new ArrayList<Record>(20000);
for (int i = 0; i < NUM_VALUES; ++i) {
this.objects.add(new Record(new IntValue(i)));
}
// create the reader
this.reader = objects.iterator();
}
@AfterEach
void shutdown() {
this.objects = null;
// check that the memory manager got all segments back
assertThat(this.memman.verifyEmpty())
.withFailMessage(
"A memory leak has occurred: Not all memory was properly returned to the memory manager.")
.isTrue();
this.memman.shutdown();
this.memman = null;
}
@Test
void testSerialBlockResettableIterator() throws Exception {
final AbstractInvokable memOwner = new DummyInvokable();
// create the resettable Iterator
final ReusingBlockResettableIterator<Record> iterator =
new ReusingBlockResettableIterator<Record>(
this.memman, this.reader, this.serializer, 1, memOwner);
// open the iterator
iterator.open();
// now test walking through the iterator
int lower = 0;
int upper = 0;
do {
lower = upper;
upper = lower;
// find the upper bound
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(upper++);
}
// now reset the buffer a few times
for (int i = 0; i < 5; ++i) {
iterator.reset();
int count = 0;
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(lower + (count++));
}
assertThat(count).isEqualTo(upper - lower);
}
} while (iterator.nextBlock());
assertThat(upper).isEqualTo(NUM_VALUES);
// close the iterator
iterator.close();
}
@Test
void testDoubleBufferedBlockResettableIterator() throws Exception {
final AbstractInvokable memOwner = new DummyInvokable();
// create the resettable Iterator
final ReusingBlockResettableIterator<Record> iterator =
new ReusingBlockResettableIterator<Record>(
this.memman, this.reader, this.serializer, 2, memOwner);
// open the iterator
iterator.open();
// now test walking through the iterator
int lower = 0;
int upper = 0;
do {
lower = upper;
upper = lower;
// find the upper bound
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(upper++);
}
// now reset the buffer a few times
for (int i = 0; i < 5; ++i) {
iterator.reset();
int count = 0;
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(lower + (count++));
}
assertThat(count).isEqualTo(upper - lower);
}
} while (iterator.nextBlock());
assertThat(upper).isEqualTo(NUM_VALUES);
// close the iterator
iterator.close();
}
@Test
void testTwelveFoldBufferedBlockResettableIterator() throws Exception {
final AbstractInvokable memOwner = new DummyInvokable();
// create the resettable Iterator
final ReusingBlockResettableIterator<Record> iterator =
new ReusingBlockResettableIterator<Record>(
this.memman, this.reader, this.serializer, 12, memOwner);
// open the iterator
iterator.open();
// now test walking through the iterator
int lower = 0;
int upper = 0;
do {
lower = upper;
upper = lower;
// find the upper bound
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(upper++);
}
// now reset the buffer a few times
for (int i = 0; i < 5; ++i) {
iterator.reset();
int count = 0;
while (iterator.hasNext()) {
Record target = iterator.next();
int val = target.getField(0, IntValue.class).getValue();
assertThat(val).isEqualTo(lower + (count++));
}
assertThat(count).isEqualTo(upper - lower);
}
} while (iterator.nextBlock());
assertThat(upper).isEqualTo(NUM_VALUES);
// close the iterator
iterator.close();
}
}
| ReusingBlockResettableIteratorTest |
java | apache__camel | components/camel-splunk/src/test/java/org/apache/camel/component/splunk/integration/RealtimeSearchManualTest.java | {
"start": 1321,
"end": 2686
} | class ____ extends SplunkTest {
@Test
public void testRealtimeSearch() throws Exception {
MockEndpoint searchMock = getMockEndpoint("mock:search-saved");
searchMock.expectedMessageCount(1);
MockEndpoint.assertIsSatisfied(context);
SplunkEvent received = searchMock.getReceivedExchanges().get(0).getIn().getBody(SplunkEvent.class);
assertNotNull(received);
Map<String, String> data = received.getEventData();
assertEquals("value1", data.get("key1"));
assertEquals("value2", data.get("key2"));
assertEquals("value3", data.get("key3"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:submit")
.to("splunk://submit?username=" + SPLUNK_USERNAME + "&password=" + SPLUNK_PASSWORD + "&index=" + INDEX
+ "&sourceType=testSource&source=test")
.to("mock:submit-result");
from("splunk://realtime?delay=5000&username=" + SPLUNK_USERNAME + "&password=" + SPLUNK_PASSWORD
+ "&initEarliestTime=rt-10s&search=search index=" + INDEX
+ " sourcetype=testSource").to("mock:search-saved");
}
};
}
}
| RealtimeSearchManualTest |
java | alibaba__nacos | client/src/main/java/com/alibaba/nacos/client/redo/data/RedoData.java | {
"start": 836,
"end": 3699
} | class ____<T> {
/**
* Expected states for finally.
*
* <ul>
* <li>{@code true} meas the cached data expect registered to server finally.</li>
* <li>{@code false} means unregistered from server.</li>
* </ul>
*/
private volatile boolean expectedRegistered;
/**
* If {@code true} means cached data has been registered to server successfully.
*/
private volatile boolean registered;
/**
* If {@code true} means cached data is unregistering from server.
*/
private volatile boolean unregistering;
private T data;
protected RedoData() {
this.expectedRegistered = true;
}
public void setExpectedRegistered(boolean registered) {
this.expectedRegistered = registered;
}
public boolean isExpectedRegistered() {
return expectedRegistered;
}
public boolean isRegistered() {
return registered;
}
public boolean isUnregistering() {
return unregistering;
}
public void setRegistered(boolean registered) {
this.registered = registered;
}
public void setUnregistering(boolean unregistering) {
this.unregistering = unregistering;
}
public T get() {
return data;
}
public void set(T data) {
this.data = data;
}
public void registered() {
this.registered = true;
this.unregistering = false;
}
public void unregistered() {
this.registered = false;
this.unregistering = true;
}
public boolean isNeedRedo() {
return !RedoType.NONE.equals(getRedoType());
}
/**
* Get redo type for current redo data without expected state.
*
* <ul>
* <li>{@code registered=true} & {@code unregistering=false} means data has registered, so redo should not do anything.</li>
* <li>{@code registered=true} & {@code unregistering=true} means data has registered and now need unregister.</li>
* <li>{@code registered=false} & {@code unregistering=false} means not registered yet, need register again.</li>
* <li>{@code registered=false} & {@code unregistering=true} means not registered yet and not continue to register.</li>
* </ul>
*
* @return redo type
*/
public RedoType getRedoType() {
if (isRegistered() && !isUnregistering()) {
return expectedRegistered ? RedoType.NONE : RedoType.UNREGISTER;
} else if (isRegistered() && isUnregistering()) {
return RedoType.UNREGISTER;
} else if (!isRegistered() && !isUnregistering()) {
return RedoType.REGISTER;
} else {
return expectedRegistered ? RedoType.REGISTER : RedoType.REMOVE;
}
}
public | RedoData |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/internal/ClassLoaderAccessImpl.java | {
"start": 1487,
"end": 1661
} | class ____ load cannot be null" );
}
if ( isSafeClass( name ) ) {
return classLoaderService.classForName( name );
}
else {
// Could not determine that the given | to |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/action/ResolvedIndices.java | {
"start": 1522,
"end": 14033
} | class ____ {
@Nullable
private final SearchContextId searchContextId;
private final Map<String, OriginalIndices> remoteClusterIndices;
@Nullable
private final OriginalIndices localIndices;
private final Map<Index, IndexMetadata> localIndexMetadata;
ResolvedIndices(
Map<String, OriginalIndices> remoteClusterIndices,
@Nullable OriginalIndices localIndices,
Map<Index, IndexMetadata> localIndexMetadata,
@Nullable SearchContextId searchContextId
) {
this.remoteClusterIndices = Collections.unmodifiableMap(remoteClusterIndices);
this.localIndices = localIndices;
this.localIndexMetadata = Collections.unmodifiableMap(localIndexMetadata);
this.searchContextId = searchContextId;
}
ResolvedIndices(
Map<String, OriginalIndices> remoteClusterIndices,
@Nullable OriginalIndices localIndices,
Map<Index, IndexMetadata> localIndexMetadata
) {
this(remoteClusterIndices, localIndices, localIndexMetadata, null);
}
/**
* Get the remote cluster indices, structured as a map where the key is the remote cluster alias.
* <br/>
* <br/>
* NOTE: The returned indices are *not* guaranteed to be concrete indices that exist.
* In addition to simple concrete index names, returned index names can be any combination of the following:
* <ul>
* <li>Aliases</li>
* <li>Wildcards</li>
* <li>Invalid index/alias names</li>
* </ul>
*
* @return The remote cluster indices map
*/
public Map<String, OriginalIndices> getRemoteClusterIndices() {
return remoteClusterIndices;
}
/**
* Get the local cluster indices.
* If the returned value is null, no local cluster indices are referenced.
* If the returned value is an {@link OriginalIndices} instance with an empty or null {@link OriginalIndices#indices()} array,
* potentially all local cluster indices are referenced, depending on if {@link OriginalIndices#indicesOptions()} is configured to
* expand wildcards.
* <br/>
* <br/>
* NOTE: The returned indices are *not* guaranteed to be concrete indices that exist.
* In addition to simple concrete index names, returned index names can be any combination of the following:
* <ul>
* <li>Aliases</li>
* <li>Wildcards</li>
* <li>Invalid index/alias names</li>
* </ul>
*
* @return The local cluster indices
*/
@Nullable
public OriginalIndices getLocalIndices() {
return localIndices;
}
/**
* Get metadata for concrete local cluster indices.
* All indices returned are guaranteed to be concrete indices that exist.
*
* @return Metadata for concrete local cluster indices
*/
public Map<Index, IndexMetadata> getConcreteLocalIndicesMetadata() {
return localIndexMetadata;
}
/**
* Get the concrete local cluster indices.
* All indices returned are guaranteed to be concrete indices that exist.
*
* @return The concrete local cluster indices
*/
public Index[] getConcreteLocalIndices() {
return localIndexMetadata.keySet().toArray(Index[]::new);
}
/**
* Get the search context ID.
* Returns a non-null value only when the instance is created using
* {@link ResolvedIndices#resolveWithPIT(PointInTimeBuilder, IndicesOptions, ProjectMetadata, NamedWriteableRegistry)}.
*
* @return The search context ID
*/
@Nullable
public SearchContextId getSearchContextId() {
return searchContextId;
}
/**
* Create a new {@link ResolvedIndices} instance from an {@link IndicesRequest}.
*
* @param request The indices request
* @param projectMetadata The project holding the indices
* @param indexNameExpressionResolver The index name expression resolver used to resolve concrete local indices
* @param remoteClusterService The remote cluster service used to group remote cluster indices
* @param startTimeInMillis The request start time in milliseconds
* @return a new {@link ResolvedIndices} instance
*/
public static ResolvedIndices resolveWithIndicesRequest(
IndicesRequest request,
ProjectMetadata projectMetadata,
IndexNameExpressionResolver indexNameExpressionResolver,
RemoteClusterService remoteClusterService,
long startTimeInMillis
) {
return resolveWithIndexNamesAndOptions(
request.indices(),
request.indicesOptions(),
projectMetadata,
indexNameExpressionResolver,
remoteClusterService,
startTimeInMillis
);
}
public static ResolvedIndices resolveWithIndexNamesAndOptions(
String[] indexNames,
IndicesOptions indicesOptions,
ProjectMetadata projectMetadata,
IndexNameExpressionResolver indexNameExpressionResolver,
RemoteClusterService remoteClusterService,
long startTimeInMillis
) {
final Map<String, OriginalIndices> remoteClusterIndices = remoteClusterService.groupIndices(indicesOptions, indexNames);
final OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
Index[] concreteLocalIndices = localIndices == null
? Index.EMPTY_ARRAY
: indexNameExpressionResolver.concreteIndices(projectMetadata, localIndices, startTimeInMillis);
// prevent using selectors with remote cluster patterns
for (final var indicesPerRemoteClusterAlias : remoteClusterIndices.entrySet()) {
final String[] indices = indicesPerRemoteClusterAlias.getValue().indices();
if (indices != null) {
for (final String index : indices) {
if (IndexNameExpressionResolver.hasSelectorSuffix(index)) {
throw new InvalidIndexNameException(index, "Selectors are not yet supported on remote cluster patterns");
}
}
}
}
return new ResolvedIndices(
remoteClusterIndices,
localIndices,
resolveLocalIndexMetadata(concreteLocalIndices, projectMetadata, true)
);
}
/**
* Create a new {@link ResolvedIndices} instance from a {@link PointInTimeBuilder}.
*
* @param pit The point-in-time builder
* @param indicesOptions The indices options to propagate to the new {@link ResolvedIndices} instance
* @param projectMetadata The project holding the indices
* @param namedWriteableRegistry The named writeable registry used to decode the search context ID
* @return a new {@link ResolvedIndices} instance
*/
public static ResolvedIndices resolveWithPIT(
PointInTimeBuilder pit,
IndicesOptions indicesOptions,
ProjectMetadata projectMetadata,
NamedWriteableRegistry namedWriteableRegistry
) {
final SearchContextId searchContextId = pit.getSearchContextId(namedWriteableRegistry);
final Map<String, Set<Index>> indicesFromSearchContext = new HashMap<>();
for (var entry : searchContextId.shards().entrySet()) {
String clusterAlias = entry.getValue().getClusterAlias();
if (clusterAlias == null) {
clusterAlias = RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY;
}
indicesFromSearchContext.computeIfAbsent(clusterAlias, s -> new HashSet<>()).add(entry.getKey().getIndex());
}
OriginalIndices localIndices;
Index[] concreteLocalIndices;
Set<Index> localIndicesSet = indicesFromSearchContext.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY);
if (localIndicesSet != null) {
concreteLocalIndices = localIndicesSet.toArray(Index[]::new);
localIndices = new OriginalIndices(localIndicesSet.stream().map(Index::getName).toArray(String[]::new), indicesOptions);
} else {
concreteLocalIndices = Index.EMPTY_ARRAY;
// Set localIndices to null because a non-null value with a null or 0-length indices array will be resolved to all indices by
// IndexNameExpressionResolver
localIndices = null;
}
Map<String, OriginalIndices> remoteClusterIndices = new HashMap<>();
for (var entry : indicesFromSearchContext.entrySet()) {
OriginalIndices originalIndices = new OriginalIndices(
entry.getValue().stream().map(Index::getName).toArray(String[]::new),
indicesOptions
);
remoteClusterIndices.put(entry.getKey(), originalIndices);
}
// Don't fail on missing indices to handle point-in-time requests that reference deleted indices
return new ResolvedIndices(
remoteClusterIndices,
localIndices,
resolveLocalIndexMetadata(concreteLocalIndices, projectMetadata, false),
searchContextId
);
}
/**
* Create a new {@link ResolvedIndices} instance from a Map of Projects to {@link ResolvedIndexExpressions}. This is intended to be
* used for Cross-Project Search (CPS).
*
* @param localIndices this value is set as-is in the resulting ResolvedIndices.
* @param localIndexMetadata this value is set as-is in the resulting ResolvedIndices.
* @param remoteExpressions the map of project names to {@link ResolvedIndexExpressions}. This map is used to create the
* {@link ResolvedIndices#getRemoteClusterIndices()} for the resulting ResolvedIndices. Each project keyed
* in the map is guaranteed to have at least one index for the index expression provided by the user.
* The resulting {@link ResolvedIndices#getRemoteClusterIndices()} will map to the original index expression
* provided by the user. For example, if the user requested "logs" and "project-1" resolved that to "logs-1",
* then the result will map "project-1" to "logs". We rely on the remote search request to expand "logs" back
* to "logs-1".
* @param indicesOptions this value is set as-is in the resulting ResolvedIndices.
*/
public static ResolvedIndices resolveWithIndexExpressions(
OriginalIndices localIndices,
Map<Index, IndexMetadata> localIndexMetadata,
Map<String, ResolvedIndexExpressions> remoteExpressions,
IndicesOptions indicesOptions
) {
Map<String, OriginalIndices> remoteIndices = remoteExpressions.entrySet().stream().collect(HashMap::new, (map, entry) -> {
var indices = entry.getValue().expressions().stream().filter(expression -> {
var resolvedExpressions = expression.localExpressions();
var successfulResolution = resolvedExpressions
.localIndexResolutionResult() == ResolvedIndexExpression.LocalIndexResolutionResult.SUCCESS;
// if the expression is a wildcard, it will be successful even if there are no indices, so filter for no indices
var hasResolvedIndices = resolvedExpressions.indices().isEmpty() == false;
return successfulResolution && hasResolvedIndices;
}).map(ResolvedIndexExpression::original).toArray(String[]::new);
if (indices.length > 0) {
map.put(entry.getKey(), new OriginalIndices(indices, indicesOptions));
}
}, Map::putAll);
return new ResolvedIndices(remoteIndices, localIndices, localIndexMetadata);
}
private static Map<Index, IndexMetadata> resolveLocalIndexMetadata(
Index[] concreteLocalIndices,
ProjectMetadata projectMetadata,
boolean failOnMissingIndex
) {
Map<Index, IndexMetadata> localIndexMetadata = new HashMap<>();
for (Index index : concreteLocalIndices) {
IndexMetadata indexMetadata = projectMetadata.index(index);
if (indexMetadata == null) {
if (failOnMissingIndex) {
throw new IndexNotFoundException(index);
}
continue;
}
localIndexMetadata.put(index, indexMetadata);
}
return localIndexMetadata;
}
}
| ResolvedIndices |
java | google__guava | android/guava-tests/test/com/google/common/primitives/ImmutableDoubleArrayTest.java | {
"start": 18875,
"end": 19182
} | class ____ extends TestDoubleListGenerator {
@Override
protected List<Double> create(Double[] elements) {
return makeArray(elements).asList();
}
}
@J2ktIncompatible
@GwtIncompatible // used only from suite
@AndroidIncompatible
public static final | ImmutableDoubleArrayAsListGenerator |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/JoinFetchInheritanceTest.java | {
"start": 7788,
"end": 8096
} | class ____ {
@Id
private Long id;
public Animal() {
}
public Animal(Long id) {
this.id = id;
}
@Column( name = "type", insertable = false, updatable = false )
private String type;
public Long getId() {
return id;
}
public String getType() {
return type;
}
}
public | Animal |
java | apache__rocketmq | broker/src/main/java/org/apache/rocketmq/broker/transaction/TransactionMetricsFlushService.java | {
"start": 1129,
"end": 2387
} | class ____ extends ServiceThread {
private static final Logger log = LoggerFactory.getLogger(LoggerName.TRANSACTION_LOGGER_NAME);
private BrokerController brokerController;
public TransactionMetricsFlushService(BrokerController brokerController) {
this.brokerController = brokerController;
}
@Override
public String getServiceName() {
return "TransactionFlushService";
}
@Override
public void run() {
log.info(this.getServiceName() + " service start");
long start = System.currentTimeMillis();
while (!this.isStopped()) {
try {
if (System.currentTimeMillis() - start > brokerController.getBrokerConfig().getTransactionMetricFlushInterval()) {
start = System.currentTimeMillis();
brokerController.getTransactionalMessageService().getTransactionMetrics().persist();
waitForRunning(brokerController.getBrokerConfig().getTransactionMetricFlushInterval());
}
} catch (Throwable e) {
log.error("Error occurred in " + getServiceName(), e);
}
}
log.info(this.getServiceName() + " service end");
}
} | TransactionMetricsFlushService |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestTimelineWriterHBaseDown.java | {
"start": 1720,
"end": 1780
} | class ____ HbaseTimelineWriter with HBase Down.
*/
public | tests |
java | micronaut-projects__micronaut-core | http-netty/src/main/java/io/micronaut/http/netty/body/NettyJsonStreamHandler.java | {
"start": 1833,
"end": 4537
} | class ____<T> implements MessageBodyHandler<T>, ChunkedMessageBodyReader<T>, CustomizableJsonHandler {
private final JsonMessageHandler<T> jsonMessageHandler;
public NettyJsonStreamHandler(JsonMapper jsonMapper) {
this(new JsonMessageHandler<>(jsonMapper));
}
private NettyJsonStreamHandler(JsonMessageHandler<T> jsonMessageHandler) {
this.jsonMessageHandler = jsonMessageHandler;
}
@Override
public CustomizableJsonHandler customize(JsonFeatures jsonFeatures) {
return new NettyJsonStreamHandler<>(jsonMessageHandler.getJsonMapper().cloneWithFeatures(jsonFeatures));
}
@Override
public boolean isReadable(Argument<T> type, MediaType mediaType) {
return mediaType.matches(MediaType.APPLICATION_JSON_STREAM_TYPE);
}
@Override
public T read(Argument<T> type, MediaType mediaType, Headers httpHeaders, ByteBuffer<?> byteBuffer) throws CodecException {
if (!type.getType().isAssignableFrom(List.class)) {
throw new IllegalArgumentException("Can only read json-stream to a Publisher or list type");
}
//noinspection unchecked
return (T) readChunked((Argument<T>) type.getFirstTypeVariable().orElse(type), mediaType, httpHeaders, Flux.just(byteBuffer)).collectList().block();
}
@Override
public T read(Argument<T> type, MediaType mediaType, Headers httpHeaders, InputStream inputStream) throws CodecException {
throw new UnsupportedOperationException("Reading from InputStream is not supported for json-stream");
}
@Override
public Flux<T> readChunked(Argument<T> type, MediaType mediaType, Headers httpHeaders, Publisher<ByteBuffer<?>> input) {
JsonChunkedProcessor processor = new JsonChunkedProcessor();
return processor.process(Flux.from(input).map(bb -> {
if (!(bb.asNativeBuffer() instanceof ByteBuf buf)) {
throw new IllegalArgumentException("Only netty buffers are supported");
}
return buf;
})).map(bb -> jsonMessageHandler.read(type, mediaType, httpHeaders, bb));
}
@Override
public void writeTo(Argument<T> type, MediaType mediaType, T object, MutableHeaders outgoingHeaders, OutputStream outputStream) throws CodecException {
jsonMessageHandler.writeTo(type, mediaType, object, outgoingHeaders, outputStream);
}
@Override
public ByteBuffer<?> writeTo(Argument<T> type, MediaType mediaType, T object, MutableHeaders outgoingHeaders, ByteBufferFactory<?, ?> bufferFactory) throws CodecException {
return jsonMessageHandler.writeTo(type, mediaType, object, outgoingHeaders, bufferFactory);
}
}
| NettyJsonStreamHandler |
java | apache__flink | flink-core/src/main/java/org/apache/flink/core/memory/MemorySegmentInputStreamWithPos.java | {
"start": 1025,
"end": 2909
} | class ____ extends InputStream {
private MemorySegment segment;
private int position;
private int count;
private int mark;
public MemorySegmentInputStreamWithPos(MemorySegment segment, int offset, int length) {
setSegment(segment, offset, length);
}
@Override
public int read() {
return (position < count) ? 0xFF & (segment.get(position++)) : -1;
}
@Override
public int read(@Nonnull byte[] b, int off, int len) {
if (position >= count) {
return -1; // signal EOF
}
if (len <= 0) {
return 0;
}
final int numBytes = Math.min(count - position, len);
segment.get(position, b, off, numBytes);
position += numBytes;
return numBytes;
}
@Override
public long skip(long toSkip) {
long remain = count - position;
if (toSkip < remain) {
remain = toSkip < 0 ? 0 : toSkip;
}
position += remain;
return remain;
}
@Override
public boolean markSupported() {
return true;
}
@Override
public void mark(int readAheadLimit) {
mark = position;
}
@Override
public void reset() {
position = mark;
}
@Override
public int available() {
return count - position;
}
@Override
public void close() {}
public int getPosition() {
return position;
}
public void setPosition(int pos) {
Preconditions.checkArgument(pos >= 0 && pos <= count, "Position out of bounds.");
this.position = pos;
}
public void setSegment(MemorySegment segment, int offset, int length) {
this.count = Math.min(segment.size(), offset + length);
setPosition(offset);
this.segment = segment;
this.mark = offset;
}
}
| MemorySegmentInputStreamWithPos |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/bean/BeanInfoTest.java | {
"start": 6160,
"end": 6305
} | interface ____ {
void inOutMethod();
@Pattern(ExchangePattern.InOnly)
void inOnlyMethod();
}
@InOnly
public | Foo |
java | spring-projects__spring-framework | spring-r2dbc/src/test/java/org/springframework/r2dbc/core/R2dbcDataClassRowMapperTests.java | {
"start": 3762,
"end": 4121
} | class ____ extends ConstructorPerson {
private final List<BigDecimal> balance;
public ConstructorPersonWithGenerics(String name, long age, Date birth_date, List<BigDecimal> balance) {
super(name, age, birth_date);
this.balance = balance;
}
public List<BigDecimal> balance() {
return this.balance;
}
}
static | ConstructorPersonWithGenerics |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/converter/json/MappingJacksonValue.java | {
"start": 1450,
"end": 3243
} | class ____ {
private Object value;
private @Nullable Class<?> serializationView;
private @Nullable FilterProvider filters;
/**
* Create a new instance wrapping the given POJO to be serialized.
* @param value the Object to be serialized
*/
public MappingJacksonValue(Object value) {
this.value = value;
}
/**
* Modify the POJO to serialize.
*/
public void setValue(Object value) {
this.value = value;
}
/**
* Return the POJO that needs to be serialized.
*/
public Object getValue() {
return this.value;
}
/**
* Set the serialization view to serialize the POJO with.
* @see com.fasterxml.jackson.databind.ObjectMapper#writerWithView(Class)
* @see com.fasterxml.jackson.annotation.JsonView
*/
public void setSerializationView(@Nullable Class<?> serializationView) {
this.serializationView = serializationView;
}
/**
* Return the serialization view to use.
* @see com.fasterxml.jackson.databind.ObjectMapper#writerWithView(Class)
* @see com.fasterxml.jackson.annotation.JsonView
*/
public @Nullable Class<?> getSerializationView() {
return this.serializationView;
}
/**
* Set the Jackson filter provider to serialize the POJO with.
* @since 4.2
* @see com.fasterxml.jackson.databind.ObjectMapper#writer(FilterProvider)
* @see com.fasterxml.jackson.annotation.JsonFilter
* @see Jackson2ObjectMapperBuilder#filters(FilterProvider)
*/
public void setFilters(@Nullable FilterProvider filters) {
this.filters = filters;
}
/**
* Return the Jackson filter provider to use.
* @since 4.2
* @see com.fasterxml.jackson.databind.ObjectMapper#writer(FilterProvider)
* @see com.fasterxml.jackson.annotation.JsonFilter
*/
public @Nullable FilterProvider getFilters() {
return this.filters;
}
}
| MappingJacksonValue |
java | spring-projects__spring-boot | module/spring-boot-micrometer-tracing-brave/src/test/java/org/springframework/boot/micrometer/tracing/brave/autoconfigure/zipkin/ZipkinWithBraveTracingAutoConfigurationTests.java | {
"start": 7183,
"end": 7375
} | class ____ {
@Bean
SpanHandler customSpanHandler() {
return mock(SpanHandler.class);
}
}
@Configuration(proxyBeanMethods = false)
private static final | CustomSpanHandlerConfiguration |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/ext/javatime/ser/LocalDateSerTest.java | {
"start": 1231,
"end": 1457
} | class ____ {
@JsonFormat(shape=JsonFormat.Shape.NUMBER_INT)
public LocalDate value;
public EpochDayWrapper() { }
public EpochDayWrapper(LocalDate v) { value = v; }
}
static | EpochDayWrapper |
java | FasterXML__jackson-databind | src/main/java/tools/jackson/databind/deser/impl/ExternalTypeHandler.java | {
"start": 488,
"end": 844
} | class ____ is used to flatten JSON structure when using
* "external type id" (see {@link com.fasterxml.jackson.annotation.JsonTypeInfo.As#EXTERNAL_PROPERTY}).
* This is needed to store temporary state and buffer tokens, as the structure is
* rearranged a bit so that actual type deserializer can resolve type and
* finalize deserialization.
*/
public | that |
java | elastic__elasticsearch | qa/smoke-test-http/src/javaRestTest/java/org/elasticsearch/http/AutoCreateIndexIT.java | {
"start": 1227,
"end": 6411
} | class ____ extends AbstractHttpSmokeTestIT {
/**
* Check that setting {@link AutoCreateIndex#AUTO_CREATE_INDEX_SETTING} to <code>false</code>
* disable the automatic creation on indices.
*/
public void testCannotAutoCreateIndexWhenDisabled() throws IOException {
configureAutoCreateIndex(false);
// Attempt to add a document to a non-existing index. Auto-creating the index should fail owing to the setting above.
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc/123456");
indexDocumentRequest.setJsonEntity("{ \"name\": \"Kimchi\" }");
final ResponseException responseException = expectThrows(ResponseException.class, this::indexDocument);
assertThat(
Streams.copyToString(new InputStreamReader(responseException.getResponse().getEntity().getContent(), UTF_8)),
containsString("no such index [recipe_kr] and [action.auto_create_index] is [false]")
);
}
/**
* Check that automatically creating an index is allowed, even when {@link AutoCreateIndex#AUTO_CREATE_INDEX_SETTING}
* is <code>false</code>, when the index name matches a template and that template has <code>allow_auto_create</code>
* set to <code>true</code>.
*/
public void testCanAutoCreateIndexWhenAllowedByTemplate() throws IOException {
configureAutoCreateIndex(false);
createTemplateWithAllowAutoCreate(true);
// Attempt to add a document to a non-existing index. Auto-creating the index should succeed because the index name
// matches the template pattern
assertOK(this.indexDocument());
}
/**
* Check that automatically creating an index is disallowed when the index name matches a template and that template has
* <code>allow_auto_create</code> explicitly to <code>false</code>, even when {@link AutoCreateIndex#AUTO_CREATE_INDEX_SETTING}
* is set to <code>true</code>.
*/
public void testCannotAutoCreateIndexWhenDisallowedByTemplate() throws IOException {
configureAutoCreateIndex(true);
createTemplateWithAllowAutoCreate(false);
// Attempt to add a document to a non-existing index. Auto-creating the index should succeed because the index name
// matches the template pattern
final ResponseException responseException = expectThrows(ResponseException.class, this::indexDocument);
assertThat(
Streams.copyToString(new InputStreamReader(responseException.getResponse().getEntity().getContent(), UTF_8)),
containsString("no such index [composable template [recipe*] forbids index auto creation]")
);
}
public void testRequireAliasImplicitValueIsTrue() throws IOException {
final Request indexDocumentRequest = new Request("POST", "/_bulk?require_alias");
indexDocumentRequest.setJsonEntity("""
{ "index" : { "_index" : "test", "_id" : "1" } }
{ "field1" : "value1" }
""");
String resp = EntityUtils.toString(client().performRequest(indexDocumentRequest).getEntity());
Map<String, Object> respDoc = XContentHelper.convertToMap(JsonXContent.jsonXContent, resp, false);
assertTrue("there should be errors in the bulk response", ObjectPath.eval("errors", respDoc));
assertThat(
"there should be errors in the bulk response",
"" + ObjectPath.eval("items.0.index.error.reason", respDoc),
containsString("no such index [test] and [require_alias] request flag is [true] and [test] is not an alias")
);
}
private void configureAutoCreateIndex(boolean value) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder()
.startObject()
.startObject("persistent")
.field(AutoCreateIndex.AUTO_CREATE_INDEX_SETTING.getKey(), value)
.endObject()
.endObject();
final Request settingsRequest = new Request("PUT", "_cluster/settings");
settingsRequest.setJsonEntity(Strings.toString(builder));
final Response settingsResponse = client().performRequest(settingsRequest);
assertOK(settingsResponse);
}
private void createTemplateWithAllowAutoCreate(Boolean allowAutoCreate) throws IOException {
XContentBuilder builder = JsonXContent.contentBuilder()
.startObject()
.array("index_patterns", "recipe*")
.field("allow_auto_create", allowAutoCreate)
.endObject();
final Request createTemplateRequest = new Request("PUT", "_index_template/recipe_template");
createTemplateRequest.setJsonEntity(Strings.toString(builder));
final Response createTemplateResponse = client().performRequest(createTemplateRequest);
assertOK(createTemplateResponse);
}
private Response indexDocument() throws IOException {
final Request indexDocumentRequest = new Request("POST", "recipe_kr/_doc/123456");
indexDocumentRequest.setJsonEntity("{ \"name\": \"Kimchi\" }");
return client().performRequest(indexDocumentRequest);
}
}
| AutoCreateIndexIT |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/constraint/api/ConstraintPlacementAlgorithmOutput.java | {
"start": 1372,
"end": 2056
} | class ____ {
private final ApplicationId applicationId;
public ConstraintPlacementAlgorithmOutput(ApplicationId applicationId) {
this.applicationId = applicationId;
}
private final List<PlacedSchedulingRequest> placedRequests =
new ArrayList<>();
private final List<SchedulingRequestWithPlacementAttempt> rejectedRequests =
new ArrayList<>();
public List<PlacedSchedulingRequest> getPlacedRequests() {
return placedRequests;
}
public List<SchedulingRequestWithPlacementAttempt> getRejectedRequests() {
return rejectedRequests;
}
public ApplicationId getApplicationId() {
return applicationId;
}
}
| ConstraintPlacementAlgorithmOutput |
java | alibaba__nacos | api/src/test/java/com/alibaba/nacos/api/config/remote/request/ClientConfigMetricRequestTest.java | {
"start": 1242,
"end": 4247
} | class ____ extends BasedConfigRequestTest {
@Override
@Test
public void testSerialize() throws JsonProcessingException {
ClientConfigMetricRequest clientMetrics = new ClientConfigMetricRequest();
clientMetrics.putAllHeader(HEADERS);
clientMetrics.getMetricsKeys()
.add(ClientConfigMetricRequest.MetricsKey.build(CACHE_DATA, String.join("+", KEY)));
clientMetrics.getMetricsKeys()
.add(ClientConfigMetricRequest.MetricsKey.build(SNAPSHOT_DATA, String.join("+", KEY)));
final String requestId = injectRequestUuId(clientMetrics);
String json = mapper.writeValueAsString(clientMetrics);
assertTrue(json.contains("\"type\":\"" + "cacheData" + "\""));
assertTrue(json.contains("\"type\":\"" + "snapshotData" + "\""));
assertTrue(json.contains("\"key\":\"" + String.join("+", KEY) + "\""));
assertTrue(json.contains("\"module\":\"" + Constants.Config.CONFIG_MODULE));
assertTrue(json.contains("\"requestId\":\"" + requestId));
}
@Override
@Test
public void testDeserialize() throws JsonProcessingException {
String json =
"{\"headers\":{\"header1\":\"test_header1\"}," + "\"metricsKeys\":[{\"type\":\"cacheData\",\"key\":"
+ "\"test_data+group+test_tenant\"},{\"type\":\"snapshotData\","
+ "\"key\":\"test_data+group+test_tenant\"}],\"module\":\"config\"}";
ClientConfigMetricRequest actual = mapper.readValue(json, ClientConfigMetricRequest.class);
assertEquals(2, actual.getMetricsKeys().size());
assertEquals(Constants.Config.CONFIG_MODULE, actual.getModule());
assertEquals(HEADER_VALUE, actual.getHeader(HEADER_KEY));
}
@Test
void testMetricsKeysEquals() {
String dataKey = String.join("+", KEY);
ClientConfigMetricRequest.MetricsKey key = ClientConfigMetricRequest.MetricsKey.build(CACHE_DATA, dataKey);
assertEquals(key, key);
assertNotEquals(null, key);
assertNotEquals(key, new ClientConfigMetricRequest());
ClientConfigMetricRequest.MetricsKey newOne = ClientConfigMetricRequest.MetricsKey.build(SNAPSHOT_DATA,
dataKey);
assertNotEquals(key, newOne);
newOne.setType(CACHE_DATA);
assertEquals(key, newOne);
}
@Test
void testMetricsHashCode() {
String dataKey = String.join("+", KEY);
ClientConfigMetricRequest.MetricsKey key = ClientConfigMetricRequest.MetricsKey.build(CACHE_DATA, dataKey);
assertEquals(Objects.hash(CACHE_DATA, dataKey), key.hashCode());
}
@Test
void testMetricsToString() {
ClientConfigMetricRequest.MetricsKey key = ClientConfigMetricRequest.MetricsKey.build(CACHE_DATA,
String.join("+", KEY));
assertEquals("MetricsKey{type='cacheData', key='test_data+group+test_tenant'}", key.toString());
}
}
| ClientConfigMetricRequestTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceBlacklistRequest.java | {
"start": 1465,
"end": 3128
} | class ____ {
@Public
@Stable
public static ResourceBlacklistRequest newInstance(
List<String> additions, List<String> removals) {
ResourceBlacklistRequest blacklistRequest =
Records.newRecord(ResourceBlacklistRequest.class);
blacklistRequest.setBlacklistAdditions(additions);
blacklistRequest.setBlacklistRemovals(removals);
return blacklistRequest;
}
/**
* Get the list of resource-names which should be added to the
* application blacklist.
*
* @return list of resource-names which should be added to the
* application blacklist
*/
@Public
@Stable
public abstract List<String> getBlacklistAdditions();
/**
* Set list of resource-names which should be added to the application blacklist.
*
* @param resourceNames list of resource-names which should be added to the
* application blacklist
*/
@Public
@Stable
public abstract void setBlacklistAdditions(List<String> resourceNames);
/**
* Get the list of resource-names which should be removed from the
* application blacklist.
*
* @return list of resource-names which should be removed from the
* application blacklist
*/
@Public
@Stable
public abstract List<String> getBlacklistRemovals();
/**
* Set list of resource-names which should be removed from the
* application blacklist.
*
* @param resourceNames list of resource-names which should be removed from the
* application blacklist
*/
@Public
@Stable
public abstract void setBlacklistRemovals(List<String> resourceNames);
}
| ResourceBlacklistRequest |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/ContextTestSupport.java | {
"start": 23040,
"end": 35995
} | class ____ any of the following annotations on the class-level.
*/
protected boolean hasClassAnnotation(String... names) {
for (String name : names) {
for (Annotation ann : getClass().getAnnotations()) {
String annName = ann.annotationType().getName();
if (annName.equals(name)) {
return true;
}
}
}
return false;
}
protected void stopCamelContext() {
doStopCamelContext(context, camelContextService);
}
protected void doStopCamelContext(CamelContext context, Service camelContextService) {
if (camelContextService != null) {
if (camelContextService == THREAD_SERVICE.get()) {
THREAD_SERVICE.remove();
}
camelContextService.stop();
} else {
if (context != null) {
if (context == THREAD_CAMEL_CONTEXT.get()) {
THREAD_CAMEL_CONTEXT.remove();
}
context.stop();
}
}
}
private static void doStopTemplates(
ConsumerTemplate consumer, ProducerTemplate template, FluentProducerTemplate fluentTemplate) {
if (consumer != null) {
if (consumer == THREAD_CONSUMER.get()) {
THREAD_CONSUMER.remove();
}
consumer.stop();
}
if (template != null) {
if (template == THREAD_TEMPLATE.get()) {
THREAD_TEMPLATE.remove();
}
template.stop();
}
if (fluentTemplate != null) {
if (fluentTemplate == THREAD_FLUENT_TEMPLATE.get()) {
THREAD_FLUENT_TEMPLATE.remove();
}
fluentTemplate.stop();
}
}
protected void startCamelContext() {
if (camelContextService != null) {
camelContextService.start();
} else {
if (context instanceof DefaultCamelContext defaultCamelContext) {
if (!defaultCamelContext.isStarted()) {
defaultCamelContext.start();
}
} else {
context.start();
}
}
}
protected CamelContext createCamelContext() throws Exception {
Registry registry = createCamelRegistry();
CamelContext retContext;
if (registry != null) {
retContext = new DefaultCamelContext(registry);
} else {
retContext = new DefaultCamelContext();
}
retContext.setLoadTypeConverters(isLoadTypeConverters());
return retContext;
}
protected Context createJndiContext() throws Exception {
return JndiTest.createInitialContext();
}
/**
* Allows to bind custom beans to the Camel {@link Registry}.
*/
protected void bindToRegistry(Registry registry) {
// noop
}
/**
* Override to use a custom {@link Registry}.
* <p>
* However if you need to bind beans to the registry then this is possible already with the bind method on registry,
* and there is no need to override this method.
*/
protected Registry createCamelRegistry() throws Exception {
return new DefaultRegistry();
}
/**
* Factory method which derived classes can use to create a {@link RouteBuilder} to define the routes for testing
*/
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() {
// no routes added by default
}
};
}
/**
* Factory method which derived classes can use to create an array of {@link org.apache.camel.builder.RouteBuilder}s
* to define the routes for testing
*
* @see #createRouteBuilder()
*/
protected RoutesBuilder[] createRouteBuilders() throws Exception {
return new RoutesBuilder[] { createRouteBuilder() };
}
/**
* Resolves a mandatory endpoint for the given URI or an exception is thrown
*
* @param uri the Camel <a href="">URI</a> to use to create or resolve an endpoint
* @return the endpoint
*/
protected Endpoint resolveMandatoryEndpoint(String uri) {
return TestSupport.resolveMandatoryEndpoint(context, uri);
}
/**
* Resolves a mandatory endpoint for the given URI and expected type or an exception is thrown
*
* @param uri the Camel <a href="">URI</a> to use to create or resolve an endpoint
* @return the endpoint
*/
protected <T extends Endpoint> T resolveMandatoryEndpoint(String uri, Class<T> endpointType) {
return TestSupport.resolveMandatoryEndpoint(context, uri, endpointType);
}
/**
* Resolves the mandatory Mock endpoint using a URI of the form <code>mock:someName</code>
*
* @param uri the URI which typically starts with "mock:" and has some name
* @return the mandatory mock endpoint or an exception is thrown if it could not be resolved
*/
protected MockEndpoint getMockEndpoint(String uri) {
return getMockEndpoint(uri, true);
}
/**
* Resolves the {@link MockEndpoint} using a URI of the form <code>mock:someName</code>, optionally creating it if
* it does not exist. This implementation will lookup existing mock endpoints and match on the mock queue name, eg
* mock:foo and mock:foo?retainFirst=5 would match as the queue name is foo.
*
* @param uri the URI which typically starts with "mock:" and has some name
* @param create whether or not to allow the endpoint to be created if it doesn't exist
* @return the mock endpoint or an {@link NoSuchEndpointException} is thrown if it could not
* be resolved
* @throws NoSuchEndpointException is the mock endpoint does not exist
*/
protected MockEndpoint getMockEndpoint(String uri, boolean create) throws NoSuchEndpointException {
// look for existing mock endpoints that have the same queue name, and
// to
// do that we need to normalize uri and strip out query parameters and
// whatnot
String n;
try {
n = URISupport.normalizeUri(uri);
} catch (URISyntaxException e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
// strip query
final String target = StringHelper.before(n, "?", n);
// lookup endpoints in registry and try to find it
MockEndpoint found = (MockEndpoint) context.getEndpointRegistry().values().stream()
.filter(e -> e instanceof MockEndpoint).filter(e -> {
String t = e.getEndpointUri();
// strip query
int idx2 = t.indexOf('?');
if (idx2 != -1) {
t = t.substring(0, idx2);
}
return t.equals(target);
}).findFirst().orElse(null);
if (found != null) {
return found;
}
if (create) {
return resolveMandatoryEndpoint(uri, MockEndpoint.class);
} else {
throw new NoSuchEndpointException(String.format("MockEndpoint %s does not exist.", uri));
}
}
/**
* Sends a message to the given endpoint URI with the body value
*
* @param endpointUri the URI of the endpoint to send to
* @param body the body for the message
*/
protected void sendBody(String endpointUri, final Object body) {
template.send(endpointUri, exchange -> {
Message in = exchange.getIn();
in.setBody(body);
});
}
/**
* Sends a message to the given endpoint URI with the body value and specified headers
*
* @param endpointUri the URI of the endpoint to send to
* @param body the body for the message
* @param headers any headers to set on the message
*/
protected void sendBody(String endpointUri, final Object body, final Map<String, Object> headers) {
template.send(endpointUri, exchange -> {
Message in = exchange.getIn();
in.setBody(body);
for (Map.Entry<String, Object> entry : headers.entrySet()) {
in.setHeader(entry.getKey(), entry.getValue());
}
});
}
/**
* Sends messages to the given endpoint for each of the specified bodies
*
* @param endpointUri the endpoint URI to send to
* @param bodies the bodies to send, one per message
*/
protected void sendBodies(String endpointUri, Object... bodies) {
for (Object body : bodies) {
sendBody(endpointUri, body);
}
}
/**
* Creates an exchange with the given body
*/
protected Exchange createExchangeWithBody(Object body) {
return createExchangeWithBody(context, body);
}
/**
* Reset all Mock endpoints.
*/
protected void resetMocks() {
MockEndpoint.resetMocks(context);
}
/**
* Asserts that all the expectations of the Mock endpoints are valid
*/
protected void assertMockEndpointsSatisfied() throws InterruptedException {
MockEndpoint.assertIsSatisfied(context);
}
/**
* Asserts that all the expectations of the Mock endpoints are valid
*
* @param timeout timeout
* @param unit time unit
*/
protected void assertMockEndpointsSatisfied(long timeout, TimeUnit unit) throws InterruptedException {
MockEndpoint.assertIsSatisfied(context, timeout, unit);
}
/**
* Asserts that the given language name and expression evaluates to the given value on a specific exchange
*/
protected void assertExpression(Exchange exchange, String languageName, String expressionText, Object expectedValue) {
Language language = assertResolveLanguage(languageName);
Expression expression = language.createExpression(expressionText);
assertNotNull(expression, "No Expression could be created for text: " + expressionText + " language: " + language);
TestSupport.assertExpression(expression, exchange, expectedValue);
}
/**
* Asserts that the given language name and predicate expression evaluates to the expected value on the message
* exchange
*/
protected void assertPredicate(String languageName, String expressionText, Exchange exchange, boolean expected) {
Language language = assertResolveLanguage(languageName);
Predicate predicate = language.createPredicate(expressionText);
assertNotNull(predicate, "No Predicate could be created for text: " + expressionText + " language: " + language);
TestSupport.assertPredicate(predicate, exchange, expected);
}
/**
* Asserts that the language name can be resolved
*/
protected Language assertResolveLanguage(String languageName) {
Language language = context.resolveLanguage(languageName);
assertNotNull(language, "No language found for name: " + languageName);
return language;
}
protected void assertValidContext(CamelContext context) {
assertNotNull(context, "No context found!");
}
protected <T extends Endpoint> T getMandatoryEndpoint(String uri, Class<T> type) {
T endpoint = context.getEndpoint(uri, type);
assertNotNull(endpoint, "No endpoint found for uri: " + uri);
return endpoint;
}
protected Endpoint getMandatoryEndpoint(String uri) {
Endpoint endpoint = context.getEndpoint(uri);
assertNotNull(endpoint, "No endpoint found for uri: " + uri);
return endpoint;
}
/**
* Disables the JMX agent. Must be called before the {@link #setUp()} method.
*/
protected void disableJMX() {
DefaultCamelContext.setDisableJmx(true);
}
/**
* Enables the JMX agent. Must be called before the {@link #setUp()} method.
*/
protected void enableJMX() {
DefaultCamelContext.setDisableJmx(false);
}
/**
* Single step debugs and Camel invokes this method before entering the given processor
*/
protected void debugBefore(
Exchange exchange, Processor processor, ProcessorDefinition<?> definition, String id, String label) {
}
/**
* Single step debugs and Camel invokes this method after processing the given processor
*/
protected void debugAfter(
Exchange exchange, Processor processor, ProcessorDefinition<?> definition, String id, String label,
long timeTaken) {
}
/**
* To easily debug by overriding the <tt>debugBefore</tt> and <tt>debugAfter</tt> methods.
*/
private | have |
java | quarkusio__quarkus | test-framework/common/src/test/java/io/quarkus/test/common/TestResourceManagerTest.java | {
"start": 2926,
"end": 3622
} | class ____ implements QuarkusTestResourceLifecycleManager {
@Override
public Map<String, String> start() {
return Collections.emptyMap();
}
@Override
public void inject(Object instance) {
if (instance instanceof AtomicInteger) {
((AtomicInteger) instance).incrementAndGet();
}
}
@Override
public void stop() {
}
}
@WithTestResource(value = FirstSequentialQuarkusTestResource.class, scope = TestResourceScope.GLOBAL)
@WithTestResource(value = SecondSequentialQuarkusTestResource.class, scope = TestResourceScope.GLOBAL)
public static | SecondLifecycleManager |
java | elastic__elasticsearch | x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/request/GoogleVertexAiEmbeddingsRequest.java | {
"start": 998,
"end": 3361
} | class ____ implements GoogleVertexAiRequest {
private final Truncator truncator;
private final Truncator.TruncationResult truncationResult;
private final InputType inputType;
private final GoogleVertexAiEmbeddingsModel model;
public GoogleVertexAiEmbeddingsRequest(
Truncator truncator,
Truncator.TruncationResult input,
InputType inputType,
GoogleVertexAiEmbeddingsModel model
) {
this.truncator = Objects.requireNonNull(truncator);
this.truncationResult = Objects.requireNonNull(input);
this.inputType = inputType;
this.model = Objects.requireNonNull(model);
}
@Override
public HttpRequest createHttpRequest() {
HttpPost httpPost = new HttpPost(model.nonStreamingUri());
ByteArrayEntity byteEntity = new ByteArrayEntity(
Strings.toString(
new GoogleVertexAiEmbeddingsRequestEntity(
truncationResult.input(),
inputType,
model.getTaskSettings(),
model.getServiceSettings()
)
).getBytes(StandardCharsets.UTF_8)
);
httpPost.setEntity(byteEntity);
httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType());
decorateWithAuth(httpPost);
return new HttpRequest(httpPost, getInferenceEntityId());
}
public void decorateWithAuth(HttpPost httpPost) {
GoogleVertexAiRequest.decorateWithBearerToken(httpPost, model.getSecretSettings());
}
Truncator truncator() {
return truncator;
}
Truncator.TruncationResult truncationResult() {
return truncationResult;
}
GoogleVertexAiEmbeddingsModel model() {
return model;
}
@Override
public String getInferenceEntityId() {
return model.getInferenceEntityId();
}
@Override
public URI getURI() {
return model.nonStreamingUri();
}
@Override
public Request truncate() {
var truncatedInput = truncator.truncate(truncationResult.input());
return new GoogleVertexAiEmbeddingsRequest(truncator, truncatedInput, inputType, model);
}
@Override
public boolean[] getTruncationInfo() {
return truncationResult.truncated().clone();
}
}
| GoogleVertexAiEmbeddingsRequest |
java | micronaut-projects__micronaut-core | inject/src/main/java/io/micronaut/context/i18n/ResourceBundleMessageSource.java | {
"start": 1148,
"end": 4785
} | class ____ extends AbstractMessageSource {
private static final Logger LOG = LoggerFactory.getLogger(ResourceBundleMessageSource.class);
private static final int DEFAULT_ORDER = 0;
private final String baseName;
private final Map<MessageKey, Optional<String>> messageCache =
buildMessageCache();
private final Map<MessageKey, Optional<ResourceBundle>> bundleCache =
buildBundleCache();
private final @Nullable ResourceBundle defaultBundle;
private final int order;
/**
* Default constructor.
* @param baseName The base name of the message bundle
*/
public ResourceBundleMessageSource(@NonNull String baseName) {
this(baseName, null);
}
/**
* Default constructor.
* @param baseName The base name of the message bundle
* @param order used for the implementation of the {@link Ordered#getOrder()} method
*/
public ResourceBundleMessageSource(@NonNull String baseName, int order) {
this(baseName, null, order);
}
/**
* Default constructor.
* @param baseName The base name of the message bundle
* @param defaultLocale The default locale to use if no message is found for the given locale
*/
public ResourceBundleMessageSource(@NonNull String baseName, @Nullable Locale defaultLocale) {
this(baseName, defaultLocale, DEFAULT_ORDER);
}
/**
* Default constructor.
* @param baseName The base name of the message bundle
* @param defaultLocale The default locale to use if no message is found for the given locale
* @param order used for the implementation of the {@link Ordered#getOrder()} method
*/
public ResourceBundleMessageSource(@NonNull String baseName, @Nullable Locale defaultLocale, int order) {
this.order = order;
ArgumentUtils.requireNonNull("baseName", baseName);
this.baseName = baseName;
ResourceBundle defaultBundle;
try {
if (defaultLocale != null) {
defaultBundle = ResourceBundle.getBundle(baseName, defaultLocale, getClassLoader());
} else {
defaultBundle = ResourceBundle.getBundle(baseName);
}
} catch (MissingResourceException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("No default bundle (locale: {}) found for base name {}", defaultLocale, baseName);
}
defaultBundle = null;
}
this.defaultBundle = defaultBundle;
}
@Override
public int getOrder() {
return order;
}
@NonNull
@Override
@SuppressWarnings("java:S2789") // performance optimization
public Optional<String> getRawMessage(@NonNull String code, @NonNull MessageContext context) {
final Locale locale = defaultBundle != null ? context.getLocale(defaultBundle.getLocale()) : context.getLocale();
MessageKey messageKey = new MessageKey(locale, code);
Optional<String> opt = messageCache.get(messageKey);
//noinspection OptionalAssignedToNull
if (opt == null) {
try {
final Optional<ResourceBundle> bundle = resolveBundle(locale);
if (bundle.isPresent()) {
return bundle.map(b -> b.getString(code));
} else {
return resolveDefault(code);
}
} catch (MissingResourceException e) {
opt = resolveDefault(code);
}
messageCache.put(messageKey, opt);
}
return opt;
}
/**
* The | ResourceBundleMessageSource |
java | apache__flink | flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/operators/deduplicate/RowTimeDeduplicateKeepFirstRowFunctionTest.java | {
"start": 1595,
"end": 5887
} | class ____ extends RowTimeDeduplicateFunctionTestBase {
@Test
public void testRowTimeDeduplicateKeepFirstRow() throws Exception {
List<Object> expectedOutput = new ArrayList<>();
RowTimeDeduplicateKeepFirstRowFunction deduplicateFunction =
new RowTimeDeduplicateKeepFirstRowFunction(
inputRowType, minTtlTime.toMillis(), rowTimeIndex);
OneInputStreamOperatorTestHarness<RowData, RowData> testHarness =
createTestHarness(new KeyedProcessOperator<>(deduplicateFunction));
List<Object> actualOutput = new ArrayList<>();
testHarness.open();
testHarness.processWatermark(new Watermark(50));
// ignore late records
assertThat(deduplicateFunction.getNumLateRecordsDropped().getCount()).isEqualTo(0);
testHarness.processElement(insertRecord("key1", 0, 1L));
expectedOutput.add(new Watermark(50));
assertThat(deduplicateFunction.getNumLateRecordsDropped().getCount()).isEqualTo(1);
testHarness.processElement(insertRecord("key1", 14, 101L));
testHarness.processElement(insertRecord("key1", 13, 99L));
testHarness.processElement(insertRecord("key1", 15, 99L));
testHarness.processElement(insertRecord("key1", 12, 100L));
testHarness.processElement(insertRecord("key2", 11, 101L));
// test 1: keep first row with row time
testHarness.processWatermark(new Watermark(102));
actualOutput.addAll(testHarness.getOutput());
expectedOutput.add(record(RowKind.INSERT, "key1", 13, 99L));
expectedOutput.add(record(RowKind.INSERT, "key2", 11, 101L));
expectedOutput.add(new Watermark(102));
assertThat(deduplicateFunction.getNumLateRecordsDropped().getCount()).isEqualTo(1);
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0);
testHarness.close();
deduplicateFunction =
new RowTimeDeduplicateKeepFirstRowFunction(
inputRowType, minTtlTime.toMillis(), rowTimeIndex);
testHarness = createTestHarness(new KeyedProcessOperator<>(deduplicateFunction));
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(insertRecord("key1", 12, 300L));
testHarness.processElement(insertRecord("key2", 11, 301L));
testHarness.processElement(insertRecord("key3", 5, 299L));
// test 2: load snapshot state
testHarness.processWatermark(new Watermark(302));
expectedOutput.add(record(RowKind.INSERT, "key3", 5, 299L));
expectedOutput.add(new Watermark(302));
// test 3: expire the state
testHarness.setStateTtlProcessingTime(minTtlTime.toMillis() + 1);
testHarness.processElement(insertRecord("key1", 12, 400L));
testHarness.processElement(insertRecord("key2", 11, 401L));
// previously emitted records have expired so new records should be emitted
testHarness.processWatermark(402);
expectedOutput.add(record(RowKind.INSERT, "key1", 12, 400L));
expectedOutput.add(record(RowKind.INSERT, "key2", 11, 401L));
expectedOutput.add(new Watermark(402));
// test 4: expire the state again to test ttl not losing records
testHarness.setStateTtlProcessingTime(2 * minTtlTime.toMillis() + 1);
testHarness.processElement(insertRecord("key1", 22, 500L));
testHarness.processElement(insertRecord("key2", 21, 501L));
// we test that ttl doesn't expire records that are still in not-yet-fired timers
testHarness.setStateTtlProcessingTime(3 * minTtlTime.toMillis() + 1);
// previously emitted records have expired so new records should be emitted
testHarness.processWatermark(502);
expectedOutput.add(record(RowKind.INSERT, "key1", 22, 500L));
expectedOutput.add(record(RowKind.INSERT, "key2", 21, 501L));
expectedOutput.add(new Watermark(502));
actualOutput.addAll(testHarness.getOutput());
assertor.assertOutputEqualsSorted("output wrong.", expectedOutput, actualOutput);
testHarness.close();
}
}
| RowTimeDeduplicateKeepFirstRowFunctionTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configuration/AuthorizationManagerWebInvocationPrivilegeEvaluatorConfigTests.java | {
"start": 2108,
"end": 2680
} | class ____ {
public final SpringTestContext spring = new SpringTestContext(this);
@Autowired(required = false)
HttpServletRequestTransformer requestTransformer;
@Autowired
WebInvocationPrivilegeEvaluator wipe;
@Test
void webAndTransformerThenWIPEDelegatesToTransformer() {
this.spring.register(WebConfig.class, TransformerConfig.class).autowire();
this.wipe.isAllowed("/uri", TestAuthentication.authenticatedUser());
verify(this.requestTransformer).transform(any());
}
@Configuration
static | AuthorizationManagerWebInvocationPrivilegeEvaluatorConfigTests |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/jmx/export/annotation/EnableMBeanExportConfigurationTests.java | {
"start": 5796,
"end": 6224
} | class ____ {
@Bean
public MBeanServerFactoryBean server() {
return new MBeanServerFactoryBean();
}
@Bean
@Lazy
@Scope(proxyMode = ScopedProxyMode.TARGET_CLASS)
public AnnotationTestBean testBean() {
AnnotationTestBean bean = new AnnotationTestBean();
bean.setName("TEST");
bean.setAge(100);
return bean;
}
}
@Configuration
@EnableMBeanExport(server = "${serverName}")
static | ProxyConfiguration |
java | apache__camel | components/camel-fhir/camel-fhir-component/src/test/java/org/apache/camel/component/fhir/FhirReadIT.java | {
"start": 1615,
"end": 11150
} | class ____ extends AbstractFhirTestSupport {
private static final Logger LOG = LoggerFactory.getLogger(FhirReadIT.class);
private static final String PATH_PREFIX = FhirApiCollection.getCollection().getApiName(FhirReadApiMethod.class).getName();
@Test
public void testResourceById() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is org.hl7.fhir.instance.model.api.IIdType
headers.put("CamelFhir.id", patient.getIdElement());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_ID", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByLongId() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is Long
headers.put("CamelFhir.longId", Long.valueOf(patient.getIdElement().getIdPart()));
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_LONG_ID", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByStringId() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is Long
headers.put("CamelFhir.stringId", patient.getIdElement().getIdPart());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_STRING_ID", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByIdAndStringResource() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resourceClass", "Patient");
// parameter type is org.hl7.fhir.instance.model.api.IIdType
headers.put("CamelFhir.id", patient.getIdElement());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_ID_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByLongIdAndStringResource() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is Long
headers.put("CamelFhir.longId", Long.valueOf(patient.getIdElement().getIdPart()));
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_LONG_ID_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByStringIdAndStringResource() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is Long
headers.put("CamelFhir.stringId", patient.getIdElement().getIdPart());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_STRING_ID_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByStringIdAndVersion() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is Long
headers.put("CamelFhir.stringId", patient.getIdElement().getIdPart());
// parameter type is String
headers.put("CamelFhir.version", patient.getIdElement().getVersionIdPart());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_STRING_ID_AND_VERSION", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByStringIdAndVersionWithResourceClass() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resourceClass", "Patient");
// parameter type is Long
headers.put("CamelFhir.stringId", patient.getIdElement().getIdPart());
// parameter type is String
headers.put("CamelFhir.version", patient.getIdElement().getVersionIdPart());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_STRING_ID_AND_VERSION_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByiUrl() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is org.hl7.fhir.instance.model.api.IIdType
headers.put("CamelFhir.iUrl", new IdType(this.patient.getId()));
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_IURL", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByUrl() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is Class
headers.put("CamelFhir.resource", Patient.class);
// parameter type is String
headers.put("CamelFhir.url", this.patient.getId());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_URL", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByStringUrlAndStringResource() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is String
headers.put("CamelFhir.resourceClass", "Patient");
// parameter type is org.hl7.fhir.instance.model.api.IIdType
headers.put("CamelFhir.iUrl", new IdType(this.patient.getId()));
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_STRING_URL_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByUrlAndStringResource() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is String
headers.put("CamelFhir.resourceClass", "Patient");
// parameter type is String
headers.put("CamelFhir.url", this.patient.getId());
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_URL_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
@Test
public void testResourceByUrlAndStringResourcePrettyPrint() {
final Map<String, Object> headers = new HashMap<>();
// parameter type is String
headers.put("CamelFhir.resourceClass", "Patient");
// parameter type is String
headers.put("CamelFhir.url", this.patient.getId());
headers.put(ExtraParameters.PRETTY_PRINT.getHeaderName(), Boolean.TRUE);
Patient result = requestBodyAndHeaders("direct://RESOURCE_BY_URL_AND_STRING_RESOURCE", null, headers);
assertValidResponse(result);
}
private void assertValidResponse(Patient result) {
LOG.debug("response: {}", result);
assertNotNull(result, "resourceByUrl result");
assertEquals("Freeman", result.getName().get(0).getFamily());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// test route for resourceById
from("direct://RESOURCE_BY_ID")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_LONG_ID")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_STRING_ID")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_ID_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_LONG_ID_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_STRING_ID_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_STRING_ID_AND_VERSION")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceById
from("direct://RESOURCE_BY_STRING_ID_AND_VERSION_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceById");
// test route for resourceByUrl
from("direct://RESOURCE_BY_IURL")
.to("fhir://" + PATH_PREFIX + "/resourceByUrl");
// test route for resourceByUrl
from("direct://RESOURCE_BY_URL")
.to("fhir://" + PATH_PREFIX + "/resourceByUrl");
// test route for resourceByUrl
from("direct://RESOURCE_BY_STRING_URL_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceByUrl");
// test route for resourceByUrl
from("direct://RESOURCE_BY_URL_AND_STRING_RESOURCE")
.to("fhir://" + PATH_PREFIX + "/resourceByUrl");
}
};
}
}
| FhirReadIT |
java | apache__camel | components/camel-twitter/src/main/java/org/apache/camel/component/twitter/consumer/TwitterEventListener.java | {
"start": 932,
"end": 1027
} | interface ____ extends EventListener {
void onEvent(Exchange exchange);
}
| TwitterEventListener |
java | spring-projects__spring-data-jpa | spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/query/QueryEnhancer.java | {
"start": 842,
"end": 1269
} | interface ____ the API for enhancing a given Query. Query enhancers understand the syntax of the query and
* can introspect queries to determine aliases and projections. Enhancers can also rewrite queries to apply sorting and
* create count queries if the underlying query is a {@link #isSelectQuery() SELECT} query.
*
* @author Diego Krupitza
* @author Greg Turnquist
* @author Mark Paluch
* @since 2.7
*/
public | describes |
java | elastic__elasticsearch | server/src/test/java/org/elasticsearch/index/shard/IndexShardOperationPermitsTests.java | {
"start": 2402,
"end": 29155
} | class ____ extends ESTestCase {
private static ThreadPool threadPool;
private IndexShardOperationPermits permits;
private static final String REJECTING_EXECUTOR = "rejecting";
@BeforeClass
public static void setupThreadPool() {
int writeThreadPoolSize = randomIntBetween(1, 2);
int writeThreadPoolQueueSize = randomIntBetween(1, 2);
threadPool = new TestThreadPool(
"IndexShardOperationPermitsTests",
Settings.builder()
.put("thread_pool." + ThreadPool.Names.WRITE + ".size", writeThreadPoolSize)
.put("thread_pool." + ThreadPool.Names.WRITE + ".queue_size", writeThreadPoolQueueSize)
.build(),
new FixedExecutorBuilder(
Settings.EMPTY,
REJECTING_EXECUTOR,
1,
0,
REJECTING_EXECUTOR,
EsExecutors.TaskTrackingConfig.DO_NOT_TRACK
)
);
assertThat(threadPool.executor(ThreadPool.Names.WRITE), instanceOf(EsThreadPoolExecutor.class));
assertThat(((EsThreadPoolExecutor) threadPool.executor(ThreadPool.Names.WRITE)).getCorePoolSize(), equalTo(writeThreadPoolSize));
assertThat(((EsThreadPoolExecutor) threadPool.executor(ThreadPool.Names.WRITE)).getMaximumPoolSize(), equalTo(writeThreadPoolSize));
assertThat(
((EsThreadPoolExecutor) threadPool.executor(ThreadPool.Names.WRITE)).getQueue().remainingCapacity(),
equalTo(writeThreadPoolQueueSize)
);
}
@AfterClass
public static void shutdownThreadPool() {
ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS);
threadPool = null;
}
@Before
public void createIndexShardOperationsLock() {
permits = new IndexShardOperationPermits(new ShardId("blubb", "id", 0), threadPool);
}
@After
public void checkNoInflightOperations() {
assertThat(permits.semaphore.availablePermits(), equalTo(Integer.MAX_VALUE));
assertThat(permits.getActiveOperationsCount(), equalTo(0));
}
public void testAllOperationsInvoked() throws InterruptedException, TimeoutException {
int numThreads = 10;
List<PlainActionFuture<Releasable>> futures = new ArrayList<>();
List<Thread> operationThreads = new ArrayList<>();
CountDownLatch latch = new CountDownLatch(numThreads / 4);
boolean forceExecution = randomBoolean();
for (int i = 0; i < numThreads; i++) {
// the write thread pool uses a bounded size and can get rejections, see setupThreadPool
Executor executor = threadPool.executor(randomFrom(ThreadPool.Names.WRITE, ThreadPool.Names.GENERIC));
PlainActionFuture<Releasable> future = new PlainActionFuture<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
releasable.close();
super.onResponse(releasable);
}
};
Thread thread = new Thread(() -> {
latch.countDown();
permits.acquire(future, executor, forceExecution);
});
futures.add(future);
operationThreads.add(thread);
}
boolean closeAfterBlocking = randomBoolean();
CountDownLatch blockFinished = new CountDownLatch(1);
threadPool.generic().execute(() -> {
try {
latch.await();
blockAndWait().close();
blockFinished.countDown();
if (closeAfterBlocking) {
permits.close();
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
});
for (Thread thread : operationThreads) {
thread.start();
}
for (PlainActionFuture<Releasable> future : futures) {
try {
assertNotNull(future.get(1, TimeUnit.MINUTES));
} catch (ExecutionException e) {
if (closeAfterBlocking) {
assertThat(
e.getCause(),
either(instanceOf(EsRejectedExecutionException.class)).or(instanceOf(IndexShardClosedException.class))
);
} else {
assertThat(e.getCause(), instanceOf(EsRejectedExecutionException.class));
}
}
}
for (Thread thread : operationThreads) {
thread.join();
}
blockFinished.await();
}
public void testOperationsInvokedImmediatelyIfNoBlock() throws ExecutionException, InterruptedException {
PlainActionFuture<Releasable> future = new PlainActionFuture<>();
permits.acquire(future, threadPool.generic(), true);
assertTrue(future.isDone());
future.get().close();
}
public void testOperationsIfClosed() {
PlainActionFuture<Releasable> future = new PlainActionFuture<>();
permits.close();
permits.acquire(future, threadPool.generic(), true);
ExecutionException exception = expectThrows(ExecutionException.class, future::get);
assertThat(exception.getCause(), instanceOf(IndexShardClosedException.class));
}
public void testBlockIfClosed() {
permits.close();
expectThrows(
IndexShardClosedException.class,
() -> permits.blockOperations(
wrap(() -> { throw new IllegalArgumentException("fake error"); }),
randomInt(10),
TimeUnit.MINUTES,
threadPool.generic()
)
);
}
public void testOperationsDelayedIfBlock() throws ExecutionException, InterruptedException, TimeoutException {
PlainActionFuture<Releasable> future = new PlainActionFuture<>();
try (Releasable ignored = blockAndWait()) {
permits.acquire(future, threadPool.generic(), true);
assertFalse(future.isDone());
}
future.get(1, TimeUnit.HOURS).close();
}
public void testGetBlockWhenBlocked() throws ExecutionException, InterruptedException, TimeoutException {
PlainActionFuture<Releasable> future = new PlainActionFuture<>();
final CountDownLatch blockAcquired = new CountDownLatch(1);
final CountDownLatch releaseBlock = new CountDownLatch(1);
final AtomicBoolean blocked = new AtomicBoolean();
try (Releasable ignored = blockAndWait()) {
permits.acquire(future, threadPool.generic(), true);
permits.blockOperations(wrap(() -> {
blocked.set(true);
blockAcquired.countDown();
releaseBlock.await();
}), 30, TimeUnit.MINUTES, threadPool.generic());
assertFalse(blocked.get());
assertFalse(future.isDone());
}
blockAcquired.await();
assertTrue(blocked.get());
assertFalse(future.isDone());
releaseBlock.countDown();
future.get(1, TimeUnit.HOURS).close();
}
/**
* Tests that the ThreadContext is restored when a operation is executed after it has been delayed due to a block
*/
public void testThreadContextPreservedIfBlock() throws ExecutionException, InterruptedException, TimeoutException {
final ThreadContext context = threadPool.getThreadContext();
final Function<ActionListener<Releasable>, Boolean> contextChecker = (listener) -> {
if ("bar".equals(context.getHeader("foo")) == false) {
listener.onFailure(
new IllegalStateException(
"context did not have value [bar] for header [foo]. Actual value [" + context.getHeader("foo") + "]"
)
);
} else if ("baz".equals(context.getTransient("bar")) == false) {
listener.onFailure(
new IllegalStateException(
"context did not have value [baz] for transient [bar]. Actual value [" + context.getTransient("bar") + "]"
)
);
} else {
return true;
}
return false;
};
PlainActionFuture<Releasable> future = new PlainActionFuture<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
if (contextChecker.apply(this)) {
super.onResponse(releasable);
}
}
};
PlainActionFuture<Releasable> future2 = new PlainActionFuture<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
if (contextChecker.apply(this)) {
super.onResponse(releasable);
}
}
};
try (Releasable ignored = blockAndWait()) {
// we preserve the thread context here so that we have a different context in the call to acquire than the context present
// when the releasable is closed
try (ThreadContext.StoredContext ignore = context.newStoredContext()) {
context.putHeader("foo", "bar");
context.putTransient("bar", "baz");
// test both with and without a executor name
permits.acquire(future, threadPool.generic(), true);
permits.acquire(future2, null, true);
}
assertFalse(future.isDone());
}
future.get(1, TimeUnit.HOURS).close();
future2.get(1, TimeUnit.HOURS).close();
}
private Releasable blockAndWait() throws InterruptedException {
CountDownLatch blockAcquired = new CountDownLatch(1);
CountDownLatch releaseBlock = new CountDownLatch(1);
CountDownLatch blockReleased = new CountDownLatch(1);
boolean throwsException = randomBoolean();
IndexShardClosedException exception = new IndexShardClosedException(new ShardId("blubb", "id", 0));
permits.blockOperations(ActionListener.runAfter(new ActionListener<>() {
@Override
public void onResponse(Releasable releasable) {
try (releasable) {
blockAcquired.countDown();
releaseBlock.await();
if (throwsException) {
onFailure(exception);
}
} catch (InterruptedException e) {
throw new RuntimeException();
}
}
@Override
public void onFailure(Exception e) {
if (e != exception) {
throw new RuntimeException(e);
}
}
}, blockReleased::countDown), 1, TimeUnit.MINUTES, threadPool.generic());
blockAcquired.await();
return () -> {
releaseBlock.countDown();
try {
blockReleased.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
};
}
public void testAsyncBlockOperationsOperationWhileBlocked() throws InterruptedException {
final CountDownLatch blockAcquired = new CountDownLatch(1);
final CountDownLatch releaseBlock = new CountDownLatch(1);
final AtomicBoolean blocked = new AtomicBoolean();
permits.blockOperations(wrap(() -> {
blocked.set(true);
blockAcquired.countDown();
releaseBlock.await();
}), 30, TimeUnit.MINUTES, threadPool.generic());
blockAcquired.await();
assertTrue(blocked.get());
// an operation that is submitted while there is a delay in place should be delayed
final CountDownLatch delayedOperation = new CountDownLatch(1);
final AtomicBoolean delayed = new AtomicBoolean();
final Thread thread = new Thread(() -> permits.acquire(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
delayed.set(true);
releasable.close();
delayedOperation.countDown();
}
@Override
public void onFailure(Exception e) {
}
}, threadPool.generic(), false));
thread.start();
assertFalse(delayed.get());
releaseBlock.countDown();
delayedOperation.await();
assertTrue(delayed.get());
thread.join();
}
public void testAsyncBlockOperationsOperationBeforeBlocked() throws InterruptedException, BrokenBarrierException {
final CyclicBarrier barrier = new CyclicBarrier(2);
final CountDownLatch operationExecutingLatch = new CountDownLatch(1);
final CountDownLatch firstOperationLatch = new CountDownLatch(1);
final CountDownLatch firstOperationCompleteLatch = new CountDownLatch(1);
final Thread firstOperationThread = new Thread(
controlledAcquire(barrier, operationExecutingLatch, firstOperationLatch, firstOperationCompleteLatch)
);
firstOperationThread.start();
barrier.await();
operationExecutingLatch.await();
// now we will delay operations while the first operation is still executing (because it is latched)
final CountDownLatch blockedLatch = new CountDownLatch(1);
final AtomicBoolean onBlocked = new AtomicBoolean();
permits.blockOperations(wrap(() -> {
onBlocked.set(true);
blockedLatch.countDown();
}), 30, TimeUnit.MINUTES, threadPool.generic());
assertFalse(onBlocked.get());
// if we submit another operation, it should be delayed
final CountDownLatch secondOperationExecuting = new CountDownLatch(1);
final CountDownLatch secondOperationComplete = new CountDownLatch(1);
final AtomicBoolean secondOperation = new AtomicBoolean();
final Thread secondOperationThread = new Thread(() -> {
secondOperationExecuting.countDown();
permits.acquire(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
secondOperation.set(true);
releasable.close();
secondOperationComplete.countDown();
}
@Override
public void onFailure(Exception e) {
throw new RuntimeException(e);
}
}, threadPool.generic(), false);
});
secondOperationThread.start();
secondOperationExecuting.await();
assertFalse(secondOperation.get());
firstOperationLatch.countDown();
firstOperationCompleteLatch.await();
blockedLatch.await();
assertTrue(onBlocked.get());
secondOperationComplete.await();
assertTrue(secondOperation.get());
firstOperationThread.join();
secondOperationThread.join();
}
public void testAsyncBlockOperationsRace() throws Exception {
// we racily submit operations and a delay, and then ensure that all operations were actually completed
final int operations = scaledRandomIntBetween(1, 64);
final CyclicBarrier barrier = new CyclicBarrier(1 + 1 + operations);
final CountDownLatch operationLatch = new CountDownLatch(1 + operations);
final Set<Integer> values = ConcurrentCollections.newConcurrentSet();
final List<Thread> threads = new ArrayList<>();
for (int i = 0; i < operations; i++) {
final int value = i;
final Thread thread = new Thread(() -> {
try {
barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
permits.acquire(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
values.add(value);
releasable.close();
operationLatch.countDown();
}
@Override
public void onFailure(Exception e) {
}
}, threadPool.generic(), false);
});
thread.start();
threads.add(thread);
}
final Thread blockingThread = new Thread(() -> {
try {
barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
permits.blockOperations(wrap(() -> {
values.add(operations);
operationLatch.countDown();
}), 30, TimeUnit.MINUTES, threadPool.generic());
});
blockingThread.start();
barrier.await();
operationLatch.await();
for (final Thread thread : threads) {
thread.join();
}
blockingThread.join();
// check that all operations completed
for (int i = 0; i < operations; i++) {
assertTrue(values.contains(i));
}
assertTrue(values.contains(operations));
/*
* The block operation is executed on another thread and the operations can have completed before this thread has returned all the
* permits to the semaphore. We wait here until all generic threads are idle as an indication that all permits have been returned to
* the semaphore.
*/
assertBusy(() -> {
for (final ThreadPoolStats.Stats stats : threadPool.stats()) {
if (ThreadPool.Names.GENERIC.equals(stats.name())) {
assertThat("Expected no active threads in GENERIC pool", stats.active(), equalTo(0));
return;
}
}
fail("Failed to find stats for the GENERIC thread pool");
});
}
public void testActiveOperationsCount() throws ExecutionException, InterruptedException {
PlainActionFuture<Releasable> future1 = new PlainActionFuture<>();
permits.acquire(future1, threadPool.generic(), true);
assertTrue(future1.isDone());
assertThat(permits.getActiveOperationsCount(), equalTo(1));
PlainActionFuture<Releasable> future2 = new PlainActionFuture<>();
permits.acquire(future2, threadPool.generic(), true);
assertTrue(future2.isDone());
assertThat(permits.getActiveOperationsCount(), equalTo(2));
future1.get().close();
assertThat(permits.getActiveOperationsCount(), equalTo(1));
future1.get().close(); // check idempotence
assertThat(permits.getActiveOperationsCount(), equalTo(1));
future2.get().close();
assertThat(permits.getActiveOperationsCount(), equalTo(0));
try (Releasable ignored = blockAndWait()) {
assertThat(permits.getActiveOperationsCount(), equalTo(IndexShard.OPERATIONS_BLOCKED));
}
PlainActionFuture<Releasable> future3 = new PlainActionFuture<>();
permits.acquire(future3, threadPool.generic(), true);
assertTrue(future3.isDone());
assertThat(permits.getActiveOperationsCount(), equalTo(1));
future3.get().close();
assertThat(permits.getActiveOperationsCount(), equalTo(0));
}
private Releasable acquirePermitImmediately() {
final var listener = SubscribableListener.<Releasable>newForked(l -> permits.acquire(l, threadPool.generic(), false));
assertTrue(listener.isDone());
return safeAwait(listener);
}
public void testAsyncBlockOperationsOnRejection() {
final PlainActionFuture<Void> threadBlock = new PlainActionFuture<>();
try (Releasable firstPermit = acquirePermitImmediately()) {
assertNotNull(firstPermit);
final var rejectingExecutor = threadPool.executor(REJECTING_EXECUTOR);
rejectingExecutor.execute(threadBlock::actionGet);
assertThat(
safeAwaitFailure(Releasable.class, l -> permits.blockOperations(l, 1, TimeUnit.HOURS, rejectingExecutor)),
instanceOf(EsRejectedExecutionException.class)
);
// ensure that the exception means no block was put in place
try (Releasable secondPermit = acquirePermitImmediately()) {
assertNotNull(secondPermit);
}
} finally {
threadBlock.onResponse(null);
}
// ensure that another block can still be acquired
try (Releasable block = safeAwait(l -> permits.blockOperations(l, 1, TimeUnit.HOURS, threadPool.generic()))) {
assertNotNull(block);
}
}
public void testAsyncBlockOperationsOnTimeout() {
final PlainActionFuture<Void> threadBlock = new PlainActionFuture<>();
try (Releasable firstPermit = acquirePermitImmediately()) {
assertNotNull(firstPermit);
assertEquals(
"timeout while blocking operations after [0s]",
safeAwaitFailure(
ElasticsearchTimeoutException.class,
Releasable.class,
f -> permits.blockOperations(f, 0, TimeUnit.SECONDS, threadPool.generic())
).getMessage()
);
// ensure that the exception means no block was put in place
try (Releasable secondPermit = acquirePermitImmediately()) {
assertNotNull(secondPermit);
}
} finally {
threadBlock.onResponse(null);
}
// ensure that another block can still be acquired
try (Releasable block = safeAwait(l -> permits.blockOperations(l, 1, TimeUnit.HOURS, threadPool.generic()))) {
assertNotNull(block);
}
}
public void testTimeout() throws BrokenBarrierException, InterruptedException {
final CyclicBarrier barrier = new CyclicBarrier(2);
final CountDownLatch operationExecutingLatch = new CountDownLatch(1);
final CountDownLatch operationLatch = new CountDownLatch(1);
final CountDownLatch operationCompleteLatch = new CountDownLatch(1);
final Thread thread = new Thread(controlledAcquire(barrier, operationExecutingLatch, operationLatch, operationCompleteLatch));
thread.start();
barrier.await();
operationExecutingLatch.await();
final AtomicReference<Exception> reference = new AtomicReference<>();
final CountDownLatch onFailureLatch = new CountDownLatch(1);
permits.blockOperations(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
releasable.close();
}
@Override
public void onFailure(final Exception e) {
reference.set(e);
onFailureLatch.countDown();
}
}, 1, TimeUnit.MILLISECONDS, threadPool.generic());
onFailureLatch.await();
assertThat(reference.get(), hasToString(containsString("timeout while blocking operations")));
operationLatch.countDown();
operationCompleteLatch.await();
thread.join();
}
public void testNoPermitsRemaining() throws InterruptedException {
permits.semaphore.tryAcquire(IndexShardOperationPermits.TOTAL_PERMITS, 1, TimeUnit.SECONDS);
final IllegalStateException e = expectThrows(
IllegalStateException.class,
() -> this.permits.acquire(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
assert false;
}
@Override
public void onFailure(Exception e) {
assert false;
}
}, threadPool.generic(), false)
);
assertThat(e, hasToString(containsString("failed to obtain permit but operations are not delayed")));
permits.semaphore.release(IndexShardOperationPermits.TOTAL_PERMITS);
}
/**
* Returns an operation that acquires a permit and synchronizes in the following manner:
* <ul>
* <li>waits on the {@code barrier} before acquiring a permit</li>
* <li>counts down the {@code operationExecutingLatch} when it acquires the permit</li>
* <li>waits on the {@code operationLatch} before releasing the permit</li>
* <li>counts down the {@code operationCompleteLatch} after releasing the permit</li>
* </ul>
*
* @param barrier the barrier to wait on
* @param operationExecutingLatch the latch to countdown after acquiring the permit
* @param operationLatch the latch to wait on before releasing the permit
* @param operationCompleteLatch the latch to countdown after releasing the permit
* @return a controllable runnable that acquires a permit
*/
private Runnable controlledAcquire(
final CyclicBarrier barrier,
final CountDownLatch operationExecutingLatch,
final CountDownLatch operationLatch,
final CountDownLatch operationCompleteLatch
) {
return () -> {
try {
barrier.await();
} catch (final BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
permits.acquire(new ActionListener<Releasable>() {
@Override
public void onResponse(Releasable releasable) {
operationExecutingLatch.countDown();
try {
operationLatch.await();
} catch (final InterruptedException e) {
throw new RuntimeException(e);
}
releasable.close();
operationCompleteLatch.countDown();
}
@Override
public void onFailure(Exception e) {
throw new RuntimeException(e);
}
}, threadPool.generic(), false);
};
}
private static ActionListener<Releasable> wrap(final CheckedRunnable<Exception> onResponse) {
return ActionTestUtils.assertNoFailureListener(releasable -> {
try (Releasable ignored = releasable) {
onResponse.run();
}
});
}
}
| IndexShardOperationPermitsTests |
java | elastic__elasticsearch | x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/DeleteAsyncSearchRequestTests.java | {
"start": 579,
"end": 1177
} | class ____ extends AbstractWireSerializingTestCase<DeleteAsyncResultRequest> {
@Override
protected Writeable.Reader<DeleteAsyncResultRequest> instanceReader() {
return DeleteAsyncResultRequest::new;
}
@Override
protected DeleteAsyncResultRequest createTestInstance() {
return new DeleteAsyncResultRequest(randomSearchId());
}
@Override
protected DeleteAsyncResultRequest mutateInstance(DeleteAsyncResultRequest instance) {
return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929
}
}
| DeleteAsyncSearchRequestTests |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-proxyexchange-webmvc/src/test/java/org/springframework/cloud/gateway/mvc/ProductionConfigurationTests.java | {
"start": 12188,
"end": 18046
} | class ____ {
private URI home;
public void setHome(URI home) {
this.home = home;
}
@GetMapping("/proxy/{id}")
public ResponseEntity<?> proxyFoos(@PathVariable Integer id, ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/foos/" + id).get();
}
@GetMapping("/proxy/path/**")
public ResponseEntity<?> proxyPath(ProxyExchange<?> proxy, UriComponentsBuilder uri) {
String path = proxy.path("/proxy/path/");
return proxy.uri(home.toString() + "/foos/" + path).get();
}
@GetMapping("/proxy/html/**")
public ResponseEntity<String> proxyHtml(ProxyExchange<String> proxy, UriComponentsBuilder uri) {
String path = proxy.path("/proxy/html");
return proxy.uri(home.toString() + path).get();
}
@GetMapping("/proxy/typeless/**")
public ResponseEntity<?> proxyTypeless(ProxyExchange<byte[]> proxy, UriComponentsBuilder uri) {
String path = proxy.path("/proxy/typeless");
return proxy.uri(home.toString() + path).get();
}
@GetMapping("/proxy/missing/{id}")
public ResponseEntity<?> proxyMissing(@PathVariable Integer id, ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/missing/" + id).get();
}
@GetMapping("/proxy")
public ResponseEntity<?> proxyUri(ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/foos").get();
}
@PostMapping("/proxy/{id}")
public ResponseEntity<?> proxyBars(@PathVariable Integer id, @RequestBody Map<String, Object> body,
ProxyExchange<List<Object>> proxy) {
body.put("id", id);
return proxy.uri(home.toString() + "/bars").body(Arrays.asList(body)).post(this::first);
}
@PostMapping("/proxy")
public ResponseEntity<?> barsWithNoBody(ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/bars").post();
}
@PostMapping("/proxy/entity")
public ResponseEntity<?> explicitEntity(@RequestBody Foo foo, ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/bars").body(Arrays.asList(foo)).post();
}
@PostMapping("/proxy/type")
public ResponseEntity<List<Bar>> explicitEntityWithType(@RequestBody Foo foo,
ProxyExchange<List<Bar>> proxy) {
return proxy.uri(home.toString() + "/bars").body(Arrays.asList(foo)).post();
}
@PostMapping("/proxy/single")
public ResponseEntity<?> implicitEntity(@RequestBody Foo foo, ProxyExchange<List<Object>> proxy) {
return proxy.uri(home.toString() + "/bars").body(Arrays.asList(foo)).post(this::first);
}
@PostMapping("/proxy/converter")
public ResponseEntity<Bar> implicitEntityWithConverter(@RequestBody Foo foo,
ProxyExchange<List<Bar>> proxy) {
return proxy.uri(home.toString() + "/bars")
.body(Arrays.asList(foo))
.post(response -> ResponseEntity.status(response.getStatusCode())
.headers(response.getHeaders())
.body(response.getBody().iterator().next()));
}
@PostMapping("/proxy/no-body")
public ResponseEntity<Foo> noBody(ProxyExchange<Foo> proxy) {
return proxy.uri(home.toString() + "/foos").post();
}
@DeleteMapping("/proxy/{id}/no-body")
public ResponseEntity<?> deleteWithoutBody(@PathVariable Integer id, ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/foos/" + id + "/no-body").delete();
}
@DeleteMapping("/proxy/{id}")
public ResponseEntity<?> deleteWithBody(@PathVariable Integer id, @RequestBody Foo foo,
ProxyExchange<?> proxy) {
return proxy.uri(home.toString() + "/foos/" + id)
.body(foo)
.delete(response -> ResponseEntity.status(response.getStatusCode())
.headers(response.getHeaders())
.body(response.getBody()));
}
@GetMapping("/forward/**")
public void forward(ProxyExchange<?> proxy) {
String path = proxy.path("/forward");
if (path.startsWith("/special")) {
proxy.header("X-Custom", "FOO");
path = proxy.path("/forward/special");
}
proxy.forward(path);
}
@PostMapping("/forward/**")
public void postForward(ProxyExchange<?> proxy) {
String path = proxy.path("/forward");
if (path.startsWith("/special")) {
proxy.header("X-Custom", "FOO");
path = proxy.path("/forward/special");
}
proxy.forward(path);
}
@PostMapping("/forward/body/**")
public void postForwardBody(@RequestBody byte[] body, ProxyExchange<?> proxy) {
String path = proxy.path("/forward/body");
proxy.body(body).forward(path);
}
@SuppressWarnings("unused")
@PostMapping("/forward/forget/**")
public void postForwardForgetBody(@RequestBody byte[] body, ProxyExchange<?> proxy) {
String path = proxy.path("/forward/forget");
proxy.forward(path);
}
@GetMapping("/proxy/headers")
@SuppressWarnings("Duplicates")
public ResponseEntity<Map<String, List<String>>> headers(ProxyExchange<Map<String, List<String>>> proxy) {
proxy.excluded("foo", "hello");
proxy.header("bar", "hello");
proxy.header("abc", "123");
proxy.header("hello", "world");
return proxy.uri(home.toString() + "/headers").get();
}
@GetMapping("/proxy/sensitive-headers-default")
public ResponseEntity<Map<String, List<String>>> defaultSensitiveHeaders(
ProxyExchange<Map<String, List<String>>> proxy) {
proxy.header("bar", "hello");
proxy.header("abc", "123");
proxy.header("hello", "world");
return proxy.uri(home.toString() + "/headers").get();
}
@PostMapping("/proxy/checkContentLength")
public ResponseEntity<?> checkContentLength(ProxyExchange<byte[]> proxy) {
return proxy.uri(home.toString() + "/checkContentLength").post();
}
private <T> ResponseEntity<T> first(ResponseEntity<List<T>> response) {
return ResponseEntity.status(response.getStatusCode())
.headers(response.getHeaders())
.body(response.getBody().iterator().next());
}
}
@RestController
static | ProxyController |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/annotations/BuildStep.java | {
"start": 2323,
"end": 3323
} | class ____ be a producer/consumer of these
* items, while method parameter injection is specific to an individual build step. In general method parameter injection
* should be the preferred approach as it is more fine-grained.
* <p>
* Note that a {@code BuildStep} will only be run if there is a consumer for items it produces. If nothing is
* interested in the produced item then it will not be run. A consequence of this is that it must be capable of producing
* at least one item (it does not actually have to produce anything, but it must have the ability to). A build step that
* cannot produce anything will never be run.
* <p>
* {@code BuildItem} instances must be immutable, as the producer/consumer model does not allow for mutating
* artifacts. Injecting a build item and modifying it is a bug waiting to happen, as this operation would not be accounted
* for in the dependency graph.
*
* @see Record
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @ | will |
java | elastic__elasticsearch | x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/SourceGenerator.java | {
"start": 1361,
"end": 5802
} | class ____ {
private SourceGenerator() {}
public static SearchSourceBuilder sourceBuilder(
QueryContainer container,
QueryBuilder filter,
List<FieldAndFormat> fetchFields,
Map<String, Object> runtimeMappings
) {
QueryBuilder finalQuery = null;
// add the source
if (container.query() != null) {
if (filter != null) {
finalQuery = boolQuery().must(container.query().asBuilder()).filter(filter);
} else {
finalQuery = container.query().asBuilder();
}
} else {
if (filter != null) {
finalQuery = boolQuery().filter(filter);
}
}
final SearchSourceBuilder source = new SearchSourceBuilder();
source.query(finalQuery);
// extract fields
QlSourceBuilder sourceBuilder = new QlSourceBuilder();
// Iterate through all the columns requested, collecting the fields that
// need to be retrieved from the result documents
// NB: the sortBuilder takes care of eliminating duplicates
container.fields().forEach(f -> f.v1().collectFields(sourceBuilder));
sourceBuilder.build(source);
sorting(container, source);
// disable the source, as we rely on "fields" API
source.fetchSource(false);
// add the "fields" to be fetched
if (fetchFields != null) {
fetchFields.forEach(source::fetchField);
}
// add the runtime fields
if (runtimeMappings != null) {
source.runtimeMappings(runtimeMappings);
}
if (container.limit() != null) {
// add size and from
source.size(container.limit().absLimit());
// this should be added only for event queries
if (container.limit().offset() > 0) {
source.from(container.limit().offset());
}
} else {
source.size(0);
}
optimize(container, source);
return source;
}
private static void sorting(QueryContainer container, SearchSourceBuilder source) {
for (Sort sortable : container.sort().values()) {
SortBuilder<?> sortBuilder = null;
if (sortable instanceof AttributeSort as) {
Attribute attr = as.attribute();
// sorting only works on not-analyzed fields - look for a multi-field replacement
if (attr instanceof FieldAttribute fieldAttribute) {
FieldAttribute fa = fieldAttribute.exactAttribute();
sortBuilder = fieldSort(fa.name()).missing(as.missing().searchOrder(as.direction()))
.unmappedType(fa.dataType().esType());
if (fa.isNested()) {
FieldSortBuilder fieldSort = fieldSort(fa.name()).missing(as.missing().searchOrder(as.direction()))
.unmappedType(fa.dataType().esType());
NestedSortBuilder newSort = new NestedSortBuilder(fa.nestedParent().name());
NestedSortBuilder nestedSort = fieldSort.getNestedSort();
if (nestedSort == null) {
fieldSort.setNestedSort(newSort);
} else {
while (nestedSort.getNestedSort() != null) {
nestedSort = nestedSort.getNestedSort();
}
nestedSort.setNestedSort(newSort);
}
nestedSort = newSort;
if (container.query() != null) {
container.query().enrichNestedSort(nestedSort);
}
sortBuilder = fieldSort;
}
}
} else if (sortable instanceof ScriptSort ss) {
sortBuilder = scriptSort(ss.script().toPainless(), ss.script().outputType().scriptSortType());
}
if (sortBuilder != null) {
sortBuilder.order(sortable.direction().asOrder());
source.sort(sortBuilder);
}
}
}
private static void optimize(QueryContainer query, SearchSourceBuilder builder) {
builder.trackTotalHits(query.shouldTrackHits());
}
}
| SourceGenerator |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/impl/engine/CamelPostProcessorHelperTest.java | {
"start": 28802,
"end": 29231
} | class ____ {
private PollingConsumer consumer;
@EndpointInject("seda:foo")
public void setConsumer(PollingConsumer consumer) {
this.consumer = consumer;
}
public PollingConsumer getConsumer() {
return consumer;
}
public Exchange consume() {
return consumer.receive(1000);
}
}
public static | MyEndpointBeanPollingConsumer |
java | mockito__mockito | mockito-core/src/main/java/org/mockito/stubbing/OngoingStubbing.java | {
"start": 4508,
"end": 9455
} | class ____ be instantiated for each method invocation.
* <p>
* If <code>throwableTypes</code> contain a checked exception then it has to
* match one of the checked exceptions of method signature.
* <p>
* You can specify <code>throwableTypes</code> to be thrown for consecutive calls.
* In that case the last throwable determines the behavior of further consecutive calls.
* <p>
* If throwable is null then exception will be thrown.
* <p>
* See examples in javadoc for {@link Mockito#when}
*
* <p>Note since JDK 7, invoking this method will raise a compiler warning "possible heap pollution",
* this API is safe to use. If you don't want to see this warning it is possible to chain {@link #thenThrow(Class)}
* <p>Note depending on the JVM, stack trace information may not be available in
* the generated throwable instance. If you require stack trace information,
* use {@link OngoingStubbing#thenThrow(Throwable...)} instead.
*
* @param toBeThrown to be thrown on method invocation
* @param nextToBeThrown next to be thrown on method invocation
*
* @return object that allows stubbing consecutive calls
* @since 2.1.0
*/
// Additional method helps users of JDK7+ to hide heap pollution / unchecked generics array
// creation warnings (on call site)
@SuppressWarnings({"unchecked", "varargs"})
OngoingStubbing<T> thenThrow(
Class<? extends Throwable> toBeThrown, Class<? extends Throwable>... nextToBeThrown);
/**
* Sets the real implementation to be called when the method is called on a mock object.
* <p>
* As usual you are going to read <b>the partial mock warning</b>:
* Object oriented programming is more less tackling complexity by dividing the complexity into separate, specific, SRPy objects.
* How does partial mock fit into this paradigm? Well, it just doesn't...
* Partial mock usually means that the complexity has been moved to a different method on the same object.
* In most cases, this is not the way you want to design your application.
* <p>
* However, there are rare cases when partial mocks come handy:
* dealing with code you cannot change easily (3rd party interfaces, interim refactoring of legacy code etc.)
* However, I wouldn't use partial mocks for new, test-driven and well-designed code.
* <pre class="code"><code class="java">
* // someMethod() must be safe (e.g. doesn't throw, doesn't have dependencies to the object state, etc.)
* // if it isn't safe then you will have trouble stubbing it using this api. Use Mockito.doCallRealMethod() instead.
* when(mock.someMethod()).thenCallRealMethod();
*
* // calls real method:
* mock.someMethod();
*
* </code></pre>
* See also javadoc {@link Mockito#spy(Object)} to find out more about partial mocks.
* <b>Mockito.spy() is a recommended way of creating partial mocks.</b>
* The reason is it guarantees real methods are called against correctly constructed object because you're responsible for constructing the object passed to spy() method.
* <p>
* See examples in javadoc for {@link Mockito#when}
*
* @return object that allows stubbing consecutive calls
*/
OngoingStubbing<T> thenCallRealMethod();
/**
* Sets a generic Answer for the method. E.g:
* <pre class="code"><code class="java">
* when(mock.someMethod(10)).thenAnswer(new Answer<Integer>() {
* public Integer answer(InvocationOnMock invocation) throws Throwable {
* return (Integer) invocation.getArguments()[0];
* }
* }
* </code></pre>
*
* @param answer the custom answer to execute.
*
* @return object that allows stubbing consecutive calls
*/
OngoingStubbing<T> thenAnswer(Answer<?> answer);
/**
* Sets a generic Answer for the method.
*
* This method is an alias of {@link #thenAnswer(Answer)}. This alias allows
* more readable tests on occasion, for example:
* <pre class="code"><code class="java">
* //using 'then' alias:
* when(mock.foo()).then(returnCoolValue());
*
* //versus good old 'thenAnswer:
* when(mock.foo()).thenAnswer(byReturningCoolValue());
* </code></pre>
*
* @param answer the custom answer to execute.
* @return object that allows stubbing consecutive calls
*
* @see #thenAnswer(Answer)
* @since 1.9.0
*/
OngoingStubbing<T> then(Answer<?> answer);
/**
* Returns the mock that was used for this stub.
* <p>
* It allows to create a stub in one line of code.
* This can be helpful to keep test code clean.
* For example, some boring stub can be created and stubbed at field initialization in a test:
* <pre class="code"><code class="java">
* public | will |
java | apache__hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/reduce/ShuffleSchedulerImpl.java | {
"start": 18785,
"end": 19443
} | class ____ implements Delayed {
MapHost host;
private long endTime;
Penalty(MapHost host, long delay) {
this.host = host;
this.endTime = Time.monotonicNow() + delay;
}
@Override
public long getDelay(TimeUnit unit) {
long remainingTime = endTime - Time.monotonicNow();
return unit.convert(remainingTime, TimeUnit.MILLISECONDS);
}
@Override
public int compareTo(Delayed o) {
long other = ((Penalty) o).endTime;
return endTime == other ? 0 : (endTime < other ? -1 : 1);
}
}
/**
* A thread that takes hosts off of the penalty list when the timer expires.
*/
private | Penalty |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/processor/DynamicRouterNoCacheTest.java | {
"start": 1350,
"end": 3517
} | class ____ extends ContextTestSupport {
@Test
public void testNoCache() throws Exception {
assertEquals(1, context.getEndpointRegistry().size());
sendBody("foo");
sendBody("bar");
// make sure its using an empty producer cache as the cache is disabled
List<Processor> list = getProcessors("foo");
DynamicRouter rl = (DynamicRouter) list.get(0);
assertNotNull(rl);
assertEquals(-1, rl.getCacheSize());
// check no additional endpoints added as cache was disabled
assertEquals(1, context.getEndpointRegistry().size());
// now send again with mocks which then add endpoints
MockEndpoint x = getMockEndpoint("mock:x");
MockEndpoint y = getMockEndpoint("mock:y");
MockEndpoint z = getMockEndpoint("mock:z");
x.expectedBodiesReceived("foo", "bar");
y.expectedBodiesReceived("foo", "bar");
z.expectedBodiesReceived("foo", "bar");
sendBody("foo");
sendBody("bar");
assertMockEndpointsSatisfied();
assertEquals(4, context.getEndpointRegistry().size());
}
protected void sendBody(String body) {
template.sendBodyAndHeader("direct:a", body, "recipientListHeader", "mock:x,mock:y,mock:z");
}
public String slip(@Headers Map headers) {
String header = (String) headers.get("recipientListHeader");
if (ObjectHelper.isEmpty(header)) {
return null;
}
if (header.contains(",")) {
String next = StringHelper.before(header, ",");
String rest = StringHelper.after(header, ",");
headers.put("recipientListHeader", rest);
return next;
} else {
// last slip
headers.put("recipientListHeader", "");
return header;
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:a").dynamicRouter(method(DynamicRouterNoCacheTest.class, "slip")).cacheSize(-1).id("foo");
}
};
}
}
| DynamicRouterNoCacheTest |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/intarrays/IntArrays_assertContainsOnly_Test.java | {
"start": 1756,
"end": 7041
} | class ____ extends IntArraysBaseTest {
@Test
void should_pass_if_actual_contains_given_values_only() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6, 8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(10, 8, 6));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once() {
actual = arrayOf(6, 8, 10, 8, 8, 8);
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6, 8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated() {
arrays.assertContainsOnly(someInfo(), actual, arrayOf(6, 8, 10, 6, 8, 10));
}
@Test
void should_pass_if_actual_and_given_values_are_empty() {
actual = emptyArray();
arrays.assertContainsOnly(someInfo(), actual, emptyArray());
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsOnly(someInfo(), actual, emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null() {
assertThatNullPointerException().isThrownBy(() -> arrays.assertContainsOnly(someInfo(), actual, null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arrays.assertContainsOnly(someInfo(), null, arrayOf(8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_only() {
AssertionInfo info = someInfo();
int[] expected = { 6, 8, 20 };
Throwable error = catchThrowable(() -> arrays.assertContainsOnly(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldContainOnly(actual, expected, newArrayList(20), newArrayList(10)));
}
@Test
void should_pass_if_actual_contains_given_values_only_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6, -8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_only_in_different_order_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(10, -8, 6));
}
@Test
void should_pass_if_actual_contains_given_values_only_more_than_once_according_to_custom_comparison_strategy() {
actual = arrayOf(6, -8, 10, -8, -8, -8);
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6, -8, 10));
}
@Test
void should_pass_if_actual_contains_given_values_only_even_if_duplicated_according_to_custom_comparison_strategy() {
arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(), actual, arrayOf(6, -8, 10, 6, -8, 10));
}
@Test
void should_fail_if_array_of_values_to_look_for_is_empty_and_actual_is_not_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
actual,
emptyArray()));
}
@Test
void should_throw_error_if_array_of_values_to_look_for_is_null_whatever_custom_comparison_strategy_is() {
assertThatNullPointerException().isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
actual,
null))
.withMessage(valuesToLookForIsNull());
}
@Test
void should_fail_if_actual_is_null_whatever_custom_comparison_strategy_is() {
assertThatExceptionOfType(AssertionError.class).isThrownBy(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(someInfo(),
null,
arrayOf(-8)))
.withMessage(actualIsNull());
}
@Test
void should_fail_if_actual_does_not_contain_given_values_only_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
int[] expected = { 6, -8, 20 };
Throwable error = catchThrowable(() -> arraysWithCustomComparisonStrategy.assertContainsOnly(info, actual, expected));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info,
shouldContainOnly(actual, expected, newArrayList(20), newArrayList(10),
absValueComparisonStrategy));
}
}
| IntArrays_assertContainsOnly_Test |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/OverridesTest.java | {
"start": 11656,
"end": 11843
} | class ____ extends SubTwo {
@Override
abstract void arrayMethod(Object[] xs);
}
}\
""")
.doTest();
}
}
| SubThree |
java | alibaba__nacos | client/src/test/java/com/alibaba/nacos/client/logging/NacosLoggingTest.java | {
"start": 1227,
"end": 3190
} | class ____ {
@Mock
NacosLoggingAdapter loggingAdapter;
NacosLoggingProperties loggingProperties;
NacosLogging instance;
@BeforeEach
void setUp() throws NoSuchFieldException, IllegalAccessException {
loggingProperties = new NacosLoggingProperties("", new Properties());
instance = NacosLogging.getInstance();
Field loggingPropertiesField = NacosLogging.class.getDeclaredField("loggingProperties");
loggingPropertiesField.setAccessible(true);
loggingPropertiesField.set(instance, loggingProperties);
}
@Test
void testGetInstance() {
NacosLogging instance = NacosLogging.getInstance();
assertNotNull(instance);
}
@Test
void testLoadConfiguration() throws NoSuchFieldException, IllegalAccessException {
instance = NacosLogging.getInstance();
Field nacosLogging = NacosLogging.class.getDeclaredField("loggingAdapter");
nacosLogging.setAccessible(true);
nacosLogging.set(instance, loggingAdapter);
instance.loadConfiguration();
Mockito.verify(loggingAdapter, Mockito.times(1)).loadConfiguration(loggingProperties);
}
@Test
void testLoadConfigurationWithException() throws NoSuchFieldException, IllegalAccessException {
instance = NacosLogging.getInstance();
Field nacosLoggingField = NacosLogging.class.getDeclaredField("loggingAdapter");
nacosLoggingField.setAccessible(true);
NacosLoggingAdapter cachedLogging = (NacosLoggingAdapter) nacosLoggingField.get(instance);
try {
doThrow(new RuntimeException()).when(loggingAdapter).loadConfiguration(loggingProperties);
nacosLoggingField.set(instance, loggingAdapter);
instance.loadConfiguration();
// without exception thrown
} finally {
nacosLoggingField.set(instance, cachedLogging);
}
}
} | NacosLoggingTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServices.java | {
"start": 7121,
"end": 42005
} | class ____ extends AbstractBinder {
@Override
protected void configure() {
Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, testRootDir.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
conf.setBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED, true);
conf.set(YarnConfiguration.NM_REMOTE_APP_LOG_DIR, testRemoteLogDir.getAbsolutePath());
conf.set(YarnConfiguration.YARN_LOG_SERVER_WEBSERVICE_URL, LOGSERVICEWSADDR);
dirsHandler = new LocalDirsHandlerService();
NodeHealthCheckerService healthChecker = new NodeHealthCheckerService(dirsHandler);
healthChecker.init(conf);
aclsManager = new ApplicationACLsManager(conf);
nmContext = new NodeManager.NMContext(null, null, dirsHandler,
aclsManager, null, false, conf);
NodeId nodeId = NodeId.newInstance("testhost.foo.com", 8042);
nmContext.setNodeId(nodeId);
resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
// 15.5G in bytes
return new Long("16642998272");
}
@Override
public long getPmemAllocatedForContainers() {
// 16G in bytes
return new Long("17179869184");
}
@Override
public long getVCoresAllocatedForContainers() {
return new Long("4000");
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
nmWebApp = new NMWebApp(resourceView, aclsManager, dirsHandler);
final HttpServletRequest request = mock(HttpServletRequest.class);
when(request.getQueryString()).thenReturn("?user.name=user&nm.id=localhost:1111");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(nmContext).to(Context.class).named("nm");
bind(nmWebApp).to(WebApp.class).named("webapp");
bind(request).to(HttpServletRequest.class);
bind(response).to(HttpServletResponse.class);
bind(aclsManager).to(ApplicationACLsManager.class);
bind(dirsHandler).to(LocalDirsHandlerService.class);
bind(resourceView).to(ResourceView.class).named("view");
}
}
private void setupMockPluginsWithNmResourceInfo() throws YarnException {
ResourcePlugin mockPlugin1 = mock(ResourcePlugin.class);
NMResourceInfo nmResourceInfo1 = new NMResourceInfo();
nmResourceInfo1.setResourceValue(NM_RESOURCE_VALUE);
when(mockPlugin1.getNMResourceInfo()).thenReturn(nmResourceInfo1);
ResourcePluginManager pluginManager = createResourceManagerWithPlugins(
ImmutableMap.<String, ResourcePlugin>builder()
.put("resource-1", mockPlugin1)
.put("yarn.io/resource-1", mockPlugin1)
.put("resource-2", mock(ResourcePlugin.class))
.build()
);
nmContext.setResourcePluginManager(pluginManager);
}
private void setupMockPluginsWithGpuResourceInfo() throws YarnException {
GpuDeviceInformation gpuDeviceInformation = new GpuDeviceInformation();
gpuDeviceInformation.setDriverVersion("1.2.3");
gpuDeviceInformation.setGpus(Collections.singletonList(new PerGpuDeviceInformation()));
ResourcePlugin mockPlugin1 = mock(ResourcePlugin.class);
List<GpuDevice> totalGpuDevices = Arrays.asList(
new GpuDevice(1, 1), new GpuDevice(2, 2), new GpuDevice(3, 3));
List<AssignedGpuDevice> assignedGpuDevices = Arrays.asList(
new AssignedGpuDevice(2, 2, createContainerId(1)),
new AssignedGpuDevice(3, 3, createContainerId(2)));
NMResourceInfo nmResourceInfo1 = new NMGpuResourceInfo(gpuDeviceInformation,
totalGpuDevices,
assignedGpuDevices);
when(mockPlugin1.getNMResourceInfo()).thenReturn(nmResourceInfo1);
ResourcePluginManager pluginManager = createResourceManagerWithPlugins(
ImmutableMap.<String, ResourcePlugin>builder()
.put("resource-1", mockPlugin1)
.put("yarn.io/resource-1", mockPlugin1)
.put("resource-2", mock(ResourcePlugin.class))
.build()
);
nmContext.setResourcePluginManager(pluginManager);
}
private ResourcePluginManager createResourceManagerWithPlugins(
Map<String, ResourcePlugin> plugins) {
ResourcePluginManager pluginManager = mock(ResourcePluginManager.class);
when(pluginManager.getNameToPlugins()).thenReturn(plugins);
return pluginManager;
}
private void assertNMResourceInfoResponse(Response response, long value)
throws JSONException {
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString(),
"MediaType of the response is not the expected!");
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(value, json.getJSONObject("nmResourceInfo").getLong("resourceValue"),
"Unexpected value in the json response!");
}
private void assertEmptyNMResourceInfo(Response response) throws JSONException {
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString(),
"MediaType of the response is not the expected!");
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "Unexpected value in the json response!");
}
private Response getNMResourceResponse(WebTarget target, String resourceName) {
return target.path("ws").path("v1").path("node").path("resources")
.path(resourceName).request(MediaType.APPLICATION_JSON)
.get();
}
@BeforeEach
public void before() throws Exception {
testRemoteLogDir.mkdir();
testRootDir.mkdirs();
testLogDir.mkdir();
}
@AfterAll
static public void stop() {
FileUtil.fullyDelete(testRootDir);
FileUtil.fullyDelete(testLogDir);
FileUtil.fullyDelete(testRemoteLogDir);
}
public TestNMWebServices() {
}
@Test
public void testInvalidUri() throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
Response response = r.path("ws").path("v1").path("node").path("bogus").request()
.accept(MediaType.APPLICATION_JSON).get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
responseStr = response.readEntity(String.class);
assertEquals(Response.Status.NOT_FOUND.getStatusCode(),
response.getStatusInfo().getStatusCode());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testInvalidAccept() throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
Response response = r.path("ws").path("v1").path("node").request()
.accept(MediaType.TEXT_PLAIN).get();
throw new NotAcceptableException(response);
} catch (NotAcceptableException ue) {
Response response = ue.getResponse();
responseStr = response.readEntity(String.class);
assertEquals(Response.Status.NOT_ACCEPTABLE.getStatusCode(),
response.getStatusInfo().getStatusCode());
String reasonPhrase = response.getStatusInfo().getReasonPhrase();
assertEquals("Not Acceptable", reasonPhrase);
assertTrue(responseStr.contains("HTTP 406 Not Acceptable"));
}
}
@Test
public void testInvalidUri2() throws JSONException, Exception {
WebTarget r = target();
String responseStr = "";
try {
Response response = r.request().accept(MediaType.APPLICATION_JSON).get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
@Test
public void testNode() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node").request()
.accept(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node/").request()
.accept(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
// make sure default is json output
@Test
public void testNodeDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node").request()
.accept(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeInfo() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node").path("info").request()
.accept(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testNodeInfoSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node")
.path("info/").request().accept(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
// make sure default is json output
@Test
public void testNodeInfoDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Response response = r.path("ws").path("v1").path("node").path("info").request()
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
verifyNodeInfo(json);
}
@Test
public void testSingleNodesXML() throws JSONException, Exception {
WebTarget r = target();
Response response = r.path("ws").path("v1").path("node")
.path("info/").request(MediaType.APPLICATION_XML)
.get(Response.class);
assertEquals(MediaType.APPLICATION_XML+ ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("nodeInfo");
assertEquals(1, nodes.getLength(), "incorrect number of elements");
verifyNodesXML(nodes);
}
@Test
@Timeout(value = 5)
public void testContainerLogsWithNewAPI() throws Exception {
ContainerId containerId0 = BuilderUtils.newContainerId(0, 0, 0, 0);
WebTarget r0 = targetWithJsonObject();
r0 = r0.path("ws").path("v1").path("node").path("containers")
.path(containerId0.toString()).path("logs");
testContainerLogs(r0, containerId0, LOG_MESSAGE);
ContainerId containerId1 = BuilderUtils.newContainerId(0, 0, 0, 1);
WebTarget r1 = targetWithJsonObject();
r1 = r1.path("ws").path("v1").path("node").path("containers")
.path(containerId1.toString()).path("logs");
testContainerLogs(r1, containerId1, "");
}
@Test
@Timeout(value = 5)
public void testContainerLogsWithOldAPI() throws Exception {
final ContainerId containerId2 = BuilderUtils.newContainerId(1, 1, 0, 2);
WebTarget r = targetWithJsonObject();
r = r.path("ws").path("v1").path("node").path("containerlogs")
.path(containerId2.toString());
testContainerLogs(r, containerId2, LOG_MESSAGE);
}
@Test
public void testNMRedirect() {
ApplicationId noExistAppId = ApplicationId.newInstance(
System.currentTimeMillis(), 2000);
ApplicationAttemptId noExistAttemptId = ApplicationAttemptId.newInstance(
noExistAppId, 150);
ContainerId noExistContainerId = ContainerId.newContainerId(
noExistAttemptId, 250);
String fileName = "syslog";
WebTarget r = target();
// check the old api
URI requestURI = r.path("ws").path("v1").path("node")
.path("containerlogs").path(noExistContainerId.toString())
.path(fileName).queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getUri();
String redirectURL = getRedirectURL(requestURI.toString());
assertNotNull(redirectURL);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
// check the new api
requestURI = r.path("ws").path("v1").path("node")
.path("containers").path(noExistContainerId.toString())
.path("logs").path(fileName).queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertNotNull(redirectURL);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("/logs/" + fileName));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
requestURI = r.path("ws").path("v1").path("node")
.path("containers").path(noExistContainerId.toString())
.path("logs").queryParam("user.name", "user")
.queryParam(YarnWebServiceParams.NM_ID, "localhost:1111")
.getUri();
redirectURL = getRedirectURL(requestURI.toString());
assertNotNull(redirectURL);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
assertTrue(redirectURL.contains(noExistContainerId.toString()));
assertTrue(redirectURL.contains("user.name=" + "user"));
assertTrue(redirectURL.contains(
YarnWebServiceParams.REDIRECTED_FROM_NODE + "=true"));
assertFalse(redirectURL.contains(YarnWebServiceParams.NM_ID));
}
@Test
public void testGetNMResourceInfoSuccessful()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
WebTarget r = targetWithJsonObject();
Response response = getNMResourceResponse(r, "resource-1");
assertNMResourceInfoResponse(response, NM_RESOURCE_VALUE);
}
@Test
public void testGetNMResourceInfoEncodedIsSuccessful()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
//test encoded yarn.io/resource-1 path
WebTarget r = targetWithJsonObject();
Response response = getNMResourceResponse(r, "yarn.io%2Fresource-1");
assertNMResourceInfoResponse(response, NM_RESOURCE_VALUE);
}
@Test
public void testGetNMResourceInfoFailBecauseOfEmptyResourceInfo()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
WebTarget r = targetWithJsonObject();
Response response = getNMResourceResponse(r, "resource-2");
assertEmptyNMResourceInfo(response);
}
@Test
public void testGetNMResourceInfoWhenPluginIsUnknown()
throws YarnException, JSONException {
setupMockPluginsWithNmResourceInfo();
WebTarget r = targetWithJsonObject();
Response response = getNMResourceResponse(r, "resource-3");
assertEmptyNMResourceInfo(response);
}
private ContainerId createContainerId(int id) {
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
return ContainerId.newContainerId(appAttemptId, id);
}
@Test
public void testGetYarnGpuResourceInfo()
throws YarnException, JSONException {
setupMockPluginsWithGpuResourceInfo();
WebTarget r = targetWithJsonObject();
Response response = getNMResourceResponse(r, "resource-1");
assertEquals(MediaType.APPLICATION_JSON + ";" + JettyUtils.UTF_8,
response.getMediaType().toString(), "MediaType of the response is not the expected!");
JSONObject nmGpuResourceInfo = response.readEntity(JSONObject.class);
JSONObject json = nmGpuResourceInfo.getJSONObject("nmGpuResourceInfo");
assertEquals("1.2.3",
json.getJSONObject("gpuDeviceInformation").getString("driver_version"),
"Unexpected driverVersion in the json response!");
assertEquals(3, json.getJSONArray("totalGpuDevices").length(),
"Unexpected totalGpuDevices in the json response!");
assertEquals(2, json.getJSONArray("assignedGpuDevices").length(),
"Unexpected assignedGpuDevices in the json response!");
}
@SuppressWarnings("checkstyle:methodlength")
private void testContainerLogs(WebTarget target, ContainerId containerId,
String logMessage) throws Exception {
final String containerIdStr = containerId.toString();
final ApplicationAttemptId appAttemptId = containerId
.getApplicationAttemptId();
final ApplicationId appId = appAttemptId.getApplicationId();
final String appIdStr = appId.toString();
final String filename = "logfile1";
nmContext.getApplications().put(appId, new ApplicationImpl(null, "user",
appId, null, nmContext));
MockContainer container = new MockContainer(appAttemptId,
new AsyncDispatcher(), new Configuration(), "user", appId, 1);
container.setState(ContainerState.RUNNING);
nmContext.getContainers().put(containerId, container);
// write out log file
Path path = dirsHandler.getLogPathForWrite(
ContainerLaunch.getRelativeContainerLogDir(
appIdStr, containerIdStr) + "/" + filename, false);
File logFile = new File(path.toUri().getPath());
logFile.deleteOnExit();
if (logFile.getParentFile().exists()) {
FileUtils.deleteDirectory(logFile.getParentFile());
}
assertTrue(logFile.getParentFile().mkdirs(), "Failed to create log dir");
PrintWriter pw = new PrintWriter(logFile);
pw.print(logMessage);
pw.close();
// ask for it
Response response = target.path(filename)
.request(MediaType.TEXT_PLAIN).get(Response.class);
String responseText = response.readEntity(String.class);
String responseLogMessage = getLogContext(responseText);
assertEquals(logMessage, responseLogMessage);
int fullTextSize = responseLogMessage.getBytes().length;
// specify how many bytes we should get from logs
// specify a position number, it would get the first n bytes from
// container log
response = target.path(filename)
.queryParam("size", "5")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
responseLogMessage = getLogContext(responseText);
int truncatedLength = Math.min(5, logMessage.getBytes().length);
assertEquals(truncatedLength, responseLogMessage.getBytes().length);
assertEquals(new String(logMessage.getBytes(), 0, truncatedLength),
responseLogMessage);
assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
// specify the bytes which is larger than the actual file size,
// we would get the full logs
response = target.path(filename)
.queryParam("size", "10000")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(fullTextSize, responseLogMessage.getBytes().length);
assertEquals(logMessage, responseLogMessage);
// specify a negative number, it would get the last n bytes from
// container log
response = target.path(filename)
.queryParam("size", "-5")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(truncatedLength, responseLogMessage.getBytes().length);
assertEquals(new String(logMessage.getBytes(),
logMessage.getBytes().length - truncatedLength, truncatedLength),
responseLogMessage);
assertTrue(fullTextSize >= responseLogMessage.getBytes().length);
response = target.path(filename)
.queryParam("size", "-10000")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals("text/plain;charset=utf-8", response.getMediaType().toString());
assertEquals(fullTextSize, responseLogMessage.getBytes().length);
assertEquals(logMessage, responseLogMessage);
// ask and download it
response = target.path(filename)
.queryParam("format", "octet-stream")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
responseLogMessage = getLogContext(responseText);
assertEquals(logMessage, responseLogMessage);
assertEquals(200, response.getStatus());
assertEquals("application/octet-stream;charset=utf-8",
response.getMediaType().toString());
// specify a invalid format value
response = target.path(filename)
.queryParam("format", "123")
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
assertEquals("The valid values for the parameter : format are "
+ WebAppUtils.listSupportedLogContentType(), responseText);
assertEquals(400, response.getStatus());
// ask for file that doesn't exist and it will re-direct to
// the log server
URI requestURI = target.path("uhhh").getUri();
String redirectURL = getRedirectURL(requestURI.toString());
assertNotNull(redirectURL);
assertTrue(redirectURL.contains(LOGSERVICEWSADDR));
// Get container log files' name
WebTarget r1 = targetWithJsonObject();
response = r1.path("ws").path("v1").path("node")
.path("containers").path(containerIdStr)
.path("logs").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(200, response.getStatus());
List<ContainerLogsInfo> responseList = readEntity(response);
assertEquals(1, responseList.size());
assertEquals(responseList.get(0).getLogType(),
ContainerLogAggregationType.LOCAL.toString());
List<ContainerLogFileInfo> logMeta = responseList.get(0)
.getContainerLogsInfo();
assertEquals(1, logMeta.size());
assertThat(logMeta.get(0).getFileName()).isEqualTo(filename);
// now create an aggregated log in Remote File system
File tempLogDir = new File("target",
TestNMWebServices.class.getSimpleName() + "temp-log-dir");
try {
String aggregatedLogFile = filename + "-aggregated";
String aggregatedLogMessage = "This is aggregated ;og.";
TestContainerLogsUtils.createContainerLogFileInRemoteFS(
nmContext.getConf(), FileSystem.get(nmContext.getConf()),
tempLogDir.getAbsolutePath(), appId,
Collections.singletonMap(containerId, aggregatedLogMessage),
nmContext.getNodeId(), aggregatedLogFile, "user", true);
r1 = targetWithJsonObject();
response = r1.path("ws").path("v1").path("node")
.path("containers").path(containerIdStr)
.path("logs").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(200, response.getStatus());
responseList = readEntity(response);
assertThat(responseList).hasSize(2);
for (ContainerLogsInfo logInfo : responseList) {
if(logInfo.getLogType().equals(
ContainerLogAggregationType.AGGREGATED.toString())) {
List<ContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
assertEquals(1, meta.size());
assertThat(meta.get(0).getFileName()).isEqualTo(aggregatedLogFile);
} else {
assertEquals(logInfo.getLogType(),
ContainerLogAggregationType.LOCAL.toString());
List<ContainerLogFileInfo> meta = logInfo.getContainerLogsInfo();
assertEquals(1, meta.size());
assertThat(meta.get(0).getFileName()).isEqualTo(filename);
}
}
// Test whether we could get aggregated log as well
TestContainerLogsUtils.createContainerLogFileInRemoteFS(
nmContext.getConf(), FileSystem.get(nmContext.getConf()),
tempLogDir.getAbsolutePath(), appId,
Collections.singletonMap(containerId, aggregatedLogMessage),
nmContext.getNodeId(), filename, "user", true);
response = target.path(filename)
.request(MediaType.TEXT_PLAIN).get(Response.class);
responseText = response.readEntity(String.class);
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.AGGREGATED));
assertTrue(responseText.contains(aggregatedLogMessage));
assertTrue(responseText.contains("LogAggregationType: "
+ ContainerLogAggregationType.LOCAL));
assertTrue(responseText.contains(logMessage));
} finally {
FileUtil.fullyDelete(tempLogDir);
}
// After container is completed, it is removed from nmContext
nmContext.getContainers().remove(containerId);
assertNull(nmContext.getContainers().get(containerId));
response = target.path(filename).request(MediaType.TEXT_PLAIN)
.get(Response.class);
responseText = response.readEntity((String.class));
assertTrue(responseText.contains(logMessage));
}
public void verifyNodesXML(NodeList nodes) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyNodeInfoGeneric(WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "healthReport"),
WebServicesTestUtils.getXmlLong(element,
"totalVmemAllocatedContainersMB"),
WebServicesTestUtils.getXmlLong(element,
"totalPmemAllocatedContainersMB"),
WebServicesTestUtils.getXmlLong(element,
"totalVCoresAllocatedContainers"),
WebServicesTestUtils.getXmlBoolean(element, "vmemCheckEnabled"),
WebServicesTestUtils.getXmlBoolean(element, "pmemCheckEnabled"),
WebServicesTestUtils.getXmlLong(element, "lastNodeUpdateTime"),
WebServicesTestUtils.getXmlBoolean(element, "nodeHealthy"),
WebServicesTestUtils.getXmlString(element, "nodeHostName"),
WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
WebServicesTestUtils.getXmlString(element, "hadoopVersion"),
WebServicesTestUtils.getXmlString(element,
"nodeManagerVersionBuiltOn"), WebServicesTestUtils.getXmlString(
element, "nodeManagerBuildVersion"),
WebServicesTestUtils.getXmlString(element, "nodeManagerVersion"),
WebServicesTestUtils.getXmlString(element, "resourceTypes"));
}
}
public void verifyNodeInfo(JSONObject json) throws JSONException, Exception {
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("nodeInfo");
assertEquals(18, info.length(), "incorrect number of elements");
verifyNodeInfoGeneric(info.getString("id"), info.getString("healthReport"),
info.getLong("totalVmemAllocatedContainersMB"),
info.getLong("totalPmemAllocatedContainersMB"),
info.getLong("totalVCoresAllocatedContainers"),
info.getBoolean("vmemCheckEnabled"),
info.getBoolean("pmemCheckEnabled"),
info.getLong("lastNodeUpdateTime"), info.getBoolean("nodeHealthy"),
info.getString("nodeHostName"), info.getString("hadoopVersionBuiltOn"),
info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"),
info.getString("nodeManagerVersionBuiltOn"),
info.getString("nodeManagerBuildVersion"),
info.getString("nodeManagerVersion"),
info.getString("resourceTypes")
);
}
public void verifyNodeInfoGeneric(String id, String healthReport,
long totalVmemAllocatedContainersMB, long totalPmemAllocatedContainersMB,
long totalVCoresAllocatedContainers,
boolean vmemCheckEnabled, boolean pmemCheckEnabled,
long lastNodeUpdateTime, Boolean nodeHealthy, String nodeHostName,
String hadoopVersionBuiltOn, String hadoopBuildVersion,
String hadoopVersion, String resourceManagerVersionBuiltOn,
String resourceManagerBuildVersion, String resourceManagerVersion,
String resourceTypes) {
WebServicesTestUtils.checkStringMatch("id", "testhost.foo.com:8042", id);
WebServicesTestUtils.checkStringMatch("healthReport", "Healthy",
healthReport);
assertEquals(15872,
totalVmemAllocatedContainersMB, "totalVmemAllocatedContainersMB incorrect");
assertEquals(16384,
totalPmemAllocatedContainersMB, "totalPmemAllocatedContainersMB incorrect");
assertEquals(4000,
totalVCoresAllocatedContainers, "totalVCoresAllocatedContainers incorrect");
assertTrue(vmemCheckEnabled, "vmemCheckEnabled incorrect");
assertTrue(pmemCheckEnabled, "pmemCheckEnabled incorrect");
assertEquals(lastNodeUpdateTime, nmContext
.getNodeHealthStatus().getLastHealthReportTime(), "lastNodeUpdateTime incorrect");
assertTrue(nodeHealthy, "nodeHealthy isn't true");
WebServicesTestUtils.checkStringMatch("nodeHostName", "testhost.foo.com",
nodeHostName);
WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
VersionInfo.getDate(), hadoopVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("hadoopBuildVersion",
VersionInfo.getBuildVersion(), hadoopBuildVersion);
WebServicesTestUtils.checkStringMatch("hadoopVersion",
VersionInfo.getVersion(), hadoopVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersionBuiltOn",
YarnVersionInfo.getDate(), resourceManagerVersionBuiltOn);
WebServicesTestUtils.checkStringEqual("resourceManagerBuildVersion",
YarnVersionInfo.getBuildVersion(), resourceManagerBuildVersion);
WebServicesTestUtils.checkStringMatch("resourceManagerVersion",
YarnVersionInfo.getVersion(), resourceManagerVersion);
assertEquals("memory-mb (unit=Mi), vcores", resourceTypes);
}
private String getLogContext(String fullMessage) {
String prefix = "LogContents:\n";
String postfix = "End of LogType:";
int prefixIndex = fullMessage.indexOf(prefix) + prefix.length();
int postfixIndex = fullMessage.indexOf(postfix);
return fullMessage.substring(prefixIndex, postfixIndex);
}
private static String getRedirectURL(String url) {
String redirectUrl = null;
try {
HttpURLConnection conn = (HttpURLConnection) new URL(url)
.openConnection();
// do not automatically follow the redirection
// otherwise we get too many redirections exception
conn.setInstanceFollowRedirects(false);
if(conn.getResponseCode() == HttpServletResponse.SC_TEMPORARY_REDIRECT) {
redirectUrl = conn.getHeaderField("Location");
}
} catch (Exception e) {
// throw new RuntimeException(e);
}
return redirectUrl;
}
private List<ContainerLogsInfo> readEntity(Response response) throws JSONException {
JSONObject jsonObject = response.readEntity(JSONObject.class);
Iterator<String> keys = jsonObject.keys();
List<ContainerLogsInfo> list = new ArrayList<>();
while (keys.hasNext()) {
String key = keys.next();
JSONObject subJsonObject = jsonObject.getJSONObject(key);
Iterator<String> subKeys = subJsonObject.keys();
while (subKeys.hasNext()) {
String subKeyItem = subKeys.next();
Object object = subJsonObject.get(subKeyItem);
if (object instanceof JSONObject) {
JSONObject subKeyItemValue = subJsonObject.getJSONObject(subKeyItem);
ContainerLogsInfo containerLogsInfo = parseContainerLogsInfo(subKeyItemValue);
list.add(containerLogsInfo);
}
if(object instanceof JSONArray) {
JSONArray jsonArray = subJsonObject.getJSONArray(subKeyItem);
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject subKeyItemValue = jsonArray.getJSONObject(i);
ContainerLogsInfo containerLogsInfo = parseContainerLogsInfo(subKeyItemValue);
list.add(containerLogsInfo);
}
}
}
}
return list;
}
private ContainerLogsInfo parseContainerLogsInfo(JSONObject subKeyItemValue)
throws JSONException {
String logAggregationType = subKeyItemValue.getString("logAggregationType");
String containerId = subKeyItemValue.getString("containerId");
String nodeId = subKeyItemValue.getString("nodeId");
JSONObject containerLogInfo = subKeyItemValue.getJSONObject("containerLogInfo");
String fileName = containerLogInfo.getString("fileName");
String fileSize = containerLogInfo.getString("fileSize");
String lastModifiedTime = containerLogInfo.getString("lastModifiedTime");
ContainerLogMeta containerLogMeta = new ContainerLogMeta(containerId, nodeId);
containerLogMeta.addLogMeta(fileName, fileSize, lastModifiedTime);
ContainerLogsInfo containerLogsInfo =
new ContainerLogsInfo(containerLogMeta, logAggregationType);
return containerLogsInfo;
}
} | JerseyBinder |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/factory/config/InstantiationAwareBeanPostProcessor.java | {
"start": 1317,
"end": 1478
} | interface ____ a special purpose interface, mainly for
* internal use within the framework. It is recommended to implement the plain
* {@link BeanPostProcessor} | is |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/common/runtime/src/main/java/org/jboss/resteasy/reactive/common/headers/LinkHeaders.java | {
"start": 235,
"end": 1360
} | class ____ {
private final Map<String, Link> linksByRelationship = new HashMap<>();
private final List<Link> links = new ArrayList<>();
public LinkHeaders(MultivaluedMap<String, Object> headers) {
List<Object> values = headers.get("Link");
if (values == null) {
return;
}
for (Object val : values) {
if (val instanceof Link) {
addLink((Link) val);
} else if (val instanceof String) {
for (String link : ((String) val).split(",")) {
addLink(Link.valueOf(link));
}
} else {
String str = HeaderUtil.headerToString(val);
addLink(Link.valueOf(str));
}
}
}
private void addLink(final Link link) {
links.add(link);
for (String rel : link.getRels()) {
linksByRelationship.put(rel, link);
}
}
public Link getLinkByRelationship(String rel) {
return linksByRelationship.get(rel);
}
public List<Link> getLinks() {
return links;
}
}
| LinkHeaders |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/sql/mysql/alterTable/MySqlAlterTableTest33.java | {
"start": 911,
"end": 2417
} | class ____ extends TestCase {
public void test_exchange_0() throws Exception {
String sql = "ALTER TABLE src.`part_tab` EXCHANGE PARTITION p9 WITH TABLE test_create_table without validation";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE src.`part_tab`\n" +
"\tEXCHANGE PARTITION p9 WITH TABLE test_create_table WITHOUT VALIDATION", SQLUtils.toMySqlString(stmt));
}
public void test_exchange_1() throws Exception {
String sql = "ALTER TABLE e EXCHANGE PARTITION p0 WITH TABLE e2 WITHOUT VALIDATION;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE e\n" +
"\tEXCHANGE PARTITION p0 WITH TABLE e2 WITHOUT VALIDATION;", SQLUtils.toMySqlString(stmt));
}
public void test_exchange_2() throws Exception {
String sql = "ALTER TABLE e EXCHANGE PARTITION p0 WITH TABLE e2 WITH VALIDATION;";
MySqlStatementParser parser = new MySqlStatementParser(sql);
SQLStatement stmt = parser.parseStatementList().get(0);
parser.match(Token.EOF);
assertEquals("ALTER TABLE e\n" +
"\tEXCHANGE PARTITION p0 WITH TABLE e2 WITH VALIDATION;", SQLUtils.toMySqlString(stmt));
}
}
| MySqlAlterTableTest33 |
java | quarkusio__quarkus | extensions/panache/hibernate-orm-rest-data-panache/deployment/src/test/java/io/quarkus/hibernate/orm/rest/data/panache/deployment/entity/AbstractItem.java | {
"start": 305,
"end": 521
} | class ____<IdType extends Number> extends AbstractEntity<IdType> {
public String name;
@ManyToOne(optional = false)
@JsonProperty(access = Access.WRITE_ONLY)
public Collection collection;
}
| AbstractItem |
java | spring-projects__spring-framework | spring-expression/src/main/java/org/springframework/expression/spel/ast/MethodReference.java | {
"start": 16092,
"end": 17863
} | class ____ implements ValueRef {
private final EvaluationContext evaluationContext;
private final @Nullable Object target;
private final @Nullable TypeDescriptor targetType;
private final @Nullable Object[] arguments;
public MethodValueRef(ExpressionState state, @Nullable Object[] arguments) {
this.evaluationContext = state.getEvaluationContext();
this.target = state.getActiveContextObject().getValue();
this.targetType = state.getActiveContextObject().getTypeDescriptor();
this.arguments = arguments;
}
@Override
public TypedValue getValue() {
TypedValue result = MethodReference.this.getValueInternal(
this.evaluationContext, this.target, this.targetType, this.arguments);
updateExitTypeDescriptor();
return result;
}
@Override
public void setValue(@Nullable Object newValue) {
throw new IllegalAccessError();
}
@Override
public boolean isWritable() {
return false;
}
}
private record MethodExecutorSearchResult(@Nullable MethodExecutor methodExecutor, @Nullable AccessException accessException) {
}
private record CachedMethodExecutor(MethodExecutor methodExecutor, @Nullable Class<?> staticClass,
@Nullable TypeDescriptor targetType, List<TypeDescriptor> argumentTypes) {
public boolean isSuitable(Object target, @Nullable TypeDescriptor targetType, List<TypeDescriptor> argumentTypes) {
return ((this.staticClass == null || this.staticClass == target) &&
ObjectUtils.nullSafeEquals(this.targetType, targetType) && this.argumentTypes.equals(argumentTypes));
}
public boolean hasProxyTarget() {
return (this.targetType != null && Proxy.isProxyClass(this.targetType.getType()));
}
public MethodExecutor get() {
return this.methodExecutor;
}
}
}
| MethodValueRef |
java | elastic__elasticsearch | x-pack/plugin/rank-rrf/src/main/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilder.java | {
"start": 2370,
"end": 15679
} | class ____ extends CompoundRetrieverBuilder<RRFRetrieverBuilder> {
public static final NodeFeature MULTI_FIELDS_QUERY_FORMAT_SUPPORT = new NodeFeature("rrf_retriever.multi_fields_query_format_support");
public static final NodeFeature WEIGHTED_SUPPORT = new NodeFeature("rrf_retriever.weighted_support");
public static final NodeFeature SIMPLIFIED_WEIGHTED_SUPPORT = new NodeFeature("rrf_retriever.simplified_weighted_support");
public static final NodeFeature MULTI_INDEX_SIMPLIFIED_FORMAT_SUPPORT = new NodeFeature(
"rrf_retriever.multi_index_simplified_format_support"
);
public static final String NAME = "rrf";
public static final ParseField RETRIEVERS_FIELD = new ParseField("retrievers");
public static final ParseField RANK_CONSTANT_FIELD = new ParseField("rank_constant");
public static final ParseField FIELDS_FIELD = new ParseField("fields");
public static final ParseField QUERY_FIELD = new ParseField("query");
public static final int DEFAULT_RANK_CONSTANT = 60;
private final float[] weights;
@SuppressWarnings("unchecked")
static final ConstructingObjectParser<RRFRetrieverBuilder, RetrieverParserContext> PARSER = new ConstructingObjectParser<>(
NAME,
false,
args -> {
List<RRFRetrieverComponent> retrieverComponents = args[0] == null ? List.of() : (List<RRFRetrieverComponent>) args[0];
List<String> fields = (List<String>) args[1];
String query = (String) args[2];
int rankWindowSize = args[3] == null ? RankBuilder.DEFAULT_RANK_WINDOW_SIZE : (int) args[3];
int rankConstant = args[4] == null ? DEFAULT_RANK_CONSTANT : (int) args[4];
int n = retrieverComponents.size();
List<RetrieverSource> innerRetrievers = new ArrayList<>(n);
float[] weights = new float[n];
for (int i = 0; i < n; i++) {
RRFRetrieverComponent component = retrieverComponents.get(i);
innerRetrievers.add(RetrieverSource.from(component.retriever()));
weights[i] = component.weight();
}
return new RRFRetrieverBuilder(innerRetrievers, fields, query, rankWindowSize, rankConstant, weights);
}
);
static {
PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), RRFRetrieverComponent::fromXContent, RETRIEVERS_FIELD);
PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), FIELDS_FIELD);
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), QUERY_FIELD);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), RANK_WINDOW_SIZE_FIELD);
PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), RANK_CONSTANT_FIELD);
RetrieverBuilder.declareBaseParserFields(PARSER);
}
public static RRFRetrieverBuilder fromXContent(XContentParser parser, RetrieverParserContext context) throws IOException {
if (RRFRankPlugin.RANK_RRF_FEATURE.check(XPackPlugin.getSharedLicenseState()) == false) {
throw LicenseUtils.newComplianceException("Reciprocal Rank Fusion (RRF)");
}
return PARSER.apply(parser, context);
}
private final List<String> fields;
private final String query;
private final int rankConstant;
public RRFRetrieverBuilder(List<RetrieverSource> childRetrievers, int rankWindowSize, int rankConstant) {
this(childRetrievers, null, null, rankWindowSize, rankConstant, createDefaultWeights(childRetrievers));
}
private static float[] createDefaultWeights(List<?> retrievers) {
int size = retrievers == null ? 0 : retrievers.size();
float[] defaultWeights = new float[size];
Arrays.fill(defaultWeights, DEFAULT_WEIGHT);
return defaultWeights;
}
public RRFRetrieverBuilder(
List<RetrieverSource> childRetrievers,
List<String> fields,
String query,
int rankWindowSize,
int rankConstant,
float[] weights
) {
// Use a mutable list for childRetrievers so that we can use addChild
super(childRetrievers == null ? new ArrayList<>() : new ArrayList<>(childRetrievers), rankWindowSize);
this.fields = fields == null ? null : List.copyOf(fields);
this.query = query;
this.rankConstant = rankConstant;
Objects.requireNonNull(weights, "weights must not be null");
if (weights.length != innerRetrievers.size()) {
throw new IllegalArgumentException(
"weights array length [" + weights.length + "] must match retrievers count [" + innerRetrievers.size() + "]"
);
}
this.weights = weights;
}
public int rankConstant() {
return rankConstant;
}
public float[] weights() {
return weights;
}
@Override
public String getName() {
return NAME;
}
@Override
public ActionRequestValidationException validate(
SearchSourceBuilder source,
ActionRequestValidationException validationException,
boolean isScroll,
boolean allowPartialSearchResults
) {
validationException = super.validate(source, validationException, isScroll, allowPartialSearchResults);
return MultiFieldsInnerRetrieverUtils.validateParams(
innerRetrievers,
fields,
query,
getName(),
RETRIEVERS_FIELD.getPreferredName(),
FIELDS_FIELD.getPreferredName(),
QUERY_FIELD.getPreferredName(),
validationException
);
}
@Override
protected RRFRetrieverBuilder clone(List<RetrieverSource> newRetrievers, List<QueryBuilder> newPreFilterQueryBuilders) {
RRFRetrieverBuilder clone = new RRFRetrieverBuilder(
newRetrievers,
this.fields,
this.query,
this.rankWindowSize,
this.rankConstant,
this.weights
);
clone.preFilterQueryBuilders = newPreFilterQueryBuilders;
clone.retrieverName = retrieverName;
return clone;
}
@Override
protected RRFRankDoc[] combineInnerRetrieverResults(List<ScoreDoc[]> rankResults, boolean explain) {
// combine the disjointed sets of TopDocs into a single set or RRFRankDocs
// each RRFRankDoc will have both the position and score for each query where
// it was within the result set for that query
// if a doc isn't part of a result set its position will be NO_RANK [0] and
// its score is [0f]
int queries = rankResults.size();
Map<RankDoc.RankKey, RRFRankDoc> docsToRankResults = Maps.newMapWithExpectedSize(rankWindowSize);
int index = 0;
for (var rrfRankResult : rankResults) {
int rank = 1;
for (ScoreDoc scoreDoc : rrfRankResult) {
final int findex = index;
final int frank = rank;
docsToRankResults.compute(new RankDoc.RankKey(scoreDoc.doc, scoreDoc.shardIndex), (key, value) -> {
if (value == null) {
if (explain) {
value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex, queries, rankConstant);
} else {
value = new RRFRankDoc(scoreDoc.doc, scoreDoc.shardIndex);
}
}
// calculate the current rrf score for this document
// later used to sort and covert to a rank
value.score += this.weights[findex] * (1.0f / (rankConstant + frank));
if (explain && value.positions != null && value.scores != null) {
// record the position for each query
// for explain and debugging
value.positions[findex] = frank - 1;
// record the score for each query
// used to later re-rank on the coordinator
value.scores[findex] = scoreDoc.score;
}
return value;
});
++rank;
}
++index;
}
// sort the results based on rrf score, tiebreaker based on smaller doc id
RRFRankDoc[] sortedResults = docsToRankResults.values().toArray(RRFRankDoc[]::new);
Arrays.sort(sortedResults);
// trim the results if needed, otherwise each shard will always return `rank_window_sieze` results.
RRFRankDoc[] topResults = new RRFRankDoc[Math.min(rankWindowSize, sortedResults.length)];
for (int rank = 0; rank < topResults.length; ++rank) {
topResults[rank] = sortedResults[rank];
topResults[rank].rank = rank + 1;
}
return topResults;
}
@Override
protected RetrieverBuilder doRewrite(QueryRewriteContext ctx) {
RetrieverBuilder rewritten = this;
ResolvedIndices resolvedIndices = ctx.getResolvedIndices();
if (resolvedIndices != null && query != null) {
// TODO: Refactor duplicate code
// Using the multi-fields query format
var localIndicesMetadata = resolvedIndices.getConcreteLocalIndicesMetadata();
if (resolvedIndices.getRemoteClusterIndices().isEmpty() == false) {
throw new IllegalArgumentException(
"[" + NAME + "] cannot specify [" + QUERY_FIELD.getPreferredName() + "] when querying remote indices"
);
}
List<RetrieverSource> fieldsInnerRetrievers = MultiFieldsInnerRetrieverUtils.generateInnerRetrievers(
fields,
query,
localIndicesMetadata.values(),
r -> createRRFFromWeightedRetrievers(r, rankWindowSize, rankConstant),
w -> validateNonNegativeWeight(w)
).stream().map(RetrieverSource::from).toList();
if (fieldsInnerRetrievers.isEmpty() == false) {
// TODO: This is a incomplete solution as it does not address other incomplete copy issues
// (such as dropping the retriever name and min score)
float[] weights = createDefaultWeights(fieldsInnerRetrievers);
rewritten = new RRFRetrieverBuilder(fieldsInnerRetrievers, null, null, rankWindowSize, rankConstant, weights);
rewritten.getPreFilterQueryBuilders().addAll(preFilterQueryBuilders);
} else {
// Inner retriever list can be empty when using an index wildcard pattern that doesn't match any indices
rewritten = new StandardRetrieverBuilder(new MatchNoneQueryBuilder());
}
}
return rewritten;
}
@Override
public void doToXContent(XContentBuilder builder, Params params) throws IOException {
if (innerRetrievers.isEmpty() == false) {
builder.startArray(RETRIEVERS_FIELD.getPreferredName());
for (int i = 0; i < innerRetrievers.size(); i++) {
RRFRetrieverComponent component = new RRFRetrieverComponent(innerRetrievers.get(i).retriever(), weights[i]);
component.toXContent(builder, params);
}
builder.endArray();
}
if (fields != null) {
builder.startArray(FIELDS_FIELD.getPreferredName());
for (String field : fields) {
builder.value(field);
}
builder.endArray();
}
if (query != null) {
builder.field(QUERY_FIELD.getPreferredName(), query);
}
builder.field(RANK_WINDOW_SIZE_FIELD.getPreferredName(), rankWindowSize);
builder.field(RANK_CONSTANT_FIELD.getPreferredName(), rankConstant);
}
// ---- FOR TESTING XCONTENT PARSING ----
@Override
public boolean doEquals(Object o) {
RRFRetrieverBuilder that = (RRFRetrieverBuilder) o;
return super.doEquals(o)
&& Objects.equals(fields, that.fields)
&& Objects.equals(query, that.query)
&& rankConstant == that.rankConstant
&& Arrays.equals(weights, that.weights);
}
@Override
public int doHashCode() {
return Objects.hash(super.doHashCode(), fields, query, rankConstant, Arrays.hashCode(weights));
}
private static RRFRetrieverBuilder createRRFFromWeightedRetrievers(
List<WeightedRetrieverSource> r,
int rankWindowSize,
int rankConstant
) {
int size = r.size();
List<RetrieverSource> retrievers = new ArrayList<>(size);
float[] weights = new float[size];
for (int i = 0; i < size; i++) {
var retriever = r.get(i);
retrievers.add(retriever.retrieverSource());
weights[i] = retriever.weight();
}
return new RRFRetrieverBuilder(retrievers, null, null, rankWindowSize, rankConstant, weights);
}
private static void validateNonNegativeWeight(float w) {
if (w < 0) {
throw new IllegalArgumentException("[" + NAME + "] per-field weights must be non-negative");
}
}
}
| RRFRetrieverBuilder |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/writing/ComponentRequirementExpressions.java | {
"start": 2094,
"end": 5461
} | class ____ {
// TODO(dpb,ronshapiro): refactor this and ComponentRequestRepresentations into a
// HierarchicalComponentMap<K, V>, or perhaps this use a flattened ImmutableMap, built from its
// parents? If so, maybe make ComponentRequirementExpression.Factory create it.
private final Optional<ComponentRequirementExpressions> parent;
private final Map<ComponentRequirement, ComponentRequirementExpression>
componentRequirementExpressions = new HashMap<>();
private final BindingGraph graph;
private final ShardImplementation componentShard;
private final CompilerOptions compilerOptions;
@Inject
ComponentRequirementExpressions(
@ParentComponent Optional<ComponentRequirementExpressions> parent,
BindingGraph graph,
ComponentImplementation componentImplementation,
CompilerOptions compilerOptions) {
this.parent = parent;
this.graph = graph;
// All component requirements go in the componentShard.
this.componentShard = componentImplementation.getComponentShard();
this.compilerOptions = compilerOptions;
}
/**
* Returns an expression for the {@code componentRequirement} to be used when implementing a
* component method. This may add a field or method to the component in order to reference the
* component requirement outside of the {@code initialize()} methods.
*/
XCodeBlock getExpression(ComponentRequirement componentRequirement, XClassName requestingClass) {
return getExpression(componentRequirement).getExpression(requestingClass);
}
private ComponentRequirementExpression getExpression(ComponentRequirement componentRequirement) {
if (graph.componentRequirements().contains(componentRequirement)) {
return componentRequirementExpressions.computeIfAbsent(
componentRequirement, this::createExpression);
}
if (parent.isPresent()) {
return parent.get().getExpression(componentRequirement);
}
throw new IllegalStateException(
"no component requirement expression found for " + componentRequirement);
}
/**
* Returns an expression for the {@code componentRequirement} to be used only within {@code
* initialize()} methods, where the component constructor parameters are available.
*
* <p>When accessing this expression from a subcomponent, this may cause a field to be initialized
* or a method to be added in the component that owns this {@link ComponentRequirement}.
*/
XCodeBlock getExpressionDuringInitialization(
ComponentRequirement componentRequirement, XClassName requestingClass) {
return getExpression(componentRequirement).getExpressionDuringInitialization(requestingClass);
}
/** Returns a field for a {@link ComponentRequirement}. */
private ComponentRequirementExpression createExpression(ComponentRequirement requirement) {
if (componentShard.componentDescriptor().hasCreator()
|| (graph.factoryMethod().isPresent()
&& graph.factoryMethodParameters().containsKey(requirement))) {
return new ComponentParameterField(requirement);
} else if (requirement.kind().isModule()) {
return new InstantiableModuleField(requirement);
} else {
throw new AssertionError(
String.format("Can't create %s in %s", requirement, componentShard.name()));
}
}
private abstract | ComponentRequirementExpressions |
java | alibaba__nacos | config/src/test/java/com/alibaba/nacos/config/server/utils/SimpleCacheTest.java | {
"start": 861,
"end": 1281
} | class ____ {
@Test
void testPutAndGet() throws InterruptedException {
SimpleCache<String> simpleCache = new SimpleCache<>();
simpleCache.put("key", "value", 1000);
assertEquals("value", simpleCache.get("key"));
//time expire
TimeUnit.MILLISECONDS.sleep(1100);
Object value = simpleCache.get("key");
assertNull(value);
}
}
| SimpleCacheTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJAutoProxyCreatorTests.java | {
"start": 20223,
"end": 20391
} | class ____ extends AbstractProxyTargetClassConfig {
}
@Configuration(proxyBeanMethods = false)
@EnableAspectJAutoProxy(proxyTargetClass = true)
| ProxyTargetClassFalseConfig |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/BackupState.java | {
"start": 1316,
"end": 2356
} | class ____ extends HAState {
public BackupState() {
super(HAServiceState.STANDBY);
}
@Override // HAState
public void checkOperation(HAContext context, OperationCategory op)
throws StandbyException {
context.checkOperation(op);
}
@Override // HAState
public boolean shouldPopulateReplQueues() {
return false;
}
@Override // HAState
public void enterState(HAContext context) throws ServiceFailedException {
try {
context.startActiveServices();
} catch (IOException e) {
throw new ServiceFailedException("Failed to start backup services", e);
}
}
@Override // HAState
public void exitState(HAContext context) throws ServiceFailedException {
try {
context.stopActiveServices();
} catch (IOException e) {
throw new ServiceFailedException("Failed to stop backup services", e);
}
}
@Override // HAState
public void prepareToExitState(HAContext context) throws ServiceFailedException {
context.prepareToStopStandbyServices();
}
}
| BackupState |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/sql/results/graph/BiDirectionalFetch.java | {
"start": 233,
"end": 522
} | interface ____ Fetches that are actually references to
* another fetch based on "normalized navigable path"
*
* The following query is used throughout the javadocs for these impls
* to help describe what it going on and why certain methods do certain things.
*
*
* ```
* @Entity
* | for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/ObjectToStringTest.java | {
"start": 3307,
"end": 5125
} | class ____ {
@Override
public String toString() {
return "matata";
}
}
public void log(Object o) {
System.out.println(o.toString());
}
void directToStringCalls() {
NonFinalObjectClassWithoutToString nonFinalObjectClassWithoutToString =
new NonFinalObjectClassWithoutToString();
System.out.println(nonFinalObjectClassWithoutToString.toString());
FinalObjectClassWithToString finalObjectClassWithToString = new FinalObjectClassWithToString();
System.out.println(finalObjectClassWithToString.toString());
NonFinalObjectClassWithToString nonFinalObjectClassWithToString =
new NonFinalObjectClassWithToString();
System.out.println(nonFinalObjectClassWithToString.toString());
}
void callsTologMethod() {
FinalObjectClassWithoutToString finalObjectClassWithoutToString =
new FinalObjectClassWithoutToString();
log(finalObjectClassWithoutToString);
NonFinalObjectClassWithoutToString nonFinalObjectClassWithoutToString =
new NonFinalObjectClassWithoutToString();
log(nonFinalObjectClassWithoutToString);
FinalObjectClassWithToString finalObjectClassWithToString = new FinalObjectClassWithToString();
log(finalObjectClassWithToString);
NonFinalObjectClassWithToString nonFinalObjectClassWithToString =
new NonFinalObjectClassWithToString();
log(nonFinalObjectClassWithToString);
}
public void overridePresentInAbstractClassInHierarchy(Duration durationArg) {
String unusedString = Duration.standardSeconds(86400).toString();
System.out.println("test joda string " + Duration.standardSeconds(86400));
unusedString = durationArg.toString();
System.out.println("test joda string " + durationArg);
}
}\
""")
.doTest();
}
/** A | NonFinalObjectClassWithToString |
java | netty__netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2StreamChannelOption.java | {
"start": 898,
"end": 1847
} | class ____<T> extends ChannelOption<T> {
private Http2StreamChannelOption(String name) {
super(name);
}
/**
* When set to {@code true} {@link Http2WindowUpdateFrame}s will be automatically be generated and written for
* {@link Http2StreamChannel}s as soon as frames are passed to the user via
* {@link io.netty.channel.ChannelPipeline#fireChannelRead(Object)}. If the user wants more control on when a
* window update is send its possible to set it to {@code false}. In this case the user is responsible to
* generate the correct {@link Http2WindowUpdateFrame}s and eventually write these to the channel.
* <p>
* See <a href="https://datatracker.ietf.org/doc/html/rfc9113#section-5.2">RFC9113 5.2. Flow Control</a> for more
* details.
*/
public static final ChannelOption<Boolean> AUTO_STREAM_FLOW_CONTROL =
valueOf("AUTO_STREAM_FLOW_CONTROL");
}
| Http2StreamChannelOption |
java | elastic__elasticsearch | test/framework/src/main/java/org/elasticsearch/ingest/IngestPipelineTestUtils.java | {
"start": 2066,
"end": 11177
} | class ____ {
private static final Logger logger = LogManager.getLogger(IngestPipelineTestUtils.class);
private IngestPipelineTestUtils() { /* no instances */ }
/**
* @param id The pipeline id.
* @param source The body of the {@link PutPipelineRequest} as a JSON-formatted {@link BytesReference}.
* @return a new {@link PutPipelineRequest} with the given {@code id} and body.
*/
public static PutPipelineRequest putJsonPipelineRequest(String id, BytesReference source) {
return new PutPipelineRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, id, source, XContentType.JSON);
}
/**
* @param id The pipeline id.
* @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}.
* @return a new {@link PutPipelineRequest} with the given {@code id} and body.
*/
public static PutPipelineRequest putJsonPipelineRequest(String id, String jsonString) {
return putJsonPipelineRequest(id, new BytesArray(jsonString));
}
/**
* Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}.
*
* @param client The client to use to execute the {@link PutPipelineTransportAction}.
* @param id The pipeline id.
* @param source The body of the {@link PutPipelineRequest} as a JSON-formatted {@link BytesReference}.
*/
public static void putJsonPipeline(ElasticsearchClient client, String id, BytesReference source) {
assertAcked(safeGet(client.execute(PutPipelineTransportAction.TYPE, putJsonPipelineRequest(id, source))));
}
/**
* Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}.
*
* @param client The client to use to execute the {@link PutPipelineTransportAction}.
* @param id The pipeline id.
* @param jsonString The body of the {@link PutPipelineRequest} as a JSON-formatted {@link String}.
*/
public static void putJsonPipeline(ElasticsearchClient client, String id, String jsonString) {
putJsonPipeline(client, id, new BytesArray(jsonString));
}
/**
* Create an ingest pipeline with the given ID and body, using the given {@link ElasticsearchClient}.
*
* @param client The client to use to execute the {@link PutPipelineTransportAction}.
* @param id The pipeline id.
* @param toXContent The body of the {@link PutPipelineRequest} as a {@link ToXContentFragment}.
*/
public static void putJsonPipeline(ElasticsearchClient client, String id, ToXContentFragment toXContent) throws IOException {
try (var xContentBuilder = jsonBuilder()) {
xContentBuilder.startObject();
toXContent.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS);
xContentBuilder.endObject();
putJsonPipeline(client, id, BytesReference.bytes(xContentBuilder));
}
}
/**
* Attempt to delete the ingest pipeline with the given {@code id}, using the given {@link ElasticsearchClient}, and logging (but
* otherwise ignoring) the result.
*/
public static void deletePipelinesIgnoringExceptions(ElasticsearchClient client, Iterable<String> ids) {
for (final var id : ids) {
ESTestCase.safeAwait(
l -> client.execute(
DeletePipelineTransportAction.TYPE,
new DeletePipelineRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT, id),
new ActionListener<>() {
@Override
public void onResponse(AcknowledgedResponse acknowledgedResponse) {
logger.info("delete pipeline [{}] success [acknowledged={}]", id, acknowledgedResponse.isAcknowledged());
l.onResponse(null);
}
@Override
public void onFailure(Exception e) {
logger.warn(Strings.format("delete pipeline [%s] failure", id), e);
l.onResponse(null);
}
}
)
);
}
}
/**
* Construct a new {@link SimulatePipelineRequest} whose content is the given JSON document, represented as a {@link String}.
*/
public static SimulatePipelineRequest jsonSimulatePipelineRequest(String jsonString) {
return jsonSimulatePipelineRequest(new BytesArray(jsonString));
}
/**
* Construct a new {@link SimulatePipelineRequest} whose content is the given JSON document, represented as a {@link BytesReference}.
*/
public static SimulatePipelineRequest jsonSimulatePipelineRequest(BytesReference jsonBytes) {
return new SimulatePipelineRequest(ReleasableBytesReference.wrap(jsonBytes), XContentType.JSON);
}
/**
* Executes an action against an ingest document using a random access pattern. A synthetic pipeline instance with the provided
* access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param document The document to operate on
* @param action A consumer which takes the updated ingest document during execution
* @throws Exception Any exception thrown from the provided consumer
*/
public static void doWithRandomAccessPattern(IngestDocument document, Consumer<IngestDocument> action) throws Exception {
doWithAccessPattern(randomFrom(IngestPipelineFieldAccessPattern.values()), document, action);
}
/**
* Executes an action against an ingest document using a random access pattern. A synthetic pipeline instance with the provided
* access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param accessPattern The access pattern to use when executing the block of code
* @param document The document to operate on
* @param action A consumer which takes the updated ingest document during execution
* @throws Exception Any exception thrown from the provided consumer
*/
public static void doWithAccessPattern(
IngestPipelineFieldAccessPattern accessPattern,
IngestDocument document,
Consumer<IngestDocument> action
) throws Exception {
runWithAccessPattern(accessPattern, document, new TestProcessor(action));
}
/**
* Executes a processor against an ingest document using a random access pattern. A synthetic pipeline instance with the provided
* access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param document The document to operate on
* @param processor A processor which takes the updated ingest document during execution
* @return the resulting ingest document instance
* @throws Exception Any exception thrown from the provided consumer
*/
public static IngestDocument runWithRandomAccessPattern(IngestDocument document, Processor processor) throws Exception {
return runWithAccessPattern(randomFrom(IngestPipelineFieldAccessPattern.values()), document, processor);
}
/**
* Executes a processor against an ingest document using the provided access pattern. A synthetic pipeline instance with the provided
* access pattern is created and executed against the ingest document, thus updating its internal access pattern.
* @param accessPattern The access pattern to use when executing the block of code
* @param document The document to operate on
* @param processor A processor which takes the updated ingest document during execution
* @return the resulting ingest document instance
* @throws Exception Any exception thrown from the provided consumer
*/
public static IngestDocument runWithAccessPattern(
IngestPipelineFieldAccessPattern accessPattern,
IngestDocument document,
Processor processor
) throws Exception {
IngestDocument[] ingestDocumentHolder = new IngestDocument[1];
Exception[] exceptionHolder = new Exception[1];
document.executePipeline(
new Pipeline(
randomAlphanumericOfLength(10),
null,
null,
null,
new CompoundProcessor(processor),
accessPattern,
null,
null,
null
),
(result, ex) -> {
ingestDocumentHolder[0] = result;
exceptionHolder[0] = ex;
}
);
Exception exception = exceptionHolder[0];
if (exception != null) {
if (exception instanceof IngestProcessorException ingestProcessorException) {
exception = ((Exception) ingestProcessorException.getCause());
}
throw exception;
}
return ingestDocumentHolder[0];
}
}
| IngestPipelineTestUtils |
java | apache__commons-lang | src/test/java/org/apache/commons/lang3/ClassUtilsTest.java | {
"start": 18543,
"end": 19481
} | class ____ {
// empty
}
assertEquals("org.apache.commons.lang3.ClassUtilsTest$3", ClassUtils.getName(new Object() {
// empty
}.getClass()));
assertEquals("org.apache.commons.lang3.ClassUtilsTest$3Named", ClassUtils.getName(Named.class));
assertEquals("org.apache.commons.lang3.ClassUtilsTest$Inner", ClassUtils.getName(Inner.class));
assertEquals(OBJECT_CANONICAL_NAME, ClassUtils.getName(new Object()));
}
@Test
void test_getName_Object() {
assertEquals("org.apache.commons.lang3.ClassUtils", ClassUtils.getName(new ClassUtils(), "<null>"));
assertEquals("org.apache.commons.lang3.ClassUtilsTest$Inner", ClassUtils.getName(new Inner(), "<null>"));
assertEquals("java.lang.String", ClassUtils.getName("hello", "<null>"));
assertEquals("<null>", ClassUtils.getName(null, "<null>"));
// Inner types
final | Named |
java | quarkusio__quarkus | extensions/oidc-redis-token-state-manager/deployment/src/main/java/io/quarkus/oidc/redis/token/state/manager/deployment/OidcRedisTokenStateManagerBuildConfig.java | {
"start": 406,
"end": 993
} | interface ____ {
/**
* Enables this extension.
* Set to 'false' if this extension should be disabled.
*/
@WithDefault("true")
boolean enabled();
/**
* Selects Redis client used to store the OIDC token state.
* The default Redis client is used if this property is not configured.
* Used Redis datasource must only be accessible by trusted parties,
* because Quarkus will not encrypt tokens before storing them.
*/
@WithDefault(RedisConfig.DEFAULT_CLIENT_NAME)
String redisClientName();
}
| OidcRedisTokenStateManagerBuildConfig |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/share/persister/PersisterStateBatch.java | {
"start": 1150,
"end": 4799
} | class ____ implements Comparable<PersisterStateBatch> {
private final long firstOffset;
private final long lastOffset;
private final short deliveryCount;
private final byte deliveryState;
public PersisterStateBatch(long firstOffset, long lastOffset, byte deliveryState, short deliveryCount) {
this.firstOffset = firstOffset;
this.lastOffset = lastOffset;
this.deliveryState = deliveryState;
this.deliveryCount = deliveryCount;
}
public long firstOffset() {
return firstOffset;
}
public long lastOffset() {
return lastOffset;
}
public byte deliveryState() {
return deliveryState;
}
public short deliveryCount() {
return deliveryCount;
}
public static PersisterStateBatch from(ReadShareGroupStateResponseData.StateBatch batch) {
return new PersisterStateBatch(
batch.firstOffset(),
batch.lastOffset(),
batch.deliveryState(),
batch.deliveryCount());
}
public static PersisterStateBatch from(WriteShareGroupStateRequestData.StateBatch batch) {
return new PersisterStateBatch(
batch.firstOffset(),
batch.lastOffset(),
batch.deliveryState(),
batch.deliveryCount());
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PersisterStateBatch that = (PersisterStateBatch) o;
return firstOffset == that.firstOffset &&
lastOffset == that.lastOffset &&
deliveryCount == that.deliveryCount &&
deliveryState == that.deliveryState;
}
@Override
public int hashCode() {
return Objects.hash(firstOffset, lastOffset, deliveryCount, deliveryState);
}
@Override
public String toString() {
return "PersisterStateBatch(" +
"firstOffset=" + firstOffset + "," +
"lastOffset=" + lastOffset + "," +
"deliveryCount=" + deliveryCount + "," +
"deliveryState=" + deliveryState +
")";
}
/**
* Compares 2 PersisterStateBatches in various dimensions.
* The priority of the dimensions are:
* - firstOffset
* - lastOffset
* - deliveryCount
* - deliveryState
* <p>
* Does not check all dimensions in every case. The first dimension
* check resulting in non-zero comparison result is returned.
* <p>
* In case the 2 objects are equal, all 4 dimension comparisons must
* be 0.
* <p>
* This method could be used for storing PersisterStateBatch objects
* in containers which allow a Comparator argument or various sort algorithms
* in the java library.
*
* @param other - object representing another PersisterStateBatch
* @return -INT, 0, +INT based on "this" being smaller, equal or larger than the argument.
*/
@Override
public int compareTo(PersisterStateBatch other) {
int deltaFirst = Long.compare(this.firstOffset(), other.firstOffset());
if (deltaFirst == 0) {
int deltaLast = Long.compare(this.lastOffset(), other.lastOffset());
if (deltaLast == 0) {
int deltaCount = this.deliveryCount() - other.deliveryCount();
if (deltaCount == 0) {
return Byte.compare(this.deliveryState(), other.deliveryState());
}
return deltaCount;
}
return deltaLast;
}
return deltaFirst;
}
}
| PersisterStateBatch |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/PerNodeTimelineCollectorsAuxService.java | {
"start": 9944,
"end": 10504
} | class ____ implements Runnable {
private final PerNodeTimelineCollectorsAuxService auxService;
public ShutdownHook(PerNodeTimelineCollectorsAuxService auxService) {
this.auxService = auxService;
}
public void run() {
auxService.stop();
}
}
public static void main(String[] args) {
Configuration conf = new YarnConfiguration();
conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
conf.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
launchServer(args, null, conf);
}
}
| ShutdownHook |
java | apache__camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/SpringEventComponentBuilderFactory.java | {
"start": 1363,
"end": 1838
} | interface ____ {
/**
* Spring Event (camel-spring)
* Listen for Spring Application Events.
*
* Category: messaging
* Since: 1.4
* Maven coordinates: org.apache.camel:camel-spring
*
* @return the dsl builder
*/
static SpringEventComponentBuilder springEvent() {
return new SpringEventComponentBuilderImpl();
}
/**
* Builder for the Spring Event component.
*/
| SpringEventComponentBuilderFactory |
java | apache__flink | flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/Schema.java | {
"start": 2992,
"end": 3157
} | class ____ unresolved, it should not be directly persisted. The {@link
* #toString()} shows only a summary of the contained objects.
*/
@PublicEvolving
public final | is |
java | dropwizard__dropwizard | dropwizard-hibernate/src/test/java/io/dropwizard/hibernate/AbstractDAOTest.java | {
"start": 1067,
"end": 6784
} | class ____ extends AbstractDAO<String> {
MockDAO(SessionFactory factory) {
super(factory);
}
@Override
public Session currentSession() {
return super.currentSession();
}
@Override
public Query<?> namedQuery(String queryName) throws HibernateException {
return super.namedQuery(queryName);
}
@Override
protected Query<String> namedTypedQuery(String queryName) throws HibernateException {
return super.namedTypedQuery(queryName);
}
@Override
public Class<String> getEntityClass() {
return super.getEntityClass();
}
@Override
public String uniqueResult(Query<String> query) throws HibernateException {
return super.uniqueResult(query);
}
@Override
public List<String> list(Query<String> query) throws HibernateException {
return super.list(query);
}
@Override
public String get(Object id) {
return super.get(id);
}
@Override
public String persist(String entity) throws HibernateException {
return super.persist(entity);
}
@Override
public <T> T initialize(T proxy) {
return super.initialize(proxy);
}
}
private final SessionFactory factory = mock(SessionFactory.class);
private final HibernateCriteriaBuilder criteriaBuilder = mock(HibernateCriteriaBuilder.class);
@SuppressWarnings("unchecked")
private final JpaCriteriaQuery<String> criteriaQuery = mock(JpaCriteriaQuery.class);
@SuppressWarnings("unchecked")
private final Query<String> query = mock(Query.class);
private final Session session = mock(Session.class);
private final MockDAO dao = new MockDAO(factory);
@BeforeEach
void setup() throws Exception {
when(criteriaBuilder.createQuery(same(String.class))).thenReturn(criteriaQuery);
when(factory.getCurrentSession()).thenReturn(session);
when(session.getCriteriaBuilder()).thenReturn(criteriaBuilder);
when(session.getNamedQuery(anyString())).thenReturn(query);
when(session.createQuery(anyString(), same(String.class))).thenReturn(query);
when(session.createNamedQuery(anyString(), same(String.class))).thenReturn(query);
}
@Test
void getsASessionFromTheSessionFactory() {
assertThat(dao.currentSession())
.isSameAs(session);
}
@Test
void hasAnEntityClass() {
assertThat(dao.getEntityClass())
.isEqualTo(String.class);
}
@Test
void getsNamedQueries() {
assertThat(dao.namedQuery("query-name"))
.isEqualTo(query);
verify(session).getNamedQuery("query-name");
}
@Test
void getsNamedTypedQueries() {
assertThat(dao.namedTypedQuery("query-name"))
.isEqualTo(query);
verify(session).createNamedQuery("query-name", String.class);
}
@Test
void getsTypedQueries() {
assertThat(dao.query("HQL"))
.isEqualTo(query);
verify(session).createQuery("HQL", String.class);
}
@Test
void createsNewCriteriaQueries() {
assertThat(dao.criteriaQuery())
.isEqualTo(criteriaQuery);
verify(session).getCriteriaBuilder();
verify(criteriaBuilder).createQuery(String.class);
}
@Test
void returnsUniqueResultsFromJpaCriteriaQueries() {
when(session.createQuery(criteriaQuery)).thenReturn(query);
when(query.getResultList()).thenReturn(Collections.singletonList("woo"));
assertThat(dao.uniqueResult(criteriaQuery))
.isEqualTo("woo");
}
@Test
void throwsOnNonUniqueResultsFromJpaCriteriaQueries() {
when(session.createQuery(criteriaQuery)).thenReturn(query);
when(query.getResultList()).thenReturn(Arrays.asList("woo", "boo"));
assertThatExceptionOfType(NonUniqueResultException.class).isThrownBy(() ->
dao.uniqueResult(criteriaQuery));
}
@Test
void returnsUniqueResultsFromQueries() {
when(query.uniqueResult()).thenReturn("woo");
assertThat(dao.uniqueResult(query))
.isEqualTo("woo");
}
@Test
void returnsUniqueListsFromJpaCriteriaQueries() {
when(session.createQuery(criteriaQuery)).thenReturn(query);
when(query.getResultList()).thenReturn(Collections.singletonList("woo"));
assertThat(dao.list(criteriaQuery))
.containsOnly("woo");
}
@Test
void returnsUniqueListsFromQueries() {
when(query.list()).thenReturn(Collections.singletonList("woo"));
assertThat(dao.list(query))
.containsOnly("woo");
}
@Test
void getsEntitiesById() {
when(session.get(String.class, 200)).thenReturn("woo!");
assertThat(dao.get(200))
.isEqualTo("woo!");
verify(session).get(String.class, 200);
}
@Test
void persistsEntities() {
assertThat(dao.persist("woo"))
.isEqualTo("woo");
verify(session).saveOrUpdate("woo");
}
@Test
void initializesProxies() throws Exception {
final LazyInitializer initializer = mock(LazyInitializer.class);
when(initializer.isUninitialized()).thenReturn(true);
final HibernateProxy proxy = mock(HibernateProxy.class);
doCallRealMethod().when(proxy).asHibernateProxy();
when(proxy.getHibernateLazyInitializer()).thenReturn(initializer);
dao.initialize(proxy);
verify(initializer).initialize();
}
}
| MockDAO |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingleTableNativeQueryTest.java | {
"start": 8996,
"end": 9807
} | class ____ extends Person {
private Color color;
private String job;
@OneToOne
private Man husband;
@OneToMany(mappedBy = "mother")
private List<Child> children = new ArrayList<>();
public Woman() {
}
public Woman(String name, String job) {
super( name );
this.job = job;
}
public String getJob() {
return job;
}
public void setJob(String job) {
this.job = job;
}
public Man getHusband() {
return husband;
}
public void setHusband(Man husband) {
this.husband = husband;
}
public List<Child> getChildren() {
return children;
}
public void setChildren(List<Child> children) {
this.children = children;
}
public Color getColor() {
return color;
}
public void setColor(final Color color) {
this.color = color;
}
}
}
| Woman |
java | elastic__elasticsearch | x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java | {
"start": 1457,
"end": 16360
} | class ____ extends AbstractRemoteClusterSecurityTestCase {
private static final AtomicReference<Map<String, Object>> API_KEY_MAP_REF = new AtomicReference<>();
static {
fulfillingCluster = ElasticsearchCluster.local()
.name("fulfilling-cluster")
.apply(commonClusterConfig)
.module("x-pack-ccr")
.setting("remote_cluster_server.enabled", "true")
.setting("remote_cluster.port", "0")
.setting("xpack.security.remote_cluster_server.ssl.enabled", "true")
.setting("xpack.security.remote_cluster_server.ssl.key", "remote-cluster.key")
.setting("xpack.security.remote_cluster_server.ssl.certificate", "remote-cluster.crt")
.keystore("xpack.security.remote_cluster_server.ssl.secure_key_passphrase", "remote-cluster-password")
.build();
queryCluster = ElasticsearchCluster.local()
.name("query-cluster")
.apply(commonClusterConfig)
.rolesFile(Resource.fromClasspath("roles.yml"))
.module("x-pack-ccr")
.setting("xpack.security.remote_cluster_client.ssl.enabled", "true")
.setting("xpack.security.remote_cluster_client.ssl.certificate_authorities", "remote-cluster-ca.crt")
.keystore("cluster.remote.my_remote_cluster.credentials", () -> {
API_KEY_MAP_REF.updateAndGet(v -> v != null ? v : createCrossClusterAccessApiKey("""
{
"replication": [
{
"names": ["leader-index", "leader-alias", "metrics-*"]
}
]
}"""));
return (String) API_KEY_MAP_REF.get().get("encoded");
})
.user("ccr_user", PASS.toString(), "ccr_user_role", false)
.build();
}
@ClassRule
// Use a RuleChain to ensure that fulfilling cluster is started before query cluster
public static TestRule clusterRule = RuleChain.outerRule(fulfillingCluster).around(queryCluster);
public void testFollow() throws Exception {
configureRemoteCluster();
// fulfilling cluster
{
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "leader-index" } }
{ "name": "doc-1" }
{ "index": { "_index": "leader-index" } }
{ "name": "doc-2" }
{ "index": { "_index": "leader-index" } }
{ "name": "doc-3" }
{ "index": { "_index": "leader-index" } }
{ "name": "doc-4" }
{ "index": { "_index": "private-index" } }
{ "name": "doc-5" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
final Request putIndexRequest = new Request("PUT", "/shared-index");
putIndexRequest.setJsonEntity("""
{
"aliases": {
"shared-alias": {}
}
}
""");
assertOK(performRequestAgainstFulfillingCluster(putIndexRequest));
}
// query cluster
{
final String followIndexName = "follower-index";
final Request putCcrRequest = new Request("PUT", "/" + followIndexName + "/_ccr/follow?wait_for_active_shards=1");
putCcrRequest.setJsonEntity("""
{
"remote_cluster": "my_remote_cluster",
"leader_index": "leader-index"
}""");
final Response putCcrResponse = performRequestWithCcrUser(putCcrRequest);
assertOK(putCcrResponse);
final Map<String, Object> responseMap = responseAsMap(putCcrResponse);
responseMap.forEach((k, v) -> assertThat(k, v, is(true)));
// Ensure data is replicated
verifyReplicatedDocuments(4L, followIndexName);
assertFollowerInfo(followIndexName, "leader-index", "active");
assertFollowerStats(followIndexName);
// unfollow and then follow and then index a few docs in leader index:
pauseFollow(followIndexName);
assertFollowerInfo(followIndexName, "leader-index", "paused");
resumeFollow(followIndexName);
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "leader-index" } }
{ "name": "doc-5" }
{ "index": { "_index": "leader-index" } }
{ "name": "doc-6" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
verifyReplicatedDocuments(6L, followIndexName);
pauseFollow(followIndexName);
closeIndex(followIndexName);
unfollow(followIndexName);
assertNoFollowerInfo(followIndexName);
final var e = expectThrows(ResponseException.class, () -> resumeFollow(followIndexName));
assertThat(e.getMessage(), containsString("follow index [" + followIndexName + "] does not have ccr metadata"));
}
// query cluster error cases - no privileges
{
final Request putCcrRequest = new Request("PUT", "/follower-index-2/_ccr/follow?wait_for_active_shards=1");
putCcrRequest.setJsonEntity("""
{
"remote_cluster": "my_remote_cluster",
"leader_index": "private-index"
}""");
final ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithCcrUser(putCcrRequest));
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(403));
assertThat(e.getMessage(), containsString("insufficient privileges to follow index [private-index]"));
}
// query cluster error cases - aliases not supported
{
final Request putCcrRequest = new Request("PUT", "/follower-index-3/_ccr/follow?wait_for_active_shards=1");
putCcrRequest.setJsonEntity("""
{
"remote_cluster": "my_remote_cluster",
"leader_index": "shared-alias"
}""");
final ResponseException e = expectThrows(ResponseException.class, () -> performRequestWithCcrUser(putCcrRequest));
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
assertThat(e.getMessage(), containsString("cannot follow [shared-alias], because it is a ALIAS"));
}
}
public void testAutoFollow() throws Exception {
configureRemoteCluster();
// follow cluster
{
final var putAllowFollowRequest = new Request("PUT", "/_ccr/auto_follow/my_auto_follow_pattern");
putAllowFollowRequest.setJsonEntity("""
{
"remote_cluster" : "my_remote_cluster",
"leader_index_patterns" : [ "metrics-*" ],
"leader_index_exclusion_patterns": [ "metrics-001" ]
}""");
final Response putAutoFollowResponse = performRequestWithCcrUser(putAllowFollowRequest);
assertOK(putAutoFollowResponse);
}
// leader cluster
{
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "metrics-000" } }
{ "name": "doc-1" }
{ "index": { "_index": "metrics-000" } }
{ "name": "doc-2" }
{ "index": { "_index": "metrics-001" } }
{ "name": "doc-3" }
{ "index": { "_index": "metrics-002" } }
{ "name": "doc-4" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
}
// follow cluster
{
assertBusy(() -> {
ensureHealth("metrics-000,metrics-002", request -> {
request.addParameter("wait_for_status", "yellow");
request.addParameter("wait_for_active_shards", "2");
request.addParameter("wait_for_no_relocating_shards", "true");
request.addParameter("wait_for_no_initializing_shards", "true");
request.addParameter("timeout", "5s");
request.addParameter("level", "shards");
});
});
verifyReplicatedDocuments(3L, "metrics-000", "metrics-002");
final Response statsResponse = performRequestWithCcrUser(new Request("GET", "/_ccr/stats"));
assertOK(statsResponse);
assertThat(
ObjectPath.createFromResponse(statsResponse).evaluate("auto_follow_stats.number_of_successful_follow_indices"),
equalTo(2)
);
assertFollowerInfo("metrics-000", "metrics-000", "active");
assertFollowerInfo("metrics-002", "metrics-002", "active");
// Pause and resume
pauseAutoFollow("my_auto_follow_pattern");
resumeAutoFollow("my_auto_follow_pattern");
final Request bulkRequest = new Request("POST", "/_bulk?refresh=true");
bulkRequest.setJsonEntity(Strings.format("""
{ "index": { "_index": "metrics-000" } }
{ "name": "doc-5" }
{ "index": { "_index": "metrics-002" } }
{ "name": "doc-6" }
"""));
assertOK(performRequestAgainstFulfillingCluster(bulkRequest));
verifyReplicatedDocuments(5L, "metrics-000", "metrics-002");
// Delete
deleteAutoFollow("my_auto_follow_pattern");
final ResponseException e = expectThrows(
ResponseException.class,
() -> performRequestWithCcrUser(new Request("GET", "/_ccr/auto_follow/my_auto_follow_pattern"))
);
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404));
}
}
private Response performRequestWithCcrUser(final Request request) throws IOException {
request.setOptions(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", basicAuthHeaderValue("ccr_user", PASS)));
return client().performRequest(request);
}
private void verifyReplicatedDocuments(long numberOfDocs, String... indices) throws Exception {
final Request searchRequest = new Request("GET", "/" + arrayToCommaDelimitedString(indices) + "/_search");
assertBusy(() -> {
final Response response;
try {
response = performRequestWithCcrUser(searchRequest);
} catch (ResponseException e) {
throw new AssertionError(e);
}
assertOK(response);
final SearchResponse searchResponse;
try (var parser = responseAsParser(response)) {
searchResponse = SearchResponseUtils.parseSearchResponse(parser);
}
try {
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs));
assertThat(
Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()),
equalTo(Set.of(indices))
);
} finally {
searchResponse.decRef();
}
});
}
private void assertFollowerInfo(String followIndexName, String leadIndexName, String status) throws IOException {
final Response response = performRequestWithCcrUser(new Request("GET", "/" + followIndexName + "/_ccr/info"));
assertOK(response);
final List<Map<String, Object>> followerIndices = ObjectPath.createFromResponse(response).evaluate("follower_indices");
assertThat(followerIndices, hasSize(1));
final Map<String, Object> follower = followerIndices.get(0);
assertThat(ObjectPath.evaluate(follower, "follower_index"), equalTo(followIndexName));
assertThat(ObjectPath.evaluate(follower, "leader_index"), equalTo(leadIndexName));
assertThat(ObjectPath.evaluate(follower, "remote_cluster"), equalTo("my_remote_cluster"));
assertThat(ObjectPath.evaluate(follower, "status"), equalTo(status));
}
private void assertNoFollowerInfo(String followIndexName) throws IOException {
final Response response = performRequestWithCcrUser(new Request("GET", "/" + followIndexName + "/_ccr/info"));
assertOK(response);
final List<Map<String, Object>> followerIndices = ObjectPath.createFromResponse(response).evaluate("follower_indices");
assertThat(followerIndices, empty());
}
private void assertFollowerStats(String followIndexName) throws IOException {
final Response response = performRequestWithCcrUser(new Request("GET", "/" + followIndexName + "/_ccr/stats"));
assertOK(response);
final List<Map<String, Object>> followerIndices = ObjectPath.createFromResponse(response).evaluate("indices");
assertThat(followerIndices, hasSize(1));
final Map<String, Object> follower = followerIndices.get(0);
assertThat(ObjectPath.evaluate(follower, "index"), equalTo(followIndexName));
}
private void pauseFollow(String followIndexName) throws IOException {
assertOK(performRequestWithCcrUser(new Request("POST", "/" + followIndexName + "/_ccr/pause_follow")));
}
private void resumeFollow(String followIndexName) throws IOException {
final Request resumeFollowRequest = new Request("POST", "/" + followIndexName + "/_ccr/resume_follow");
resumeFollowRequest.setJsonEntity("{\"read_poll_timeout\": \"10ms\"}");
assertOK(performRequestWithCcrUser(resumeFollowRequest));
}
private void unfollow(String followIndexName) throws IOException {
assertOK(performRequestWithCcrUser(new Request("POST", "/" + followIndexName + "/_ccr/unfollow")));
}
private void pauseAutoFollow(String name) throws IOException {
assertOK(performRequestWithCcrUser(new Request("POST", "/_ccr/auto_follow/" + name + "/pause")));
}
private void resumeAutoFollow(String name) throws IOException {
assertOK(performRequestWithCcrUser(new Request("POST", "/_ccr/auto_follow/" + name + "/resume")));
}
private void deleteAutoFollow(String name) throws IOException {
assertOK(performRequestWithCcrUser(new Request("DELETE", "/_ccr/auto_follow/" + name)));
}
}
| RemoteClusterSecurityCcrIT |
java | apache__camel | components/camel-spring-parent/camel-spring-ai/camel-spring-ai-vector-store/src/main/java/org/apache/camel/component/springai/vectorstore/SpringAiVectorStoreEndpoint.java | {
"start": 1575,
"end": 2744
} | class ____ extends DefaultEndpoint {
@Metadata(required = true)
@UriPath(description = "The id")
private final String storeId;
@UriParam
private SpringAiVectorStoreConfiguration configuration;
public SpringAiVectorStoreEndpoint(
String endpointUri,
Component component,
String storeId,
SpringAiVectorStoreConfiguration configuration) {
super(endpointUri, component);
this.storeId = storeId;
this.configuration = configuration;
}
public SpringAiVectorStoreConfiguration getConfiguration() {
return this.configuration;
}
public String getStoreId() {
return this.storeId;
}
@Override
public Producer createProducer() throws Exception {
return new SpringAiVectorStoreProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Consumer is not implemented for this component");
}
}
| SpringAiVectorStoreEndpoint |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsVisitor.java | {
"start": 982,
"end": 1070
} | interface ____ metrics
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public | for |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java | {
"start": 27244,
"end": 29163
} | class ____ extends Modification {
protected final HashMap<Path, FileStatus> statusMap;
FileStatusChange(Path file, FileSystem fs, String type) {
super(file, fs, type);
statusMap = new HashMap<Path, FileStatus>();
}
@Override
void loadSnapshots() throws Exception {
for (Path snapshotRoot : snapshotList) {
Path snapshotFile = SnapshotTestHelper.getSnapshotFile(
snapshotRoot, file);
if (snapshotFile != null) {
if (fs.exists(snapshotFile)) {
FileStatus status = fs.getFileStatus(snapshotFile);
statusMap.put(snapshotFile, status);
} else {
statusMap.put(snapshotFile, null);
}
}
}
}
@Override
void checkSnapshots() throws Exception {
for (Path snapshotFile : statusMap.keySet()) {
FileStatus currentStatus = fs.exists(snapshotFile) ? fs
.getFileStatus(snapshotFile) : null;
FileStatus originalStatus = statusMap.get(snapshotFile);
assertEquals(currentStatus, originalStatus);
if (currentStatus != null) {
String s = null;
if (!currentStatus.toString().equals(originalStatus.toString())) {
s = "FAILED: " + getClass().getSimpleName()
+ ": file=" + file + ", snapshotFile" + snapshotFile
+ "\n\n currentStatus = " + currentStatus
+ "\noriginalStatus = " + originalStatus
+ "\n\nfile : " + fsdir.getINode(file.toString()).toDetailString()
+ "\n\nsnapshotFile: " + fsdir.getINode(snapshotFile.toString()).toDetailString();
SnapshotTestHelper.dumpTree(s, cluster);
}
assertEquals(currentStatus.toString(), originalStatus.toString(), s);
}
}
}
}
/**
* Change the file permission
*/
static | FileStatusChange |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/android/FragmentInjection.java | {
"start": 2961,
"end": 4426
} | class ____ extends BugChecker implements ClassTreeMatcher {
private static final Matcher<MethodTree> OVERRIDES_IS_VALID_FRAGMENT =
allOf(methodIsNamed("isValidFragment"), methodHasParameters(isSameType("java.lang.String")));
@Override
public Description matchClass(ClassTree tree, VisitorState state) {
if (!state.isAndroidCompatible()) {
return Description.NO_MATCH;
}
// Only examine classes that extend PreferenceActivity.
Type preferenceActivityType = ANDROID_PREFERENCE_PREFERENCEACTIVITY.get(state);
if (!isSubtype(getType(tree), preferenceActivityType, state)) {
return NO_MATCH;
}
// Examine each method in the class. Complain if isValidFragment not implemented.
TypeSymbol preferenceActivityTypeSymbol = preferenceActivityType.tsym;
boolean methodNotImplemented = true;
try {
MethodSymbol isValidFragmentMethodSymbol =
resolveExistingMethod(
state,
getSymbol(tree),
ISVALIDFRAGMENT.get(state),
ImmutableList.of(state.getSymtab().stringType),
ImmutableList.<Type>of());
methodNotImplemented = isValidFragmentMethodSymbol.owner.equals(preferenceActivityTypeSymbol);
} catch (FatalError e) {
// If isValidFragment method symbol is not found, then we must be compiling against an old SDK
// version (< 19) in which isValidFragment is not yet implemented, and neither this | FragmentInjection |
java | google__dagger | javatests/dagger/internal/codegen/ProductionGraphValidationTest.java | {
"start": 16941,
"end": 17430
} | interface ____");
});
}
@Test
public void cycleNotBrokenByProducerMap() {
Source component =
CompilerTests.javaSource(
"test.TestComponent",
"package test;",
"",
"import com.google.common.util.concurrent.ListenableFuture;",
"import dagger.producers.ProductionComponent;",
"",
"@ProductionComponent(modules = {ExecutorModule.class, TestModule.class})",
" | TestComponent |
java | apache__logging-log4j2 | log4j-core-java9/src/main/java/org/apache/logging/log4j/core/util/internal/UnsafeUtil.java | {
"start": 1226,
"end": 2422
} | class ____ {
private static final Logger LOGGER = StatusLogger.getLogger();
private static final Unsafe unsafe = findUnsafe();
private static Unsafe findUnsafe() {
try {
return AccessController.doPrivileged(new PrivilegedExceptionAction<Unsafe>() {
@Override
public Unsafe run() throws ReflectiveOperationException, SecurityException {
final Field unsafeField = Class.forName("sun.misc.Unsafe").getDeclaredField("theUnsafe");
unsafeField.setAccessible(true);
return (Unsafe) unsafeField.get(null);
}
});
} catch (PrivilegedActionException e) {
final Exception wrapped = e.getException();
if (wrapped instanceof SecurityException) {
throw (SecurityException) wrapped;
}
LOGGER.warn("sun.misc.Unsafe is not available. This will impact memory usage.", e);
}
return null;
}
public static void clean(final ByteBuffer bb) throws Exception {
if (unsafe != null && bb.isDirect()) {
unsafe.invokeCleaner(bb);
}
}
}
| UnsafeUtil |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/web/reactive/server/ApplicationContextSpecTests.java | {
"start": 2044,
"end": 2542
} | class ____ {
@Bean
public RouterFunction<?> handler() {
return RouterFunctions.route()
.GET("/sessionClassName", request ->
request.session().flatMap(session -> {
String className = session.getClass().getSimpleName();
return ServerResponse.ok().bodyValue(className);
}))
.build();
}
@Bean
public WebSessionManager webSessionManager() {
MockWebSession session = new MockWebSession();
return exchange -> Mono.just(session);
}
}
}
| WebConfig |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/bytearray/ByteArrayAssert_contains_at_Index_with_Integer_Argument_Test.java | {
"start": 1119,
"end": 1623
} | class ____ extends ByteArrayAssertBaseTest {
private Index index = someIndex();
@Override
protected ByteArrayAssert invoke_api_method() {
return assertions.contains(8, index);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertContains(getInfo(assertions), getActual(assertions), 8, index);
}
@Test
void invoke_api_like_user() {
assertThat(new byte[] { 1 }).contains(1, atIndex(0));
}
}
| ByteArrayAssert_contains_at_Index_with_Integer_Argument_Test |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/dialect/function/json/AbstractJsonRemoveFunction.java | {
"start": 727,
"end": 1270
} | class ____ extends AbstractSqmSelfRenderingFunctionDescriptor {
public AbstractJsonRemoveFunction(TypeConfiguration typeConfiguration) {
super(
"json_remove",
FunctionKind.NORMAL,
new ArgumentTypesValidator(
StandardArgumentsValidators.exactly( 2 ),
FunctionParameterType.IMPLICIT_JSON,
FunctionParameterType.STRING
),
StandardFunctionReturnTypeResolvers.invariant(
typeConfiguration.getBasicTypeRegistry().resolve( String.class, SqlTypes.JSON )
),
null
);
}
}
| AbstractJsonRemoveFunction |
java | apache__camel | components/camel-google/camel-google-mail/src/generated/java/org/apache/camel/component/google/mail/GmailUsersEndpointConfiguration.java | {
"start": 1286,
"end": 2527
} | class ____ extends GoogleMailConfiguration {
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "watch", description="The com.google.api.services.gmail.model.WatchRequest")})
private com.google.api.services.gmail.model.WatchRequest content;
@UriParam
@ApiParam(optional = false, apiMethods = {@ApiMethod(methodName = "getProfile", description="The user's email address. The special value me can be used to indicate the authenticated user. default: me"), @ApiMethod(methodName = "stop", description="The user's email address. The special value me can be used to indicate the authenticated user. default: me"), @ApiMethod(methodName = "watch", description="The user's email address. The special value me can be used to indicate the authenticated user. default: me")})
private String userId;
public com.google.api.services.gmail.model.WatchRequest getContent() {
return content;
}
public void setContent(com.google.api.services.gmail.model.WatchRequest content) {
this.content = content;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
| GmailUsersEndpointConfiguration |
java | apache__avro | lang/java/avro/src/main/java/org/apache/avro/util/ClassUtils.java | {
"start": 2665,
"end": 3333
} | class ____ could load the class.
*/
public static Class<?> forName(ClassLoader classLoader, String className) throws ClassNotFoundException {
Class<?> c = null;
if (classLoader != null) {
c = forName(className, classLoader);
}
if (c == null && Thread.currentThread().getContextClassLoader() != null) {
c = forName(className, Thread.currentThread().getContextClassLoader());
}
if (c == null) {
throw new ClassNotFoundException("Failed to load class " + className);
}
return c;
}
/**
* Loads a {@link Class} from the specified {@link ClassLoader} without throwing
* {@link ClassNotFoundException}. The | loader |
java | quarkusio__quarkus | extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/timeseries/Sample.java | {
"start": 211,
"end": 1151
} | class ____ {
public final long timestamp;
public final double value;
public Sample(long timestamp, double value) {
this.timestamp = positiveOrZero(timestamp, "timestamp");
this.value = value;
}
public long timestamp() {
return timestamp;
}
public double value() {
return value;
}
@Override
public String toString() {
return "Sample{" +
"timestamp=" + timestamp +
", value=" + value +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Sample sample = (Sample) o;
return timestamp == sample.timestamp && Double.compare(sample.value, value) == 0;
}
@Override
public int hashCode() {
return Objects.hash(timestamp, value);
}
}
| Sample |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 103577,
"end": 103797
} | class ____ implements Procedure {
public Number[] call(Object procedureContext, Number n) {
return null;
}
}
// extracted order is f(BIGINT) || f(INT)
private static | OrderedProcedure2 |
java | quarkusio__quarkus | extensions/qute/deployment/src/test/java/io/quarkus/qute/deployment/extensions/TemplateExtensionAttributeTest.java | {
"start": 1623,
"end": 2207
} | class ____ {
static String myAttr(Object any, @TemplateAttribute Object myAttribute) {
return myAttribute != null ? myAttribute.toString() : "NULL";
}
static String transform(String val, @TemplateAttribute("locale") Object loc) {
return val.toLowerCase() + "::" + loc.toString();
}
@TemplateExtension(namespace = "attr")
static String ping(@TemplateAttribute Object myAttribute) {
return myAttribute.toString();
}
}
@TemplateExtension(namespace = "ping")
public static | Extensions |
java | apache__hadoop | hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java | {
"start": 4939,
"end": 6188
} | class ____ implement one of the following means of construction, which are
* attempted in order:
*
* <ol>
* <li>a public constructor accepting java.net.URI and
* org.apache.hadoop.conf.Configuration</li>
* <li>a public constructor accepting
* org.apache.hadoop.conf.Configuration</li>
* <li>a public static method named getInstance that accepts no
* arguments and returns an instance of
* com.amazonaws.auth.AWSCredentialsProvider, or</li>
* <li>a public default constructor.</li>
* </ol>
* @param conf configuration
* @param className classname
* @param uri URI of the FS
* @return the instantiated class
* @throws InstantiationIOException on construction and instantiation failures,
* including v1 SDK exceptions.
* @throws IOException if raised by a constructor/factory method.
*/
static AwsCredentialsProvider create(
Configuration conf,
String className,
@Nullable URI uri) throws InstantiationIOException, IOException {
final AWSCredentialsProvider instance =
S3AUtils.getInstanceFromReflection(className, conf, uri, AWSCredentialsProvider.class,
"getInstance", AWS_CREDENTIALS_PROVIDER);
return create(instance);
}
}
| must |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.