language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/EventNotifierExchangeSentTest.java
|
{
"start": 1181,
"end": 2827
}
|
class ____ extends ContextTestSupport {
private final MySentEventNotifier notifier = new MySentEventNotifier();
@Override
protected CamelContext createCamelContext() {
DefaultCamelContext context = new DefaultCamelContext();
context.getManagementStrategy().addEventNotifier(notifier);
return context;
}
@Test
public void testExchangeSent() throws Exception {
assertEquals(0, notifier.getEvents().size());
getMockEndpoint("mock:result").expectedMessageCount(1);
getMockEndpoint("mock:bar").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
assertEquals(4, notifier.getEvents().size());
ExchangeSentEvent e = (ExchangeSentEvent) notifier.getEvents().get(0);
assertEquals("mock://bar", e.getEndpoint().getEndpointUri());
e = (ExchangeSentEvent) notifier.getEvents().get(1);
assertEquals("direct://bar", e.getEndpoint().getEndpointUri());
e = (ExchangeSentEvent) notifier.getEvents().get(2);
assertEquals("mock://result", e.getEndpoint().getEndpointUri());
e = (ExchangeSentEvent) notifier.getEvents().get(3);
assertEquals("direct://start", e.getEndpoint().getEndpointUri());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("direct:bar").to("mock:result");
from("direct:bar").to("mock:bar");
}
};
}
}
|
EventNotifierExchangeSentTest
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/tasks/OneInputStreamTaskTest.java
|
{
"start": 55479,
"end": 56511
}
|
class ____ extends RichMapFunction<String, String> {
private static final long serialVersionUID = 1L;
public static boolean openCalled = false;
public static boolean closeCalled = false;
TestOpenCloseMapFunction() {
openCalled = false;
closeCalled = false;
}
@Override
public void open(OpenContext openContext) throws Exception {
super.open(openContext);
assertThat(openCalled).as("Close called before open.").isFalse();
openCalled = true;
}
@Override
public void close() throws Exception {
super.close();
assertThat(openCalled).as("Open was not called before close.").isTrue();
closeCalled = true;
}
@Override
public String map(String value) throws Exception {
assertThat(openCalled).as("Open was not called before run.").isTrue();
return value;
}
}
private static
|
TestOpenCloseMapFunction
|
java
|
alibaba__nacos
|
api/src/main/java/com/alibaba/nacos/api/selector/context/CmdbContext.java
|
{
"start": 1055,
"end": 1864
}
|
class ____<T extends Instance> {
/**
* consumer is the instance which provide the info.
*/
private CmdbInstance<T> consumer;
/**
* the providers which be selected.
*/
private List<CmdbInstance<T>> providers;
public CmdbInstance<T> getConsumer() {
return consumer;
}
public void setConsumer(CmdbInstance<T> consumer) {
this.consumer = consumer;
}
public List<CmdbInstance<T>> getProviders() {
return providers;
}
public void setProviders(List<CmdbInstance<T>> providers) {
this.providers = providers;
}
@Override
public String toString() {
return "CmdbContext{" + "consumer=" + consumer + ", providers=" + providers + '}';
}
public static
|
CmdbContext
|
java
|
spring-projects__spring-security
|
config/src/test/java/org/springframework/security/config/annotation/method/configuration/PrePostMethodSecurityConfigurationTests.java
|
{
"start": 83100,
"end": 83324
}
|
class ____ {
final String name;
AuthorizedPerson(String name) {
this.name = name;
}
@PostAuthorize("returnObject == authentication.name")
public String getName() {
return this.name;
}
}
}
|
AuthorizedPerson
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/analysis/CharFilterFactory.java
|
{
"start": 543,
"end": 706
}
|
interface ____ {
String name();
Reader create(Reader reader);
default Reader normalize(Reader reader) {
return reader;
}
}
|
CharFilterFactory
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/binding/Person.java
|
{
"start": 216,
"end": 374
}
|
class ____ {
@Id
String name;
Address address;
Person() {}
Person(String name, Address address) {
this.name = name;
this.address = address;
}
}
|
Person
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/DoNotCallChecker.java
|
{
"start": 11823,
"end": 12613
}
|
class ____ by this ClassInfo using getName or"
+ " load")
/*
* Users of TypeToken have to create a subclass. The static type of their instance is
* probably often still "TypeToken," but that may change as we see more usage of `var`. So
* let's check subclasses, too. If anyone defines an overload of getClass on such a
* subclass, this check will give that person a bad time in one additional way.
*/
.put(
instanceMethod()
.onDescendantOf("com.google.common.reflect.TypeToken")
.named("getClass"),
"Calling getClass on TypeToken returns the Class object for TypeToken, you probably"
+ " meant to retrieve the
|
described
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/processor/RecipientListWithStringDelimitedHeaderTest.java
|
{
"start": 1026,
"end": 2024
}
|
class ____ extends ContextTestSupport {
@Test
public void testSendingAMessageUsingMulticastReceivesItsOwnExchange() throws Exception {
MockEndpoint x = getMockEndpoint("mock:x");
MockEndpoint y = getMockEndpoint("mock:y");
MockEndpoint z = getMockEndpoint("mock:z");
x.expectedBodiesReceived("answer");
y.expectedBodiesReceived("answer");
z.expectedBodiesReceived("answer");
sendBody();
assertMockEndpointsSatisfied();
}
protected void sendBody() {
template.sendBodyAndHeader("direct:a", "answer", "myHeader", "mock:x, mock:y, mock:z");
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
// START SNIPPET: example
from("direct:a").recipientList(header("myHeader"));
// END SNIPPET: example
}
};
}
}
|
RecipientListWithStringDelimitedHeaderTest
|
java
|
apache__camel
|
components/camel-servicenow/camel-servicenow-component/src/main/java/org/apache/camel/component/servicenow/AbstractServiceNowProcessor.java
|
{
"start": 1392,
"end": 10951
}
|
class ____ implements Processor {
protected final ServiceNowEndpoint endpoint;
protected final ServiceNowConfiguration config;
protected final ObjectMapper mapper;
protected final ServiceNowClient client;
// Cache for JavaTypes
private final JavaTypeCache javaTypeCache;
private final List<ServiceNowDispatcher> dispatchers;
protected AbstractServiceNowProcessor(ServiceNowEndpoint endpoint) {
this.javaTypeCache = new JavaTypeCache();
this.endpoint = endpoint;
this.config = endpoint.getConfiguration();
this.mapper = config.getOrCreateMapper();
this.client = new ServiceNowClient(endpoint.getCamelContext(), config);
this.dispatchers = new ArrayList<>();
}
protected AbstractServiceNowProcessor setBodyAndHeaders(Message message, Class<?> responseModel, Response response)
throws Exception {
if (response != null) {
setHeaders(message, responseModel, response);
setBody(message, responseModel, response);
}
return this;
}
@Override
public void process(Exchange exchange) throws Exception {
final ServiceNowDispatcher dispatcher = findDispatcher(exchange);
if (dispatcher != null) {
dispatcher.process(exchange);
} else {
throw new IllegalArgumentException("Unable to process exchange");
}
}
// *********************************
// Header
// *********************************
protected AbstractServiceNowProcessor setHeaders(Message message, Class<?> responseModel, Response response)
throws Exception {
ServiceNowHelper.findOffsets(response, (k, v) -> message.setHeader(k, v));
String attachmentMeta = response.getHeaderString(ServiceNowConstants.ATTACHMENT_META_HEADER);
if (ObjectHelper.isNotEmpty(attachmentMeta)) {
message.setHeader(
ServiceNowConstants.CONTENT_META,
mapper.readValue(attachmentMeta, Map.class));
}
copyHeader(response, HttpHeaders.CONTENT_TYPE, message, ServiceNowConstants.CONTENT_TYPE);
copyHeader(response, HttpHeaders.CONTENT_ENCODING, message, ServiceNowConstants.CONTENT_ENCODING);
if (responseModel != null) {
message.getHeaders().putIfAbsent(ServiceNowConstants.MODEL, responseModel.getName());
message.getHeaders().putIfAbsent(ServiceNowConstants.RESPONSE_MODEL, responseModel.getName());
}
return this;
}
// *********************************
// Body
// *********************************
protected AbstractServiceNowProcessor setBody(Message message, Class<?> model, Response response) throws Exception {
if (message != null && response != null) {
if (ObjectHelper.isNotEmpty(response.getHeaderString(HttpHeaders.CONTENT_TYPE))) {
JsonNode root = response.readEntity(JsonNode.class);
Map<String, String> responseAttributes = null;
if (root != null) {
Iterator<Map.Entry<String, JsonNode>> fields = root.fields();
while (fields.hasNext()) {
final Map.Entry<String, JsonNode> entry = fields.next();
final String key = entry.getKey();
final JsonNode node = entry.getValue();
if (ObjectHelper.equal("result", key, true)) {
Object body = unwrap(node, model);
if (body != null) {
message.setHeader(ServiceNowConstants.RESPONSE_TYPE, body.getClass());
message.setBody(body);
}
} else {
if (responseAttributes == null) {
responseAttributes = new HashMap<>();
}
responseAttributes.put(key, node.textValue());
}
}
if (responseAttributes != null) {
message.setHeader(ServiceNowConstants.RESPONSE_META, responseAttributes);
}
}
}
}
return this;
}
protected AbstractServiceNowProcessor validateBody(Message message, Class<?> model) {
return validateBody(message.getBody(), model);
}
protected AbstractServiceNowProcessor validateBody(Object body, Class<?> model) {
ObjectHelper.notNull(body, "body");
if (!model.isAssignableFrom(body.getClass())) {
throw new IllegalArgumentException(
"Body is not compatible with model (body=" + body.getClass() + ", model=" + model);
}
return this;
}
protected Object unwrap(JsonNode node, Class<?> model) throws IOException {
if (model == String.class) {
// If the model is a String, let's just serialize it as
// a json string
return mapper.writeValueAsString(node);
}
final Object result;
if (node.isArray()) {
if (model.isInstance(Map.class)) {
// If the model is a Map, there's no need to use any
// specific JavaType to instruct Jackson about the
// expected element type
result = mapper.treeToValue(node, List.class);
} else {
result = mapper.readValue(node.traverse(), javaTypeCache.get(model));
}
} else {
result = mapper.treeToValue(node, model);
}
return result;
}
// *********************************
// Helpers
// *********************************
protected final void addDispatcher(ServiceNowDispatcher dispatcher) {
this.dispatchers.add(dispatcher);
}
protected final void addDispatcher(String action, Processor processor) {
addDispatcher(ServiceNowDispatcher.on(action, null, processor));
}
protected final void addDispatcher(String action, String subject, Processor processor) {
addDispatcher(ServiceNowDispatcher.on(action, subject, processor));
}
protected final ServiceNowDispatcher findDispatcher(Exchange exchange) {
ServiceNowDispatcher dispatcher = null;
for (int i = 0; i < dispatchers.size(); i++) {
dispatcher = dispatchers.get(i);
if (dispatcher.match(exchange)) {
return dispatcher;
}
}
return null;
}
// *********************************
// Helpers
// *********************************
protected Object getRequestParamFromHeader(ServiceNowParam sysParam, Message message) {
return message.getHeader(
sysParam.getHeader(),
sysParam.getDefaultValue(config),
sysParam.getType());
}
protected Object getMandatoryRequestParamFromHeader(ServiceNowParam sysParam, Message message) {
return ObjectHelper.notNull(
getRequestParamFromHeader(sysParam, message),
sysParam.getHeader());
}
protected void copyHeader(Response from, String fromId, Message to, String toId) {
Object fromValue = from.getHeaders().getFirst(fromId);
if (ObjectHelper.isNotEmpty(fromValue)) {
to.setHeader(toId, fromValue);
}
}
protected Class<?> getRequestModel(Message message) {
return getRequestModel(message, null);
}
protected Class<?> getRequestModel(Message message, String modelName) {
Class<?> model = null;
if (message != null) {
model = message.getHeader(ServiceNowConstants.REQUEST_MODEL, Class.class);
if (model == null) {
model = message.getHeader(ServiceNowConstants.MODEL, Class.class);
}
}
return model != null
? model
: ObjectHelper.isEmpty(modelName) ? Map.class : config.getRequestModel(modelName, Map.class);
}
protected Class<?> getResponseModel(Message message) {
return getRequestModel(message, null);
}
protected Class<?> getResponseModel(Message message, String modelName) {
Class<?> model = null;
if (message != null) {
model = message.getHeader(ServiceNowConstants.RESPONSE_MODEL, Class.class);
if (model == null) {
model = message.getHeader(ServiceNowConstants.MODEL, Class.class);
}
}
return model != null
? model
: ObjectHelper.isEmpty(modelName) ? Map.class : config.getResponseModel(modelName, Map.class);
}
protected String getApiVersion(Message message) {
return message.getHeader(ServiceNowConstants.API_VERSION, config.getApiVersion(), String.class);
}
protected String getTableName(Message message) {
return message.getHeader(ServiceNowParams.PARAM_TABLE_NAME.getHeader(), config.getTable(), String.class);
}
protected String getSysID(Message message) {
return message.getHeader(ServiceNowParams.PARAM_SYS_ID.getHeader(), String.class);
}
// *************************************************************************
// Use ClassValue to lazy create and cache JavaType
// *************************************************************************
private
|
AbstractServiceNowProcessor
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/CompileTimeConstantCheckerTest.java
|
{
"start": 14678,
"end": 15451
}
|
class ____ {
CompileTimeConstantTestCase(@CompileTimeConstant String s) {}
public static Function<String, CompileTimeConstantTestCase> r(String x) {
// BUG: Diagnostic contains: Method with @CompileTimeConstant parameter
return CompileTimeConstantTestCase::new;
}
}
""")
.doTest();
}
@Test
public void matches_methodReferenceCorrectOverrideMethod() {
compilationHelper
.addSourceLines(
"test/CompileTimeConstantTestCase.java",
"""
package test;
import com.google.errorprone.annotations.CompileTimeConstant;
import java.util.function.Consumer;
public
|
CompileTimeConstantTestCase
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/MultipleApplicationClassesWithBuildProfileTest.java
|
{
"start": 444,
"end": 978
}
|
class ____ {
@RegisterExtension
static QuarkusUnitTest runner = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(
Application1.class, Application2.class, TestResource.class));
@Test
public void testNoAnnotation() {
get("/1/test")
.then()
.statusCode(200)
.body(Matchers.equalTo("test"));
}
@ApplicationPath("1")
public static
|
MultipleApplicationClassesWithBuildProfileTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/throughput/BufferDebloater.java
|
{
"start": 5228,
"end": 5914
}
|
class ____ can not be less than min or greater than max
// buffer size but if considering this method independently the behaviour for the small or
// big value should be the same as for min and max buffer size correspondingly.
if (newSize <= minBufferSize || newSize >= maxBufferSize) {
return false;
}
int delta = (int) (lastBufferSize * bufferDebloatThresholdFactor);
return Math.abs(newSize - lastBufferSize) < delta;
}
public int getLastBufferSize() {
return lastBufferSize;
}
public Duration getLastEstimatedTimeToConsumeBuffers() {
return lastEstimatedTimeToConsumeBuffers;
}
}
|
newSize
|
java
|
apache__maven
|
impl/maven-impl/src/main/java/org/apache/maven/impl/DefaultJavaToolchainFactory.java
|
{
"start": 6347,
"end": 6773
}
|
class ____ implements Predicate<String> {
final String provides;
ExactMatcher(String provides) {
this.provides = provides;
}
@Override
public boolean test(String requirement) {
return provides.equalsIgnoreCase(requirement);
}
@Override
public String toString() {
return provides;
}
}
static final
|
ExactMatcher
|
java
|
grpc__grpc-java
|
gcp-observability/src/test/java/io/grpc/gcp/observability/GcpObservabilityTest.java
|
{
"start": 2051,
"end": 6339
}
|
class ____ {
private final StaticTestingClassLoader classLoader =
new StaticTestingClassLoader(
getClass().getClassLoader(),
Pattern.compile(
"io\\.grpc\\.InternalConfigurator|io\\.grpc\\.Configurator|"
+ "io\\.grpc\\.InternalConfiguratorRegistry|io\\.grpc\\.ConfiguratorRegistry|"
+ "io\\.grpc\\.gcp\\.observability\\.[^.]+|"
+ "io\\.grpc\\.gcp\\.observability\\.interceptors\\.[^.]+|"
+ "io\\.grpc\\.gcp\\.observability\\.GcpObservabilityTest\\$.*"));
@Test
public void initFinish() throws Exception {
Class<?> runnable =
classLoader.loadClass(StaticTestingClassInitFinish.class.getName());
((Runnable) runnable.getDeclaredConstructor().newInstance()).run();
}
@Test
public void enableObservability() throws Exception {
Class<?> runnable =
classLoader.loadClass(StaticTestingClassEnableObservability.class.getName());
((Runnable) runnable.getDeclaredConstructor().newInstance()).run();
}
@Test
public void disableObservability() throws Exception {
Class<?> runnable =
classLoader.loadClass(StaticTestingClassDisableObservability.class.getName());
((Runnable) runnable.getDeclaredConstructor().newInstance()).run();
}
@Test
@SuppressWarnings("unchecked")
public void conditionalInterceptor() {
ClientInterceptor delegate = mock(ClientInterceptor.class);
Channel channel = mock(Channel.class);
ClientCall<?, ?> returnedCall = mock(ClientCall.class);
ConditionalClientInterceptor conditionalClientInterceptor
= GcpObservability.getConditionalInterceptor(
delegate);
MethodDescriptor<?, ?> method = MethodDescriptor.newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.logging.v2.LoggingServiceV2/method")
.setRequestMarshaller(mock(MethodDescriptor.Marshaller.class))
.setResponseMarshaller(mock(MethodDescriptor.Marshaller.class))
.build();
doReturn(returnedCall).when(channel).newCall(method, CallOptions.DEFAULT);
ClientCall<?, ?> clientCall = conditionalClientInterceptor.interceptCall(method,
CallOptions.DEFAULT, channel);
verifyNoInteractions(delegate);
assertThat(clientCall).isSameInstanceAs(returnedCall);
method = MethodDescriptor.newBuilder().setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.monitoring.v3.MetricService/method2")
.setRequestMarshaller(mock(MethodDescriptor.Marshaller.class))
.setResponseMarshaller(mock(MethodDescriptor.Marshaller.class))
.build();
doReturn(returnedCall).when(channel).newCall(method, CallOptions.DEFAULT);
clientCall = conditionalClientInterceptor.interceptCall(method, CallOptions.DEFAULT, channel);
verifyNoInteractions(delegate);
assertThat(clientCall).isSameInstanceAs(returnedCall);
method = MethodDescriptor.newBuilder().setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.devtools.cloudtrace.v2.TraceService/method3")
.setRequestMarshaller(mock(MethodDescriptor.Marshaller.class))
.setResponseMarshaller(mock(MethodDescriptor.Marshaller.class))
.build();
doReturn(returnedCall).when(channel).newCall(method, CallOptions.DEFAULT);
clientCall = conditionalClientInterceptor.interceptCall(method, CallOptions.DEFAULT, channel);
verifyNoInteractions(delegate);
assertThat(clientCall).isSameInstanceAs(returnedCall);
reset(channel);
ClientCall<?, ?> interceptedCall = mock(ClientCall.class);
method = MethodDescriptor.newBuilder().setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("some.other.random.service/method4")
.setRequestMarshaller(mock(MethodDescriptor.Marshaller.class))
.setResponseMarshaller(mock(MethodDescriptor.Marshaller.class))
.build();
doReturn(interceptedCall).when(delegate).interceptCall(method, CallOptions.DEFAULT, channel);
clientCall = conditionalClientInterceptor.interceptCall(method, CallOptions.DEFAULT, channel);
verifyNoInteractions(channel);
assertThat(clientCall).isSameInstanceAs(interceptedCall);
}
// UsedReflectively
public static final
|
GcpObservabilityTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/model/TableMapping.java
|
{
"start": 496,
"end": 1711
}
|
interface ____ extends TableDetails {
/**
* The name of the mapped table
*/
String getTableName();
default boolean containsTableName(String tableName) {
return getTableName().equals( tableName );
}
/**
* The position of the table relative to others for the {@link MutationTarget}
*/
int getRelativePosition();
/**
* Whether the table is mapped as optional
*/
boolean isOptional();
/**
* Whether the table is mapped as inverse
*/
boolean isInverse();
/**
* Whether this table holds the identifier for the {@link MutationTarget}
*/
boolean isIdentifierTable();
/**
* Details for insertion into this table
*/
MutationDetails getInsertDetails();
/**
* Details for updating this table
*/
MutationDetails getUpdateDetails();
/**
* Whether deletions are cascaded to this table at the database level.
*
* @apiNote When {@code true}, {@link #isIdentifierTable()} will generally
* be {@code false}
*
* @see org.hibernate.annotations.OnDelete
*/
boolean isCascadeDeleteEnabled();
/**
* Details for deleting from this table
*/
MutationDetails getDeleteDetails();
/**
* Details for the {@linkplain MutationType mutation} of a table
*/
|
TableMapping
|
java
|
jhy__jsoup
|
src/test/java/org/jsoup/select/ElementsTest.java
|
{
"start": 589,
"end": 24685
}
|
class ____ {
@Test public void filter() {
String h = "<p>Excl</p><div class=headline><p>Hello</p><p>There</p></div><div class=headline><h1>Headline</h1></div>";
Document doc = Jsoup.parse(h);
Elements els = doc.select(".headline").select("p");
assertEquals(2, els.size());
assertEquals("Hello", els.get(0).text());
assertEquals("There", els.get(1).text());
}
@Test public void attributes() {
String h = "<p title=foo><p title=bar><p class=foo><p class=bar>";
Document doc = Jsoup.parse(h);
Elements withTitle = doc.select("p[title]");
assertEquals(2, withTitle.size());
assertTrue(withTitle.hasAttr("title"));
assertFalse(withTitle.hasAttr("class"));
assertEquals("foo", withTitle.attr("title"));
withTitle.removeAttr("title");
assertEquals(2, withTitle.size()); // existing Elements are not reevaluated
assertEquals(0, doc.select("p[title]").size());
Elements ps = doc.select("p").attr("style", "classy");
assertEquals(4, ps.size());
assertEquals("classy", ps.last().attr("style"));
assertEquals("bar", ps.last().attr("class"));
}
@Test public void hasAttr() {
Document doc = Jsoup.parse("<p title=foo><p title=bar><p class=foo><p class=bar>");
Elements ps = doc.select("p");
assertTrue(ps.hasAttr("class"));
assertFalse(ps.hasAttr("style"));
}
@Test public void hasAbsAttr() {
Document doc = Jsoup.parse("<a id=1 href='/foo'>One</a> <a id=2 href='https://jsoup.org'>Two</a>");
Elements one = doc.select("#1");
Elements two = doc.select("#2");
Elements both = doc.select("a");
assertFalse(one.hasAttr("abs:href"));
assertTrue(two.hasAttr("abs:href"));
assertTrue(both.hasAttr("abs:href")); // hits on #2
}
@Test public void attr() {
Document doc = Jsoup.parse("<p title=foo><p title=bar><p class=foo><p class=bar>");
String classVal = doc.select("p").attr("class");
assertEquals("foo", classVal);
}
@Test public void absAttr() {
Document doc = Jsoup.parse("<a id=1 href='/foo'>One</a> <a id=2 href='https://jsoup.org'>Two</a>");
Elements one = doc.select("#1");
Elements two = doc.select("#2");
Elements both = doc.select("a");
assertEquals("", one.attr("abs:href"));
assertEquals("https://jsoup.org", two.attr("abs:href"));
assertEquals("https://jsoup.org", both.attr("abs:href"));
}
@Test public void classes() {
Document doc = Jsoup.parse("<div><p class='mellow yellow'></p><p class='red green'></p>");
Elements els = doc.select("p");
assertTrue(els.hasClass("red"));
assertFalse(els.hasClass("blue"));
els.addClass("blue");
els.removeClass("yellow");
els.toggleClass("mellow");
assertEquals("blue", els.get(0).className());
assertEquals("red green blue mellow", els.get(1).className());
}
@Test public void hasClassCaseInsensitive() {
Elements els = Jsoup.parse("<p Class=One>One <p class=Two>Two <p CLASS=THREE>THREE").select("p");
Element one = els.get(0);
Element two = els.get(1);
Element thr = els.get(2);
assertTrue(one.hasClass("One"));
assertTrue(one.hasClass("ONE"));
assertTrue(two.hasClass("TWO"));
assertTrue(two.hasClass("Two"));
assertTrue(thr.hasClass("ThreE"));
assertTrue(thr.hasClass("three"));
}
@Test public void text() {
String h = "<div><p>Hello<p>there<p>world</div>";
Document doc = Jsoup.parse(h);
assertEquals("Hello there world", doc.select("div > *").text());
}
@Test public void hasText() {
Document doc = Jsoup.parse("<div><p>Hello</p></div><div><p></p></div>");
Elements divs = doc.select("div");
assertTrue(divs.hasText());
assertFalse(doc.select("div + div").hasText());
}
@Test public void html() {
Document doc = Jsoup.parse("<div><p>Hello</p></div><div><p>There</p></div>");
Elements divs = doc.select("div");
assertEquals("<p>Hello</p>\n<p>There</p>", divs.html());
}
@Test public void outerHtml() {
Document doc = Jsoup.parse("<div><p>Hello</p></div><div><p>There</p></div>");
Elements divs = doc.select("div");
assertEquals("<div><p>Hello</p></div><div><p>There</p></div>", TextUtil.stripNewlines(divs.outerHtml()));
}
@Test public void setHtml() {
Document doc = Jsoup.parse("<p>One</p><p>Two</p><p>Three</p>");
Elements ps = doc.select("p");
ps.prepend("<b>Bold</b>").append("<i>Ital</i>");
assertEquals("<p><b>Bold</b>Two<i>Ital</i></p>", TextUtil.stripNewlines(ps.get(1).outerHtml()));
ps.html("<span>Gone</span>");
assertEquals("<p><span>Gone</span></p>", TextUtil.stripNewlines(ps.get(1).outerHtml()));
}
@Test public void val() {
Document doc = Jsoup.parse("<input value='one' /><textarea>two</textarea>");
Elements els = doc.select("input, textarea");
assertEquals(2, els.size());
assertEquals("one", els.val());
assertEquals("two", els.last().val());
els.val("three");
assertEquals("three", els.first().val());
assertEquals("three", els.last().val());
assertEquals("<textarea>three</textarea>", els.last().outerHtml());
}
@Test public void before() {
Document doc = Jsoup.parse("<p>This <a>is</a> <a>jsoup</a>.</p>");
doc.select("a").before("<span>foo</span>");
assertEquals("<p>This <span>foo</span><a>is</a> <span>foo</span><a>jsoup</a>.</p>", TextUtil.stripNewlines(doc.body().html()));
}
@Test public void after() {
Document doc = Jsoup.parse("<p>This <a>is</a> <a>jsoup</a>.</p>");
doc.select("a").after("<span>foo</span>");
assertEquals("<p>This <a>is</a><span>foo</span> <a>jsoup</a><span>foo</span>.</p>", TextUtil.stripNewlines(doc.body().html()));
}
@Test public void wrap() {
String h = "<p><b>This</b> is <b>jsoup</b></p>";
Document doc = Jsoup.parse(h);
doc.select("b").wrap("<i></i>");
assertEquals("<p><i><b>This</b></i> is <i><b>jsoup</b></i></p>", doc.body().html());
}
@Test public void wrapDiv() {
String h = "<p><b>This</b> is <b>jsoup</b>.</p> <p>How do you like it?</p>";
Document doc = Jsoup.parse(h);
doc.select("p").wrap("<div></div>");
assertEquals(
"<div>\n <p><b>This</b> is <b>jsoup</b>.</p>\n</div>\n<div>\n <p>How do you like it?</p>\n</div>",
doc.body().html());
}
@Test public void unwrap() {
String h = "<div><font>One</font> <font><a href=\"/\">Two</a></font></div";
Document doc = Jsoup.parse(h);
doc.select("font").unwrap();
assertEquals("<div>\n" +
" One <a href=\"/\">Two</a>\n" +
"</div>", doc.body().html());
}
@Test public void unwrapP() {
String h = "<p><a>One</a> Two</p> Three <i>Four</i> <p>Fix <i>Six</i></p>";
Document doc = Jsoup.parse(h);
doc.select("p").unwrap();
assertEquals("<a>One</a> Two Three <i>Four</i> Fix <i>Six</i>", TextUtil.stripNewlines(doc.body().html()));
}
@Test public void unwrapKeepsSpace() {
String h = "<p>One <span>two</span> <span>three</span> four</p>";
Document doc = Jsoup.parse(h);
doc.select("span").unwrap();
assertEquals("<p>One two three four</p>", doc.body().html());
}
@Test public void empty() {
Document doc = Jsoup.parse("<div><p>Hello <b>there</b></p> <p>now!</p></div>");
doc.outputSettings().prettyPrint(false);
doc.select("p").empty();
assertEquals("<div><p></p> <p></p></div>", doc.body().html());
}
@Test public void remove() {
Document doc = Jsoup.parse("<div><p>Hello <b>there</b></p> jsoup <p>now!</p></div>");
doc.outputSettings().prettyPrint(false);
doc.select("p").remove();
assertEquals("<div> jsoup </div>", doc.body().html());
}
@Test public void eq() {
String h = "<p>Hello<p>there<p>world";
Document doc = Jsoup.parse(h);
assertEquals("there", doc.select("p").eq(1).text());
assertEquals("there", doc.select("p").get(1).text());
}
@Test public void is() {
String h = "<p>Hello<p title=foo>there<p>world";
Document doc = Jsoup.parse(h);
Elements ps = doc.select("p");
assertTrue(ps.is("[title=foo]"));
assertFalse(ps.is("[title=bar]"));
}
@Test public void parents() {
Document doc = Jsoup.parse("<div><p>Hello</p></div><p>There</p>");
Elements parents = doc.select("p").parents();
assertEquals(3, parents.size());
assertEquals("div", parents.get(0).tagName());
assertEquals("body", parents.get(1).tagName());
assertEquals("html", parents.get(2).tagName());
}
@Test public void not() {
Document doc = Jsoup.parse("<div id=1><p>One</p></div> <div id=2><p><span>Two</span></p></div>");
Elements div1 = doc.select("div").not(":has(p > span)");
assertEquals(1, div1.size());
assertEquals("1", div1.first().id());
Elements div2 = doc.select("div").not("#1");
assertEquals(1, div2.size());
assertEquals("2", div2.first().id());
}
@Test public void tagNameSet() {
Document doc = Jsoup.parse("<p>Hello <i>there</i> <i>now</i></p>");
doc.select("i").tagName("em");
assertEquals("<p>Hello <em>there</em> <em>now</em></p>", doc.body().html());
}
@Test public void traverse() {
Document doc = Jsoup.parse("<div><p>Hello</p></div><div>There</div>");
final StringBuilder accum = new StringBuilder();
doc.select("div").traverse(new NodeVisitor() {
@Override
public void head(Node node, int depth) {
accum.append("<").append(node.nodeName()).append(">");
}
@Override
public void tail(Node node, int depth) {
accum.append("</").append(node.nodeName()).append(">");
}
});
assertEquals("<div><p><#text></#text></p></div><div><#text></#text></div>", accum.toString());
}
@Test public void forms() {
Document doc = Jsoup.parse("<form id=1><input name=q></form><div /><form id=2><input name=f></form>");
Elements els = doc.select("form, div");
assertEquals(3, els.size());
List<FormElement> forms = els.forms();
assertEquals(2, forms.size());
assertNotNull(forms.get(0));
assertNotNull(forms.get(1));
assertEquals("1", forms.get(0).id());
assertEquals("2", forms.get(1).id());
}
@Test public void comments() {
Document doc = Jsoup.parse("<!-- comment1 --><p><!-- comment2 --><p class=two><!-- comment3 -->");
List<Comment> comments = doc.select("p").comments();
assertEquals(2, comments.size());
assertEquals(" comment2 ", comments.get(0).getData());
assertEquals(" comment3 ", comments.get(1).getData());
List<Comment> comments1 = doc.select("p.two").comments();
assertEquals(1, comments1.size());
assertEquals(" comment3 ", comments1.get(0).getData());
}
@Test public void textNodes() {
Document doc = Jsoup.parse("One<p>Two<a>Three</a><p>Four</p>Five");
List<TextNode> textNodes = doc.select("p").textNodes();
assertEquals(2, textNodes.size());
assertEquals("Two", textNodes.get(0).text());
assertEquals("Four", textNodes.get(1).text());
}
@Test public void dataNodes() {
Document doc = Jsoup.parse("<p>One</p><script>Two</script><style>Three</style>");
List<DataNode> dataNodes = doc.select("p, script, style").dataNodes();
assertEquals(2, dataNodes.size());
assertEquals("Two", dataNodes.get(0).getWholeData());
assertEquals("Three", dataNodes.get(1).getWholeData());
doc = Jsoup.parse("<head><script type=application/json><crux></script><script src=foo>Blah</script>");
Elements script = doc.select("script[type=application/json]");
List<DataNode> scriptNode = script.dataNodes();
assertEquals(1, scriptNode.size());
DataNode dataNode = scriptNode.get(0);
assertEquals("<crux>", dataNode.getWholeData());
// check if they're live
dataNode.setWholeData("<cromulent>");
assertEquals("<script type=\"application/json\"><cromulent></script>", script.outerHtml());
}
@Test public void nodesEmpty() {
Document doc = Jsoup.parse("<p>");
assertEquals(0, doc.select("form").textNodes().size());
}
@Test public void classWithHyphen() {
Document doc = Jsoup.parse("<p class='tab-nav'>Check</p>");
Elements els = doc.getElementsByClass("tab-nav");
assertEquals(1, els.size());
assertEquals("Check", els.text());
}
@Test public void siblings() {
Document doc = Jsoup.parse("<div><p>1<p>2<p>3<p>4<p>5<p>6</div><div><p>7<p>8<p>9<p>10<p>11<p>12</div>");
Elements els = doc.select("p:eq(3)"); // gets p4 and p10
assertEquals(2, els.size());
Elements next = els.next();
assertEquals(2, next.size());
assertEquals("5", next.first().text());
assertEquals("11", next.last().text());
assertEquals(0, els.next("p:contains(6)").size());
final Elements nextF = els.next("p:contains(5)");
assertEquals(1, nextF.size());
assertEquals("5", nextF.first().text());
Elements nextA = els.nextAll();
assertEquals(4, nextA.size());
assertEquals("5", nextA.first().text());
assertEquals("12", nextA.last().text());
Elements nextAF = els.nextAll("p:contains(6)");
assertEquals(1, nextAF.size());
assertEquals("6", nextAF.first().text());
Elements prev = els.prev();
assertEquals(2, prev.size());
assertEquals("3", prev.first().text());
assertEquals("9", prev.last().text());
assertEquals(0, els.prev("p:contains(1)").size());
final Elements prevF = els.prev("p:contains(3)");
assertEquals(1, prevF.size());
assertEquals("3", prevF.first().text());
Elements prevA = els.prevAll();
assertEquals(6, prevA.size());
assertEquals("3", prevA.first().text());
assertEquals("7", prevA.last().text());
Elements prevAF = els.prevAll("p:contains(1)");
assertEquals(1, prevAF.size());
assertEquals("1", prevAF.first().text());
}
@Test public void eachText() {
Document doc = Jsoup.parse("<div><p>1<p>2<p>3<p>4<p>5<p>6</div><div><p>7<p>8<p>9<p>10<p>11<p>12<p></p></div>");
List<String> divText = doc.select("div").eachText();
assertEquals(2, divText.size());
assertEquals("1 2 3 4 5 6", divText.get(0));
assertEquals("7 8 9 10 11 12", divText.get(1));
List<String> pText = doc.select("p").eachText();
Elements ps = doc.select("p");
assertEquals(13, ps.size());
assertEquals(12, pText.size()); // not 13, as last doesn't have text
assertEquals("1", pText.get(0));
assertEquals("2", pText.get(1));
assertEquals("5", pText.get(4));
assertEquals("7", pText.get(6));
assertEquals("12", pText.get(11));
}
@Test public void eachAttr() {
Document doc = Jsoup.parse(
"<div><a href='/foo'>1</a><a href='http://example.com/bar'>2</a><a href=''>3</a><a>4</a>",
"http://example.com");
List<String> hrefAttrs = doc.select("a").eachAttr("href");
assertEquals(3, hrefAttrs.size());
assertEquals("/foo", hrefAttrs.get(0));
assertEquals("http://example.com/bar", hrefAttrs.get(1));
assertEquals("", hrefAttrs.get(2));
assertEquals(4, doc.select("a").size());
List<String> absAttrs = doc.select("a").eachAttr("abs:href");
assertEquals(3, absAttrs.size());
assertEquals(3, absAttrs.size());
assertEquals("http://example.com/foo", absAttrs.get(0));
assertEquals("http://example.com/bar", absAttrs.get(1));
assertEquals("http://example.com", absAttrs.get(2));
}
@Test public void setElementByIndex() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three");
Element newP = doc.createElement("p").text("New").attr("id", "new");
Elements ps = doc.select("p");
Element two = ps.get(1);
Element old = ps.set(1, newP);
assertSame(old, two);
assertSame(newP, ps.get(1)); // replaced in list
assertEquals("<p>One</p>\n<p id=\"new\">New</p>\n<p>Three</p>", doc.body().html()); // replaced in dom
}
@Test public void removeElementByIndex() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three");
Elements ps = doc.select("p");
Element two = ps.get(1);
assertTrue(ps.contains(two));
Element old = ps.remove(1);
assertSame(old, two);
assertEquals(2, ps.size()); // removed from list
assertFalse(ps.contains(old));
assertEquals("<p>One</p>\n<p>Three</p>", doc.body().html()); // removed from dom
}
@Test public void removeElementByObject() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three");
Elements ps = doc.select("p");
Element two = ps.get(1);
assertTrue(ps.contains(two));
boolean removed = ps.remove(two);
assertTrue(removed);
assertEquals(2, ps.size()); // removed from list
assertFalse(ps.contains(two));
assertEquals("<p>One</p>\n<p>Three</p>", doc.body().html()); // removed from dom
}
@Test public void removeElementObjectNoops() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three");
String origHtml = doc.html();
Element newP = doc.createElement("p").text("New");
Elements ps = doc.select("p");
int size = ps.size();
assertFalse(ps.remove(newP));
assertFalse(ps.remove(newP.childNodes()));
assertEquals(origHtml, doc.html());
assertEquals(size, ps.size());
}
@Test public void clear() {
Document doc = Jsoup.parse("<p>One</p><p>Two</p><div>Three</div>");
Elements ps = doc.select("p");
assertEquals(2, ps.size());
ps.clear();
assertEquals(0, ps.size());
assertEquals(0, doc.select("p").size());
}
@Test public void removeAll() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four</p><div>Div");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
Elements midPs = doc.select("p:gt(0):lt(3)"); //Two and Three
assertEquals(2, midPs.size());
boolean removed = ps.removeAll(midPs);
assertEquals(2, ps.size());
assertTrue(removed);
assertEquals(2, midPs.size());
Elements divs = doc.select("div");
assertEquals(1, divs.size());
assertFalse(ps.removeAll(divs));
assertEquals(2, ps.size());
assertEquals("<p>One</p>\n<p>Four</p>\n<div>Div</div>", doc.body().html());
}
@Test public void retainAll() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four</p><div>Div");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
Elements midPs = doc.select("p:gt(0):lt(3)"); //Two and Three
assertEquals(2, midPs.size());
boolean removed = ps.retainAll(midPs);
assertEquals(2, ps.size());
assertTrue(removed);
assertEquals(2, midPs.size());
assertEquals("<p>Two</p>\n<p>Three</p>\n<div>Div</div>", doc.body().html());
Elements psAgain = doc.select("p");
assertFalse(midPs.retainAll(psAgain));
assertEquals("<p>Two</p>\n<p>Three</p>\n<div>Div</div>", doc.body().html());
}
@Test public void iteratorRemovesFromDom() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
for (Iterator<Element> it = ps.iterator(); it.hasNext(); ) {
Element el = it.next();
if (el.text().contains("Two"))
it.remove();
}
assertEquals(3, ps.size());
assertEquals("<p>One</p>\n<p>Three</p>\n<p>Four</p>", doc.body().html());
}
@Test public void removeIf() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
boolean removed = ps.removeIf(el -> el.text().contains("Two"));
assertTrue(removed);
assertEquals(3, ps.size());
assertEquals("<p>One</p>\n<p>Three</p>\n<p>Four</p>", doc.body().html());
assertFalse(ps.removeIf(el -> el.text().contains("Five")));
assertEquals("<p>One</p>\n<p>Three</p>\n<p>Four</p>", doc.body().html());
}
@Test public void removeIfSupportsConcurrentRead() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
boolean removed = ps.removeIf(el -> ps.contains(el));
assertTrue(removed);
assertEquals(0, ps.size());
assertEquals("", doc.body().html());
}
@Test public void replaceAll() {
Document doc = Jsoup.parse("<p>One<p>Two<p>Three<p>Four");
Elements ps = doc.select("p");
assertEquals(4, ps.size());
ps.replaceAll(el -> {
Element div = doc.createElement("div");
div.text(el.text());
return div;
});
// Check Elements
for (Element p : ps) {
assertEquals("div", p.tagName());
}
// check dom
assertEquals("<div>One</div><div>Two</div><div>Three</div><div>Four</div>", TextUtil.normalizeSpaces(doc.body().html()));
}
@Test void selectFirst() {
Document doc = Jsoup.parse("<p>One</p><p>Two <span>Jsoup</span></p><p><span>Three</span></p>");
Element span = doc.children().selectFirst("span");
assertNotNull(span);
assertEquals("Jsoup", span.text());
}
@Test void selectFirstNullOnNoMatch() {
Document doc = Jsoup.parse("<p>One</p><p>Two</p><p>Three</p>");
Element span = doc.children().selectFirst("span");
assertNull(span);
}
@Test void expectFirst() {
Document doc = Jsoup.parse("<p>One</p><p>Two <span>Jsoup</span></p><p><span>Three</span></p>");
Element span = doc.children().expectFirst("span");
assertNotNull(span);
assertEquals("Jsoup", span.text());
}
@Test void expectFirstThrowsOnNoMatch() {
Document doc = Jsoup.parse("<p>One</p><p>Two</p><p>Three</p>");
boolean threw = false;
try {
Element span = doc.children().expectFirst("span");
} catch (IllegalArgumentException e) {
threw = true;
assertEquals("No elements matched the query 'span' in the elements.", e.getMessage());
}
assertTrue(threw);
}
@Test void selectFirstFromPreviousSelect() {
Document doc = Jsoup.parse("<div><p>One</p></div><div><p><span>Two</span></p></div><div><p><span>Three</span></p></div>");
Elements divs = doc.select("div");
assertEquals(3, divs.size());
Element span = divs.selectFirst("p span");
assertNotNull(span);
assertEquals("Two", span.text());
// test roots
assertNotNull(span.selectFirst("span")); // reselect self
assertNull(span.selectFirst(">span")); // no span>span
assertNotNull(divs.selectFirst("div")); // reselect self, similar to element.select
assertNull(divs.selectFirst(">div")); // no div>div
}
}
|
ElementsTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/fleet/src/test/java/org/elasticsearch/xpack/fleet/action/DeleteSecretRequestTests.java
|
{
"start": 425,
"end": 989
}
|
class ____ extends AbstractWireSerializingTestCase<DeleteSecretRequest> {
@Override
protected Writeable.Reader<DeleteSecretRequest> instanceReader() {
return DeleteSecretRequest::new;
}
@Override
protected DeleteSecretRequest createTestInstance() {
return new DeleteSecretRequest(randomAlphaOfLengthBetween(2, 10));
}
@Override
protected DeleteSecretRequest mutateInstance(DeleteSecretRequest instance) {
return new DeleteSecretRequest(instance.id() + randomAlphaOfLength(1));
}
}
|
DeleteSecretRequestTests
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/config/status/StatusConfiguration.java
|
{
"start": 1516,
"end": 1890
}
|
class ____ {
private static final StatusLogger LOGGER = StatusLogger.getLogger();
private final Lock lock = new ReentrantLock();
private volatile boolean initialized;
@Nullable
private PrintStream output;
@Nullable
private Level level;
/**
* Specifies how verbose the StatusLogger should be.
* @deprecated This
|
StatusConfiguration
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/dataformat/SwiftMtDataFormat.java
|
{
"start": 1468,
"end": 2685
}
|
class ____ extends DataFormatDefinition {
@XmlAttribute
@Metadata(javaType = "java.lang.Boolean")
private String writeInJson;
public SwiftMtDataFormat() {
super("swiftMt");
}
protected SwiftMtDataFormat(SwiftMtDataFormat source) {
super(source);
this.writeInJson = source.writeInJson;
}
public SwiftMtDataFormat(String writeInJson) {
this();
this.writeInJson = writeInJson;
}
private SwiftMtDataFormat(Builder builder) {
this();
this.writeInJson = builder.writeInJson;
}
@Override
public SwiftMtDataFormat copyDefinition() {
return new SwiftMtDataFormat(this);
}
public String getWriteInJson() {
return writeInJson;
}
/**
* The flag indicating that messages must be marshalled in a JSON format.
*
* @param writeInJson {@code true} if messages must be marshalled in a JSON format, {@code false} otherwise.
*/
public void setWriteInJson(String writeInJson) {
this.writeInJson = writeInJson;
}
/**
* {@code Builder} is a specific builder for {@link SwiftMtDataFormat}.
*/
@XmlTransient
public static
|
SwiftMtDataFormat
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/ext/TestPropertyCreatorSubtypesExternalPropertyMissingProperty.java
|
{
"start": 1688,
"end": 2136
}
|
class ____ extends Fruit {
private int seedCount;
Apple(String name, int b) {
super(name);
seedCount = b;
}
public int getSeedCount() {
return seedCount;
}
@JsonCreator
public static Apple getApple(@JsonProperty("name") String name, @JsonProperty("seedCount") int seedCount) {
return new Apple(name, seedCount);
}
}
static
|
Apple
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/utils/UrlUtils.java
|
{
"start": 990,
"end": 1326
}
|
class ____ {
private UrlUtils() {
}
// Encode the URL up to the point of the query. Do not pass the query portion into this method.
public static String encodePath(String path) throws UnsupportedEncodingException {
return URLEncoder.encode(path, StandardCharsets.UTF_8.name()).replace("+", "%20");
}
}
|
UrlUtils
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/TypeParameterShadowingTest.java
|
{
"start": 11339,
"end": 11439
}
|
class ____<T> {
<B extends Object & Comparable> void something(B b) {
|
Test
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/aop/hotswap/HotswappableProxyingClass.java
|
{
"start": 859,
"end": 1174
}
|
class ____ {
public int invocationCount = 0;
@Mutating("name")
public String test(String name) {
invocationCount++;
return "Name is " + name;
}
public String test2(String another) {
invocationCount++;
return "Name is " + another;
}
}
|
HotswappableProxyingClass
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/MainClassFinder.java
|
{
"start": 10496,
"end": 12286
}
|
class ____ extends ClassVisitor {
private final Set<String> annotationNames = new LinkedHashSet<>();
private boolean mainMethodFound;
private boolean java25OrLater;
ClassDescriptor() {
super(SpringAsmInfo.ASM_VERSION);
}
@Override
public void visit(int version, int access, String name, String signature, String superName,
String[] interfaces) {
if (version >= 69) {
this.java25OrLater = true;
}
}
@Override
public @Nullable AnnotationVisitor visitAnnotation(String desc, boolean visible) {
this.annotationNames.add(Type.getType(desc).getClassName());
return null;
}
@Override
public @Nullable MethodVisitor visitMethod(int access, String name, String desc, String signature,
String[] exceptions) {
if (hasRequiredAccess(access) && MAIN_METHOD_NAME.equals(name)) {
if (MAIN_METHOD_TYPE.getDescriptor().equals(desc)
|| (this.java25OrLater && PARAMETERLESS_MAIN_METHOD_TYPE.getDescriptor().equals(desc))) {
this.mainMethodFound = true;
}
}
return null;
}
private boolean hasRequiredAccess(int access) {
if (this.java25OrLater) {
return !isAccess(access, Opcodes.ACC_PRIVATE) && isAccess(access, Opcodes.ACC_STATIC);
}
else {
return isAccess(access, Opcodes.ACC_PUBLIC, Opcodes.ACC_STATIC);
}
}
private boolean isAccess(int access, int... requiredOpsCodes) {
for (int requiredOpsCode : requiredOpsCodes) {
if ((access & requiredOpsCode) == 0) {
return false;
}
}
return true;
}
boolean isMainMethodFound() {
return this.mainMethodFound;
}
Set<String> getAnnotationNames() {
return this.annotationNames;
}
}
/**
* Callback for handling {@link MainClass MainClasses}.
*
* @param <T> the callback's return type
*/
|
ClassDescriptor
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/ssl/SslConfigurationException.java
|
{
"start": 735,
"end": 1300
}
|
class ____ extends RuntimeException {
/**
* @param message The message
*/
public SslConfigurationException(String message) {
super(message);
}
/**
* @param message The message
* @param cause The throwable
*/
public SslConfigurationException(String message, Throwable cause) {
super(message, cause);
}
/**
* @param cause The throwable
*/
public SslConfigurationException(Throwable cause) {
super("An error occurred configuring SSL", cause);
}
}
|
SslConfigurationException
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/io/parsing/SkipParser.java
|
{
"start": 943,
"end": 1048
}
|
class ____ used
* by decoders who (unlink encoders) are required to implement methods to skip.
*/
public
|
is
|
java
|
junit-team__junit5
|
platform-tooling-support-tests/src/test/java/platform/tooling/support/tests/ReflectionCompatibilityTests.java
|
{
"start": 964,
"end": 1763
}
|
class ____ {
@Test
void gradle_wrapper(@TempDir Path workspace, @FilePrefix("gradle") OutputFiles outputFiles) throws Exception {
var result = ProcessStarters.gradlew() //
.workingDir(copyToWorkspace(Projects.REFLECTION_TESTS, workspace)) //
.addArguments("-Dmaven.repo=" + MavenRepo.dir()) //
.addArguments("build", "--no-daemon", "--stacktrace", "--no-build-cache", "--warning-mode=fail") //
.putEnvironment("JDK17", Helper.getJavaHome(17).orElseThrow(TestAbortedException::new).toString()) //
.redirectOutput(outputFiles) //
.startAndWait();
assertEquals(0, result.exitCode());
assertTrue(result.stdOut().lines().anyMatch(line -> line.contains("BUILD SUCCESSFUL")));
assertThat(result.stdOut()).contains("Using Java version: 17");
}
}
|
ReflectionCompatibilityTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/ingest/GeoGridProcessor.java
|
{
"start": 6740,
"end": 7096
}
|
class ____ {
abstract Geometry makeGeometry();
abstract List<String> makeChildren();
// Most tilers have no intersecting non-children
List<String> makeNonChildren() {
return Collections.emptyList();
}
abstract int getPrecision();
abstract String getParent();
}
static
|
TileHandler
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java
|
{
"start": 48707,
"end": 49020
}
|
class ____ {
@DataTypeHint("INT")
public ListView<?> listView;
}
// --------------------------------------------------------------------------------------------
/** Table function that uses a big tuple with constructor defined field order. */
public static
|
AccumulatorWithInvalidView
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inlineme/ValidatorTest.java
|
{
"start": 9705,
"end": 10291
}
|
class ____ {
private final String str = null;
@InlineMe(replacement = "str;")
@Deprecated
// BUG: Diagnostic contains: deprecated or less visible API elements: str
public String before() {
return str;
}
}
""")
.doTest();
}
@Test
public void instanceMethod_publicVariable() {
helper
.addSourceLines(
"Client.java",
"""
import com.google.errorprone.annotations.InlineMe;
public final
|
Client
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptive/AdaptiveScheduler.java
|
{
"start": 75754,
"end": 77404
}
|
class ____ {@link CheckpointStatsListener} in a way that checkpoint-related
* events are actually executed in the {@code AdaptiveScheduler}'s main thread.
*/
private CheckpointStatsListener createCheckpointStatsListener() {
return new CheckpointStatsListener() {
@Override
public void onFailedCheckpoint() {
runIfSupported(CheckpointStatsListener::onFailedCheckpoint, "onFailedCheckpoint");
}
@Override
public void onCompletedCheckpoint() {
runIfSupported(
CheckpointStatsListener::onCompletedCheckpoint, "onCompletedCheckpoint");
}
private void runIfSupported(
ThrowingConsumer<CheckpointStatsListener, RuntimeException> callback,
String callbackLabel) {
AdaptiveScheduler.this
.getMainThreadExecutor()
.execute(
() ->
state.tryRun(
CheckpointStatsListener.class,
callback,
logger ->
logger.debug(
"{} is not supported by {}.",
callbackLabel,
state.getClass().getName())));
}
};
}
}
|
implementing
|
java
|
apache__flink
|
flink-filesystems/flink-s3-fs-base/src/main/java/org/apache/flink/fs/s3/common/AbstractS3FileSystemFactory.java
|
{
"start": 1824,
"end": 11472
}
|
class ____ implements FileSystemFactory {
public static final ConfigOption<String> ACCESS_KEY =
ConfigOptions.key("s3.access-key")
.stringType()
.noDefaultValue()
.withFallbackKeys("s3.access.key")
.withDescription("This optionally defines S3 access key.");
public static final ConfigOption<String> SECRET_KEY =
ConfigOptions.key("s3.secret-key")
.stringType()
.noDefaultValue()
.withFallbackKeys("s3.secret.key")
.withDescription("This optionally defines S3 secret key.");
public static final ConfigOption<String> ENDPOINT =
ConfigOptions.key("s3.endpoint")
.stringType()
.noDefaultValue()
.withDescription("This optionally defines S3 endpoint.");
public static final ConfigOption<String> S5CMD_PATH =
ConfigOptions.key("s3.s5cmd.path")
.stringType()
.noDefaultValue()
.withDescription(
"When specified, s5cmd will be used for coping files to/from S3. Currently supported only "
+ "during RocksDB Incremental state recovery.");
public static final ConfigOption<String> S5CMD_EXTRA_ARGS =
ConfigOptions.key("s3.s5cmd.args")
.stringType()
.defaultValue("-r 0")
.withDescription(
"Extra arguments to be passed to s5cmd. For example, --no-sign-request for public buckets and -r 10 for 10 retries");
public static final ConfigOption<MemorySize> S5CMD_BATCH_MAX_SIZE =
ConfigOptions.key("s3.s5cmd.batch.max-size")
.memoryType()
.defaultValue(MemorySize.ofMebiBytes(1024))
.withDescription("Maximum size of files to download per one call to s5cmd.");
public static final ConfigOption<Integer> S5CMD_BATCH_MAX_FILES =
ConfigOptions.key("s3.s5cmd.batch.max-files")
.intType()
.defaultValue(100)
.withDescription("Maximum number of files to download per one call to s5cmd");
public static final ConfigOption<Long> PART_UPLOAD_MIN_SIZE =
ConfigOptions.key("s3.upload.min.part.size")
.longType()
.defaultValue(FlinkS3FileSystem.S3_MULTIPART_MIN_PART_SIZE)
.withDescription(
"This option is relevant to the Recoverable Writer and sets the min size of data that "
+ "buffered locally, before being sent to S3. Flink also takes care of checkpointing locally "
+ "buffered data. This value cannot be less than 5MB or greater than 5GB (limits set by Amazon).");
public static final ConfigOption<Integer> MAX_CONCURRENT_UPLOADS =
ConfigOptions.key("s3.upload.max.concurrent.uploads")
.intType()
.defaultValue(Runtime.getRuntime().availableProcessors())
.withDescription(
"This option is relevant to the Recoverable Writer and limits the number of "
+ "parts that can be concurrently in-flight. By default, this is set to "
+ Runtime.getRuntime().availableProcessors()
+ ".");
/** The substring to be replaced by random entropy in checkpoint paths. */
public static final ConfigOption<String> ENTROPY_INJECT_KEY_OPTION =
ConfigOptions.key("s3.entropy.key")
.stringType()
.noDefaultValue()
.withDescription(
"This option can be used to improve performance due to sharding issues on Amazon S3. "
+ "For file creations with entropy injection, this key will be replaced by random "
+ "alphanumeric characters. For other file creations, the key will be filtered out.");
/** The number of entropy characters, in case entropy injection is configured. */
public static final ConfigOption<Integer> ENTROPY_INJECT_LENGTH_OPTION =
ConfigOptions.key("s3.entropy.length")
.intType()
.defaultValue(4)
.withDescription(
"When '"
+ ENTROPY_INJECT_KEY_OPTION.key()
+ "' is set, this option defines the number of "
+ "random characters to replace the entropy key with.");
// ------------------------------------------------------------------------
private static final String INVALID_ENTROPY_KEY_CHARS = "^.*[~#@*+%{}<>\\[\\]|\"\\\\].*$";
private static final Logger LOG = LoggerFactory.getLogger(AbstractS3FileSystemFactory.class);
/** Name of this factory for logging. */
private final String name;
private final HadoopConfigLoader hadoopConfigLoader;
private Configuration flinkConfig;
protected AbstractS3FileSystemFactory(String name, HadoopConfigLoader hadoopConfigLoader) {
this.name = name;
this.hadoopConfigLoader = hadoopConfigLoader;
}
// ------------------------------------------------------------------------
@Override
public void configure(Configuration config) {
flinkConfig = config;
hadoopConfigLoader.setFlinkConfig(config);
}
@Override
public FileSystem create(URI fsUri) throws IOException {
Configuration flinkConfig = this.flinkConfig;
if (flinkConfig == null) {
LOG.warn(
"Creating S3 FileSystem without configuring the factory. All behavior will be default.");
flinkConfig = new Configuration();
}
LOG.debug("Creating S3 file system backed by {}", name);
LOG.debug("Loading Hadoop configuration for {}", name);
try {
// create the Hadoop FileSystem
org.apache.hadoop.conf.Configuration hadoopConfig =
hadoopConfigLoader.getOrLoadHadoopConfig();
AbstractS3DelegationTokenReceiver.updateHadoopConfig(hadoopConfig);
org.apache.hadoop.fs.FileSystem fs = createHadoopFileSystem();
fs.initialize(getInitURI(fsUri, hadoopConfig), hadoopConfig);
// load the entropy injection settings
String entropyInjectionKey = flinkConfig.get(ENTROPY_INJECT_KEY_OPTION);
int numEntropyChars = -1;
if (entropyInjectionKey != null) {
if (entropyInjectionKey.matches(INVALID_ENTROPY_KEY_CHARS)) {
throw new IllegalConfigurationException(
"Invalid character in value for "
+ ENTROPY_INJECT_KEY_OPTION.key()
+ " : "
+ entropyInjectionKey);
}
numEntropyChars = flinkConfig.get(ENTROPY_INJECT_LENGTH_OPTION);
if (numEntropyChars <= 0) {
throw new IllegalConfigurationException(
ENTROPY_INJECT_LENGTH_OPTION.key() + " must configure a value > 0");
}
}
final String[] localTmpDirectories =
ConfigurationUtils.parseTempDirectories(flinkConfig);
Preconditions.checkArgument(localTmpDirectories.length > 0);
final String localTmpDirectory = localTmpDirectories[0];
final long s3minPartSize = flinkConfig.get(PART_UPLOAD_MIN_SIZE);
final int maxConcurrentUploads = flinkConfig.get(MAX_CONCURRENT_UPLOADS);
final S3AccessHelper s3AccessHelper = getS3AccessHelper(fs);
return createFlinkFileSystem(
fs,
S5CmdConfiguration.of(flinkConfig).orElse(null),
localTmpDirectory,
entropyInjectionKey,
numEntropyChars,
s3AccessHelper,
s3minPartSize,
maxConcurrentUploads);
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
}
}
protected FileSystem createFlinkFileSystem(
org.apache.hadoop.fs.FileSystem fs,
@Nullable S5CmdConfiguration s5CmdConfiguration,
String localTmpDirectory,
@Nullable String entropyInjectionKey,
int numEntropyChars,
@Nullable S3AccessHelper s3AccessHelper,
long s3minPartSize,
int maxConcurrentUploads) {
return new FlinkS3FileSystem(
fs,
s5CmdConfiguration,
localTmpDirectory,
entropyInjectionKey,
numEntropyChars,
s3AccessHelper,
s3minPartSize,
maxConcurrentUploads);
}
protected abstract org.apache.hadoop.fs.FileSystem createHadoopFileSystem();
protected abstract URI getInitURI(URI fsUri, org.apache.hadoop.conf.Configuration hadoopConfig);
@Nullable
protected abstract S3AccessHelper getS3AccessHelper(org.apache.hadoop.fs.FileSystem fs);
}
|
AbstractS3FileSystemFactory
|
java
|
spring-projects__spring-boot
|
module/spring-boot-kafka/src/main/java/org/springframework/boot/kafka/testcontainers/ApacheKafkaContainerConnectionDetailsFactory.java
|
{
"start": 1490,
"end": 1933
}
|
class ____
extends ContainerConnectionDetailsFactory<KafkaContainer, KafkaConnectionDetails> {
@Override
protected KafkaConnectionDetails getContainerConnectionDetails(ContainerConnectionSource<KafkaContainer> source) {
return new ApacheKafkaContainerConnectionDetails(source);
}
/**
* {@link KafkaConnectionDetails} backed by a {@link ContainerConnectionSource}.
*/
private static final
|
ApacheKafkaContainerConnectionDetailsFactory
|
java
|
netty__netty
|
microbench/src/main/java/io/netty/buffer/AllocationPatternSimulator.java
|
{
"start": 1652,
"end": 15522
}
|
class ____ {
/**
* An allocation pattern derived from a web socket proxy service.
*/
private static final int[] WEB_SOCKET_PROXY_PATTERN = {
// Size, Frequency
9, 316,
13, 3,
15, 10344,
17, 628,
21, 316,
36, 338,
48, 338,
64, 23,
128, 17,
256, 21272,
287, 69,
304, 65,
331, 11,
332, 7,
335, 2,
343, 2,
362, 1,
363, 16,
365, 17,
370, 11,
371, 51,
392, 11,
393, 4,
396, 3,
401, 1,
402, 3,
413, 1,
414, 2,
419, 16,
421, 1,
423, 16,
424, 46,
433, 1,
435, 1,
439, 3,
441, 13,
444, 3,
449, 1,
450, 1,
453, 2,
455, 3,
458, 3,
462, 7,
463, 8,
464, 1,
466, 59,
470, 1,
472, 2,
475, 1,
478, 2,
480, 12,
481, 16,
482, 2,
483, 2,
486, 1,
489, 2,
493, 2,
494, 1,
495, 1,
497, 14,
498, 1,
499, 2,
500, 58,
503, 1,
507, 1,
509, 2,
510, 2,
511, 13,
512, 3,
513, 4,
516, 1,
519, 2,
520, 1,
522, 5,
523, 1,
525, 15,
526, 1,
527, 55,
528, 2,
529, 1,
530, 1,
531, 3,
533, 1,
534, 1,
535, 1,
536, 10,
538, 4,
539, 3,
540, 2,
541, 1,
542, 3,
543, 10,
545, 5,
546, 1,
547, 14,
548, 1,
549, 53,
551, 1,
552, 1,
553, 1,
554, 1,
555, 2,
556, 11,
557, 3,
558, 7,
559, 4,
561, 3,
562, 1,
563, 6,
564, 3,
565, 13,
566, 31,
567, 24,
568, 1,
569, 1,
570, 4,
571, 2,
572, 9,
573, 7,
574, 3,
575, 2,
576, 4,
577, 2,
578, 7,
579, 12,
580, 38,
581, 22,
582, 1,
583, 3,
584, 5,
585, 9,
586, 9,
587, 6,
588, 3,
589, 5,
590, 8,
591, 23,
592, 42,
593, 3,
594, 5,
595, 11,
596, 10,
597, 7,
598, 5,
599, 13,
600, 26,
601, 41,
602, 8,
603, 14,
604, 18,
605, 14,
606, 16,
607, 35,
608, 57,
609, 74,
610, 13,
611, 24,
612, 22,
613, 52,
614, 88,
615, 28,
616, 23,
617, 37,
618, 70,
619, 74,
620, 31,
621, 59,
622, 110,
623, 37,
624, 67,
625, 110,
626, 55,
627, 140,
628, 71,
629, 141,
630, 141,
631, 147,
632, 190,
633, 254,
634, 349,
635, 635,
636, 5443,
637, 459,
639, 1,
640, 2,
642, 1,
644, 2,
645, 1,
647, 1,
649, 1,
650, 1,
652, 1,
655, 3,
656, 1,
658, 4,
659, 2,
660, 1,
661, 1,
662, 6,
663, 8,
664, 9,
665, 4,
666, 5,
667, 62,
668, 5,
693, 1,
701, 2,
783, 1,
941, 1,
949, 1,
958, 16,
988, 1,
1024, 29289,
1028, 1,
1086, 1,
1249, 2,
1263, 1,
1279, 24,
1280, 11,
1309, 1,
1310, 1,
1311, 2,
1343, 1,
1360, 2,
1483, 1,
1567, 1,
1957, 1,
2048, 2636,
2060, 1,
2146, 1,
2190, 1,
2247, 1,
2273, 1,
2274, 1,
2303, 106,
2304, 45,
2320, 1,
2333, 10,
2334, 14,
2335, 7,
2367, 7,
2368, 2,
2384, 7,
2399, 1,
2400, 14,
2401, 6,
2423, 1,
2443, 9,
2444, 1,
2507, 3,
3039, 1,
3140, 1,
3891, 1,
3893, 1,
4096, 26,
4118, 1,
4321, 1,
4351, 226,
4352, 15,
4370, 1,
4381, 1,
4382, 11,
4383, 10,
4415, 4,
4416, 3,
4432, 5,
4447, 1,
4448, 31,
4449, 14,
4471, 1,
4491, 42,
4492, 16,
4555, 26,
4556, 19,
4571, 1,
4572, 2,
4573, 53,
4574, 165,
5770, 1,
5803, 2,
6026, 1,
6144, 2,
6249, 1,
6278, 1,
6466, 1,
6680, 1,
6726, 2,
6728, 1,
6745, 1,
6746, 1,
6759, 1,
6935, 1,
6978, 1,
6981, 2,
6982, 1,
7032, 1,
7081, 1,
7086, 1,
7110, 1,
7172, 3,
7204, 2,
7236, 2,
7238, 1,
7330, 1,
7427, 3,
7428, 1,
7458, 1,
7459, 1,
7650, 2,
7682, 6,
7765, 1,
7937, 3,
7969, 1,
8192, 2,
8415, 1,
8447, 555,
8478, 3,
8479, 5,
8511, 2,
8512, 1,
8528, 1,
8543, 2,
8544, 9,
8545, 8,
8567, 1,
8587, 16,
8588, 12,
8650, 1,
8651, 9,
8652, 9,
8668, 3,
8669, 46,
8670, 195,
8671, 6,
10240, 4,
14336, 1,
14440, 4,
14663, 3,
14919, 1,
14950, 2,
15002, 1,
15159, 1,
15173, 2,
15205, 1,
15395, 1,
15396, 1,
15397, 2,
15428, 1,
15446, 1,
15619, 7,
15651, 5,
15683, 2,
15874, 8,
15906, 8,
15907, 2,
16128, 2,
16129, 37,
16161, 3,
16352, 2,
16383, 1,
16384, 42,
16610, 2,
16639, 9269,
16704, 2,
16736, 3,
16737, 2,
16779, 2,
16780, 7,
16843, 2,
16844, 5,
16860, 6,
16861, 67,
16862, 281,
16863, 13,
18432, 6,
};
private static final int CONCURRENCY_LEVEL = 4;
private static final int RUNNING_TIME_SECONDS = 120;
private static final ConcurrentHashMap<String, Integer> THREAD_NAMES = new ConcurrentHashMap<>();
AdaptiveByteBufAllocator adaptive128;
PooledByteBufAllocator pooled128;
AdaptiveByteBufAllocator adaptive2048;
PooledByteBufAllocator pooled2048;
int[] size;
int[] cumulativeFrequency;
int sumFrequency;
int count;
public static void main(String[] args) throws Exception {
int[] pattern = args.length == 0 ? WEB_SOCKET_PROXY_PATTERN : buildPattern(args[0]);
AllocationPatternSimulator runner = new AllocationPatternSimulator();
runner.setUp(pattern);
runner.run(CONCURRENCY_LEVEL, RUNNING_TIME_SECONDS);
}
private static int[] buildPattern(String jfrFile) throws IOException {
Path path = toAbsolutePath(jfrFile);
TreeMap<Integer, Integer> summation = new TreeMap<>();
try (RecordingFile eventReader = new RecordingFile(path)) {
while (eventReader.hasMoreEvents()) {
RecordedEvent event = eventReader.readEvent();
String name = event.getEventType().getName();
if (("AllocateBufferEvent".equals(name) || "io.netty.AllocateBuffer".equals(name)) &&
event.hasField("size")) {
int size = event.getInt("size");
summation.compute(size, (k, v) -> v == null ? 1 : v + 1);
}
}
}
if (summation.isEmpty()) {
throw new IllegalStateException("No 'AllocateBufferEvent' records found in JFR file: " + jfrFile);
}
int[] pattern = new int[summation.size() * 2];
int index = 0;
for (Map.Entry<Integer, Integer> entry : summation.entrySet()) {
pattern[index++] = entry.getKey();
pattern[index++] = entry.getValue();
}
return pattern;
}
@SuppressWarnings("JvmTaintAnalysis")
private static Path toAbsolutePath(String jfrFile) {
return Paths.get(jfrFile).toAbsolutePath();
}
void setUp(int[] pattern) {
adaptive128 = new AdaptiveByteBufAllocator();
pooled128 = new PooledByteBufAllocator();
adaptive2048 = new AdaptiveByteBufAllocator();
pooled2048 = new PooledByteBufAllocator();
PatternItr itr = new PatternItr(pattern);
size = new int[pattern.length >> 1];
cumulativeFrequency = new int[pattern.length >> 1];
sumFrequency = 0;
count = 0;
while (itr.next()) {
sumFrequency += itr.frequency();
size[count] = itr.size();
cumulativeFrequency[count] = sumFrequency;
count++;
}
}
void run(int concurrencyLevel, int runningTimeSeconds) throws Exception {
AllocConfig[] allocs = {
new AllocConfig(true, 128),
new AllocConfig(false, 128),
new AllocConfig(true, 512),
new AllocConfig(false, 512),
new AllocConfig(true, 1024),
new AllocConfig(false, 1024),
};
CountDownLatch startLatch = new CountDownLatch(1);
AtomicBoolean stopCondition = new AtomicBoolean();
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < concurrencyLevel; i++) {
for (AllocConfig alloc : allocs) {
threads.add(alloc.start(startLatch, stopCondition));
}
}
DefaultCategoryDataset dataset = new DefaultCategoryDataset();
JFreeChart chart = ChartFactory.createLineChart("Memory Usage", "Time", "Bytes", dataset);
for (int i = 0; i < allocs.length; i++) {
chart.getCategoryPlot().getRenderer().setSeriesStroke(i, new BasicStroke(3.0f));
}
int windowWidth = 1400;
int windowHeight = 1050;
ImageIcon image = new ImageIcon(chart.createBufferedImage(windowWidth, windowHeight - 30));
JFrame frame = new JFrame("Results");
frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
frame.add(new JLabel(image));
frame.setBounds(0, 0, windowWidth, windowHeight);
frame.setVisible(true);
Runnable updateImage = () -> {
Rectangle bounds = frame.getBounds();
image.setImage(chart.createBufferedImage(bounds.width, bounds.height - 30));
frame.repaint();
};
frame.addComponentListener(new ComponentAdapter() {
@Override
public void componentResized(ComponentEvent e) {
updateImage.run();
}
});
startLatch.countDown();
System.out.println("Time," + Stream.of(allocs)
.map(AllocConfig::name)
.collect(Collectors.joining("\",\"", "\"", "\"")));
for (int i = 0; i < runningTimeSeconds; i++) {
Thread.sleep(1000);
Integer iteration = Integer.valueOf(i);
long[] usages = new long[allocs.length];
for (int j = 0; j < usages.length; j++) {
usages[j] = allocs[j].usedMemory();
}
System.out.println(iteration + "," + LongStream.of(usages)
.mapToObj(String::valueOf).collect(Collectors.joining(",")));
SwingUtilities.invokeLater(() -> {
for (int j = 0; j < usages.length; j++) {
dataset.addValue(usages[j], allocs[j].name(), iteration);
}
updateImage.run();
});
}
stopCondition.set(true);
for (Thread thread : threads) {
thread.join();
}
System.out.println("\nDone");
}
private final
|
AllocationPatternSimulator
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/type/TypeFactoryTest.java
|
{
"start": 1448,
"end": 1610
}
|
class ____<V> extends HashMap<String,V> { }
// And one more, now with obfuscated type names; essentially it's just Map<Int,Long>
static abstract
|
MyStringXMap
|
java
|
grpc__grpc-java
|
authz/src/main/java/io/grpc/authz/FileWatcherAuthorizationServerInterceptor.java
|
{
"start": 1587,
"end": 4345
}
|
class ____ implements ServerInterceptor {
private static final Logger logger =
Logger.getLogger(FileWatcherAuthorizationServerInterceptor.class.getName());
private volatile AuthorizationServerInterceptor internalAuthzServerInterceptor;
private final File policyFile;
private String policyContents;
private FileWatcherAuthorizationServerInterceptor(File policyFile) throws IOException {
this.policyFile = policyFile;
updateInternalInterceptor();
}
@Override
public <ReqT, RespT> ServerCall.Listener<ReqT> interceptCall(
ServerCall<ReqT, RespT> call, Metadata headers,
ServerCallHandler<ReqT, RespT> next) {
return internalAuthzServerInterceptor.interceptCall(call, headers, next);
}
void updateInternalInterceptor() throws IOException {
String currentPolicyContents = new String(Files.readAllBytes(policyFile.toPath()), UTF_8);
if (currentPolicyContents.equals(policyContents)) {
return;
}
policyContents = currentPolicyContents;
internalAuthzServerInterceptor = AuthorizationServerInterceptor.create(policyContents);
}
/**
* Policy is reloaded periodically as per the provided refresh interval. Unlike the
* constructor, exception thrown during reload will be caught and logged and the
* previous AuthorizationServerInterceptor will be used to make authorization
* decisions.
*
* @param period the period between successive file load executions.
* @param unit the time unit for period parameter
* @param executor the execute service we use to read and update authorization policy
* @return an object that caller should close when the file refreshes are not needed
*/
public Closeable scheduleRefreshes(
long period, TimeUnit unit, ScheduledExecutorService executor) throws IOException {
checkNotNull(executor, "scheduledExecutorService");
if (period <= 0) {
throw new IllegalArgumentException("Refresh interval must be greater than 0");
}
final ScheduledFuture<?> future =
executor.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
try {
updateInternalInterceptor();
} catch (Exception e) {
logger.log(Level.WARNING, "Authorization Policy file reload failed", e);
}
}
}, period, period, unit);
return new Closeable() {
@Override public void close() {
future.cancel(false);
}
};
}
public static FileWatcherAuthorizationServerInterceptor create(File policyFile)
throws IOException {
checkNotNull(policyFile, "policyFile");
return new FileWatcherAuthorizationServerInterceptor(policyFile);
}
}
|
FileWatcherAuthorizationServerInterceptor
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/stream/ReactiveTransactionalStreamCommands.java
|
{
"start": 767,
"end": 31168
}
|
interface ____<K, F, V> extends ReactiveTransactionalRedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/xack">XACK</a>.
* Summary: Marks a pending message as correctly processed, effectively removing it from the pending entries list
* of the consumer group. Return value of the command is the number of messages successfully acknowledged, that is,
* the IDs we were actually able to resolve in the PEL.
* <p>
* The {@code XACK} command removes one or multiple messages from the Pending Entries List (PEL) of a stream consumer
* group. A message is pending, and as such stored inside the PEL, when it was delivered to some consumer, normally
* as a side effect of calling {@code XREADGROUP}, or when a consumer took ownership of a message
* calling {@code XCLAIM}. The pending message was delivered to some consumer but the server is yet not sure it was
* processed at least once. So new calls to {@code XREADGROUP} to grab the messages history for a consumer
* (for instance using an ID of 0), will return such message. Similarly, the pending message will be listed by the
* {@code XPENDING} command, that inspects the PEL.
* <p>
* Once a consumer successfully processes a message, it should call {@code XACK} so that such message does not get
* processed again, and as a side effect, the PEL entry about this message is also purged, releasing memory from
* the Redis server.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key the key
* @param group the name of the consumer group
* @param ids the message ids to acknowledge
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xack(K key, String group, String... ids);
/**
* Execute the command <a href="https://redis.io/commands/xadd">XADD</a>.
* Summary: Appends the specified stream entry to the stream at the specified key. If the key does not exist, as a
* side effect of running this command the key is created with a stream value. The creation of stream's key can be
* disabled with the {@code NOMKSTREAM} option.
* <p>
* An entry is composed of a list of field-value pairs. The field-value pairs are stored in the same order they are
* given by the user. Commands that read the stream, such as {@code XRANGE} or {@code XREAD}, are guaranteed to
* return the fields and values exactly in the same order they were added by {@code XADD}.
* <p>
* {@code XADD} is the only Redis command that can add data to a stream, but there are other commands, such as
* {@code XDEL} and {@code XTRIM}, that are able to remove data from a stream.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key the key
* @param payload the payload to write to the stream, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xadd(K key, Map<F, V> payload);
/**
* Execute the command <a href="https://redis.io/commands/xadd">XADD</a>.
* Summary: Appends the specified stream entry to the stream at the specified key. If the key does not exist, as a
* side effect of running this command the key is created with a stream value. The creation of stream's key can be
* disabled with the {@code NOMKSTREAM} option.
* <p>
* An entry is composed of a list of field-value pairs. The field-value pairs are stored in the same order they are
* given by the user. Commands that read the stream, such as {@code XRANGE} or {@code XREAD}, are guaranteed to
* return the fields and values exactly in the same order they were added by {@code XADD}.
* <p>
* {@code XADD} is the only Redis command that can add data to a stream, but there are other commands, such as
* {@code XDEL} and {@code XTRIM}, that are able to remove data from a stream.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key the key
* @param args the extra parameters
* @param payload the payload to write to the stream, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xadd(K key, XAddArgs args, Map<F, V> payload);
/**
* Execute the command <a href="https://redis.io/commands/xautoclaim">XAUTOCLAIM</a>.
* Summary: Changes (or acquires) ownership of messages in a consumer group, as if the messages were delivered to
* the specified consumer.
* <p>
* This command transfers ownership of pending stream entries that match the specified criteria. Conceptually,
* {@code XAUTOCLAIM} is equivalent to calling {@code XPENDING} and then {@code XCLAIM}, but provides a more
* straightforward way to deal with message delivery failures via {@code SCAN}-like semantics.
* <p>
* Like {@code XCLAIM}, the command operates on the stream entries at {@code key} and in the context of the provided
* {@code group}. It transfers ownership to {@code consumer} of messages pending for more than {@code min-idle-time}
* milliseconds and having an equal or greater ID than {@code start}.
* <p>
* Group: stream
* Requires Redis 6.2.0+
* <p>
*
* @param key key the key
* @param group string the consumer group
* @param consumer string the consumer id
* @param minIdleTime the min pending time of the message to claim
* @param start the min id of the message to claim
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start);
/**
* Execute the command <a href="https://redis.io/commands/xautoclaim">XAUTOCLAIM</a>.
* Summary: Changes (or acquires) ownership of messages in a consumer group, as if the messages were delivered to
* the specified consumer.
* <p>
* This command transfers ownership of pending stream entries that match the specified criteria. Conceptually,
* {@code XAUTOCLAIM} is equivalent to calling {@code XPENDING} and then {@code XCLAIM}, but provides a more
* straightforward way to deal with message delivery failures via {@code SCAN}-like semantics.
* <p>
* Like {@code XCLAIM}, the command operates on the stream entries at {@code key} and in the context of the provided
* {@code group}. It transfers ownership to {@code consumer} of messages pending for more than {@code min-idle-time}
* milliseconds and having an equal or greater ID than {@code start}.
* <p>
* Group: stream
* Requires Redis 6.2.0+
* <p>
*
* @param key key the key
* @param group string the consumer group
* @param consumer string the consumer id
* @param minIdleTime the min pending time of the message to claim
* @param start the min id of the message to claim
* @param count the upper limit of the number of entries to claim, default is 100.
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start, int count);
/**
* Execute the command <a href="https://redis.io/commands/xautoclaim">XAUTOCLAIM</a>.
* Summary: Changes (or acquires) ownership of messages in a consumer group, as if the messages were delivered to
* the specified consumer.
* <p>
* This command transfers ownership of pending stream entries that match the specified criteria. Conceptually,
* {@code XAUTOCLAIM} is equivalent to calling {@code XPENDING} and then {@code XCLAIM}, but provides a more
* straightforward way to deal with message delivery failures via {@code SCAN}-like semantics.
* <p>
* Like {@code XCLAIM}, the command operates on the stream entries at {@code key} and in the context of the provided
* {@code group}. It transfers ownership to {@code consumer} of messages pending for more than {@code min-idle-time}
* milliseconds and having an equal or greater ID than {@code start}.
* <p>
* Group: stream
* Requires Redis 6.2.0+
* <p>
*
* @param key key the key
* @param group string the consumer group
* @param consumer string the consumer id
* @param minIdleTime the min pending time of the message to claim
* @param start the min id of the message to claim
* @param count the upper limit of the number of entries to claim, default is 100.
* @param justId if {@code true} the returned structure would only contain the id of the messages and not the payloads
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xautoclaim(K key, String group, String consumer, Duration minIdleTime, String start, int count, boolean justId);
/**
* Execute the command <a href="https://redis.io/commands/xclaim">XCLAIM</a>.
* Summary: In the context of a stream consumer group, this command changes the ownership of a pending message, so
* that the new owner is the consumer specified as the command argument.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param group string the consumer group
* @param consumer string the consumer id
* @param minIdleTime the min pending time of the message to claim
* @param id the message ids to claim, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xclaim(K key, String group, String consumer, Duration minIdleTime, String... id);
/**
* Execute the command <a href="https://redis.io/commands/xclaim">XCLAIM</a>.
* Summary: In the context of a stream consumer group, this command changes the ownership of a pending message, so
* that the new owner is the consumer specified as the command argument.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param group string the consumer group
* @param consumer string the consumer id
* @param minIdleTime the min pending time of the message to claim
* @param args the extra command parameters
* @param id the message ids to claim, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xclaim(K key, String group, String consumer, Duration minIdleTime, XClaimArgs args, String... id);
/**
* Execute the command <a href="https://redis.io/commands/xdel">XDEL</a>.
* Summary: Removes the specified entries from a stream, and returns the number of entries deleted. This number may
* be less than the number of IDs passed to the command in the case where some of the specified IDs do not exist in
* the stream.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param id the message ids, must not be empty, must not contain {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xdel(K key, String... id);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-create">XGROUP CREATE</a>.
* Summary: Create a new consumer group uniquely identified by {@code groupname} for the stream stored at {@code key}
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param from the last delivered entry in the stream from the new group's perspective. The special ID {@code $}
* is the ID of the last entry in the stream, but you can substitute it with any valid ID.
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupCreate(K key, String groupname, String from);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-create">XGROUP CREATE</a>.
* Summary: Create a new consumer group uniquely identified by {@code groupname} for the stream stored at {@code key}
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param from the last delivered entry in the stream from the new group's perspective. The special ID {@code $}
* is the ID of the last entry in the stream, but you can substitute it with any valid ID.
* @param args the extra command parameters
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupCreate(K key, String groupname, String from, XGroupCreateArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-createconsumer">XGROUP CREATECONSUMER</a>.
* Summary: Create a consumer named {@code consumername} in the consumer group {@code groupname} of the stream
* that's stored at {@code key}.
* <p>
* Consumers are also created automatically whenever an operation, such as {@code XREADGROUP}, references a consumer
* that doesn't exist.
* <p>
* Group: stream
* Requires Redis 6.2.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param consumername the consumer name
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupCreateConsumer(K key, String groupname, String consumername);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-delconsumer">XGROUP DELCONSUMER</a>.
* Summary: Deletes a consumer from the consumer group.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param consumername the consumer name
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupDelConsumer(K key, String groupname, String consumername);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-destroy">XGROUP DESTROY</a>.
* Summary: Completely destroys a consumer group. The consumer group will be destroyed even if there are active
* consumers, and pending messages, so make sure to call this command only when really needed.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupDestroy(K key, String groupname);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-setid">XGROUP SETID</a>.
* Summary: Set the last delivered ID for a consumer group.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param from the last delivered entry in the stream from the new group's perspective. The special ID {@code $}
* is the ID of the last entry in the stream, but you can substitute it with any valid ID.
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupSetId(K key, String groupname, String from);
/**
* Execute the command <a href="https://redis.io/commands/xgroup-setid">XGROUP SETID</a>.
* Summary: Set the last delivered ID for a consumer group.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param groupname the name of the group, must be unique, and not {@code null}
* @param from the last delivered entry in the stream from the new group's perspective. The special ID {@code $}
* is the ID of the last entry in the stream, but you can substitute it with any valid ID.
* @param args the extra command parameters
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xgroupSetId(K key, String groupname, String from, XGroupSetIdArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xlen">XLEN</a>.
* Summary: Returns the number of entries inside a stream.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xlen(K key);
/**
* Execute the command <a href="https://redis.io/commands/xrange">XRANGE</a>.
* Summary: The command returns the stream entries matching a given range of IDs.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param range the range, must not be {@code null}
* @param count the max number of entries to return
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xrange(K key, StreamRange range, int count);
/**
* Execute the command <a href="https://redis.io/commands/xrange">XRANGE</a>.
* Summary: The command returns the stream entries matching a given range of IDs.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param range the range, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xrange(K key, StreamRange range);
/**
* Execute the command <a href="https://redis.io/commands/xread">XREAD</a>.
* Summary: Read data from one or multiple streams, only returning entries with an ID greater than the last received
* ID reported by the caller. This command has an option to block if items are not available, in a similar fashion
* to {@code BRPOP} or {@code BZPOPMIN} and others.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key of the stream
* @param id the last read id
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xread(K key, String id);
/**
* Execute the command <a href="https://redis.io/commands/xread">XREAD</a>.
* Summary: Read data from one or multiple streams, only returning entries with an ID greater than the last received
* ID reported by the caller. This command has an option to block if items are not available, in a similar fashion
* to {@code BRPOP} or {@code BZPOPMIN} and others.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param lastIdsPerStream the map of key -> id indicating the last received id per stream to read
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xread(Map<K, String> lastIdsPerStream);
/**
* Execute the command <a href="https://redis.io/commands/xread">XREAD</a>.
* Summary: Read data from one or multiple streams, only returning entries with an ID greater than the last received
* ID reported by the caller. This command has an option to block if items are not available, in a similar fashion
* to {@code BRPOP} or {@code BZPOPMIN} and others.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key of the stream
* @param id the last read id
* @param args the extra parameter
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xread(K key, String id, XReadArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xread">XREAD</a>.
* Summary: Read data from one or multiple streams, only returning entries with an ID greater than the last received
* ID reported by the caller. This command has an option to block if items are not available, in a similar fashion
* to {@code BRPOP} or {@code BZPOPMIN} and others.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param lastIdsPerStream the map of key -> id indicating the last received id per stream to read
* @param args the extra parameter
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xread(Map<K, String> lastIdsPerStream, XReadArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xreadgroup">XREADGROUP</a>.
* Summary: The {@code XREADGROUP} command is a special version of the {@code XREAD} command with support for
* consumer groups.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param group the group name
* @param consumer the consumer name
* @param key the stream key
* @param id the last read id
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xreadgroup(String group, String consumer, K key, String id);
/**
* Execute the command <a href="https://redis.io/commands/xreadgroup">XREADGROUP</a>.
* Summary: The {@code XREADGROUP} command is a special version of the {@code XREAD} command with support for
* consumer groups.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param group the group name
* @param consumer the consumer name
* @param lastIdsPerStream the map of key -> id indicating the last received id per stream to read
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xreadgroup(String group, String consumer, Map<K, String> lastIdsPerStream);
/**
* Execute the command <a href="https://redis.io/commands/xreadgroup">XREADGROUP</a>.
* Summary: The {@code XREADGROUP} command is a special version of the {@code XREAD} command with support for
* consumer groups.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param group the group name
* @param consumer the consumer name
* @param key the stream key
* @param id the last read id
* @param args the extra parameter
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xreadgroup(String group, String consumer, K key, String id, XReadGroupArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xreadgroup">XREADGROUP</a>.
* Summary: The {@code XREADGROUP} command is a special version of the {@code XREAD} command with support for
* consumer groups.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param group the group name
* @param consumer the consumer name
* @param lastIdsPerStream the map of key -> id indicating the last received id per stream to read
* @param args the extra parameter
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xreadgroup(String group, String consumer, Map<K, String> lastIdsPerStream, XReadGroupArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xrevrange">XREVRANGE</a>.
* Summary: This command is exactly like {@code XRANGE}, but with the notable difference of returning the entries
* in reverse order, and also taking the start-end range in reverse order: in {@code XREVRANGE} you need to state
* the end ID and later the start ID, and the command will produce all the element between (or exactly like) the
* two IDs, starting from the end side.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param range the range, must not be {@code null}
* @param count the max number of entries to return
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xrevrange(K key, StreamRange range, int count);
/**
* Execute the command <a href="https://redis.io/commands/xrange">XRANGE</a>.
* Summary: This command is exactly like {@code XRANGE}, but with the notable difference of returning the entries
* in reverse order, and also taking the start-end range in reverse order: in {@code XREVRANGE} you need to state
* the end ID and later the start ID, and the command will produce all the element between (or exactly like) the
* two IDs, starting from the end side.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key key the key
* @param range the range, must not be {@code null}
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xrevrange(K key, StreamRange range);
/**
* Execute the command <a href="https://redis.io/commands/xtrim">XTRIM</a>.
* Summary: Trims the stream by evicting older entries (entries with lower IDs) if needed.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key the key
* @param threshold the threshold
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xtrim(K key, String threshold);
/**
* Execute the command <a href="https://redis.io/commands/xtrim">XTRIM</a>.
* Summary: Trims the stream by evicting older entries (entries with lower IDs) if needed.
* <p>
* Group: stream
* Requires Redis 5.0.0+
* <p>
*
* @param key the key
* @param args the extra parameters
* @return A {@code Uni} emitting {@code null} when the command has been enqueued successfully in the transaction, a failure
* otherwise. In the case of failure, the transaction is discarded.
*/
Uni<Void> xtrim(K key, XTrimArgs args);
/**
* Execute the command <a href="https://redis.io/commands/xpending">XPENDING</a>.
* Summary: The XPENDING command is the
|
ReactiveTransactionalStreamCommands
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/RPriorityBlockingQueue.java
|
{
"start": 782,
"end": 868
}
|
interface ____<V> extends RBlockingQueue<V>, RPriorityQueue<V> {
}
|
RPriorityBlockingQueue
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/mover/TestStorageMover.java
|
{
"start": 5172,
"end": 6874
}
|
class ____ {
final List<Path> dirs;
final List<Path> files;
final long fileSize;
final Map<Path, List<String>> snapshotMap;
final Map<Path, BlockStoragePolicy> policyMap;
NamespaceScheme(List<Path> dirs, List<Path> files, long fileSize,
Map<Path,List<String>> snapshotMap,
Map<Path, BlockStoragePolicy> policyMap) {
this.dirs = dirs == null? Collections.<Path>emptyList(): dirs;
this.files = files == null? Collections.<Path>emptyList(): files;
this.fileSize = fileSize;
this.snapshotMap = snapshotMap == null ?
Collections.<Path, List<String>>emptyMap() : snapshotMap;
this.policyMap = policyMap;
}
/**
* Create files/directories/snapshots.
*/
void prepare(DistributedFileSystem dfs, short repl) throws Exception {
for (Path d : dirs) {
dfs.mkdirs(d);
}
for (Path file : files) {
DFSTestUtil.createFile(dfs, file, fileSize, repl, 0L);
}
for (Map.Entry<Path, List<String>> entry : snapshotMap.entrySet()) {
for (String snapshot : entry.getValue()) {
SnapshotTestHelper.createSnapshot(dfs, entry.getKey(), snapshot);
}
}
}
/**
* Set storage policies according to the corresponding scheme.
*/
void setStoragePolicy(DistributedFileSystem dfs) throws Exception {
for (Map.Entry<Path, BlockStoragePolicy> entry : policyMap.entrySet()) {
dfs.setStoragePolicy(entry.getKey(), entry.getValue().getName());
}
}
}
/**
* This scheme defines DataNodes and their storage, including storage types
* and remaining capacities.
*/
static
|
NamespaceScheme
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/records/MergeRecordPropertyTestCase.java
|
{
"start": 4166,
"end": 4858
}
|
class ____ {
@Id
Long id;
@Embedded
MyRecord record;
public MyEntity() {
}
public MyEntity(Long id) {
this.id = id;
}
public MyEntity(Long id, MyRecord record) {
this.id = id;
this.record = record;
}
public Long getId() {
return id;
}
public MyRecord getRecord() {
return record;
}
public void setId(Long id) {
this.id = id;
}
public void setRecord(MyRecord record) {
this.record = record;
}
}
@Embeddable
public static record MyRecord(String name, String description, @ManyToOne(fetch = FetchType.LAZY) MyEntity assoc) {
public MyRecord(String name, String description) {
this( name, description, null );
}
}
}
|
MyEntity
|
java
|
processing__processing4
|
java/src/processing/mode/java/RuntimePathBuilder.java
|
{
"start": 7832,
"end": 10728
}
|
class ____ at sketch execution
sketchClassPathStrategies.add(javaRuntimePathFactory);
sketchClassPathStrategies.add(javaFxRuntimePathFactory);
sketchClassPathStrategies.add(modeSketchPathFactory);
sketchClassPathStrategies.add(librarySketchPathFactory);
sketchClassPathStrategies.add(coreLibraryPathFactory);
sketchClassPathStrategies.add(codeFolderPathFactory);
// Strategies required for import suggestions
searchClassPathStrategies.add(javaRuntimePathFactory);
searchClassPathStrategies.add(javaFxRuntimePathFactory);
searchClassPathStrategies.add(modeSearchPathFactory);
searchClassPathStrategies.add(librarySearchPathFactory);
searchClassPathStrategies.add(coreLibraryPathFactory);
searchClassPathStrategies.add(codeFolderPathFactory);
// Assign strategies to collections for cache invalidation on library events.
libraryDependentCaches.add(coreLibraryPathFactory);
libraryImportsDependentCaches.add(librarySketchPathFactory);
libraryImportsDependentCaches.add(librarySearchPathFactory);
// Assign strategies to collections for cache invalidation on code folder changes.
codeFolderDependentCaches.add(codeFolderPathFactory);
}
/**
* Invalidate all the runtime path caches associated with sketch libraries.
*/
public void markLibrariesChanged() {
invalidateAll(libraryDependentCaches);
}
/**
* Invalidate all the runtime path caches associated with sketch library imports.
*/
public void markLibraryImportsChanged() {
invalidateAll(libraryImportsDependentCaches);
}
/**
* Invalidate all the runtime path caches associated with the code folder having changed.
*/
public void markCodeFolderChanged() {
invalidateAll(codeFolderDependentCaches);
}
/**
* Generate a classpath and inject it into a {PreprocessedSketch.Builder}.
*
* @param result The {PreprocessedSketch.Builder} into which the classpath should be inserted.
* @param mode The {JavaMode} for which the classpath should be generated.
*/
public void prepareClassPath(PreprocSketch.Builder result, JavaMode mode) {
List<ImportStatement> programImports = result.programImports;
Sketch sketch = result.sketch;
prepareSketchClassPath(result, mode, programImports, sketch);
prepareSearchClassPath(result, mode, programImports, sketch);
}
/**
* Invalidate all of the caches in a provided collection.
*
* @param caches The caches to invalidate so that, when their value is requested again, the value
* is generated again.
*/
static private void invalidateAll(List<CachedRuntimePathFactory> caches) {
for (CachedRuntimePathFactory cache : caches) {
cache.invalidateCache();
}
}
/**
* Prepare the classpath required for the sketch's execution.
*
* @param result The PreprocessedSketch builder into which the classpath and
|
path
|
java
|
quarkusio__quarkus
|
extensions/vertx-http/deployment/src/test/java/io/quarkus/vertx/http/proxy/DotDotSemicolonSegmentTest.java
|
{
"start": 557,
"end": 1222
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().withApplicationRoot(jar -> {
jar.addAsResource(new StringAsset("Hello"), "META-INF/resources/index.html");
});
@Test
public void testPathIsNotResolved() {
given()
.get("/index.html")
.then()
.statusCode(200);
given()
.get("/something/../index.html")
.then()
.statusCode(200);
given()
.get("/something/..;/index.html")
.then()
.statusCode(404);
}
}
|
DotDotSemicolonSegmentTest
|
java
|
FasterXML__jackson-databind
|
src/main/java/tools/jackson/databind/ext/OptionalHandlerFactory.java
|
{
"start": 327,
"end": 471
}
|
class ____ for isolating details of handling optional+external types
* (javax.xml classes) from standard factories that offer them.
*/
public
|
used
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/api/geo/OptionalGeoSearch.java
|
{
"start": 765,
"end": 1501
}
|
interface ____ extends GeoSearchArgs {
/**
* Defines limit of search result
*
* @param value - result limit
* @return search conditions object
*/
OptionalGeoSearch count(int value);
/**
* Defines limit of search result.
* Returns as soon as enough matches are found.
* Result size might be not closest to defined limit,
* but works faster.
*
* @param value - result limit
* @return search conditions object
*/
OptionalGeoSearch countAny(int value);
/**
* Defines order of search result
*
* @param geoOrder - result order
* @return search conditions object
*/
OptionalGeoSearch order(GeoOrder geoOrder);
}
|
OptionalGeoSearch
|
java
|
google__guice
|
core/test/com/google/inject/MembersInjectorTest.java
|
{
"start": 1150,
"end": 9772
}
|
class ____ extends TestCase {
private static final long DEADLOCK_TIMEOUT_SECONDS = 1;
private static final A<C> uninjectableA =
new A<C>() {
@Inject
@Override
void doNothing() {
throw new AssertionFailedError();
}
};
private static final B uninjectableB =
new B() {
@Inject
@Override
void doNothing() {
throw new AssertionFailedError();
}
};
private static final C myFavouriteC = new C();
public void testMembersInjectorFromBinder() {
final AtomicReference<MembersInjector<A<C>>> aMembersInjectorReference =
new AtomicReference<MembersInjector<A<C>>>();
final AtomicReference<MembersInjector<B>> bMembersInjectorReference =
new AtomicReference<MembersInjector<B>>();
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
MembersInjector<A<C>> aMembersInjector = getMembersInjector(new TypeLiteral<A<C>>() {});
try {
aMembersInjector.injectMembers(uninjectableA);
fail();
} catch (IllegalStateException expected) {
assertContains(
expected.getMessage(),
"This MembersInjector cannot be used until the Injector has been created.");
}
MembersInjector<B> bMembersInjector = getMembersInjector(B.class);
try {
bMembersInjector.injectMembers(uninjectableB);
fail();
} catch (IllegalStateException expected) {
assertContains(
expected.getMessage(),
"This MembersInjector cannot be used until the Injector has been created.");
}
aMembersInjectorReference.set(aMembersInjector);
bMembersInjectorReference.set(bMembersInjector);
assertEquals(
"MembersInjector<java.lang.String>", getMembersInjector(String.class).toString());
bind(C.class).toInstance(myFavouriteC);
}
});
A<C> injectableA = new A<>();
aMembersInjectorReference.get().injectMembers(injectableA);
assertSame(myFavouriteC, injectableA.t);
assertSame(myFavouriteC, injectableA.b.c);
B injectableB = new B();
bMembersInjectorReference.get().injectMembers(injectableB);
assertSame(myFavouriteC, injectableB.c);
B anotherInjectableB = new B();
bMembersInjectorReference.get().injectMembers(anotherInjectableB);
assertSame(myFavouriteC, anotherInjectableB.c);
}
public void testMembersInjectorFromInjector() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(C.class).toInstance(myFavouriteC);
}
});
MembersInjector<A<C>> aMembersInjector =
injector.getMembersInjector(new TypeLiteral<A<C>>() {});
MembersInjector<B> bMembersInjector = injector.getMembersInjector(B.class);
A<C> injectableA = new A<>();
aMembersInjector.injectMembers(injectableA);
assertSame(myFavouriteC, injectableA.t);
assertSame(myFavouriteC, injectableA.b.c);
B injectableB = new B();
bMembersInjector.injectMembers(injectableB);
assertSame(myFavouriteC, injectableB.c);
B anotherInjectableB = new B();
bMembersInjector.injectMembers(anotherInjectableB);
assertSame(myFavouriteC, anotherInjectableB.c);
assertEquals(
"MembersInjector<java.lang.String>", injector.getMembersInjector(String.class).toString());
}
public void testMembersInjectorWithNonInjectedTypes() {
Injector injector = Guice.createInjector();
MembersInjector<NoInjectedMembers> membersInjector =
injector.getMembersInjector(NoInjectedMembers.class);
membersInjector.injectMembers(new NoInjectedMembers());
membersInjector.injectMembers(new NoInjectedMembers());
}
public void testInjectionFailure() {
Injector injector = Guice.createInjector();
MembersInjector<InjectionFailure> membersInjector =
injector.getMembersInjector(InjectionFailure.class);
try {
membersInjector.injectMembers(new InjectionFailure());
fail();
} catch (ProvisionException expected) {
assertContains(expected.getMessage(), "ClassCastException: whoops, failure #1");
}
}
public void testInjectionAppliesToSpecifiedType() {
Injector injector = Guice.createInjector();
MembersInjector<Object> membersInjector = injector.getMembersInjector(Object.class);
membersInjector.injectMembers(new InjectionFailure());
}
public void testInjectingMembersInjector() {
InjectsMembersInjector injectsMembersInjector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(C.class).toInstance(myFavouriteC);
}
})
.getInstance(InjectsMembersInjector.class);
A<C> a = new A<>();
injectsMembersInjector.aMembersInjector.injectMembers(a);
assertSame(myFavouriteC, a.t);
assertSame(myFavouriteC, a.b.c);
}
public void testCannotBindMembersInjector() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(MembersInjector.class).toProvider(Providers.of(null));
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"Binding to core guice framework type is not allowed: MembersInjector.");
}
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(new TypeLiteral<MembersInjector<A<C>>>() {})
.toProvider(Providers.<MembersInjector<A<C>>>of(null));
}
});
fail();
} catch (CreationException expected) {
assertContains(
expected.getMessage(),
"Binding to core guice framework type is not allowed: MembersInjector.");
}
}
public void testInjectingMembersInjectorWithErrorsInDependencies() {
try {
Guice.createInjector().getInstance(InjectsBrokenMembersInjector.class);
fail();
} catch (ConfigurationException expected) {
assertContains(
expected.getMessage(),
"No implementation for MembersInjectorTest$Unimplemented was bound.",
"MembersInjectorTest$A.t(MembersInjectorTest.java:",
"for field t",
"at MembersInjectorTest$InjectsBrokenMembersInjector.aMembersInjector("
+ "MembersInjectorTest.java:",
"for field aMembersInjector",
"while locating MembersInjectorTest$InjectsBrokenMembersInjector");
}
}
public void testLookupMembersInjectorBinding() {
Injector injector =
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(C.class).toInstance(myFavouriteC);
}
});
MembersInjector<A<C>> membersInjector =
injector.getInstance(new Key<MembersInjector<A<C>>>() {});
A<C> a = new A<>();
membersInjector.injectMembers(a);
assertSame(myFavouriteC, a.t);
assertSame(myFavouriteC, a.b.c);
assertEquals(
"MembersInjector<java.lang.String>",
injector.getInstance(new Key<MembersInjector<String>>() {}).toString());
}
public void testGettingRawMembersInjector() {
Injector injector = Guice.createInjector();
try {
injector.getInstance(MembersInjector.class);
fail();
} catch (ConfigurationException expected) {
assertContains(
expected.getMessage(), "Cannot inject a MembersInjector that has no type parameter");
}
}
public void testGettingAnnotatedMembersInjector() {
Injector injector = Guice.createInjector();
try {
injector.getInstance(new Key<MembersInjector<String>>(Names.named("foo")) {});
fail();
} catch (ConfigurationException expected) {
assertContains(
expected.getMessage(),
"No implementation for MembersInjector<String> annotated with @Named("
+ Annotations.memberValueString("value", "foo")
+ ") was bound.");
}
}
/** Callback for member injection. Uses a static type to be referable by getInstance(). */
abstract static
|
MembersInjectorTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/issues/MockValueBuilderIssueTest.java
|
{
"start": 1144,
"end": 2845
}
|
class ____ extends ContextTestSupport {
@Test
public void testMockValueBuilder() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).exchangeProperty("foo").convertTo(String.class).contains("2");
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testMockValueBuilderFail() {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).exchangeProperty("foo").convertTo(String.class).contains("4");
template.sendBody("direct:start", "Hello World");
Throwable e = assertThrows(Throwable.class, this::assertMockEndpointsSatisfied,
"Should fail");
String s = "Assertion error at index 0 on mock mock://result with predicate: exchangeProperty(foo) contains 4 evaluated as: 123 contains 4";
assertTrue(e.getMessage().startsWith(s));
}
@Test
public void testMockValueBuilderNotSatisfied() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.message(0).exchangeProperty("foo").convertTo(String.class).contains("4");
template.sendBody("direct:start", "Hello World");
mock.assertIsNotSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").setProperty("foo", constant(123)).to("mock:result");
}
};
}
}
|
MockValueBuilderIssueTest
|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/support/AbstractTestContextBootstrapper.java
|
{
"start": 9383,
"end": 11419
}
|
class ____ declared the
// configuration for the current level in the context hierarchy.
Assert.notEmpty(reversedList, "ContextConfigurationAttributes list must not be empty");
Class<?> declaringClass = reversedList.get(0).getDeclaringClass();
mergedConfig = buildMergedContextConfiguration(
declaringClass, reversedList, parentConfig, cacheAwareContextLoaderDelegate, true);
parentConfig = mergedConfig;
}
// Return the last level in the context hierarchy
Assert.state(mergedConfig != null, "No merged context configuration");
return mergedConfig;
}
else {
return buildMergedContextConfiguration(testClass,
ContextLoaderUtils.resolveContextConfigurationAttributes(testClass),
null, cacheAwareContextLoaderDelegate, true);
}
}
private MergedContextConfiguration buildDefaultMergedContextConfiguration(Class<?> testClass,
CacheAwareContextLoaderDelegate cacheAwareContextLoaderDelegate) {
List<ContextConfigurationAttributes> defaultConfigAttributesList =
Collections.singletonList(new ContextConfigurationAttributes(testClass));
ContextLoader contextLoader = resolveContextLoader(testClass, defaultConfigAttributesList);
if (logger.isTraceEnabled()) {
logger.trace(String.format(
"Neither @ContextConfiguration nor @ContextHierarchy found for test class [%s]: using %s",
testClass.getName(), contextLoader.getClass().getName()));
}
else if (logger.isDebugEnabled()) {
logger.debug(String.format(
"Neither @ContextConfiguration nor @ContextHierarchy found for test class [%s]: using %s",
testClass.getSimpleName(), contextLoader.getClass().getSimpleName()));
}
return buildMergedContextConfiguration(testClass, defaultConfigAttributesList, null,
cacheAwareContextLoaderDelegate, false);
}
/**
* Build the {@link MergedContextConfiguration merged context configuration}
* for the supplied {@link Class testClass}, context configuration attributes,
* and parent context configuration.
* @param testClass the test
|
that
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/CustomEntityDirtinessStrategy.java
|
{
"start": 1034,
"end": 1222
}
|
interface ____ {
* Set<String> changes();
* }
* </pre>
* Then the following implementation of {@code CustomEntityDirtinessStrategy} would be used:
* <pre>
* public
|
DirtyTracker
|
java
|
apache__commons-lang
|
src/test/java/org/apache/commons/lang3/builder/TestClassBuilder.java
|
{
"start": 1175,
"end": 1448
}
|
class ____ extends ClassLoader {
public Class<?> defineClass(final String name, final byte[] b) {
return defineClass(name, b, 0, b.length);
}
}
/**
* Defines the simplest possible class.
*
* @param name The
|
DynamicClassLoader
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/AutoValueCompilationTest.java
|
{
"start": 86369,
"end": 87420
}
|
interface ____ {",
" Builder setNotNull(@Nullable String x);",
" Baz build();",
" }",
"}");
Compilation compilation =
javac()
.withProcessors(new AutoValueProcessor(), new AutoValueBuilderProcessor())
.compile(javaFileObject, nullableFileObject);
assertThat(compilation)
.hadErrorContaining(
"Parameter of setter method is @Nullable but property method"
+ " foo.bar.Baz.notNull() is not")
.inFile(javaFileObject)
.onLineContaining("setNotNull");
}
// Check that we get a helpful error message if some of your properties look like getters but
// others don't.
@Test
public void autoValueBuilderBeansConfusion() {
JavaFileObject javaFileObject =
JavaFileObjects.forSourceLines(
"foo.bar.Item",
"package foo.bar;",
"",
"import com.google.auto.value.AutoValue;",
"",
"@AutoValue",
"public abstract
|
Builder
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/test/java/org/springframework/messaging/core/MessageReceivingTemplateTests.java
|
{
"start": 4075,
"end": 4708
}
|
class ____ extends AbstractMessagingTemplate<String> {
private String destination;
private Message<?> receiveMessage;
private void setReceiveMessage(Message<?> receiveMessage) {
this.receiveMessage = receiveMessage;
}
@Override
protected void doSend(String destination, Message<?> message) {
}
@Override
protected Message<?> doReceive(String destination) {
this.destination = destination;
return this.receiveMessage;
}
@Override
protected Message<?> doSendAndReceive(String destination, Message<?> requestMessage) {
this.destination = destination;
return null;
}
}
}
|
TestMessagingTemplate
|
java
|
FasterXML__jackson-databind
|
src/test/java/perf/MediaItem.java
|
{
"start": 1167,
"end": 2249
}
|
class ____
{
private String _uri;
private String _title;
private int _width;
private int _height;
private Size _size;
public Photo() {}
public Photo(String uri, String title, int w, int h, Size s)
{
_uri = uri;
_title = title;
_width = w;
_height = h;
_size = s;
}
public String getUri() { return _uri; }
public String getTitle() { return _title; }
public int getWidth() { return _width; }
public int getHeight() { return _height; }
public Size getSize() { return _size; }
public void setUri(String u) { _uri = u; }
public void setTitle(String t) { _title = t; }
public void setWidth(int w) { _width = w; }
public void setHeight(int h) { _height = h; }
public void setSize(Size s) { _size = s; }
}
@JsonFormat(shape=JsonFormat.Shape.ARRAY)
@JsonPropertyOrder({"uri","title","width","height","format","duration","size","bitrate","persons","player","copyright"})
public static
|
Photo
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ComponentProcessorTest.java
|
{
"start": 70746,
"end": 70965
}
|
interface ____ extends ChildInterface.Factory {}");
Source childInterface =
CompilerTests.javaSource(
"test.ChildInterface",
"package test;",
"",
"
|
ParentInterface
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/dynamic/ReactiveTypes.java
|
{
"start": 466,
"end": 691
}
|
class ____ whether a reactive wrapper is supported in
* general and whether a particular type is suitable for no-value/single-value/multi-value usage.
* <p>
* Supported types are discovered by their availability on the
|
exposes
|
java
|
hibernate__hibernate-orm
|
tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/constant/CookBook.java
|
{
"start": 401,
"end": 475
}
|
class ____ {
@Id
String isbn;
String title;
BookType bookType;
}
|
CookBook
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/TestConstructorUtilsTests.java
|
{
"start": 3963,
"end": 4113
}
|
class ____ {
}
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@TestConstructor(autowireMode = ALL)
@
|
TestConstructorAnnotationTestCase
|
java
|
quarkusio__quarkus
|
independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/ObserverConfigurator.java
|
{
"start": 4766,
"end": 5682
}
|
class ____ the generated {@code notify} method.
*
* @see #notifyMethod()
*/
ClassCreator observerClass();
/**
* {@return the {@link BlockCreator} for the generated {@code notify} method}
* This method is supposed to contain the logic of the observer.
*/
BlockCreator notifyMethod();
/**
* {@return the field on the {@link #observerClass()} that contains the parameter map}
*/
default Var paramsMap() {
ClassCreator cc = observerClass();
return cc.this_().field(FieldDesc.of(cc.type(), "params", Map.class));
}
/**
* {@return the parameter of the generated notification method that contains
* the {@link jakarta.enterprise.inject.spi.EventContext}}
*
* @see #notifyMethod()
*/
Var eventContext();
}
}
|
contains
|
java
|
apache__avro
|
lang/java/avro/src/main/java/org/apache/avro/data/TimeConversions.java
|
{
"start": 6267,
"end": 7704
}
|
class ____ extends Conversion<Instant> {
@Override
public Class<Instant> getConvertedType() {
return Instant.class;
}
@Override
public String getLogicalTypeName() {
return "timestamp-nanos";
}
@Override
public String adjustAndSetValue(String varName, String valParamName) {
return varName + " = " + valParamName + ".truncatedTo(java.time.temporal.ChronoUnit.NANOS);";
}
@Override
public Instant fromLong(Long microsFromEpoch, Schema schema, LogicalType type) {
long epochSeconds = microsFromEpoch / 1_000_000_000L;
long nanoAdjustment = microsFromEpoch % 1_000_000_000L;
return Instant.ofEpochSecond(epochSeconds, nanoAdjustment);
}
@Override
public Long toLong(Instant instant, Schema schema, LogicalType type) {
long seconds = instant.getEpochSecond();
int nanos = instant.getNano();
if (seconds < 0 && nanos > 0) {
long micros = Math.multiplyExact(seconds + 1, 1_000_000_000L);
long adjustment = nanos - 1_000_000;
return Math.addExact(micros, adjustment);
} else {
long micros = Math.multiplyExact(seconds, 1_000_000_000L);
return Math.addExact(micros, nanos);
}
}
@Override
public Schema getRecommendedSchema() {
return LogicalTypes.timestampNanos().addToSchema(Schema.create(Schema.Type.LONG));
}
}
public static
|
TimestampNanosConversion
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/event/EventListenerSupport.java
|
{
"start": 1848,
"end": 2502
}
|
class ____ {
*
* private EventListenerSupport<ActionListener> actionListeners = EventListenerSupport.create(ActionListener.class);
*
* public void someMethodThatFiresAction() {
* ActionEvent e = new ActionEvent(this, ActionEvent.ACTION_PERFORMED, "something");
* actionListeners.fire().actionPerformed(e);
* }
* }
* }</pre>
* <p>
* Events are fired
* <p>
* Serializing an {@link EventListenerSupport} instance will result in any non-{@link Serializable} listeners being silently dropped.
* </p>
*
* @param <L> the type of event listener that is supported by this proxy.
* @since 3.0
*/
public
|
MyActionEventSource
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/factory/config/CustomScopeConfigurerTests.java
|
{
"start": 1221,
"end": 3778
}
|
class ____ {
private static final String FOO_SCOPE = "fooScope";
private final ConfigurableListableBeanFactory factory = new DefaultListableBeanFactory();
@Test
void testWithNoScopes() {
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.postProcessBeanFactory(factory);
}
@Test
void testSunnyDayWithBonaFideScopeInstance() {
Scope scope = mock();
factory.registerScope(FOO_SCOPE, scope);
Map<String, Object> scopes = new HashMap<>();
scopes.put(FOO_SCOPE, scope);
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
figurer.postProcessBeanFactory(factory);
}
@Test
void testSunnyDayWithBonaFideScopeClass() {
Map<String, Object> scopes = new HashMap<>();
scopes.put(FOO_SCOPE, NoOpScope.class);
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
figurer.postProcessBeanFactory(factory);
assertThat(factory.getRegisteredScope(FOO_SCOPE)).isInstanceOf(NoOpScope.class);
}
@Test
void testSunnyDayWithBonaFideScopeClassName() {
Map<String, Object> scopes = new HashMap<>();
scopes.put(FOO_SCOPE, NoOpScope.class.getName());
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
figurer.postProcessBeanFactory(factory);
assertThat(factory.getRegisteredScope(FOO_SCOPE)).isInstanceOf(NoOpScope.class);
}
@Test
void testWhereScopeMapHasNullScopeValueInEntrySet() {
Map<String, Object> scopes = new HashMap<>();
scopes.put(FOO_SCOPE, null);
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
assertThatIllegalArgumentException().isThrownBy(() ->
figurer.postProcessBeanFactory(factory));
}
@Test
void testWhereScopeMapHasNonScopeInstanceInEntrySet() {
Map<String, Object> scopes = new HashMap<>();
scopes.put(FOO_SCOPE, this); // <-- not a valid value...
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
assertThatIllegalArgumentException().isThrownBy(() ->
figurer.postProcessBeanFactory(factory));
}
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test
void testWhereScopeMapHasNonStringTypedScopeNameInKeySet() {
Map scopes = new HashMap();
scopes.put(this, new NoOpScope()); // <-- not a valid value (the key)...
CustomScopeConfigurer figurer = new CustomScopeConfigurer();
figurer.setScopes(scopes);
assertThatExceptionOfType(ClassCastException.class).isThrownBy(() ->
figurer.postProcessBeanFactory(factory));
}
}
|
CustomScopeConfigurerTests
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/io/network/buffer/BufferProvider.java
|
{
"start": 1259,
"end": 4073
}
|
interface ____ extends AvailabilityProvider {
/**
* Returns a {@link Buffer} instance from the buffer provider, if one is available.
*
* @return {@code null} if no buffer is available or the buffer provider has been destroyed.
*/
@Nullable
Buffer requestBuffer();
/**
* Returns a {@link BufferBuilder} instance from the buffer provider. This equals to {@link
* #requestBufferBuilder(int)} with unknown target channel.
*
* @return {@code null} if no buffer is available or the buffer provider has been destroyed.
*/
@Nullable
BufferBuilder requestBufferBuilder();
/**
* Returns a {@link BufferBuilder} instance from the buffer provider.
*
* @param targetChannel to which the request will be accounted to.
* @return {@code null} if no buffer is available or the buffer provider has been destroyed.
*/
@Nullable
BufferBuilder requestBufferBuilder(int targetChannel);
/**
* Returns a {@link BufferBuilder} instance from the buffer provider. This equals to {@link
* #requestBufferBuilderBlocking(int)} with unknown target channel.
*
* <p>If there is no buffer available, the call will block until one becomes available again or
* the buffer provider has been destroyed.
*/
BufferBuilder requestBufferBuilderBlocking() throws InterruptedException;
/**
* Returns a {@link BufferBuilder} instance from the buffer provider.
*
* <p>If there is no buffer available, the call will block until one becomes available again or
* the buffer provider has been destroyed.
*
* @param targetChannel to which the request will be accounted to.
*/
BufferBuilder requestBufferBuilderBlocking(int targetChannel) throws InterruptedException;
/**
* Adds a buffer availability listener to the buffer provider.
*
* <p>The operation fails with return value <code>false</code>, when there is a buffer available
* or the buffer provider has been destroyed.
*/
boolean addBufferListener(BufferListener listener);
/** Returns whether the buffer provider has been destroyed. */
boolean isDestroyed();
/**
* Returns a {@link MemorySegment} instance from the buffer provider.
*
* @return {@code null} if no memory segment is available or the buffer provider has been
* destroyed.
*/
@Nullable
MemorySegment requestMemorySegment();
/**
* Returns a {@link MemorySegment} instance from the buffer provider.
*
* <p>If there is no memory segment available, the call will block until one becomes available
* again or the buffer provider has been destroyed.
*/
MemorySegment requestMemorySegmentBlocking() throws InterruptedException;
}
|
BufferProvider
|
java
|
alibaba__nacos
|
sys/src/test/java/com/alibaba/nacos/sys/env/EnvUtilTest.java
|
{
"start": 2721,
"end": 17283
}
|
class ____ {
MockedStatic<OperatingSystemBeanManager> systemBeanManagerMocked;
MockEnvironment environment;
@BeforeEach
void before() {
systemBeanManagerMocked = Mockito.mockStatic(OperatingSystemBeanManager.class);
environment = new MockEnvironment();
EnvUtil.setEnvironment(environment);
}
@AfterEach
void after() {
if (!systemBeanManagerMocked.isClosed()) {
systemBeanManagerMocked.close();
}
EnvUtil.setEnvironment(null);
}
@Test
void testCustomEnvironment() {
environment.setProperty("nacos.custom.environment.enabled", "true");
List<CustomEnvironmentPluginService> pluginServices = (List<CustomEnvironmentPluginService>) ReflectionTestUtils.getField(
CustomEnvironmentPluginManager.getInstance(), "SERVICE_LIST");
pluginServices.add(new CustomEnvironmentPluginService() {
@Override
public Map<String, Object> customValue(Map<String, Object> property) {
return Collections.emptyMap();
}
@Override
public Set<String> propertyKey() {
return Collections.singleton("nacos.custom.environment.enabled");
}
@Override
public Integer order() {
return 0;
}
@Override
public String pluginName() {
return "";
}
});
MutablePropertySources mock = Mockito.mock(MutablePropertySources.class);
ReflectionTestUtils.setField(environment, "propertySources", mock);
EnvUtil.customEnvironment();
verify(mock).addFirst(any(MapPropertySource.class));
}
@Test
void testGetEnvironment() {
assertEquals(environment, EnvUtil.getEnvironment());
}
@Test
void testContainsProperty() {
assertFalse(EnvUtil.containsProperty("nacos.custom.environment.enabled"));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertTrue(EnvUtil.containsProperty("nacos.custom.environment.enabled"));
}
@Test
void testGetProperty() {
assertNull(EnvUtil.getProperty("nacos.custom.environment.enabled"));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertEquals("true", EnvUtil.getProperty("nacos.custom.environment.enabled"));
}
@Test
void testGetPropertyWithDefault() {
assertEquals("false", EnvUtil.getProperty("nacos.custom.environment.enabled", "false"));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertEquals("true", EnvUtil.getProperty("nacos.custom.environment.enabled"));
}
@Test
void testGetPropertyWithType() {
assertNull(EnvUtil.getProperty("nacos.custom.environment.enabled", Boolean.class));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertTrue(EnvUtil.getProperty("nacos.custom.environment.enabled", Boolean.class));
}
@Test
void testGetRequiredProperty() {
assertThrows(IllegalStateException.class,
() -> EnvUtil.getRequiredProperty("nacos.custom.environment.enabled"));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertEquals("true", EnvUtil.getRequiredProperty("nacos.custom.environment.enabled"));
}
@Test
void testGetRequiredPropertyWithType() {
environment.setProperty("nacos.custom.environment.enabled", "true");
assertTrue(EnvUtil.getRequiredProperty("nacos.custom.environment.enabled", Boolean.class));
}
@Test
void testGetProperties() {
environment.setProperty("nacos.custom.environment.enabled", "true");
Properties properties = EnvUtil.getProperties();
assertEquals(1, properties.size());
assertEquals("true", properties.getProperty("nacos.custom.environment.enabled"));
}
@Test
void testResolvePlaceholders() {
environment.setProperty("nacos.custom.environment.enabled", "true");
assertEquals("true", EnvUtil.resolvePlaceholders("${nacos.custom.environment.enabled}"));
}
@Test
void testResolveRequiredPlaceholders() {
assertThrows(IllegalArgumentException.class,
() -> EnvUtil.resolveRequiredPlaceholders("${nacos.custom.environment.enabled}"));
environment.setProperty("nacos.custom.environment.enabled", "true");
assertEquals("true", EnvUtil.resolvePlaceholders("${nacos.custom.environment.enabled}"));
}
@Test
void testGetPropertyList() {
environment.setProperty("nacos.properties[0]", "value1");
environment.setProperty("nacos.properties[1]", "value2");
assertEquals(Arrays.asList("value1", "value2"), EnvUtil.getPropertyList("nacos.properties"));
}
@Test
void testGetLocalAddress() {
System.setProperty(NACOS_SERVER_IP, "1.1.1.1");
System.setProperty(Constants.AUTO_REFRESH_TIME, "100");
try {
EnvUtil.setLocalAddress(null);
assertEquals(InetUtils.getSelfIP() + ":8848", EnvUtil.getLocalAddress());
EnvUtil.setLocalAddress("testLocalAddress:8848");
assertEquals("testLocalAddress:8848", EnvUtil.getLocalAddress());
} finally {
System.clearProperty(NACOS_SERVER_IP);
System.clearProperty(Constants.AUTO_REFRESH_TIME);
}
}
@Test
void testGetPort() {
assertEquals(8848, EnvUtil.getPort());
EnvUtil.setPort(3306);
assertEquals(3306, EnvUtil.getPort());
}
@Test
void testGetContextPath() {
EnvUtil.setContextPath(null);
assertEquals("/nacos", EnvUtil.getContextPath());
EnvUtil.setContextPath(null);
environment.setProperty(Constants.WEB_CONTEXT_PATH, "/");
assertEquals("", EnvUtil.getContextPath());
EnvUtil.setContextPath(null);
environment.setProperty(Constants.WEB_CONTEXT_PATH, "/other");
assertEquals("/other", EnvUtil.getContextPath());
}
@Test
void testGetStandaloneMode() {
EnvUtil.setIsStandalone(false);
assertFalse(EnvUtil.getStandaloneMode());
EnvUtil.setIsStandalone(true);
assertTrue(EnvUtil.getStandaloneMode());
}
@Test
void testGetFunctionMode() {
try {
assertNull(EnvUtil.getFunctionMode());
System.setProperty(Constants.FUNCTION_MODE_PROPERTY_NAME, EnvUtil.FUNCTION_MODE_CONFIG);
assertEquals(EnvUtil.FUNCTION_MODE_CONFIG, EnvUtil.getFunctionMode());
} finally {
System.clearProperty(Constants.FUNCTION_MODE_PROPERTY_NAME);
ReflectionTestUtils.setField(EnvUtil.class, "functionModeType", null);
}
}
@Test
void testGetNacosTmpDir() {
assertEquals(EnvUtil.getNacosHome() + "/data/tmp", EnvUtil.getNacosTmpDir());
}
@Test
void testGetNacosHome() {
try {
assertEquals(System.getProperty("user.home") + "/nacos", EnvUtil.getNacosHome());
EnvUtil.setNacosHomePath(null);
System.setProperty(EnvUtil.NACOS_HOME_KEY, "/home/admin/nacos");
assertEquals("/home/admin/nacos", EnvUtil.getNacosHome());
EnvUtil.setNacosHomePath("/tmp/nacos");
assertEquals("/tmp/nacos", EnvUtil.getNacosHome());
} finally {
System.clearProperty(EnvUtil.NACOS_HOME_KEY);
}
}
@Test
void testGetSystemEnv() {
assertDoesNotThrow(() -> EnvUtil.getSystemEnv("test"));
}
@Test
void testGetLoad() {
OperatingSystemMXBean operatingSystemMxBean = mock(OperatingSystemMXBean.class);
systemBeanManagerMocked.when(OperatingSystemBeanManager::getOperatingSystemBean)
.thenReturn(operatingSystemMxBean);
when(operatingSystemMxBean.getSystemLoadAverage()).thenReturn(100.0d);
assertEquals(100d, EnvUtil.getLoad());
}
@Test
void testGetCpu() {
systemBeanManagerMocked.when(OperatingSystemBeanManager::getSystemCpuUsage).thenReturn(50.0d);
assertEquals(50.0d, EnvUtil.getCpu());
}
@Test
public void testGetMem() {
systemBeanManagerMocked.when(OperatingSystemBeanManager::getFreePhysicalMem).thenReturn(123L);
systemBeanManagerMocked.when(OperatingSystemBeanManager::getTotalPhysicalMem).thenReturn(2048L);
assertEquals(EnvUtil.getMem(), 1 - ((double) 123L / (double) 2048L));
systemBeanManagerMocked.when(OperatingSystemBeanManager::getFreePhysicalMem).thenReturn(0L);
assertEquals(EnvUtil.getMem(), 1 - ((double) 0L / (double) 2048L));
}
@Test
void testGetConfPath() {
try {
assertEquals(EnvUtil.getNacosHome() + "/conf", EnvUtil.getConfPath());
EnvUtil.setConfPath("/tmp/nacos/conf");
assertEquals("/tmp/nacos/conf", EnvUtil.getConfPath());
} finally {
EnvUtil.setConfPath(null);
}
}
@Test
void testGetClusterConfFilePath() {
assertEquals(EnvUtil.getNacosHome() + "/conf/cluster.conf", EnvUtil.getClusterConfFilePath());
}
@Test
void testReadClusterConfFromFile() throws URISyntaxException, IOException {
try {
File file = new File(EnvUtilTest.class.getClassLoader().getResource("conf/cluster.conf").toURI());
EnvUtil.setNacosHomePath(file.getParentFile().getParentFile().getAbsolutePath());
List<String> actual = EnvUtil.readClusterConf();
assertEquals(3, actual.size());
assertEquals("127.0.0.1", actual.get(0));
assertEquals("127.0.0.2", actual.get(1));
assertEquals("127.0.0.3", actual.get(2));
} finally {
EnvUtil.setNacosHomePath(null);
}
}
@Test
void testReadClusterConfFromProperties() throws IOException {
try {
EnvUtil.setNacosHomePath("/non/exist/path");
environment.setProperty("nacos.member.list", "127.0.0.1,127.0.0.2");
assertEquals(Arrays.asList("127.0.0.1", "127.0.0.2"), EnvUtil.readClusterConf());
} finally {
EnvUtil.setNacosHomePath(null);
}
}
@Test
void testReadClusterConfFromSystem() throws IOException {
try {
EnvUtil.setNacosHomePath("/non/exist/path");
EnvUtil.setEnvironment(null);
System.setProperty("nacos.member.list", "127.0.0.1,127.0.0.2");
assertEquals(Arrays.asList("127.0.0.1", "127.0.0.2"), EnvUtil.readClusterConf());
} finally {
EnvUtil.setNacosHomePath(null);
}
}
@Test
void testWriteClusterConf() throws IOException {
DiskUtils.forceMkdir(EnvUtil.getNacosHome() + "/conf");
EnvUtil.writeClusterConf("127.0.0.1");
File file = new File(EnvUtil.getNacosHome() + "/conf/cluster.conf");
assertTrue(file.exists());
assertEquals("127.0.0.1", FileUtils.readFileToString(file, "UTF-8"));
}
@Test
void testLoadProperties() throws IOException {
String path = "test-properties.properties";
Map<String, ?> actual = EnvUtil.loadProperties(new ClassPathResource(path));
assertFalse(actual.isEmpty());
}
@Test
void testGetApplicationConfFileResourceDefault() throws IOException {
Resource resource = EnvUtil.getApplicationConfFileResource();
assertNotNull(resource);
assertInstanceOf(BufferedInputStream.class, resource.getInputStream());
}
@Test
void testGetApplicationConfFileResourceCustom() throws IOException {
String path = new ClassPathResource("test-properties.properties").getFile().getParentFile().getAbsolutePath();
environment.setProperty("spring.config.additional-location",
"file:test-properties-malformed-unicode.properties,file:" + path);
Resource resource = EnvUtil.getApplicationConfFileResource();
assertNotNull(resource);
assertInstanceOf(FileInputStream.class, resource.getInputStream());
}
@Test
void testGetApplicationConfFileResourceCustomButFileNotExist() throws IOException {
environment.setProperty("spring.config.additional-location",
"file:test-properties-malformed-unicode.properties,file:test-properties.properties");
Resource resource = EnvUtil.getApplicationConfFileResource();
assertNotNull(resource);
assertInstanceOf(BufferedInputStream.class, resource.getInputStream());
}
@Test
void testGetAvailableProcessorsDefaultMultiple() {
assertEquals(ThreadUtils.getSuitableThreadCount(1), EnvUtil.getAvailableProcessors());
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "0");
assertEquals(1, EnvUtil.getAvailableProcessors());
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "2");
assertEquals(2, EnvUtil.getAvailableProcessors());
}
@Test
void testGetAvailableProcessorsWithMultiple() {
assertThrows(IllegalArgumentException.class, () -> EnvUtil.getAvailableProcessors(0));
assertEquals(ThreadUtils.getSuitableThreadCount(2), EnvUtil.getAvailableProcessors(2));
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "0");
assertEquals(ThreadUtils.getSuitableThreadCount(2), EnvUtil.getAvailableProcessors(2));
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "2");
assertEquals(4, EnvUtil.getAvailableProcessors(2));
}
@Test
void testGetAvailableProcessorsWithScale() {
assertThrows(IllegalArgumentException.class, () -> EnvUtil.getAvailableProcessors(-1.0d));
assertThrows(IllegalArgumentException.class, () -> EnvUtil.getAvailableProcessors(1.1d));
int defaultValue = (int) (ThreadUtils.getSuitableThreadCount(1) * 0.5);
defaultValue = Math.max(defaultValue, 1);
assertEquals(defaultValue, EnvUtil.getAvailableProcessors(0.5d));
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "0");
assertEquals(1, EnvUtil.getAvailableProcessors(0.5d));
environment.setProperty(Constants.AVAILABLE_PROCESSORS_BASIC, "4");
assertEquals(2, EnvUtil.getAvailableProcessors(0.5d));
}
}
|
EnvUtilTest
|
java
|
google__auto
|
value/src/test/java/com/google/auto/value/processor/TemplateVarsTest.java
|
{
"start": 3805,
"end": 4168
}
|
class ____ extends TemplateVars {
int integer;
String string;
@Override
Template parsedTemplate() {
throw new UnsupportedOperationException();
}
}
@Test
public void testPrimitive() {
try {
new Primitive();
fail("Did not get expected exception");
} catch (IllegalArgumentException expected) {
}
}
}
|
Primitive
|
java
|
spring-projects__spring-security
|
core/src/test/java/org/springframework/security/core/annotation/UniqueSecurityAnnotationScannerTests.java
|
{
"start": 21006,
"end": 21205
}
|
class ____
implements AnnotationOnInterface, AnnotationOnInterfaceMethod {
@Override
public String method() {
return "ok";
}
}
private static
|
ClassOverridingInterfaceAndMethodInheritance
|
java
|
apache__dubbo
|
dubbo-rpc/dubbo-rpc-triple/src/test/java/org/apache/dubbo/rpc/protocol/tri/service/HealthStatusManagerTest.java
|
{
"start": 1186,
"end": 3195
}
|
class ____ {
private final TriHealthImpl health = new TriHealthImpl();
private final HealthStatusManager manager = new HealthStatusManager(health);
@Test
void getHealthService() {
Assertions.assertNotNull(manager.getHealthService());
}
@Test
void setStatus() {
String service = "serv0";
manager.setStatus(service, ServingStatus.SERVING);
ServingStatus stored = manager.getHealthService()
.check(HealthCheckRequest.newBuilder().setService(service).build())
.getStatus();
Assertions.assertEquals(ServingStatus.SERVING, stored);
}
@Test
void clearStatus() {
String service = "serv1";
manager.setStatus(service, ServingStatus.SERVING);
ServingStatus stored = manager.getHealthService()
.check(HealthCheckRequest.newBuilder().setService(service).build())
.getStatus();
Assertions.assertEquals(ServingStatus.SERVING, stored);
manager.clearStatus(service);
try {
manager.getHealthService()
.check(HealthCheckRequest.newBuilder().setService(service).build());
fail();
} catch (StatusRpcException e) {
Assertions.assertEquals(Code.NOT_FOUND, e.getStatus().code);
}
}
@Test
void enterTerminalState() {
String service = "serv2";
manager.setStatus(service, ServingStatus.SERVING);
ServingStatus stored = manager.getHealthService()
.check(HealthCheckRequest.newBuilder().setService(service).build())
.getStatus();
Assertions.assertEquals(ServingStatus.SERVING, stored);
manager.enterTerminalState();
ServingStatus stored2 = manager.getHealthService()
.check(HealthCheckRequest.newBuilder().setService(service).build())
.getStatus();
Assertions.assertEquals(ServingStatus.NOT_SERVING, stored2);
}
}
|
HealthStatusManagerTest
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-jackson2-mixed/src/main/java/smoketest/jackson2/mixed/SampleJackson2MixedApplication.java
|
{
"start": 812,
"end": 969
}
|
class ____ {
public static void main(String[] args) {
SpringApplication.run(SampleJackson2MixedApplication.class, args);
}
}
|
SampleJackson2MixedApplication
|
java
|
spring-projects__spring-boot
|
module/spring-boot-restclient/src/test/java/org/springframework/boot/restclient/RootUriBuilderFactoryTests.java
|
{
"start": 1107,
"end": 1475
}
|
class ____ {
@Test
void uriStringPrefixesRoot() throws URISyntaxException {
UriBuilderFactory builderFactory = new RootUriBuilderFactory("https://example.com",
mock(UriTemplateHandler.class));
UriBuilder builder = builderFactory.uriString("/hello");
assertThat(builder.build()).isEqualTo(new URI("https://example.com/hello"));
}
}
|
RootUriBuilderFactoryTests
|
java
|
apache__rocketmq
|
common/src/main/java/org/apache/rocketmq/common/stats/StatsItemSet.java
|
{
"start": 1302,
"end": 10362
}
|
class ____ {
private static final Logger COMMERCIAL_LOG = LoggerFactory.getLogger(LoggerName.COMMERCIAL_LOGGER_NAME);
private final ConcurrentMap<String/* key */, StatsItem> statsItemTable =
new ConcurrentHashMap<>(128);
private final String statsName;
private final ScheduledExecutorService scheduledExecutorService;
private final Logger logger;
public StatsItemSet(String statsName, ScheduledExecutorService scheduledExecutorService, Logger logger) {
this.logger = logger;
this.statsName = statsName;
this.scheduledExecutorService = scheduledExecutorService;
this.init();
}
public void init() {
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
samplingInSeconds();
} catch (Throwable ignored) {
}
}
}, 0, 10, TimeUnit.SECONDS);
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
samplingInMinutes();
} catch (Throwable ignored) {
}
}
}, 0, 10, TimeUnit.MINUTES);
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
samplingInHour();
} catch (Throwable ignored) {
}
}
}, 0, 1, TimeUnit.HOURS);
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
printAtMinutes();
} catch (Throwable ignored) {
}
}
}, Math.abs(UtilAll.computeNextMinutesTimeMillis() - System.currentTimeMillis()), 1000 * 60, TimeUnit.MILLISECONDS);
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
printAtHour();
} catch (Throwable ignored) {
}
}
}, Math.abs(UtilAll.computeNextHourTimeMillis() - System.currentTimeMillis()), 1000 * 60 * 60, TimeUnit.MILLISECONDS);
this.scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
try {
printAtDay();
} catch (Throwable ignored) {
}
}
}, Math.abs(UtilAll.computeNextMorningTimeMillis() - System.currentTimeMillis()), 1000 * 60 * 60 * 24, TimeUnit.MILLISECONDS);
}
private void samplingInSeconds() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().samplingInSeconds();
}
}
private void samplingInMinutes() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().samplingInMinutes();
}
}
private void samplingInHour() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().samplingInHour();
}
}
private void printAtMinutes() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().printAtMinutes();
}
}
private void printAtHour() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().printAtHour();
}
}
private void printAtDay() {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
next.getValue().printAtDay();
}
}
public void addValue(final String statsKey, final int incValue, final int incTimes) {
StatsItem statsItem = this.getAndCreateStatsItem(statsKey);
statsItem.getValue().add(incValue);
statsItem.getTimes().add(incTimes);
statsItem.setLastUpdateTimestamp(System.currentTimeMillis());
}
public void addRTValue(final String statsKey, final int incValue, final int incTimes) {
StatsItem statsItem = this.getAndCreateRTStatsItem(statsKey);
statsItem.getValue().add(incValue);
statsItem.getTimes().add(incTimes);
statsItem.setLastUpdateTimestamp(System.currentTimeMillis());
}
public void delValue(final String statsKey) {
StatsItem statsItem = this.statsItemTable.get(statsKey);
if (null != statsItem) {
this.statsItemTable.remove(statsKey);
}
}
public void delValueByPrefixKey(final String statsKey, String separator) {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
if (next.getKey().startsWith(statsKey + separator)) {
it.remove();
}
}
}
public void delValueByInfixKey(final String statsKey, String separator) {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
if (next.getKey().contains(separator + statsKey + separator)) {
it.remove();
}
}
}
public void delValueBySuffixKey(final String statsKey, String separator) {
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
if (next.getKey().endsWith(separator + statsKey)) {
it.remove();
}
}
}
public StatsItem getAndCreateStatsItem(final String statsKey) {
return getAndCreateItem(statsKey, false);
}
public StatsItem getAndCreateRTStatsItem(final String statsKey) {
return getAndCreateItem(statsKey, true);
}
public StatsItem getAndCreateItem(final String statsKey, boolean rtItem) {
StatsItem statsItem = this.statsItemTable.get(statsKey);
if (null == statsItem) {
if (rtItem) {
statsItem = new RTStatsItem(this.statsName, statsKey, this.scheduledExecutorService, logger);
} else {
statsItem = new StatsItem(this.statsName, statsKey, this.scheduledExecutorService, logger);
}
StatsItem prev = this.statsItemTable.putIfAbsent(statsKey, statsItem);
if (null != prev) {
statsItem = prev;
// statsItem.init();
}
}
return statsItem;
}
public StatsSnapshot getStatsDataInMinute(final String statsKey) {
StatsItem statsItem = this.statsItemTable.get(statsKey);
if (null != statsItem) {
return statsItem.getStatsDataInMinute();
}
return new StatsSnapshot();
}
public StatsSnapshot getStatsDataInHour(final String statsKey) {
StatsItem statsItem = this.statsItemTable.get(statsKey);
if (null != statsItem) {
return statsItem.getStatsDataInHour();
}
return new StatsSnapshot();
}
public StatsSnapshot getStatsDataInDay(final String statsKey) {
StatsItem statsItem = this.statsItemTable.get(statsKey);
if (null != statsItem) {
return statsItem.getStatsDataInDay();
}
return new StatsSnapshot();
}
public StatsItem getStatsItem(final String statsKey) {
return this.statsItemTable.get(statsKey);
}
public void cleanResource(int maxStatsIdleTimeInMinutes) {
COMMERCIAL_LOG.info("CleanStatisticItemOld: kind:{}, size:{}", statsName, this.statsItemTable.size());
Iterator<Entry<String, StatsItem>> it = this.statsItemTable.entrySet().iterator();
while (it.hasNext()) {
Entry<String, StatsItem> next = it.next();
StatsItem statsItem = next.getValue();
if (System.currentTimeMillis() - statsItem.getLastUpdateTimestamp() > maxStatsIdleTimeInMinutes * 60 * 1000L) {
it.remove();
COMMERCIAL_LOG.info("CleanStatisticItemOld: removeKind:{}, removeKey:{}", statsName, statsItem.getStatsKey());
}
}
}
}
|
StatsItemSet
|
java
|
apache__camel
|
components/camel-univocity-parsers/src/main/java/org/apache/camel/dataformat/univocity/Marshaller.java
|
{
"start": 1525,
"end": 4423
}
|
class ____<W extends AbstractWriter<?>> {
private final Lock headersLock = new ReentrantLock();
private final LinkedHashSet<String> headers = new LinkedHashSet<>();
private final boolean adaptHeaders;
/**
* Creates a new instance.
*
* @param headers the base headers to use
* @param adaptHeaders whether or not we can add headers on the fly depending on the data
*/
Marshaller(String[] headers, boolean adaptHeaders) {
if (headers != null) {
this.headers.addAll(Arrays.asList(headers));
}
this.adaptHeaders = adaptHeaders;
}
/**
* Marshals the given body.
*
* @param exchange exchange to use (for type conversion)
* @param body body to marshal
* @param writer uniVocity writer to use
* @throws NoTypeConversionAvailableException when it's not possible to convert the body as list and maps.
*/
public void marshal(Exchange exchange, Object body, W writer) throws NoTypeConversionAvailableException {
try {
List<?> list = convertToType(exchange, List.class, body);
if (list != null) {
for (Object row : list) {
writeRow(exchange, row, writer);
}
} else {
writeRow(exchange, body, writer);
}
} finally {
writer.close();
}
}
/**
* Writes the given row.
*
* @param exchange exchange to use (for type conversion)
* @param row row to write
* @param writer uniVocity writer to use
* @throws NoTypeConversionAvailableException when it's not possible to convert the row as map.
*/
private void writeRow(Exchange exchange, Object row, W writer) throws NoTypeConversionAvailableException {
Map<?, ?> map = convertToMandatoryType(exchange, Map.class, row);
if (adaptHeaders) {
headersLock.lock();
try {
for (Object key : map.keySet()) {
headers.add(convertToMandatoryType(exchange, String.class, key));
}
writeRow(map, writer);
} finally {
headersLock.unlock();
}
} else {
writeRow(map, writer);
}
}
/**
* Writes the given map as row.
*
* @param map row values by header
* @param writer uniVocity writer to use
*/
private void writeRow(Map<?, ?> map, W writer) {
Object[] values = new Object[headers.size()];
int index = 0;
for (String header : headers) {
values[index++] = map.get(header);
}
writer.writeRow(values);
}
}
|
Marshaller
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/creators/TestCreators2.java
|
{
"start": 4207,
"end": 4275
}
|
interface ____ extends java.util.Set<Object> { }
static
|
Issue700Set
|
java
|
grpc__grpc-java
|
services/src/generated/main/grpc/io/grpc/reflection/v1alpha/ServerReflectionGrpc.java
|
{
"start": 11563,
"end": 12176
}
|
class ____
implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ServerReflectionBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return io.grpc.reflection.v1alpha.ServerReflectionProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ServerReflection");
}
}
private static final
|
ServerReflectionBaseDescriptorSupplier
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilderTests.java
|
{
"start": 532,
"end": 799
}
|
class ____ extends AbstractNumericMetricTestCase<MinAggregationBuilder> {
@Override
protected MinAggregationBuilder doCreateTestAggregatorFactory() {
return new MinAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
}
}
|
MinAggregationBuilderTests
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/checkpoint/filemerging/FileMergingSnapshotManagerTestBase.java
|
{
"start": 3390,
"end": 39295
}
|
class ____ {
final String tmId = "Testing";
final JobID jobID = new JobID();
final JobVertexID vertexID = new JobVertexID(289347923L, 75893479L);
SubtaskKey subtaskKey1;
SubtaskKey subtaskKey2;
Path checkpointBaseDir;
Path sharedStateDir;
Path taskOwnedStateDir;
int writeBufferSize;
abstract FileMergingType getFileMergingType();
@BeforeEach
public void setup(@TempDir java.nio.file.Path tempFolder) {
subtaskKey1 =
new SubtaskKey(jobID, vertexID, new TaskInfoImpl("TestingTask", 128, 0, 128, 3));
subtaskKey2 =
new SubtaskKey(jobID, vertexID, new TaskInfoImpl("TestingTask", 128, 1, 128, 3));
checkpointBaseDir = new Path(tempFolder.toString(), jobID.toHexString());
sharedStateDir = new Path(checkpointBaseDir, CHECKPOINT_SHARED_STATE_DIR);
taskOwnedStateDir = new Path(checkpointBaseDir, CHECKPOINT_TASK_OWNED_STATE_DIR);
writeBufferSize = 4096;
}
@Test
void testCreateFileMergingSnapshotManager() throws IOException {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
String expectManagerId = String.format("job_%s_tm_%s", jobID, tmId);
assertThat(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.EXCLUSIVE))
.isEqualTo(new Path(taskOwnedStateDir, expectManagerId));
assertThat(fmsm.getManagedDir(subtaskKey1, CheckpointedStateScope.SHARED))
.isEqualTo(new Path(sharedStateDir, subtaskKey1.getManagedDirName()));
}
}
@Test
public void testSpecialCharactersInPath() throws IOException {
FileSystem fs = LocalFileSystem.getSharedInstance();
if (!fs.exists(checkpointBaseDir)) {
fs.mkdirs(checkpointBaseDir);
fs.mkdirs(sharedStateDir);
fs.mkdirs(taskOwnedStateDir);
}
// No exception will throw.
try (FileMergingSnapshotManager fmsm =
new FileMergingSnapshotManagerBuilder(
jobID,
new ResourceID("localhost:53424-,;:$&+=?/[]@#qqq"),
getFileMergingType())
.setMetricGroup(
new UnregisteredMetricGroups
.UnregisteredTaskManagerJobMetricGroup())
.build()) {
fmsm.initFileSystem(
LocalFileSystem.getSharedInstance(),
checkpointBaseDir,
sharedStateDir,
taskOwnedStateDir,
writeBufferSize);
assertThat(fmsm).isNotNull();
fmsm.registerSubtaskForSharedStates(
new SubtaskKey(jobID.toString(), ",;:$&+=?/[]@#www", 0, 1));
}
}
@Test
void testRefCountBetweenLogicalAndPhysicalFiles() throws IOException {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
PhysicalFile physicalFile1 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(physicalFile1.isOpen()).isTrue();
LogicalFile logicalFile1 = fmsm.createLogicalFile(physicalFile1, 0, 10, subtaskKey1);
assertThat(logicalFile1.getSubtaskKey()).isEqualTo(subtaskKey1);
assertThat(logicalFile1.getPhysicalFile()).isEqualTo(physicalFile1);
assertThat(logicalFile1.getStartOffset()).isZero();
assertThat(logicalFile1.getLength()).isEqualTo(10);
assertThat(physicalFile1.getRefCount()).isOne();
assertThat(logicalFile1.isDiscarded()).isFalse();
logicalFile1.advanceLastCheckpointId(2);
assertThat(logicalFile1.getLastUsedCheckpointID()).isEqualTo(2);
logicalFile1.advanceLastCheckpointId(1);
assertThat(logicalFile1.getLastUsedCheckpointID()).isEqualTo(2);
logicalFile1.discardWithCheckpointId(1);
assertThat(logicalFile1.isDiscarded()).isFalse();
logicalFile1.discardWithCheckpointId(2);
assertThat(logicalFile1.isDiscarded()).isTrue();
// the stream is still open for reuse
assertThat(physicalFile1.isOpen()).isTrue();
assertThat(physicalFile1.isDeleted()).isFalse();
assertThat(physicalFile1.getRefCount()).isZero();
// duplicated discard takes no effect
logicalFile1.discardWithCheckpointId(2);
assertThat(physicalFile1.getRefCount()).isZero();
physicalFile1.close();
assertThat(physicalFile1.isOpen()).isFalse();
assertThat(physicalFile1.isDeleted()).isTrue();
// try close physical file but not deleted
PhysicalFile physicalFile2 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
LogicalFile logicalFile2 = fmsm.createLogicalFile(physicalFile2, 0, 10, subtaskKey1);
assertThat(logicalFile2.getPhysicalFile()).isEqualTo(physicalFile2);
assertThat(logicalFile2.getStartOffset()).isZero();
assertThat(logicalFile2.getLength()).isEqualTo(10);
assertThat(physicalFile2.getRefCount()).isOne();
logicalFile2.advanceLastCheckpointId(2);
assertThat(physicalFile2.isOpen()).isTrue();
assertThat(physicalFile2.isDeleted()).isFalse();
physicalFile2.close();
assertThat(physicalFile2.isOpen()).isFalse();
assertThat(physicalFile2.isDeleted()).isFalse();
assertThat(physicalFile2.getRefCount()).isOne();
logicalFile2.discardWithCheckpointId(2);
assertThat(logicalFile2.isDiscarded()).isTrue();
assertThat(physicalFile2.isDeleted()).isTrue();
assertThat(physicalFile2.getRefCount()).isZero();
}
}
@Test
void testSizeStatsInPhysicalFile() throws IOException {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
PhysicalFile physicalFile =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(physicalFile.getSize()).isZero();
physicalFile.incSize(123);
assertThat(physicalFile.getSize()).isEqualTo(123);
physicalFile.incSize(456);
assertThat(physicalFile.getSize()).isEqualTo(123 + 456);
}
}
@Test
void testSpaceStat() throws IOException {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
PhysicalFile physicalFile1 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(physicalFile1.isOpen()).isTrue();
LogicalFile logicalFile1 = fmsm.createLogicalFile(physicalFile1, 0, 123, subtaskKey1);
assertThat(fmsm.spaceStat.physicalFileSize.get()).isEqualTo(123);
assertThat(fmsm.spaceStat.logicalFileSize.get()).isEqualTo(123);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(1);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(1);
assertThat(physicalFile1.getSize()).isEqualTo(123);
LogicalFile logicalFile2 = fmsm.createLogicalFile(physicalFile1, 0, 456, subtaskKey1);
assertThat(fmsm.spaceStat.physicalFileSize.get()).isEqualTo(123 + 456);
assertThat(fmsm.spaceStat.logicalFileSize.get()).isEqualTo(123 + 456);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(1);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(2);
assertThat(physicalFile1.getSize()).isEqualTo(123 + 456);
logicalFile1.discardWithCheckpointId(1);
fmsm.discardSingleLogicalFile(logicalFile1, 1);
assertThat(fmsm.spaceStat.physicalFileSize.get()).isEqualTo(123 + 456);
assertThat(fmsm.spaceStat.logicalFileSize.get()).isEqualTo(456);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(1);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(1);
physicalFile1.close();
fmsm.discardSingleLogicalFile(logicalFile2, 1);
assertThat(fmsm.spaceStat.physicalFileSize.get()).isEqualTo(0);
assertThat(fmsm.spaceStat.logicalFileSize.get()).isEqualTo(0);
assertThat(fmsm.spaceStat.physicalFileCount.get()).isEqualTo(0);
assertThat(fmsm.spaceStat.logicalFileCount.get()).isEqualTo(0);
}
}
@Test
public void testReusedFileWriting() throws Exception {
long checkpointId = 1;
int streamNum = 10;
int perStreamWriteNum = 128;
// write random bytes and then read them from the file
byte[] bytes = new byte[streamNum * perStreamWriteNum];
Random rd = new Random();
rd.nextBytes(bytes);
int byteIndex = 0;
SegmentFileStateHandle[] handles = new SegmentFileStateHandle[streamNum];
try (FileMergingSnapshotManager fmsm = createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
// repeatedly get-write-close streams
for (int i = 0; i < streamNum; i++) {
FileMergingCheckpointStateOutputStream stream =
fmsm.createCheckpointStateOutputStream(
subtaskKey1, checkpointId, CheckpointedStateScope.EXCLUSIVE);
try {
closeableRegistry.registerCloseable(stream);
for (int j = 0; j < perStreamWriteNum; j++) {
stream.write(bytes[byteIndex++]);
}
handles[i] = stream.closeAndGetHandle();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// assert the streams writes to the same file correctly
byteIndex = 0;
Path filePath = null;
for (SegmentFileStateHandle handle : handles) {
// check file path
Path thisFilePath = handle.getFilePath();
assertThat(filePath == null || filePath.equals(thisFilePath)).isTrue();
filePath = thisFilePath;
// check file content
FSDataInputStream is = handle.openInputStream();
closeableRegistry.registerCloseable(is);
int readValue;
while ((readValue = is.read()) != -1) {
assertThat((byte) readValue).isEqualTo(bytes[byteIndex++]);
}
}
}
}
@Test
public void testConcurrentWriting() throws Exception {
long checkpointId = 1;
int numThreads = 12;
int perStreamWriteNum = 128;
Set<Future<SegmentFileStateHandle>> futures = new HashSet<>();
try (FileMergingSnapshotManager fmsm = createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
// write data concurrently
for (int i = 0; i < numThreads; i++) {
futures.add(
CompletableFuture.supplyAsync(
() -> {
FileMergingCheckpointStateOutputStream stream =
fmsm.createCheckpointStateOutputStream(
subtaskKey1,
checkpointId,
CheckpointedStateScope.EXCLUSIVE);
try {
closeableRegistry.registerCloseable(stream);
for (int j = 0; j < perStreamWriteNum; j++) {
stream.write(j);
}
return stream.closeAndGetHandle();
} catch (IOException e) {
throw new RuntimeException(e);
}
}));
}
// assert that multiple segments in the same file were not written concurrently
for (Future<SegmentFileStateHandle> future : futures) {
SegmentFileStateHandle segmentFileStateHandle = future.get();
FSDataInputStream is = segmentFileStateHandle.openInputStream();
closeableRegistry.registerCloseable(is);
int readValue;
int expected = 0;
while ((readValue = is.read()) != -1) {
assertThat(readValue).isEqualTo(expected++);
}
}
}
}
@Test
public void testConcurrentFileReusingWithBlockingPool() throws Exception {
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(
checkpointBaseDir,
32,
PhysicalFilePool.Type.BLOCKING,
Float.MAX_VALUE)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
// test reusing a physical file
PhysicalFile file1 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file1);
PhysicalFile file2 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(file2).isEqualTo(file1);
// a physical file whose size is bigger than maxPhysicalFileSize cannot be reused
file2.incSize(fmsm.maxPhysicalFileSize);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file2);
PhysicalFile file3 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.SHARED);
assertThat(file3).isNotEqualTo(file2);
// test for exclusive scope
PhysicalFile file4 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.EXCLUSIVE);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file4);
PhysicalFile file5 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.EXCLUSIVE);
assertThat(file5).isEqualTo(file4);
file5.incSize(fmsm.maxPhysicalFileSize);
fmsm.returnPhysicalFileForNextReuse(subtaskKey1, 0, file5);
PhysicalFile file6 =
fmsm.getOrCreatePhysicalFileForCheckpoint(
subtaskKey1, 0, CheckpointedStateScope.EXCLUSIVE);
assertThat(file6).isNotEqualTo(file5);
}
}
@Test
public void testReuseCallbackAndAdvanceWatermark() throws Exception {
long checkpointId = 1;
int streamNum = 20;
int perStreamWriteNum = 128;
// write random bytes and then read them from the file
byte[] bytes = new byte[streamNum * perStreamWriteNum];
Random rd = new Random();
rd.nextBytes(bytes);
int byteIndex = 0;
SegmentFileStateHandle[] handles = new SegmentFileStateHandle[streamNum];
try (FileMergingSnapshotManager fmsm = createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
// repeatedly get-write-close streams
for (int i = 0; i < streamNum; i++) {
FileMergingCheckpointStateOutputStream stream =
fmsm.createCheckpointStateOutputStream(
subtaskKey1, checkpointId, CheckpointedStateScope.SHARED);
try {
closeableRegistry.registerCloseable(stream);
for (int j = 0; j < perStreamWriteNum; j++) {
stream.write(bytes[byteIndex++]);
}
handles[i] = stream.closeAndGetHandle();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
// start reuse
for (long cp = checkpointId + 1; cp <= 10; cp++) {
ArrayList<SegmentFileStateHandle> reuse = new ArrayList<>();
for (int j = 0; j <= 10 - cp; j++) {
reuse.add(handles[j]);
}
fmsm.reusePreviousStateHandle(cp, reuse);
// assert the reusing affects the watermark
for (SegmentFileStateHandle handle : reuse) {
assertThat(
((FileMergingSnapshotManagerBase) fmsm)
.getLogicalFile(handle.getLogicalFileId())
.getLastUsedCheckpointID())
.isEqualTo(cp);
}
// subsumed
fmsm.notifyCheckpointSubsumed(subtaskKey1, cp - 1);
// assert the other files discarded.
for (int j = 10 - (int) cp + 1; j < streamNum; j++) {
assertThat(
((FileMergingSnapshotManagerBase) fmsm)
.getLogicalFile(handles[j].getLogicalFileId()))
.isNull();
}
}
}
}
@Test
public void testRestore() throws Exception {
TaskStateSnapshot taskStateSnapshot;
long checkpointId = 222;
SpaceStat oldSpaceStat;
// Step1: build TaskStateSnapshot using FileMergingSnapshotManagerBase;
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir);
CloseableRegistry closeableRegistry = new CloseableRegistry()) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.notifyCheckpointStart(subtaskKey1, checkpointId);
Map<OperatorID, OperatorSubtaskState> subtaskStatesByOperatorID = new HashMap<>();
// Here, we simulate a task with 2 operators, each operator has one keyed state and one
// operator state. The second operator's id is the same as the vertexID.
// first operator
subtaskStatesByOperatorID.put(
new OperatorID(777L, 75893479L),
buildOperatorSubtaskState(checkpointId, fmsm, closeableRegistry));
// second operator
subtaskStatesByOperatorID.put(
OperatorID.fromJobVertexID(vertexID),
buildOperatorSubtaskState(checkpointId, fmsm, closeableRegistry));
taskStateSnapshot = new TaskStateSnapshot(subtaskStatesByOperatorID);
oldSpaceStat = fmsm.spaceStat;
fmsm.notifyCheckpointComplete(subtaskKey1, checkpointId);
}
assertThat(taskStateSnapshot).isNotNull();
// Step 2: restore FileMergingSnapshotManagerBase from the TaskStateSnapshot.
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(checkpointBaseDir)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
TaskInfo taskInfo =
new TaskInfoImpl(
"test restore",
128,
subtaskKey1.subtaskIndex,
subtaskKey1.parallelism,
0);
for (Map.Entry<OperatorID, OperatorSubtaskState> entry :
taskStateSnapshot.getSubtaskStateMappings()) {
SubtaskFileMergingManagerRestoreOperation restoreOperation =
new SubtaskFileMergingManagerRestoreOperation(
checkpointId, fmsm, jobID, taskInfo, vertexID, entry.getValue());
restoreOperation.restore();
}
TreeMap<Long, Set<LogicalFile>> stateFiles = fmsm.getUploadedStates();
assertThat(stateFiles.size()).isEqualTo(1);
Set<LogicalFile> restoreFileSet = stateFiles.get(checkpointId);
assertThat(restoreFileSet).isNotNull();
// 2 operators * (2 keyed state + 2 operator state)
assertThat(restoreFileSet.size()).isEqualTo(8);
assertThat(fmsm.spaceStat).isEqualTo(oldSpaceStat);
for (LogicalFile file : restoreFileSet) {
assertThat(fmsm.getLogicalFile(file.getFileId())).isEqualTo(file);
}
Set<Path> physicalFileSet =
restoreFileSet.stream()
.map(LogicalFile::getPhysicalFile)
.map(PhysicalFile::getFilePath)
.collect(Collectors.toSet());
fmsm.notifyCheckpointSubsumed(subtaskKey1, checkpointId);
for (Path path : physicalFileSet) {
assertThat(path.getFileSystem().exists(path)).isFalse();
}
}
}
@Test
public void testManagedDirCleanup() throws Exception {
FileSystem fs = LocalFileSystem.getSharedInstance();
Path sharedDirOfSubtask1 = new Path(sharedStateDir, subtaskKey1.getManagedDirName());
Path sharedDirOfSubtask2 = new Path(sharedStateDir, subtaskKey2.getManagedDirName());
Path exclusiveDir;
// 1. Test clean up managed dir after non checkpoint triggered
emptyCheckpointBaseDir();
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(
checkpointBaseDir,
32,
PhysicalFilePool.Type.BLOCKING,
Float.MAX_VALUE)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
exclusiveDir = new Path(taskOwnedStateDir, fmsm.getId());
assertThat(fs.exists(exclusiveDir)).isTrue();
}
assertThat(fs.exists(sharedDirOfSubtask1)).isFalse();
assertThat(fs.exists(sharedDirOfSubtask2)).isFalse();
assertThat(fs.exists(exclusiveDir)).isFalse();
// 2. Test clean up managed dir after all checkpoint abort
emptyCheckpointBaseDir();
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(
checkpointBaseDir,
32,
PhysicalFilePool.Type.BLOCKING,
Float.MAX_VALUE)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
// record reference from checkpoint 1
fmsm.notifyCheckpointStart(subtaskKey1, 1L);
fmsm.notifyCheckpointStart(subtaskKey2, 1L);
// checkpoint 1 aborted
fmsm.notifyCheckpointAborted(subtaskKey1, 1L);
fmsm.notifyCheckpointAborted(subtaskKey2, 1L);
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
exclusiveDir = new Path(taskOwnedStateDir, fmsm.getId());
assertThat(fs.exists(exclusiveDir)).isTrue();
}
assertThat(fs.exists(sharedDirOfSubtask1)).isFalse();
assertThat(fs.exists(sharedDirOfSubtask2)).isFalse();
assertThat(fs.exists(exclusiveDir)).isFalse();
// 3. Test not clean up managed dir after checkpoint complete
emptyCheckpointBaseDir();
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(
checkpointBaseDir,
32,
PhysicalFilePool.Type.BLOCKING,
Float.MAX_VALUE)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
// record reference from checkpoint 1
fmsm.notifyCheckpointStart(subtaskKey1, 1L);
fmsm.notifyCheckpointStart(subtaskKey2, 1L);
// checkpoint 1 complete
fmsm.notifyCheckpointComplete(subtaskKey1, 1L);
fmsm.notifyCheckpointComplete(subtaskKey2, 1L);
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
exclusiveDir = new Path(taskOwnedStateDir, fmsm.getId());
assertThat(fs.exists(exclusiveDir)).isTrue();
}
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
assertThat(fs.exists(exclusiveDir)).isTrue();
// 4. Test not clean up managed dir when rpc loss
emptyCheckpointBaseDir();
try (FileMergingSnapshotManagerBase fmsm =
(FileMergingSnapshotManagerBase)
createFileMergingSnapshotManager(
checkpointBaseDir,
32,
PhysicalFilePool.Type.BLOCKING,
Float.MAX_VALUE)) {
fmsm.registerSubtaskForSharedStates(subtaskKey1);
fmsm.registerSubtaskForSharedStates(subtaskKey2);
// record reference from checkpoint 1
fmsm.notifyCheckpointStart(subtaskKey1, 1L);
fmsm.notifyCheckpointStart(subtaskKey2, 1L);
// checkpoint 1 complete rpc loss
// checkpoint 2 start rpc loss
// checkpoint 2 aborted
fmsm.notifyCheckpointAborted(subtaskKey1, 2L);
fmsm.notifyCheckpointAborted(subtaskKey2, 2L);
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
exclusiveDir = new Path(taskOwnedStateDir, fmsm.getId());
assertThat(fs.exists(exclusiveDir)).isTrue();
}
assertThat(fs.exists(sharedDirOfSubtask1)).isTrue();
assertThat(fs.exists(sharedDirOfSubtask2)).isTrue();
assertThat(fs.exists(exclusiveDir)).isTrue();
}
private void emptyCheckpointBaseDir() throws IOException {
FileSystem fs = checkpointBaseDir.getFileSystem();
FileStatus[] sub = fs.listStatus(checkpointBaseDir);
if (sub != null) {
for (FileStatus subFile : sub) {
fs.delete(subFile.getPath(), true);
}
}
}
private OperatorSubtaskState buildOperatorSubtaskState(
long checkpointId, FileMergingSnapshotManager fmsm, CloseableRegistry closeableRegistry)
throws Exception {
IncrementalRemoteKeyedStateHandle keyedStateHandle1 =
new IncrementalRemoteKeyedStateHandle(
UUID.randomUUID(),
new KeyGroupRange(0, 8),
checkpointId,
Collections.singletonList(
IncrementalKeyedStateHandle.HandleAndLocalPath.of(
buildOneSegmentFileHandle(
checkpointId,
fmsm,
CheckpointedStateScope.SHARED,
closeableRegistry),
"localPath")),
Collections.emptyList(),
null);
KeyGroupsStateHandle keyedStateHandle2 =
new KeyGroupsStateHandle(
new KeyGroupRangeOffsets(0, 8),
buildOneSegmentFileHandle(
checkpointId,
fmsm,
CheckpointedStateScope.EXCLUSIVE,
closeableRegistry));
OperatorStateHandle operatorStateHandle1 =
new FileMergingOperatorStreamStateHandle(
null,
null,
Collections.emptyMap(),
buildOneSegmentFileHandle(
checkpointId,
fmsm,
CheckpointedStateScope.EXCLUSIVE,
closeableRegistry));
OperatorStateHandle operatorStateHandle2 =
new FileMergingOperatorStreamStateHandle(
null,
null,
Collections.emptyMap(),
buildOneSegmentFileHandle(
checkpointId,
fmsm,
CheckpointedStateScope.EXCLUSIVE,
closeableRegistry));
return OperatorSubtaskState.builder()
.setManagedKeyedState(keyedStateHandle1)
.setRawKeyedState(keyedStateHandle2)
.setManagedOperatorState(operatorStateHandle1)
.setRawOperatorState(operatorStateHandle2)
.build();
}
private SegmentFileStateHandle buildOneSegmentFileHandle(
long checkpointId,
FileMergingSnapshotManager fmsm,
CheckpointedStateScope scope,
CloseableRegistry closeableRegistry)
throws Exception {
FileMergingCheckpointStateOutputStream outputStream =
writeCheckpointAndGetStream(checkpointId, fmsm, scope, closeableRegistry);
return outputStream.closeAndGetHandle();
}
FileMergingSnapshotManager createFileMergingSnapshotManager(Path checkpointBaseDir)
throws IOException {
return createFileMergingSnapshotManager(
checkpointBaseDir, 32 * 1024 * 1024, PhysicalFilePool.Type.NON_BLOCKING, 2f);
}
FileMergingSnapshotManager createFileMergingSnapshotManager(
Path checkpointBaseDir,
long maxFileSize,
PhysicalFilePool.Type filePoolType,
float spaceAmplification)
throws IOException {
FileSystem fs = LocalFileSystem.getSharedInstance();
if (!fs.exists(checkpointBaseDir)) {
fs.mkdirs(checkpointBaseDir);
fs.mkdirs(sharedStateDir);
fs.mkdirs(taskOwnedStateDir);
}
FileMergingSnapshotManager fmsm =
new FileMergingSnapshotManagerBuilder(
jobID, new ResourceID(tmId), getFileMergingType())
.setMaxFileSize(maxFileSize)
.setFilePoolType(filePoolType)
.setMaxSpaceAmplification(spaceAmplification)
.setMetricGroup(
new UnregisteredMetricGroups
.UnregisteredTaskManagerJobMetricGroup())
.build();
fmsm.initFileSystem(
LocalFileSystem.getSharedInstance(),
checkpointBaseDir,
sharedStateDir,
taskOwnedStateDir,
writeBufferSize);
assertThat(fmsm).isNotNull();
return fmsm;
}
FileMergingCheckpointStateOutputStream writeCheckpointAndGetStream(
long checkpointId,
FileMergingSnapshotManager fmsm,
CheckpointedStateScope scope,
CloseableRegistry closeableRegistry)
throws IOException {
return writeCheckpointAndGetStream(
subtaskKey1, checkpointId, scope, fmsm, closeableRegistry, 32);
}
FileMergingCheckpointStateOutputStream writeCheckpointAndGetStream(
SubtaskKey subtaskKey,
long checkpointId,
CheckpointedStateScope scope,
FileMergingSnapshotManager fmsm,
CloseableRegistry closeableRegistry)
throws IOException {
return writeCheckpointAndGetStream(
subtaskKey, checkpointId, scope, fmsm, closeableRegistry, 32);
}
FileMergingCheckpointStateOutputStream writeCheckpointAndGetStream(
SubtaskKey subtaskKey,
long checkpointId,
CheckpointedStateScope scope,
FileMergingSnapshotManager fmsm,
CloseableRegistry closeableRegistry,
int numBytes)
throws IOException {
FileMergingCheckpointStateOutputStream stream =
fmsm.createCheckpointStateOutputStream(subtaskKey, checkpointId, scope);
closeableRegistry.registerCloseable(stream);
for (int i = 0; i < numBytes; i++) {
stream.write(i);
}
return stream;
}
void assertFileInManagedDir(
FileMergingSnapshotManager fmsm, SegmentFileStateHandle stateHandle) {
assertThat(fmsm instanceof FileMergingSnapshotManagerBase).isTrue();
assertThat(stateHandle).isNotNull();
Path filePath = stateHandle.getFilePath();
assertThat(filePath).isNotNull();
assertThat(((FileMergingSnapshotManagerBase) fmsm).isResponsibleForFile(filePath)).isTrue();
}
boolean fileExists(SegmentFileStateHandle stateHandle) throws IOException {
assertThat(stateHandle).isNotNull();
Path filePath = stateHandle.getFilePath();
assertThat(filePath).isNotNull();
return filePath.getFileSystem().exists(filePath);
}
}
|
FileMergingSnapshotManagerTestBase
|
java
|
spring-projects__spring-security
|
config/src/main/java/org/springframework/security/config/http/OAuth2ClientBeanDefinitionParserUtils.java
|
{
"start": 1156,
"end": 3306
}
|
class ____ {
private static final String ATT_CLIENT_REGISTRATION_REPOSITORY_REF = "client-registration-repository-ref";
private static final String ATT_AUTHORIZED_CLIENT_REPOSITORY_REF = "authorized-client-repository-ref";
private static final String ATT_AUTHORIZED_CLIENT_SERVICE_REF = "authorized-client-service-ref";
private OAuth2ClientBeanDefinitionParserUtils() {
}
static BeanMetadataElement getClientRegistrationRepository(Element element) {
String clientRegistrationRepositoryRef = element.getAttribute(ATT_CLIENT_REGISTRATION_REPOSITORY_REF);
if (StringUtils.hasLength(clientRegistrationRepositoryRef)) {
return new RuntimeBeanReference(clientRegistrationRepositoryRef);
}
return new RuntimeBeanReference(ClientRegistrationRepository.class);
}
static BeanMetadataElement getAuthorizedClientRepository(Element element) {
String authorizedClientRepositoryRef = element.getAttribute(ATT_AUTHORIZED_CLIENT_REPOSITORY_REF);
if (StringUtils.hasLength(authorizedClientRepositoryRef)) {
return new RuntimeBeanReference(authorizedClientRepositoryRef);
}
return null;
}
static BeanMetadataElement getAuthorizedClientService(Element element) {
String authorizedClientServiceRef = element.getAttribute(ATT_AUTHORIZED_CLIENT_SERVICE_REF);
if (StringUtils.hasLength(authorizedClientServiceRef)) {
return new RuntimeBeanReference(authorizedClientServiceRef);
}
return null;
}
static BeanDefinition createDefaultAuthorizedClientRepository(BeanMetadataElement clientRegistrationRepository,
BeanMetadataElement authorizedClientService) {
if (authorizedClientService == null) {
authorizedClientService = BeanDefinitionBuilder
.rootBeanDefinition("org.springframework.security.oauth2.client.InMemoryOAuth2AuthorizedClientService")
.addConstructorArgValue(clientRegistrationRepository)
.getBeanDefinition();
}
return BeanDefinitionBuilder.rootBeanDefinition(
"org.springframework.security.oauth2.client.web.AuthenticatedPrincipalOAuth2AuthorizedClientRepository")
.addConstructorArgValue(authorizedClientService)
.getBeanDefinition();
}
}
|
OAuth2ClientBeanDefinitionParserUtils
|
java
|
micronaut-projects__micronaut-core
|
http/src/main/java/io/micronaut/http/bind/binders/QueryValueArgumentBinder.java
|
{
"start": 1707,
"end": 8958
}
|
class ____<T> extends AbstractArgumentBinder<T> implements AnnotatedRequestArgumentBinder<QueryValue, T> {
/**
* Constructor.
*
* @param conversionService conversion service
*/
public QueryValueArgumentBinder(ConversionService conversionService) {
super(conversionService);
}
/**
* Constructor.
*
* @param conversionService conversion service
* @param argument The argument
*/
public QueryValueArgumentBinder(ConversionService conversionService, Argument<T> argument) {
super(conversionService, argument);
}
@Override
public RequestArgumentBinder<T> createSpecific(Argument<T> argument) {
return new QueryValueArgumentBinder<>(conversionService, argument);
}
@Override
public Class<QueryValue> getAnnotationType() {
return QueryValue.class;
}
/**
* Binds the argument with {@link QueryValue} annotation to the request
* (Also binds without annotation if request body is not permitted).
* <p>
* It will first try to convert to ConvertibleMultiValues type and if conversion is successful, add the
* corresponding parameters to the request. (By default the conversion will be successful if the {@link Format}
* annotation is present and has one of the supported values - see
* {@link io.micronaut.core.convert.converters.MultiValuesConverterFactory} for specific converters). Otherwise,
* the uri template will be used to deduce what will be done with the request. For example, simple parameters are
* converted to {@link String}
*/
@Override
public BindingResult<T> bind(ArgumentConversionContext<T> context, HttpRequest<?> source) {
ConvertibleMultiValues<String> parameters = source.getParameters();
Argument<T> argument = context.getArgument();
AnnotationMetadata annotationMetadata = argument.getAnnotationMetadata();
if (source.getMethod().permitsRequestBody() && !annotationMetadata.hasAnnotation(QueryValue.class)) {
// During the unmatched check avoid requests that don't allow bodies
return BindingResult.unsatisfied();
}
BindingResult<T> bindSimpleResult = bindSimple(context, source, annotationMetadata, parameters, argument);
if (bindSimpleResult.isSatisfied()) {
return bindSimpleResult;
}
return bindPojo(context, parameters, argument);
}
private BindingResult<T> bindSimple(ArgumentConversionContext<T> context,
HttpRequest<?> source,
AnnotationMetadata annotationMetadata,
ConvertibleMultiValues<String> parameters,
Argument<T> argument) {
// First try converting from the ConvertibleMultiValues type and if conversion is successful, return it.
// Otherwise, use the given uri template to deduce what to do with the variable
Optional<T> multiValueConversion;
if (annotationMetadata.hasAnnotation(Format.class)) {
multiValueConversion = conversionService.convert(parameters, context);
} else {
multiValueConversion = Optional.empty();
}
if (multiValueConversion.isPresent()) {
return () -> multiValueConversion;
}
// If we need to bind all request params to command object
// checks if the variable is defined with modifier char *, e.g. ?pojo*
String parameterName = resolvedParameterName(argument);
boolean bindAll = BasicHttpAttributes.getRouteMatchInfo(source)
.map(umi -> {
UriMatchVariable uriMatchVariable = umi.getVariableMap().get(parameterName);
return uriMatchVariable != null && uriMatchVariable.isExploded();
}).orElse(false);
if (bindAll) {
Object value;
// Only maps and POJOs will "bindAll", lists work like normal
if (Iterable.class.isAssignableFrom(argument.getType())) {
value = doResolve(context, parameters);
if (value == null) {
value = Collections.emptyList();
}
} else {
value = parameters.asMap();
}
return doConvert(value, context);
}
return doBind(context, parameters, BindingResult.unsatisfied());
}
private BindingResult<T> bindPojo(ArgumentConversionContext<T> context,
ConvertibleMultiValues<String> parameters,
Argument<T> argument) {
Optional<BeanIntrospection<T>> introspectionOpt = BeanIntrospector.SHARED.findIntrospection(argument.getType());
if (introspectionOpt.isEmpty()) {
return BindingResult.unsatisfied();
}
BeanIntrospection<T> introspection = introspectionOpt.get();
BeanIntrospection.Builder<T> introspectionBuilder = introspection.builder();
Argument<?>[] builderArguments = introspectionBuilder.getBuilderArguments();
for (int index = 0; index < builderArguments.length; index++) {
Argument<?> builderArg = builderArguments[index];
String propertyName = builderArg.getName();
List<String> values = parameters.getAll(propertyName);
boolean hasNoValue = values.isEmpty();
@Nullable String defaultValue = hasNoValue ? builderArg
.getAnnotationMetadata()
.stringValue(Bindable.class, "defaultValue").orElse(null) : null;
ArgumentConversionContext<?> conversionContext = context.with(builderArg);
Optional<?> converted = hasNoValue ? conversionService.convert(defaultValue, conversionContext) : conversionService.convert(values, conversionContext);
if (converted.isPresent()) {
try {
@SuppressWarnings({"unchecked"})
Argument<Object> rawArg = (Argument<Object>) builderArg;
introspectionBuilder.with(index, rawArg, converted.get());
} catch (Exception e) {
context.reject(builderArg, e);
return BindingResult.unsatisfied();
}
} else if (conversionContext.hasErrors()) {
ConversionError conversionError = conversionContext.getLastError().orElse(null);
if (conversionError != null) {
Exception cause = conversionError.getCause();
context.reject(builderArg, cause);
return BindingResult.unsatisfied();
}
}
}
try {
T instance = introspectionBuilder.build();
return () -> Optional.of(instance);
} catch (Exception e) {
context.reject(argument, e);
return BindingResult.unsatisfied();
}
}
@Override
protected String getParameterName(Argument<T> argument) {
return argument.getAnnotationMetadata().stringValue(QueryValue.class).orElse(argument.getName());
}
}
|
QueryValueArgumentBinder
|
java
|
netty__netty
|
handler/src/test/java/io/netty/handler/ssl/SslErrorTest.java
|
{
"start": 14861,
"end": 15096
}
|
class ____ extends CertificateException {
private static final long serialVersionUID = -5816338303868751410L;
TestCertificateException(Throwable cause) {
super(cause);
}
}
}
|
TestCertificateException
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/ReservedContainerCandidatesSelector.java
|
{
"start": 1890,
"end": 11708
}
|
class ____ {
private float preemptionCost;
private FiCaSchedulerNode schedulerNode;
private List<RMContainer> selectedContainers;
public NodeForPreemption(float preemptionCost,
FiCaSchedulerNode schedulerNode, List<RMContainer> selectedContainers) {
this.preemptionCost = preemptionCost;
this.schedulerNode = schedulerNode;
this.selectedContainers = selectedContainers;
}
}
ReservedContainerCandidatesSelector(
CapacitySchedulerPreemptionContext preemptionContext) {
super(preemptionContext);
preemptableAmountCalculator = new PreemptableResourceCalculator(
preemptionContext, true, false);
}
@Override
public Map<ApplicationAttemptId, Set<RMContainer>> selectCandidates(
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource clusterResource,
Resource totalPreemptedResourceAllowed) {
Map<ApplicationAttemptId, Set<RMContainer>> curCandidates = new HashMap<>();
// Calculate how much resources we need to preempt
preemptableAmountCalculator.computeIdealAllocation(clusterResource,
totalPreemptedResourceAllowed);
// Get queue to preemptable resource by partition
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition =
new HashMap<>();
for (String leafQueue : preemptionContext.getLeafQueueNames()) {
queueToPreemptableResourceByPartition.put(leafQueue,
CapacitySchedulerPreemptionUtils
.getResToObtainByPartitionForLeafQueue(preemptionContext,
leafQueue, clusterResource));
}
// Get list of nodes for preemption, ordered by preemption cost
List<NodeForPreemption> nodesForPreemption = getNodesForPreemption(
queueToPreemptableResourceByPartition, selectedCandidates,
totalPreemptedResourceAllowed);
for (NodeForPreemption nfp : nodesForPreemption) {
RMContainer reservedContainer = nfp.schedulerNode.getReservedContainer();
if (null == reservedContainer) {
continue;
}
NodeForPreemption preemptionResult = getPreemptionCandidatesOnNode(
nfp.schedulerNode, queueToPreemptableResourceByPartition,
selectedCandidates, totalPreemptedResourceAllowed, false);
if (null != preemptionResult) {
for (RMContainer c : preemptionResult.selectedContainers) {
// Add to preemptMap
CapacitySchedulerPreemptionUtils.addToPreemptMap(selectedCandidates,
curCandidates, c.getApplicationAttemptId(), c);
LOG.debug("{} Marked container={} from queue={} to be preemption"
+ " candidates", this.getClass().getName(), c.getContainerId(),
c.getQueueName());
}
}
}
return curCandidates;
}
private Resource getPreemptableResource(String queueName,
String partitionName,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition) {
Map<String, Resource> partitionToPreemptable =
queueToPreemptableResourceByPartition.get(queueName);
if (null == partitionToPreemptable) {
return null;
}
Resource preemptable = partitionToPreemptable.get(partitionName);
return preemptable;
}
private boolean tryToPreemptFromQueue(String queueName, String partitionName,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Resource required, Resource totalPreemptionAllowed, boolean readOnly) {
Resource preemptable = getPreemptableResource(queueName, partitionName,
queueToPreemptableResourceByPartition);
if (null == preemptable) {
return false;
}
if (!Resources.fitsIn(rc, required, preemptable)) {
return false;
}
if (!Resources.fitsIn(rc, required, totalPreemptionAllowed)) {
return false;
}
if (!readOnly) {
Resources.subtractFrom(preemptable, required);
Resources.subtractFrom(totalPreemptionAllowed, required);
}
return true;
}
/**
* Try to check if we can preempt resources for reserved container in given node
* @param node
* @param queueToPreemptableResourceByPartition it's a map of
* <queueName, <partition, preemptable-resource>>
* @param readOnly do we want to modify preemptable resource after we selected
* candidates
* @return NodeForPreemption if it's possible to preempt containers on the node
* to satisfy reserved resource
*/
private NodeForPreemption getPreemptionCandidatesOnNode(
FiCaSchedulerNode node,
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource totalPreemptionAllowed, boolean readOnly) {
RMContainer reservedContainer = node.getReservedContainer();
if (reservedContainer == null) {
return null;
}
Resource available = Resources.clone(node.getUnallocatedResource());
Resource totalSelected = Resources.createResource(0);
List<RMContainer> sortedRunningContainers =
node.getCopiedListOfRunningContainers();
List<RMContainer> selectedContainers = new ArrayList<>();
Map<ContainerId, RMContainer> killableContainers =
node.getKillableContainers();
// Sort running container by launch time, we preferred to preempt recent
// launched preempt container
Collections.sort(sortedRunningContainers, new Comparator<RMContainer>() {
@Override public int compare(RMContainer o1, RMContainer o2) {
return -1 * o1.getContainerId().compareTo(o2.getContainerId());
}
});
// First check: can we preempt containers to allocate the
// reservedContainer?
boolean canAllocateReservedContainer = false;
// At least, we can get available + killable resources from this node
Resource cur = Resources.add(available, node.getTotalKillableResources());
String partition = node.getPartition();
// Avoid preempt any container if required <= available + killable
if (Resources.fitsIn(rc, reservedContainer.getReservedResource(), cur)) {
return null;
}
// Extra cost of am container preemption
float amPreemptionCost = 0f;
for (RMContainer c : sortedRunningContainers) {
String containerQueueName = c.getQueueName();
// Skip container if it is already marked killable
if (killableContainers.containsKey(c.getContainerId())) {
continue;
}
// An alternative approach is add a "penalty cost" if AM container is
// selected. Here for safety, avoid preempt AM container in any cases
if (c.isAMContainer()) {
LOG.debug("Skip selecting AM container on host={} AM container={}",
node.getNodeID(), c.getContainerId());
continue;
}
// Can we preempt container c?
// Check if we have quota to preempt this container
boolean canPreempt = tryToPreemptFromQueue(containerQueueName, partition,
queueToPreemptableResourceByPartition, c.getAllocatedResource(),
totalPreemptionAllowed, readOnly);
// If we can, add to selected container, and change resource accordingly.
if (canPreempt) {
if (!CapacitySchedulerPreemptionUtils.isContainerAlreadySelected(c,
selectedCandidates)) {
if (!readOnly) {
selectedContainers.add(c);
}
Resources.addTo(totalSelected, c.getAllocatedResource());
}
Resources.addTo(cur, c.getAllocatedResource());
if (Resources.fitsIn(rc,
reservedContainer.getReservedResource(), cur)) {
canAllocateReservedContainer = true;
break;
}
}
}
if (!canAllocateReservedContainer) {
if (!readOnly) {
// Revert queue preemption quotas
for (RMContainer c : selectedContainers) {
Resource res = getPreemptableResource(c.getQueueName(), partition,
queueToPreemptableResourceByPartition);
if (null == res) {
// This shouldn't happen in normal cases, one possible cause is
// container moved to different queue while executing preemption logic.
// Ignore such failures.
continue;
}
Resources.addTo(res, c.getAllocatedResource());
}
}
return null;
}
float ratio = Resources.ratio(rc, totalSelected,
reservedContainer.getReservedResource());
// Compute preemption score
NodeForPreemption nfp = new NodeForPreemption(ratio + amPreemptionCost,
node, selectedContainers);
return nfp;
}
private List<NodeForPreemption> getNodesForPreemption(
Map<String, Map<String, Resource>> queueToPreemptableResourceByPartition,
Map<ApplicationAttemptId, Set<RMContainer>> selectedCandidates,
Resource totalPreemptionAllowed) {
List<NodeForPreemption> nfps = new ArrayList<>();
// get nodes have reserved container
for (FiCaSchedulerNode node : preemptionContext.getScheduler()
.getAllNodes()) {
if (node.getReservedContainer() != null) {
NodeForPreemption nfp = getPreemptionCandidatesOnNode(node,
queueToPreemptableResourceByPartition, selectedCandidates,
totalPreemptionAllowed, true);
if (null != nfp) {
// Null means we cannot preempt containers on the node to satisfy
// reserved container
nfps.add(nfp);
}
}
}
// Return sorted node-for-preemptions (by cost)
Collections.sort(nfps, new Comparator<NodeForPreemption>() {
@Override
public int compare(NodeForPreemption o1, NodeForPreemption o2) {
return Float.compare(o1.preemptionCost, o2.preemptionCost);
}
});
return nfps;
}
}
|
NodeForPreemption
|
java
|
spring-projects__spring-boot
|
module/spring-boot-actuator-autoconfigure/src/test/java/org/springframework/boot/actuate/autoconfigure/web/ManagementContextConfigurationTests.java
|
{
"start": 1755,
"end": 1873
}
|
class ____ {
}
@ManagementContextConfiguration(proxyBeanMethods = false)
static
|
DefaultManagementContextConfiguration
|
java
|
elastic__elasticsearch
|
x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/SchemaUtilTests.java
|
{
"start": 2724,
"end": 12055
}
|
class ____ extends ESTestCase {
public void testInsertNestedObjectMappings() {
Map<String, String> fieldMappings = new HashMap<>();
// creates: a.b, a
fieldMappings.put("a.b.c", "long");
fieldMappings.put("a.b.d", "double");
// creates: c.b, c
fieldMappings.put("c.b.a", "double");
// creates: c.d
fieldMappings.put("c.d.e", "object");
fieldMappings.put("d", "long");
fieldMappings.put("e.f.g", "long");
// cc: already there
fieldMappings.put("e.f", "object");
// cc: already there but different type (should not be possible)
fieldMappings.put("e", "long");
// cc: start with . (should not be possible)
fieldMappings.put(".x", "long");
// cc: start and ends with . (should not be possible), creates: .y
fieldMappings.put(".y.", "long");
// cc: ends with . (should not be possible), creates: .z
fieldMappings.put(".z.", "long");
SchemaUtil.insertNestedObjectMappings(fieldMappings);
assertEquals(18, fieldMappings.size());
assertEquals("long", fieldMappings.get("a.b.c"));
assertEquals("object", fieldMappings.get("a.b"));
assertEquals("double", fieldMappings.get("a.b.d"));
assertEquals("object", fieldMappings.get("a"));
assertEquals("object", fieldMappings.get("c.d"));
assertEquals("object", fieldMappings.get("e.f"));
assertEquals("long", fieldMappings.get("e"));
assertEquals("object", fieldMappings.get(".y"));
assertEquals("object", fieldMappings.get(".z"));
assertFalse(fieldMappings.containsKey("."));
assertFalse(fieldMappings.containsKey(""));
}
public void testConvertToIntegerTypeIfNeeded() {
assertEquals(33L, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("unsigned_long", 33.0));
assertEquals(33L, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("long", 33.0));
assertEquals(33.0, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("double", 33.0));
assertEquals(33.0, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("half_float", 33.0));
assertEquals(33.0, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("unknown", 33.0));
assertEquals(33.0, SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt(null, 33.0));
Object value = SchemaUtil.dropFloatingPointComponentIfTypeRequiresIt("unsigned_long", 1.8446744073709551615E19);
assertThat(value, instanceOf(BigInteger.class));
assertEquals(new BigInteger("18446744073709551615").doubleValue(), ((BigInteger) value).doubleValue(), 0.0);
}
public void testGetSourceFieldMappings() throws InterruptedException {
try (var threadPool = createThreadPool()) {
final var client = new FieldCapsMockClient(threadPool, emptySet());
// fields is null
this.<Map<String, String>>assertAsync(
listener -> SchemaUtil.getSourceFieldMappings(
client,
emptyMap(),
new SourceConfig(new String[] { "index-1", "index-2" }),
null,
listener
),
mappings -> assertThat(mappings, anEmptyMap())
);
// fields is empty
this.<Map<String, String>>assertAsync(
listener -> SchemaUtil.getSourceFieldMappings(
client,
emptyMap(),
new SourceConfig(new String[] { "index-1", "index-2" }),
new String[] {},
listener
),
mappings -> assertThat(mappings, anEmptyMap())
);
// good use
this.<Map<String, String>>assertAsync(
listener -> SchemaUtil.getSourceFieldMappings(
client,
emptyMap(),
new SourceConfig(new String[] { "index-1", "index-2" }),
new String[] { "field-1", "field-2" },
listener
),
mappings -> assertThat(mappings, matchesMap(Map.of("field-1", "long", "field-2", "long")))
);
}
}
public void testGetSourceFieldMappingsWithRuntimeMappings() throws InterruptedException {
Map<String, Object> runtimeMappings = Map.of("field-2", Map.of("type", "keyword"), "field-3", Map.of("type", "boolean"));
try (var threadPool = createThreadPool()) {
final var client = new FieldCapsMockClient(threadPool, emptySet());
this.<Map<String, String>>assertAsync(
listener -> SchemaUtil.getSourceFieldMappings(
client,
emptyMap(),
new SourceConfig(new String[] { "index-1", "index-2" }, QueryConfig.matchAll(), runtimeMappings),
new String[] { "field-1", "field-2" },
listener
),
mappings -> {
assertThat(mappings, is(aMapWithSize(3)));
assertThat(
mappings,
allOf(hasEntry("field-1", "long"), hasEntry("field-2", "keyword"), hasEntry("field-3", "boolean"))
);
}
);
}
}
public void testIsNumericType() {
assertFalse(SchemaUtil.isNumericType(null));
assertFalse(SchemaUtil.isNumericType("non-existing"));
assertTrue(SchemaUtil.isNumericType("double"));
assertTrue(SchemaUtil.isNumericType("integer"));
assertTrue(SchemaUtil.isNumericType("long"));
assertFalse(SchemaUtil.isNumericType("date"));
assertFalse(SchemaUtil.isNumericType("date_nanos"));
assertFalse(SchemaUtil.isNumericType("keyword"));
}
public void testIsDateType() {
assertFalse(SchemaUtil.isDateType(null));
assertFalse(SchemaUtil.isDateType("non-existing"));
assertFalse(SchemaUtil.isDateType("double"));
assertFalse(SchemaUtil.isDateType("integer"));
assertFalse(SchemaUtil.isDateType("long"));
assertTrue(SchemaUtil.isDateType("date"));
assertTrue(SchemaUtil.isDateType("date_nanos"));
assertFalse(SchemaUtil.isDateType("keyword"));
}
public void testDeduceMappings_AllMappingsArePresent() throws InterruptedException {
testDeduceMappings(
emptySet(),
Map.of("by-day", "long", "by-user", "long", "by-business", "long", "timestamp", "long", "review_score", "double")
);
}
public void testDeduceMappings_GroupByFieldMappingIsMissing() throws InterruptedException {
testDeduceMappings(
Set.of("business_id"),
// Note that the expected mapping of the "by-business" target field is "keyword"
Map.of("by-day", "long", "by-user", "long", "by-business", "keyword", "timestamp", "long", "review_score", "double")
);
}
public void testDeduceMappings_AggregationFieldMappingIsMissing() throws InterruptedException {
testDeduceMappings(
Set.of("review_score"),
Map.of("by-day", "long", "by-user", "long", "by-business", "long", "timestamp", "long", "review_score", "double")
);
}
private void testDeduceMappings(Set<String> fieldsWithoutMappings, Map<String, String> expectedMappings) throws InterruptedException {
try (var threadPool = createThreadPool()) {
final var client = new FieldCapsMockClient(threadPool, fieldsWithoutMappings);
var groups = Map.of(
"by-day",
new DateHistogramGroupSource(
"timestamp",
null,
false,
new DateHistogramGroupSource.CalendarInterval(DateHistogramInterval.DAY),
null,
null
),
"by-user",
new TermsGroupSource("user_id", null, false),
"by-business",
new TermsGroupSource("business_id", null, false)
);
var aggs = AggregatorFactories.builder()
.addAggregator(AggregationBuilders.avg("review_score").field("stars"))
.addAggregator(AggregationBuilders.max("timestamp").field("timestamp"));
var groupConfig = new GroupConfig(emptyMap() /* unused anyway */, groups);
var aggregationConfig = new AggregationConfig(emptyMap() /* unused anyway */, aggs);
var pivotConfig = new PivotConfig(groupConfig, aggregationConfig, null);
this.<Map<String, String>>assertAsync(
listener -> SchemaUtil.deduceMappings(
client,
emptyMap(),
"my-transform",
new SettingsConfig.Builder().setDeduceMappings(randomBoolean() ? randomBoolean() : null).build(),
pivotConfig,
new SourceConfig(new String[] { "index-1", "index-2" }),
listener
),
mappings -> assertThat(mappings, is(equalTo(expectedMappings)))
);
}
}
private static
|
SchemaUtilTests
|
java
|
spring-projects__spring-security
|
acl/src/main/java/org/springframework/security/acls/domain/AclAuthorizationStrategy.java
|
{
"start": 913,
"end": 1081
}
|
interface ____ {
int CHANGE_OWNERSHIP = 0;
int CHANGE_AUDITING = 1;
int CHANGE_GENERAL = 2;
void securityCheck(Acl acl, int changeType);
}
|
AclAuthorizationStrategy
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/collect/AbstractFilteredMapTest.java
|
{
"start": 1061,
"end": 7357
}
|
class ____ extends TestCase {
private static final Predicate<@Nullable String> NOT_LENGTH_3 =
input -> input == null || input.length() != 3;
private static final Predicate<@Nullable Integer> EVEN = input -> input == null || input % 2 == 0;
static final Predicate<Entry<String, Integer>> CORRECT_LENGTH =
input -> input.getKey().length() == input.getValue();
abstract Map<String, Integer> createUnfiltered();
public void testFilteredKeysIllegalPut() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterKeys(unfiltered, NOT_LENGTH_3);
filtered.put("a", 1);
filtered.put("b", 2);
assertEquals(ImmutableMap.of("a", 1, "b", 2), filtered);
assertThrows(IllegalArgumentException.class, () -> filtered.put("yyy", 3));
}
public void testFilteredKeysIllegalPutAll() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterKeys(unfiltered, NOT_LENGTH_3);
filtered.put("a", 1);
filtered.put("b", 2);
assertEquals(ImmutableMap.of("a", 1, "b", 2), filtered);
assertThrows(
IllegalArgumentException.class,
() -> filtered.putAll(ImmutableMap.of("c", 3, "zzz", 4, "b", 5)));
assertEquals(ImmutableMap.of("a", 1, "b", 2), filtered);
}
public void testFilteredKeysFilteredReflectsBackingChanges() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterKeys(unfiltered, NOT_LENGTH_3);
unfiltered.put("two", 2);
unfiltered.put("three", 3);
unfiltered.put("four", 4);
assertEquals(ImmutableMap.of("two", 2, "three", 3, "four", 4), unfiltered);
assertEquals(ImmutableMap.of("three", 3, "four", 4), filtered);
unfiltered.remove("three");
assertEquals(ImmutableMap.of("two", 2, "four", 4), unfiltered);
assertEquals(ImmutableMap.of("four", 4), filtered);
unfiltered.clear();
assertEquals(ImmutableMap.of(), unfiltered);
assertEquals(ImmutableMap.of(), filtered);
}
public void testFilteredValuesIllegalPut() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterValues(unfiltered, EVEN);
filtered.put("a", 2);
unfiltered.put("b", 4);
unfiltered.put("c", 5);
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
assertThrows(IllegalArgumentException.class, () -> filtered.put("yyy", 3));
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
}
public void testFilteredValuesIllegalPutAll() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterValues(unfiltered, EVEN);
filtered.put("a", 2);
unfiltered.put("b", 4);
unfiltered.put("c", 5);
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
assertThrows(
IllegalArgumentException.class,
() -> filtered.putAll(ImmutableMap.of("c", 4, "zzz", 5, "b", 6)));
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
}
public void testFilteredValuesIllegalSetValue() {
Map<String, Integer> unfiltered = createUnfiltered();
Map<String, Integer> filtered = Maps.filterValues(unfiltered, EVEN);
filtered.put("a", 2);
filtered.put("b", 4);
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
Entry<String, Integer> entry = filtered.entrySet().iterator().next();
assertThrows(IllegalArgumentException.class, () -> entry.setValue(5));
assertEquals(ImmutableMap.of("a", 2, "b", 4), filtered);
}
public void testFilteredValuesClear() {
Map<String, Integer> unfiltered = createUnfiltered();
unfiltered.put("one", 1);
unfiltered.put("two", 2);
unfiltered.put("three", 3);
unfiltered.put("four", 4);
Map<String, Integer> filtered = Maps.filterValues(unfiltered, EVEN);
assertEquals(ImmutableMap.of("one", 1, "two", 2, "three", 3, "four", 4), unfiltered);
assertEquals(ImmutableMap.of("two", 2, "four", 4), filtered);
filtered.clear();
assertEquals(ImmutableMap.of("one", 1, "three", 3), unfiltered);
assertTrue(filtered.isEmpty());
}
public void testFilteredEntriesIllegalPut() {
Map<String, Integer> unfiltered = createUnfiltered();
unfiltered.put("cat", 3);
unfiltered.put("dog", 2);
unfiltered.put("horse", 5);
Map<String, Integer> filtered = Maps.filterEntries(unfiltered, CORRECT_LENGTH);
assertEquals(ImmutableMap.of("cat", 3, "horse", 5), filtered);
filtered.put("chicken", 7);
assertEquals(ImmutableMap.of("cat", 3, "horse", 5, "chicken", 7), filtered);
assertThrows(IllegalArgumentException.class, () -> filtered.put("cow", 7));
assertEquals(ImmutableMap.of("cat", 3, "horse", 5, "chicken", 7), filtered);
}
public void testFilteredEntriesIllegalPutAll() {
Map<String, Integer> unfiltered = createUnfiltered();
unfiltered.put("cat", 3);
unfiltered.put("dog", 2);
unfiltered.put("horse", 5);
Map<String, Integer> filtered = Maps.filterEntries(unfiltered, CORRECT_LENGTH);
assertEquals(ImmutableMap.of("cat", 3, "horse", 5), filtered);
filtered.put("chicken", 7);
assertEquals(ImmutableMap.of("cat", 3, "horse", 5, "chicken", 7), filtered);
assertThrows(
IllegalArgumentException.class,
() -> filtered.putAll(ImmutableMap.of("sheep", 5, "cow", 7)));
assertEquals(ImmutableMap.of("cat", 3, "horse", 5, "chicken", 7), filtered);
}
public void testFilteredEntriesObjectPredicate() {
Map<String, Integer> unfiltered = createUnfiltered();
unfiltered.put("cat", 3);
unfiltered.put("dog", 2);
unfiltered.put("horse", 5);
Predicate<Object> predicate = Predicates.alwaysFalse();
Map<String, Integer> filtered = Maps.filterEntries(unfiltered, predicate);
assertTrue(filtered.isEmpty());
}
public void testFilteredEntriesWildCardEntryPredicate() {
Map<String, Integer> unfiltered = createUnfiltered();
unfiltered.put("cat", 3);
unfiltered.put("dog", 2);
unfiltered.put("horse", 5);
Predicate<Entry<?, ?>> predicate = e -> e.getKey().equals("cat") || e.getValue().equals(2);
Map<String, Integer> filtered = Maps.filterEntries(unfiltered, predicate);
assertEquals(ImmutableMap.of("cat", 3, "dog", 2), filtered);
}
}
|
AbstractFilteredMapTest
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_3100/Issue3150.java
|
{
"start": 1467,
"end": 1756
}
|
class ____ {
private String name;
public Category(String name){
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
}
|
Category
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_doesNotContain_CharSequence_Test.java
|
{
"start": 927,
"end": 1296
}
|
class ____ extends CharSequenceAssertBaseTest {
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.doesNotContain("Luke");
}
@Override
protected void verify_internal_effects() {
verify(strings).assertDoesNotContain(getInfo(assertions), getActual(assertions), "Luke");
}
}
|
CharSequenceAssert_doesNotContain_CharSequence_Test
|
java
|
google__dagger
|
javatests/dagger/internal/codegen/ConflictingEntryPointsTest.java
|
{
"start": 3175,
"end": 3382
}
|
interface ____<T> {",
" T foo();",
"}");
Source base2 =
CompilerTests.javaSource(
"test.Base2", //
"package test;",
"",
"
|
Base1
|
java
|
playframework__playframework
|
core/play/src/test/java/play/utils/BigNumericJavaPojo.java
|
{
"start": 383,
"end": 1376
}
|
class ____ {
private final BigInteger intValue;
private final BigDecimal floatValue;
@JsonCreator
public BigNumericJavaPojo(
@JsonProperty("intValue") BigInteger intValue,
@JsonProperty("floatValue") BigDecimal floatValue) {
this.intValue = intValue;
this.floatValue = floatValue;
}
public BigInteger getIntValue() {
return intValue;
}
public BigDecimal getFloatValue() {
return floatValue;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
BigNumericJavaPojo that = (BigNumericJavaPojo) o;
return Objects.equals(intValue, that.intValue) && Objects.equals(floatValue, that.floatValue);
}
@Override
public int hashCode() {
return Objects.hash(intValue, floatValue);
}
@Override
public String toString() {
return "BigNumericJavaPojo{" + "intValue=" + intValue + ", floatValue=" + floatValue + '}';
}
}
|
BigNumericJavaPojo
|
java
|
spring-projects__spring-boot
|
module/spring-boot-data-jpa/src/test/java/org/springframework/boot/data/jpa/autoconfigure/domain/country/Country.java
|
{
"start": 926,
"end": 1309
}
|
class ____ implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue
private Long id;
@Audited
@Column
private String name;
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
|
Country
|
java
|
bumptech__glide
|
samples/giphy/src/main/java/com/bumptech/glide/samples/giphy/MainActivity.java
|
{
"start": 3055,
"end": 5768
}
|
class ____ extends RecyclerView.Adapter<GifViewHolder>
implements ListPreloader.PreloadModelProvider<Api.GifResult> {
private static final Api.GifResult[] EMPTY_RESULTS = new Api.GifResult[0];
private final Activity activity;
private final RequestBuilder<Drawable> requestBuilder;
private final ViewPreloadSizeProvider<Api.GifResult> preloadSizeProvider;
private Api.GifResult[] results = EMPTY_RESULTS;
GifAdapter(
Activity activity,
RequestBuilder<Drawable> requestBuilder,
ViewPreloadSizeProvider<Api.GifResult> preloadSizeProvider) {
this.activity = activity;
this.requestBuilder = requestBuilder;
this.preloadSizeProvider = preloadSizeProvider;
}
void setResults(Api.GifResult[] results) {
if (results != null) {
this.results = results;
} else {
this.results = EMPTY_RESULTS;
}
notifyDataSetChanged();
}
@Override
public GifViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = activity.getLayoutInflater().inflate(R.layout.gif_list_item, parent, false);
return new GifViewHolder(view);
}
@Override
public void onBindViewHolder(GifViewHolder holder, int position) {
final Api.GifResult result = results[position];
holder.gifView.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
ClipboardManager clipboard =
(ClipboardManager) activity.getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("giphy_url", result.images.fixed_height.url);
Preconditions.checkNotNull(clipboard).setPrimaryClip(clip);
Intent fullscreenIntent = FullscreenActivity.getIntent(activity, result);
activity.startActivity(fullscreenIntent);
}
});
// clearOnDetach let's us stop animating GifDrawables that RecyclerView hasn't yet recycled
// but that are currently off screen.
requestBuilder.load(result).into(holder.gifView).clearOnDetach();
preloadSizeProvider.setView(holder.gifView);
}
@Override
public long getItemId(int i) {
return 0;
}
@Override
public int getItemCount() {
return results.length;
}
@NonNull
@Override
public List<Api.GifResult> getPreloadItems(int position) {
return Collections.singletonList(results[position]);
}
@Nullable
@Override
public RequestBuilder<Drawable> getPreloadRequestBuilder(@NonNull Api.GifResult item) {
return requestBuilder.load(item);
}
}
private static
|
GifAdapter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/array/PrimitiveByteArrayIdCollectionTest.java
|
{
"start": 1642,
"end": 3376
}
|
class ____ {
@BeforeEach
public void prepare(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent entity = new Parent();
entity.id = new byte[] {
(byte) ( 1 ),
(byte) ( 2 ),
(byte) ( 3 ),
(byte) ( 4 )
};
entity.name = "Simple name";
for ( int j = 1; j <= 2; j++ ) {
Child child = new Child();
child.id = j;
entity.name = "Child name " + j;
child.parent = entity;
entity.children.add(child);
}
session.persist( entity );
}
);
}
@AfterEach
public void cleanup(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
@JiraKey(value = "HHH-7180")
public void testReattach(SessionFactoryScope scope) {
// Since reattachment was removed in ORM 7,
// but the code path to trigger the bug is still reachable through removing a detached entity,
// construct a scenario that shows a problem
final Parent parent = scope.fromTransaction(
session -> session.createQuery( "from Parent p", Parent.class ).getSingleResult()
);
// Copy the byte-array id which will make it a different instance, yet equal to the collection key
parent.id = parent.id.clone();
final StatisticsImplementor statistics = scope.getSessionFactory().getStatistics();
statistics.setStatisticsEnabled( true );
statistics.clear();
scope.inTransaction(
session -> {
session.remove( parent );
session.flush();
// The collection will be removed twice if the collection key can't be matched to the entity id
assertEquals( 1L, statistics.getCollectionRemoveCount() );
}
);
}
@Entity(name = "Parent")
public static
|
PrimitiveByteArrayIdCollectionTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/odps/OdpsAsNumberFirstTest.java
|
{
"start": 121,
"end": 392
}
|
class ____ extends TestCase {
public void test_0() throws Exception {
String sql = "select id as 39dd"
+ "\n from t1";
assertEquals("SELECT id AS 39dd"
+ "\nFROM t1", SQLUtils.formatOdps(sql));
}
}
|
OdpsAsNumberFirstTest
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/internal/Status.java
|
{
"start": 909,
"end": 985
}
|
class ____ be considered internal to the Log4j implementation.
*/
public
|
should
|
java
|
grpc__grpc-java
|
census/src/main/java/io/grpc/census/InternalCensusTracingAccessor.java
|
{
"start": 923,
"end": 1818
}
|
class ____ {
// Prevent instantiation.
private InternalCensusTracingAccessor() {
}
/**
* Returns a {@link ClientInterceptor} with default tracing implementation.
*/
public static ClientInterceptor getClientInterceptor() {
CensusTracingModule censusTracing =
new CensusTracingModule(
Tracing.getTracer(),
Tracing.getPropagationComponent().getBinaryFormat());
return censusTracing.getClientInterceptor();
}
/**
* Returns a {@link ServerStreamTracer.Factory} with default tracing implementation.
*/
public static ServerStreamTracer.Factory getServerStreamTracerFactory() {
CensusTracingModule censusTracing =
new CensusTracingModule(
Tracing.getTracer(),
Tracing.getPropagationComponent().getBinaryFormat());
return censusTracing.getServerTracerFactory();
}
}
|
InternalCensusTracingAccessor
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/checkpointing/RegionFailoverITCase.java
|
{
"start": 9366,
"end": 15906
}
|
class ____
extends RichParallelSourceFunction<Tuple2<Integer, Integer>>
implements CheckpointedFunction {
private static final long serialVersionUID = 1L;
private final long numElements;
private final long checkpointLatestAt;
private int index = -1;
private int lastRegionIndex = -1;
private volatile boolean isRunning = true;
private ListState<Integer> listState;
private static final ListStateDescriptor<Integer> stateDescriptor =
new ListStateDescriptor<>("list-1", Integer.class);
private ListState<Integer> unionListState;
private static final ListStateDescriptor<Integer> unionStateDescriptor =
new ListStateDescriptor<>("list-2", Integer.class);
StringGeneratingSourceFunction(long numElements, long checkpointLatestAt) {
this.numElements = numElements;
this.checkpointLatestAt = checkpointLatestAt;
}
@Override
public void run(SourceContext<Tuple2<Integer, Integer>> ctx) throws Exception {
if (index < 0) {
// not been restored, so initialize
index = 0;
}
int subTaskIndex = getRuntimeContext().getTaskInfo().getIndexOfThisSubtask();
while (isRunning && index < numElements) {
synchronized (ctx.getCheckpointLock()) {
int key = index / 2;
int forwardTaskIndex =
KeyGroupRangeAssignment.assignKeyToParallelOperator(
key, MAX_PARALLELISM, NUM_OF_REGIONS);
// pre-partition output keys
if (forwardTaskIndex == subTaskIndex) {
// we would send data with the same key twice.
ctx.collect(Tuple2.of(key, index));
}
index += 1;
}
if (numCompletedCheckpoints.get() < 3) {
// not yet completed enough checkpoints, so slow down
if (index < checkpointLatestAt) {
// mild slow down
Thread.sleep(1);
} else {
// wait until the checkpoints are completed
while (isRunning && numCompletedCheckpoints.get() < 3) {
Thread.sleep(300);
}
}
}
if (jobFailedCnt.get() < NUM_OF_RESTARTS) {
// slow down if job has not failed for 'NUM_OF_RESTARTS' times.
Thread.sleep(1);
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
int indexOfThisSubtask = getRuntimeContext().getTaskInfo().getIndexOfThisSubtask();
if (indexOfThisSubtask != 0) {
listState.update(Collections.singletonList(index));
if (indexOfThisSubtask == NUM_OF_REGIONS - 1) {
lastRegionIndex = index;
snapshotIndicesOfSubTask.put(context.getCheckpointId(), lastRegionIndex);
}
}
unionListState.update(Collections.singletonList(indexOfThisSubtask));
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
int indexOfThisSubtask = getRuntimeContext().getTaskInfo().getIndexOfThisSubtask();
if (context.isRestored()) {
restoredState = true;
unionListState =
context.getOperatorStateStore().getUnionListState(unionStateDescriptor);
Set<Integer> actualIndices =
StreamSupport.stream(unionListState.get().spliterator(), false)
.collect(Collectors.toSet());
if (getRuntimeContext()
.getTaskInfo()
.getTaskName()
.contains(SINGLE_REGION_SOURCE_NAME)) {
Assert.assertTrue(
CollectionUtils.isEqualCollection(
EXPECTED_INDICES_SINGLE_REGION, actualIndices));
} else {
Assert.assertTrue(
CollectionUtils.isEqualCollection(
EXPECTED_INDICES_MULTI_REGION, actualIndices));
}
if (indexOfThisSubtask == 0) {
listState = context.getOperatorStateStore().getListState(stateDescriptor);
Assert.assertTrue(
"list state should be empty for subtask-0",
((List<Integer>) listState.get()).isEmpty());
} else {
listState = context.getOperatorStateStore().getListState(stateDescriptor);
Assert.assertTrue(
"list state should not be empty for subtask-" + indexOfThisSubtask,
((List<Integer>) listState.get()).size() > 0);
if (indexOfThisSubtask == NUM_OF_REGIONS - 1) {
index = listState.get().iterator().next();
if (index
!= snapshotIndicesOfSubTask.get(lastCompletedCheckpointId.get())) {
throw new RuntimeException(
"Test failed due to unexpected recovered index: "
+ index
+ ", while last completed checkpoint record index: "
+ snapshotIndicesOfSubTask.get(
lastCompletedCheckpointId.get()));
}
}
}
} else {
unionListState =
context.getOperatorStateStore().getUnionListState(unionStateDescriptor);
if (indexOfThisSubtask != 0) {
listState = context.getOperatorStateStore().getListState(stateDescriptor);
}
}
}
}
private static
|
StringGeneratingSourceFunction
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/PrivateConstructorForUtilityClassTest.java
|
{
"start": 2167,
"end": 2582
}
|
class ____ extends Foo<Boolean> {
private static final long serialVersionUID = 123456789012L;
}
}
""")
.expectUnchanged()
.doTest();
}
@Test
public void implementingClassesGetLeftAlone() {
testHelper
.addInputLines(
"in/Foo.java",
"""
import java.io.Serializable;
public
|
BooleanFoo
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/FullLayoutQueryCacheTest.java
|
{
"start": 4477,
"end": 4669
}
|
class ____ extends BaseEntity {
@OneToOne
private FirstEntity firstEntity;
public SecondEntity() {
}
public SecondEntity(String baseName) {
super( baseName );
}
}
}
|
SecondEntity
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/android/processor/internal/androidentrypoint/Generators.java
|
{
"start": 20656,
"end": 21900
}
|
interface ____). But it is probably better to be consistent about only optionally
// injected classes extend the interface.
if (metadata.allowsOptionalInjection()) {
typeSpecBuilder.addSuperinterface(AndroidClassNames.INJECTED_BY_HILT);
}
}
typeSpecBuilder.addMethod(methodSpecBuilder.build());
}
private static CodeBlock getParentCodeBlock(AndroidEntryPointMetadata metadata) {
switch (metadata.androidType()) {
case ACTIVITY:
case SERVICE:
return CodeBlock.of("$T.getApplication(getApplicationContext())", ClassNames.CONTEXTS);
case FRAGMENT:
return CodeBlock.of("getHost()");
case VIEW:
return CodeBlock.of(
"$L.maybeGetParentComponentManager()", componentManagerCallBlock(metadata));
case BROADCAST_RECEIVER:
// Broadcast receivers receive a "context" parameter that make it so this code block
// isn't really usable anywhere
throw new AssertionError("BroadcastReceiver types should not get here");
default:
throw new AssertionError();
}
}
/**
* Returns the call to {@code generatedComponent()} with casts if needed.
*
* <p>A cast is required when the root generated Hilt
|
anyway
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/common/utils/MemberUtils.java
|
{
"start": 1098,
"end": 2400
}
|
interface ____ {
/**
* check the specified {@link Member member} is static or not ?
*
* @param member {@link Member} instance, e.g, {@link Constructor}, {@link Method} or {@link Field}
* @return Iff <code>member</code> is static one, return <code>true</code>, or <code>false</code>
*/
static boolean isStatic(Member member) {
return member != null && Modifier.isStatic(member.getModifiers());
}
/**
* check the specified {@link Member member} is private or not ?
*
* @param member {@link Member} instance, e.g, {@link Constructor}, {@link Method} or {@link Field}
* @return Iff <code>member</code> is private one, return <code>true</code>, or <code>false</code>
*/
static boolean isPrivate(Member member) {
return member != null && Modifier.isPrivate(member.getModifiers());
}
/**
* check the specified {@link Member member} is public or not ?
*
* @param member {@link Member} instance, e.g, {@link Constructor}, {@link Method} or {@link Field}
* @return Iff <code>member</code> is public one, return <code>true</code>, or <code>false</code>
*/
static boolean isPublic(Member member) {
return member != null && Modifier.isPublic(member.getModifiers());
}
}
|
MemberUtils
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java
|
{
"start": 2787,
"end": 13273
}
|
class ____ extends PersistentCommitData<SinglePendingCommit>
implements Iterable<UploadEtag> {
/**
* Serialization ID: {@value}.
*/
private static final long serialVersionUID = 0x10000 + VERSION;
/** Version marker. */
private int version = VERSION;
/**
* This is the filename of the pending file itself.
* Used during processing; it's persistent value, if any, is ignored.
*/
private String filename;
/** Path URI of the destination. */
private String uri = "";
/** ID of the upload. */
private String uploadId;
/** Destination bucket. */
private String bucket;
/** Destination key in the bucket. */
private String destinationKey;
/** When was the upload created? */
private long created;
/** When was the upload saved? */
private long saved;
/** timestamp as date; no expectation of parseability. */
private String date;
/** Job ID, if known. */
private String jobId = "";
/** Task ID, if known. */
private String taskId = "";
/** Arbitrary notes. */
private String text = "";
/** Ordered list of etags. */
private List<UploadEtag> etags;
/**
* Any custom extra data committer subclasses may choose to add.
*/
private Map<String, String> extraData = new HashMap<>(0);
/**
* IOStatistics.
*/
@JsonProperty("iostatistics")
private IOStatisticsSnapshot iostats = new IOStatisticsSnapshot();
/** Destination file size. */
private long length;
public SinglePendingCommit() {
}
/**
* Get a JSON serializer for this class.
* @return a serializer.
*/
public static JsonSerialization<SinglePendingCommit> serializer() {
return new JsonSerialization<>(SinglePendingCommit.class, false, false);
}
/**
* Load an instance from a file, then validate it.
* @param fs filesystem
* @param path path
* @param status nullable status of file to load
* @param serDeser serializer; if null use the shared static one.
* @return the loaded instance
* @throws IOException IO failure
* @throws ValidationFailure if the data is invalid
*/
public static SinglePendingCommit load(FileSystem fs,
Path path,
FileStatus status,
JsonSerialization<SinglePendingCommit> serDeser)
throws IOException {
return load(fs, path, serDeser, null);
}
/**
* Load an instance from a file, then validate it.
* @param fs filesystem
* @param path path
* @param serDeser deserializer
* @param status status of file to load or null
* @return the loaded instance
* @throws IOException IO failure
* @throws ValidationFailure if the data is invalid
*/
public static SinglePendingCommit load(FileSystem fs,
Path path,
JsonSerialization<SinglePendingCommit> serDeser,
@Nullable FileStatus status)
throws IOException {
JsonSerialization<SinglePendingCommit> jsonSerialization =
serDeser != null ? serDeser : serializer();
SinglePendingCommit instance = jsonSerialization.load(fs, path, status);
instance.filename = path.toString();
instance.validate();
return instance;
}
/**
* Deserialize via java Serialization API: deserialize the instance
* and then call {@link #validate()} to verify that the deserialized
* data is valid.
* @param inStream input stream
* @throws IOException IO problem
* @throws ClassNotFoundException reflection problems
* @throws ValidationFailure validation failure
*/
private void readObject(ObjectInputStream inStream) throws IOException,
ClassNotFoundException {
inStream.defaultReadObject();
validate();
}
/**
* Set the various timestamp fields to the supplied value.
* @param millis time in milliseconds
*/
public void touch(long millis) {
created = millis;
saved = millis;
date = new Date(millis).toString();
}
/**
* Set the commit data.
* @param parts ordered list of etags.
* @throws ValidationFailure if the data is invalid
*/
public void bindCommitData(List<CompletedPart> parts) throws ValidationFailure {
etags = new ArrayList<>(parts.size());
int counter = 1;
for (CompletedPart part : parts) {
verify(part.partNumber() == counter,
"Expected part number %s but got %s", counter, part.partNumber());
etags.add(UploadEtag.fromCompletedPart(part));
counter++;
}
}
@Override
public void validate() throws ValidationFailure {
verify(version == VERSION, "Wrong version: %s", version);
verify(StringUtils.isNotEmpty(bucket), "Empty bucket");
verify(StringUtils.isNotEmpty(destinationKey),
"Empty destination");
verify(StringUtils.isNotEmpty(uploadId), "Empty uploadId");
verify(length >= 0, "Invalid length: " + length);
destinationPath();
verify(etags != null, "No etag list");
validateCollectionClass(etags, UploadEtag.class);
for (UploadEtag etag : etags) {
verify(etag != null && StringUtils.isNotEmpty(etag.getEtag()),
"Empty etag");
}
if (extraData != null) {
validateCollectionClass(extraData.keySet(), String.class);
validateCollectionClass(extraData.values(), String.class);
}
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(
"SinglePendingCommit{");
sb.append("version=").append(version);
sb.append(", uri='").append(uri).append('\'');
sb.append(", destination='").append(destinationKey).append('\'');
sb.append(", uploadId='").append(uploadId).append('\'');
sb.append(", created=").append(created);
sb.append(", saved=").append(saved);
sb.append(", size=").append(length);
sb.append(", date='").append(date).append('\'');
sb.append(", jobId='").append(jobId).append('\'');
sb.append(", taskId='").append(taskId).append('\'');
sb.append(", notes='").append(text).append('\'');
if (etags != null) {
sb.append(", etags=[");
sb.append(join(",", etags));
sb.append(']');
} else {
sb.append(", etags=null");
}
sb.append('}');
return sb.toString();
}
@Override
public byte[] toBytes(JsonSerialization<SinglePendingCommit> serializer) throws IOException {
validate();
return serializer.toBytes(this);
}
@Override
public IOStatistics save(final FileSystem fs,
final Path path,
final JsonSerialization<SinglePendingCommit> serializer) throws IOException {
return saveFile(fs, path, this, serializer, true);
}
/**
* Build the destination path of the object.
* @return the path
* @throws IllegalStateException if the URI is invalid
*/
public Path destinationPath() {
Preconditions.checkState(StringUtils.isNotEmpty(uri), "Empty uri");
try {
return new Path(new URI(uri));
} catch (URISyntaxException e) {
throw new IllegalStateException("Cannot parse URI " + uri);
}
}
/**
* Get the number of etags.
* @return the size of the etag list.
*/
public int getPartCount() {
return etags.size();
}
/**
* Iterate over the etags.
* @return an iterator.
*/
@Override
public Iterator<UploadEtag> iterator() {
return etags.iterator();
}
/** @return version marker. */
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
/**
* This is the filename of the pending file itself.
* Used during processing; it's persistent value, if any, is ignored.
* @return filename
*/
public String getFilename() {
return filename;
}
public void setFilename(String filename) {
this.filename = filename;
}
/** @return path URI of the destination. */
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
/** @return ID of the upload. */
public String getUploadId() {
return uploadId;
}
public void setUploadId(String uploadId) {
this.uploadId = uploadId;
}
/** @return destination bucket. */
public String getBucket() {
return bucket;
}
public void setBucket(String bucket) {
this.bucket = bucket;
}
/** @return destination key in the bucket. */
public String getDestinationKey() {
return destinationKey;
}
public void setDestinationKey(String destinationKey) {
this.destinationKey = destinationKey;
}
/**
* When was the upload created?
* @return timestamp
*/
public long getCreated() {
return created;
}
public void setCreated(long created) {
this.created = created;
}
/**
* When was the upload saved?
* @return timestamp
*/
public long getSaved() {
return saved;
}
public void setSaved(long saved) {
this.saved = saved;
}
/**
* Timestamp as date; no expectation of parseability.
* @return date string
*/
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
/** @return Job ID, if known. */
public String getJobId() {
return jobId;
}
public void setJobId(String jobId) {
this.jobId = jobId;
}
/** @return Task ID, if known. */
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
/**
* Arbitrary notes.
* @return any notes
*/
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
/** @return ordered list of etags. */
public List<UploadEtag> getEtags() {
return etags;
}
public void setEtags(List<UploadEtag> etags) {
this.etags = etags;
}
/**
* Any custom extra data committer subclasses may choose to add.
* @return custom data
*/
public Map<String, String> getExtraData() {
return extraData;
}
public void setExtraData(Map<String, String> extraData) {
this.extraData = extraData;
}
/**
* Set/Update an extra data entry.
* @param key key
* @param value value
*/
public void putExtraData(String key, String value) {
extraData.put(key, value);
}
/**
* Destination file size.
* @return size of destination object
*/
public long getLength() {
return length;
}
public void setLength(long length) {
this.length = length;
}
@Override
public IOStatisticsSnapshot getIOStatistics() {
return iostats;
}
public void setIOStatistics(final IOStatisticsSnapshot ioStatistics) {
this.iostats = ioStatistics;
}
}
|
SinglePendingCommit
|
java
|
spring-projects__spring-security
|
core/src/main/java/org/springframework/security/authorization/AllAuthoritiesAuthorizationManager.java
|
{
"start": 1514,
"end": 6521
}
|
class ____<T> implements AuthorizationManager<T> {
private static final String ROLE_PREFIX = "ROLE_";
private RoleHierarchy roleHierarchy = new NullRoleHierarchy();
private final List<String> requiredAuthorities;
/**
* Creates a new instance.
* @param requiredAuthorities the authorities that are required.
*/
private AllAuthoritiesAuthorizationManager(String... requiredAuthorities) {
Assert.notEmpty(requiredAuthorities, "requiredAuthorities cannot be empty");
this.requiredAuthorities = Arrays.asList(requiredAuthorities);
}
/**
* Sets the {@link RoleHierarchy} to be used. Default is {@link NullRoleHierarchy}.
* Cannot be null.
* @param roleHierarchy the {@link RoleHierarchy} to use
*/
public void setRoleHierarchy(RoleHierarchy roleHierarchy) {
Assert.notNull(roleHierarchy, "roleHierarchy cannot be null");
this.roleHierarchy = roleHierarchy;
}
/**
* Determines if the current user is authorized by evaluating if the
* {@link Authentication} contains any of specified authorities.
* @param authentication the {@link Supplier} of the {@link Authentication} to check
* @param object the object to check authorization on (not used).
* @return an {@link AuthorityAuthorizationDecision}
*/
@Override
public AuthorityAuthorizationDecision authorize(Supplier<? extends @Nullable Authentication> authentication,
T object) {
List<String> authenticatedAuthorities = getGrantedAuthorities(authentication.get());
List<String> missingAuthorities = new ArrayList<>(this.requiredAuthorities);
missingAuthorities.removeIf(authenticatedAuthorities::contains);
return new AuthorityAuthorizationDecision(missingAuthorities.isEmpty(),
AuthorityUtils.createAuthorityList(missingAuthorities));
}
private List<String> getGrantedAuthorities(Authentication authentication) {
if (authentication == null || !authentication.isAuthenticated()) {
return Collections.emptyList();
}
return this.roleHierarchy.getReachableGrantedAuthorities(authentication.getAuthorities())
.stream()
.map(GrantedAuthority::getAuthority)
.toList();
}
/**
* Creates an instance of {@link AllAuthoritiesAuthorizationManager} with the provided
* authorities.
* @param roles the authorities to check for prefixed with "ROLE_". Each role should
* not start with "ROLE_" since it is automatically prepended already.
* @param <T> the type of object being authorized
* @return the new instance
*/
public static <T> AllAuthoritiesAuthorizationManager<T> hasAllRoles(String... roles) {
return hasAllPrefixedAuthorities(ROLE_PREFIX, roles);
}
/**
* Creates an instance of {@link AllAuthoritiesAuthorizationManager} with the provided
* authorities.
* @param prefix the prefix for <code>authorities</code>
* @param authorities the authorities to check for prefixed with <code>prefix</code>
* @param <T> the type of object being authorized
* @return the new instance
*/
public static <T> AllAuthoritiesAuthorizationManager<T> hasAllPrefixedAuthorities(String prefix,
String... authorities) {
Assert.notNull(prefix, "rolePrefix cannot be null");
Assert.notEmpty(authorities, "roles cannot be empty");
Assert.noNullElements(authorities, "roles cannot contain null values");
return hasAllAuthorities(toNamedRolesArray(prefix, authorities));
}
/**
* Creates an instance of {@link AllAuthoritiesAuthorizationManager} with the provided
* authorities.
* @param authorities the authorities to check for
* @param <T> the type of object being authorized
* @return the new instance
*/
public static <T> AllAuthoritiesAuthorizationManager<T> hasAllAuthorities(String... authorities) {
Assert.notEmpty(authorities, "authorities cannot be empty");
Assert.noNullElements(authorities, "authorities cannot contain null values");
return new AllAuthoritiesAuthorizationManager<>(authorities);
}
/**
* Creates an instance of {@link AllAuthoritiesAuthorizationManager} with the provided
* authorities.
* @param authorities the authorities to check for
* @param <T> the type of object being authorized
* @return the new instance
*/
public static <T> AllAuthoritiesAuthorizationManager<T> hasAllAuthorities(List<String> authorities) {
Assert.notEmpty(authorities, "authorities cannot be empty");
Assert.noNullElements(authorities, "authorities cannot contain null values");
return new AllAuthoritiesAuthorizationManager<>(authorities.toArray(new String[0]));
}
private static String[] toNamedRolesArray(String rolePrefix, String[] roles) {
String[] result = new String[roles.length];
for (int i = 0; i < roles.length; i++) {
String role = roles[i];
Assert.isTrue(rolePrefix.isEmpty() || !role.startsWith(rolePrefix), () -> role + " should not start with "
+ rolePrefix + " since " + rolePrefix
+ " is automatically prepended when using hasAnyRole. Consider using hasAnyAuthority instead.");
result[i] = rolePrefix + role;
}
return result;
}
}
|
AllAuthoritiesAuthorizationManager
|
java
|
apache__camel
|
components/camel-salesforce/camel-salesforce-maven-plugin/src/main/java/org/apache/camel/maven/AbstractSalesforceMojo.java
|
{
"start": 1656,
"end": 8915
}
|
class ____ extends AbstractMojo {
/**
* Salesforce client id.
*/
@Parameter(property = "camelSalesforce.clientId", required = true)
String clientId;
/**
* Salesforce client secret.
*/
@Parameter(property = "camelSalesforce.clientSecret")
String clientSecret;
/**
* HTTP client properties.
*/
@Parameter
Map<String, Object> httpClientProperties;
/**
* Proxy authentication URI.
*/
@Parameter(property = "camelSalesforce.httpProxyAuthUri")
String httpProxyAuthUri;
/**
* Addresses to NOT Proxy.
*/
@Parameter(property = "camelSalesforce.httpProxyExcludedAddresses")
Set<String> httpProxyExcludedAddresses;
/**
* HTTP Proxy host.
*/
@Parameter(property = "camelSalesforce.httpProxyHost")
String httpProxyHost;
/**
* Addresses to Proxy.
*/
@Parameter(property = "camelSalesforce.httpProxyIncludedAddresses")
Set<String> httpProxyIncludedAddresses;
/**
* Proxy authentication password.
*/
@Parameter(property = "camelSalesforce.httpProxyPassword")
String httpProxyPassword;
/**
* HTTP Proxy port.
*/
@Parameter(property = "camelSalesforce.httpProxyPort")
Integer httpProxyPort;
/**
* Proxy authentication realm.
*/
@Parameter(property = "camelSalesforce.httpProxyRealm")
String httpProxyRealm;
/**
* Proxy uses Digest authentication.
*/
@Parameter(property = "camelSalesforce.httpProxyUseDigestAuth")
boolean httpProxyUseDigestAuth;
/**
* Proxy authentication username.
*/
@Parameter(property = "camelSalesforce.httpProxyUsername")
String httpProxyUsername;
/**
* Is HTTP Proxy secure, i.e. using secure sockets, true by default.
*/
@Parameter(property = "camelSalesforce.isHttpProxySecure")
boolean isHttpProxySecure = true;
/**
* Is it a SOCKS4 Proxy?
*/
@Parameter(property = "camelSalesforce.isHttpProxySocks4")
boolean isHttpProxySocks4;
/**
* Salesforce login URL, defaults to https://login.salesforce.com.
*/
@Parameter(property = "camelSalesforce.loginUrl", defaultValue = SalesforceLoginConfig.DEFAULT_LOGIN_URL)
String loginUrl;
/**
* Salesforce password.
*/
@Parameter(property = "camelSalesforce.password")
String password;
/**
* SSL Context parameters.
*/
@Parameter(property = "camelSalesforce.sslContextParameters")
final SSLContextParameters sslContextParameters = new SSLContextParameters();
/**
* Salesforce username.
*/
@Parameter(property = "camelSalesforce.userName", required = true)
String userName;
/**
* Salesforce JWT Audience.
*/
@Parameter(property = "camelSalesforce.jwtAudience", defaultValue = "https://login.salesforce.com")
String jwtAudience;
/**
* Salesforce Keystore Path.
*/
@Parameter(property = "camelSalesforce.keystore.resource")
String keystoreResource;
/**
* Salesforce Keystore Password.
*/
@Parameter(property = "camelSalesforce.keystore.password")
String keystorePassword;
/**
* Salesforce Keystore Type.
*/
@Parameter(property = "camelSalesforce.keystore.type", defaultValue = "jks")
String keystoreType;
/**
* Salesforce API version.
*/
@Parameter(property = "camelSalesforce.version", defaultValue = SalesforceEndpointConfig.DEFAULT_VERSION)
String version;
private AbstractSalesforceExecution execution;
@Override
public final void execute() throws MojoExecutionException, MojoFailureException {
try {
validateAuthenticationParameters();
setup();
execution.execute();
} catch (Exception e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
protected abstract AbstractSalesforceExecution getSalesforceExecution();
protected void setup() {
execution = getSalesforceExecution();
execution.setClientId(clientId);
execution.setClientSecret(clientSecret);
execution.setHttpClientProperties(httpClientProperties);
execution.setHttpProxyAuthUri(httpProxyAuthUri);
execution.setHttpProxyHost(httpProxyHost);
execution.setHttpProxyPort(httpProxyPort);
execution.setHttpProxyRealm(httpProxyRealm);
execution.setHttpProxyUsername(httpProxyUsername);
execution.setHttpProxyPassword(httpProxyPassword);
execution.setHttpProxyExcludedAddresses(httpProxyExcludedAddresses);
execution.setHttpProxyIncludedAddresses(httpProxyIncludedAddresses);
execution.setHttpProxySocks4(isHttpProxySocks4);
execution.setHttpProxySecure(isHttpProxySecure);
execution.setHttpProxyUseDigestAuth(httpProxyUseDigestAuth);
execution.setLoginUrl(loginUrl);
execution.setUserName(userName);
execution.setPassword(password);
execution.setVersion(version);
execution.setSslContextParameters(sslContextParameters);
execution.setJwtAudience(jwtAudience);
execution.setKeyStoreParameters(generateKeyStoreParameters());
}
private void validateAuthenticationParameters() throws MojoExecutionException {
if (clientSecret == null && keystoreResource == null) {
throw new MojoExecutionException(
"Either property: clientSecret or property: keystoreResource must be provided.");
} else if (clientSecret != null && keystoreResource != null) {
throw new MojoExecutionException(
"Property: clientSecret or property: keystoreResource must be provided.");
}
if (clientSecret != null) {
if (password == null) {
throw new MojoExecutionException(
generateRequiredErrorMessage("password", "clientSecret"));
}
}
if (keystoreResource != null) {
if (keystorePassword == null) {
throw new MojoExecutionException(
generateRequiredErrorMessage("keystorePassword", "keystoreResource"));
}
}
}
private String generateRequiredErrorMessage(String parameter1, String parameter2) {
return String.format("Property: %s must be provided when property: %s was provided.", parameter1, parameter2);
}
private KeyStoreParameters generateKeyStoreParameters() {
if (keystoreResource == null) {
return null;
}
KeyStoreParameters keyStoreParameters = new KeyStoreParameters();
keyStoreParameters.setResource(keystoreResource);
keyStoreParameters.setPassword(keystorePassword);
keyStoreParameters.setType(keystoreType);
try (InputStream is = new FileInputStream(keystoreResource)) {
KeyStore ks = KeyStore.getInstance(keystoreType);
ks.load(is, keystorePassword.toCharArray());
keyStoreParameters.setKeyStore(ks);
} catch (IOException | GeneralSecurityException e) {
throw new RuntimeException(e);
}
return keyStoreParameters;
}
}
|
AbstractSalesforceMojo
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-shuffle/src/test/java/org/apache/hadoop/mapred/TestShuffleChannelHandler.java
|
{
"start": 4626,
"end": 12124
}
|
class ____ extends TestShuffleHandlerBase {
private static final org.slf4j.Logger LOG =
LoggerFactory.getLogger(TestShuffleChannelHandler.class);
@Test
public void testGetMapsFileRegion() throws IOException {
final ShuffleTest t = createShuffleTest();
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
t.testGetAllAttemptsForReduce0NoKeepAlive(shuffle.outboundMessages(), shuffle);
}
@Test
public void testGetMapsChunkedFileSSl() throws Exception {
final ShuffleTest t = createShuffleTest();
final LinkedList<Object> unencryptedMessages = new LinkedList<>();
final EmbeddedChannel shuffle = t.createShuffleHandlerSSL(unencryptedMessages);
t.testGetAllAttemptsForReduce0NoKeepAlive(unencryptedMessages, shuffle);
drainChannel(shuffle);
}
@Test
public void testKeepAlive() throws Exception {
// TODO: problems with keep-alive
// current behaviour:
// a) mapreduce.shuffle.connection-keep-alive.enable=false
// + client request with &keepAlive=true
// ==> connection is kept
// b) mapreduce.shuffle.connection-keep-alive.enable=true
// ==> connection is kept
//
// a) seems like a bug
// b) might be ok, because it's the default in HTTP/1.1
Configuration conf = new Configuration();
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, "false");
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, "15");
final ShuffleTest t = createShuffleTest(conf);
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
t.testKeepAlive(shuffle.outboundMessages(), shuffle);
}
@Test
public void testKeepAliveSSL() throws Exception {
Configuration conf = new Configuration();
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, "false");
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, "15");
final ShuffleTest t = createShuffleTest(conf);
final LinkedList<Object> unencryptedMessages = new LinkedList<>();
final EmbeddedChannel shuffle = t.createShuffleHandlerSSL(unencryptedMessages);
t.testKeepAlive(unencryptedMessages, shuffle);
}
@Test
public void tetKeepAliveTimeout() throws InterruptedException, IOException {
Configuration conf = new Configuration();
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, "true");
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, "1");
final ShuffleTest t = createShuffleTest(conf);
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
FullHttpRequest req = t.createRequest(getUri(TEST_JOB_ID, 0,
Collections.singletonList(TEST_ATTEMPT_1), true));
shuffle.writeInbound(req);
t.assertResponse(shuffle.outboundMessages(),
t.getExpectedHttpResponse(req, true, 46),
t.getAttemptData(new Attempt(TEST_ATTEMPT_1, TEST_DATA_A))
);
assertTrue(shuffle.isActive(), "keep-alive");
TimeUnit.SECONDS.sleep(3);
shuffle.runScheduledPendingTasks();
assertFalse(shuffle.isActive(), "closed");
}
@Test
public void testIncompatibleShuffleVersion() {
Configuration conf = new Configuration();
conf.set(SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, "true");
final ShuffleTest t = createShuffleTest(conf);
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
FullHttpRequest req = t.createRequest(getUri(TEST_JOB_ID, 0,
Collections.singletonList(TEST_ATTEMPT_1), true));
req.headers().set(ShuffleHeader.HTTP_HEADER_NAME, "invalid");
shuffle.writeInbound(req);
final EmbeddedChannel decoder = t.createHttpResponseChannel();
for (Object obj : shuffle.outboundMessages()) {
decoder.writeInbound(obj);
}
DefaultHttpResponse actual = decoder.readInbound();
assertFalse(actual.headers().get(CONTENT_LENGTH).isEmpty());
actual.headers().set(CONTENT_LENGTH, 0);
assertEquals(getExpectedHttpResponse(HttpResponseStatus.BAD_REQUEST).toString(),
actual.toString());
tryRelease(actual);
assertFalse(shuffle.isActive(), "closed"); // known-issue
drainChannel(decoder);
}
@Test
public void testInvalidMapNoIndexFile() {
final ShuffleTest t = createShuffleTest();
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
FullHttpRequest req = t.createRequest(getUri(TEST_JOB_ID, 0,
Arrays.asList(TEST_ATTEMPT_1, "non-existing"), true));
shuffle.writeInbound(req);
final EmbeddedChannel decoder = t.createHttpResponseChannel();
for (Object obj : shuffle.outboundMessages()) {
decoder.writeInbound(obj);
}
DefaultHttpResponse actual = decoder.readInbound();
drainChannel(decoder);
assertFalse(actual.headers().get(CONTENT_LENGTH).isEmpty());
actual.headers().set(CONTENT_LENGTH, 0);
assertEquals(getExpectedHttpResponse(HttpResponseStatus.INTERNAL_SERVER_ERROR).toString(),
actual.toString());
tryRelease(actual);
assertFalse(shuffle.isActive(), "closed");
}
@Test
public void testInvalidMapNoDataFile() {
final ShuffleTest t = createShuffleTest();
final EmbeddedChannel shuffle = t.createShuffleHandlerChannelFileRegion();
String dataFile = getDataFile(TEST_USER, tempDir.toAbsolutePath().toString(), TEST_ATTEMPT_2);
assertTrue(new File(dataFile).delete(), "should delete");
FullHttpRequest req = t.createRequest(getUri(TEST_JOB_ID, 0,
Arrays.asList(TEST_ATTEMPT_1, TEST_ATTEMPT_2), false));
shuffle.writeInbound(req);
final EmbeddedChannel decoder = t.createHttpResponseChannel();
for (Object obj : shuffle.outboundMessages()) {
decoder.writeInbound(obj);
}
DefaultHttpResponse actual = decoder.readInbound();
drainChannel(decoder);
assertFalse(actual.headers().get(CONTENT_LENGTH).isEmpty());
actual.headers().set(CONTENT_LENGTH, 0);
assertEquals(getExpectedHttpResponse(HttpResponseStatus.INTERNAL_SERVER_ERROR).toString(),
actual.toString());
tryRelease(actual);
assertFalse(shuffle.isActive(), "closed");
}
private void drainChannel(EmbeddedChannel ch) {
Object o;
while((o = ch.readInbound())!=null) {
tryRelease(o);
}
while((o = ch.readOutbound())!=null) {
tryRelease(o);
}
}
private void tryRelease(Object obj) {
if (obj instanceof ReferenceCounted) {
ReferenceCounted bb = (ReferenceCounted) obj;
if (bb.refCnt() > 0) {
bb.release(bb.refCnt());
}
}
}
private DefaultHttpResponse getExpectedHttpResponse(HttpResponseStatus status) {
DefaultHttpResponse response = new DefaultHttpResponse(HTTP_1_1, status);
response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8");
response.headers().set(ShuffleHeader.HTTP_HEADER_NAME,
ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
response.headers().set(ShuffleHeader.HTTP_HEADER_VERSION,
ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
response.headers().set(CONTENT_LENGTH, 0);
return response;
}
private ShuffleTest createShuffleTest() {
return createShuffleTest(new Configuration());
}
private ShuffleTest createShuffleTest(Configuration conf) {
return new ShuffleTest(conf);
}
private File getResourceFile(String resourceName) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
return new File(Objects.requireNonNull(classLoader.getResource(resourceName)).getFile());
}
@SuppressWarnings("checkstyle:VisibilityModifier")
static
|
TestShuffleChannelHandler
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.