language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationClientProtocol.java
|
{
"start": 20272,
"end": 22350
}
|
interface ____ by clients to get the list of reservations in a plan.
* The reservationId will be used to search for reservations to list if it is
* provided. Otherwise, it will select active reservations within the
* startTime and endTime (inclusive).
* </p>
*
* @param request to list reservations in a plan. Contains fields to select
* String queue, ReservationId reservationId, long startTime,
* long endTime, and a bool includeReservationAllocations.
*
* queue: Required. Cannot be null or empty. Refers to the
* reservable queue in the scheduler that was selected when
* creating a reservation submission
* {@link ReservationSubmissionRequest}.
*
* reservationId: Optional. If provided, other fields will
* be ignored.
*
* startTime: Optional. If provided, only reservations that
* end after the startTime will be selected. This defaults
* to 0 if an invalid number is used.
*
* endTime: Optional. If provided, only reservations that
* start on or before endTime will be selected. This defaults
* to Long.MAX_VALUE if an invalid number is used.
*
* includeReservationAllocations: Optional. Flag that
* determines whether the entire reservation allocations are
* to be returned. Reservation allocations are subject to
* change in the event of re-planning as described by
* {@code ReservationDefinition}.
*
* @return response that contains information about reservations that are
* being searched for.
* @throws YarnException if the request is invalid
* @throws IOException on IO failures
*
*/
@Public
@Unstable
ReservationListResponse listReservations(
ReservationListRequest request) throws YarnException, IOException;
/**
* <p>
* The
|
used
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/android/WakelockReleasedDangerouslyTest.java
|
{
"start": 5726,
"end": 6533
}
|
class ____ {
void foo(WakeLock wakelock) {
wakelock.acquire(100);
doThing(
() -> {
try {
wakelock.release();
} catch (RuntimeException unused) {
// Ignore: wakelock already released by timeout.
// TODO: Log this exception.
}
});
}
void doThing(Runnable thing) {}
}
""")
.doTest();
}
@Test
public void acquiredWithoutTimeout_shouldBeOkay() {
compilationHelper
.addSourceLines(
"WithoutTimeout.java",
"""
import android.os.PowerManager.WakeLock;
public
|
TestApp
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/notfound/OptionalLazyNotFoundTest.java
|
{
"start": 9266,
"end": 9792
}
|
class ____ extends Person {
@Id
private Long id;
@OneToOne(fetch = FetchType.LAZY, cascade = CascadeType.PERSIST)
@JoinColumn(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
private City city;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public City getCity() {
return city;
}
@Override
public void setCity(City city) {
this.city = city;
}
}
@Entity
@Table(name = "PersonOneToOneSelectIgnore")
public static
|
PersonOneToOneSelectException
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/accept/ContentNegotiationManager.java
|
{
"start": 1265,
"end": 1716
}
|
class ____ determine requested {@linkplain MediaType media types}
* for a request. This is done by delegating to a list of configured
* {@code ContentNegotiationStrategy} instances.
*
* <p>Also provides methods to look up file extensions for a media type.
* This is done by delegating to the list of configured
* {@code MediaTypeFileExtensionResolver} instances.
*
* @author Rossen Stoyanchev
* @author Juergen Hoeller
* @since 3.2
*/
public
|
to
|
java
|
google__dagger
|
dagger-compiler/main/java/dagger/internal/codegen/model/BindingGraph.java
|
{
"start": 15083,
"end": 15597
}
|
interface ____ extends Edge {
/**
* The modules that {@linkplain Module#subcomponents() declare the subcomponent} that generated
* this edge. Empty if the parent component has a subcomponent creator method and there are no
* declaring modules.
*/
ImmutableSet<DaggerTypeElement> declaringModules();
}
/** A node in the binding graph. Either a {@link Binding} or a {@link ComponentNode}. */
// TODO(dpb): Make all the node/edge types top-level.
public
|
SubcomponentCreatorBindingEdge
|
java
|
redisson__redisson
|
redisson/src/test/java/org/redisson/rx/RedissonTopicRxTest.java
|
{
"start": 591,
"end": 3502
}
|
class ____ implements Serializable {
private String name;
public Message() {
}
public Message(String name) {
this.name = name;
}
public String getName() {
return name;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Message other = (Message) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
}
@Test
public void testRemoveListenerById() throws InterruptedException {
RTopicRx topic1 = redisson.getTopic("topic1");
MessageListener listener = new MessageListener() {
@Override
public void onMessage(CharSequence channel, Object msg) {
Assertions.fail();
}
};
Single<Integer> res = topic1.addListener(Message.class, listener);
Integer listenerId = res.blockingGet();
topic1 = redisson.getTopic("topic1");
topic1.removeListener(listenerId);
topic1.publish(new Message("123"));
}
@Test
public void testRemoveListenerByInstance() throws InterruptedException {
RTopicRx topic1 = redisson.getTopic("topic1");
MessageListener listener = new MessageListener() {
@Override
public void onMessage(CharSequence channel, Object msg) {
Assertions.fail();
}
};
topic1.addListener(Message.class, listener);
topic1 = redisson.getTopic("topic1");
topic1.removeListener(listener);
topic1.publish(new Message("123"));
}
@Test
public void testLong() throws InterruptedException {
RTopicRx topic = redisson.getTopic("test");
Flowable<String> messages = topic.getMessages(String.class);
List<String> list = new ArrayList<>();
messages.subscribe(new Subscriber<String>() {
@Override
public void onSubscribe(Subscription s) {
s.request(10);
}
@Override
public void onNext(String t) {
list.add(t);
}
@Override
public void onError(Throwable t) {
}
@Override
public void onComplete() {
}
});
for (int i = 0; i < 15; i++) {
sync(topic.publish("" + i));
}
Awaitility.waitAtMost(Duration.ofSeconds(10)).until(() -> list.equals(Arrays.asList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")));
}
}
|
Message
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/inheritance/SingleTableConstraintsTest.java
|
{
"start": 5387,
"end": 5559
}
|
class ____ extends Publication {
@ManyToOne(optional = false)
Journal journal;
@ManyToOne(optional = false)
Author reviewer;
}
@Entity(name = "Journal")
static
|
Paper
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/processor/internal/aliasof/AliasOfProcessor.java
|
{
"start": 1132,
"end": 1325
}
|
class ____ extends JavacBaseProcessingStepProcessor {
@Override
public AliasOfProcessingStep processingStep() {
return new AliasOfProcessingStep(getXProcessingEnv());
}
}
|
AliasOfProcessor
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/localdatetime/LocalDateTimeAssert_isBetween_with_String_parameters_Test.java
|
{
"start": 1021,
"end": 2172
}
|
class ____ extends AbstractLocalDateTimeAssertBaseTest {
@Override
protected LocalDateTimeAssert invoke_api_method() {
return assertions.isBetween(YESTERDAY.toString(), TOMORROW.toString());
}
@Override
protected void verify_internal_effects() {
verify(getComparables(assertions)).assertIsBetween(getInfo(assertions), getActual(assertions), YESTERDAY, TOMORROW, true,
true);
}
@Test
void should_throw_a_DateTimeParseException_if_start_String_parameter_cant_be_converted() {
// GIVEN
String abc = "abc";
// WHEN
Throwable thrown = catchThrowable(() -> assertions.isBetween(abc, TOMORROW.toString()));
// THEN
then(thrown).isInstanceOf(DateTimeParseException.class);
}
@Test
void should_throw_a_DateTimeParseException_if_end_String_parameter_cant_be_converted() {
// GIVEN
String abc = "abc";
// WHEN
Throwable thrown = catchThrowable(() -> assertions.isBetween(YESTERDAY.toString(), abc));
// THEN
then(thrown).isInstanceOf(DateTimeParseException.class);
}
}
|
LocalDateTimeAssert_isBetween_with_String_parameters_Test
|
java
|
mybatis__mybatis-3
|
src/main/java/org/apache/ibatis/parsing/XNode.java
|
{
"start": 1015,
"end": 10251
}
|
class ____ {
private final Node node;
private final String name;
private final String body;
private final Properties attributes;
private final Properties variables;
private final XPathParser xpathParser;
public XNode(XPathParser xpathParser, Node node, Properties variables) {
this.xpathParser = xpathParser;
this.node = node;
this.name = node.getNodeName();
this.variables = variables;
this.attributes = parseAttributes(node);
this.body = parseBody(node);
}
public XNode newXNode(Node node) {
return new XNode(xpathParser, node, variables);
}
public XNode getParent() {
Node parent = node.getParentNode();
if (!(parent instanceof Element)) {
return null;
}
return new XNode(xpathParser, parent, variables);
}
public String getPath() {
StringBuilder builder = new StringBuilder();
Node current = node;
while (current instanceof Element) {
if (current != node) {
builder.insert(0, "/");
}
builder.insert(0, current.getNodeName());
current = current.getParentNode();
}
return builder.toString();
}
public String getValueBasedIdentifier() {
StringBuilder builder = new StringBuilder();
XNode current = this;
while (current != null) {
if (current != this) {
builder.insert(0, "_");
}
String value = current.getStringAttribute("id",
current.getStringAttribute("value", current.getStringAttribute("property", (String) null)));
if (value != null) {
value = value.replace('.', '_');
builder.insert(0, "]");
builder.insert(0, value);
builder.insert(0, "[");
}
builder.insert(0, current.getName());
current = current.getParent();
}
return builder.toString();
}
public String evalString(String expression) {
return xpathParser.evalString(node, expression);
}
public Boolean evalBoolean(String expression) {
return xpathParser.evalBoolean(node, expression);
}
public Double evalDouble(String expression) {
return xpathParser.evalDouble(node, expression);
}
public List<XNode> evalNodes(String expression) {
return xpathParser.evalNodes(node, expression);
}
public XNode evalNode(String expression) {
return xpathParser.evalNode(node, expression);
}
public Node getNode() {
return node;
}
public String getName() {
return name;
}
public String getStringBody() {
return getStringBody(null);
}
public String getStringBody(String def) {
return body == null ? def : body;
}
public Boolean getBooleanBody() {
return getBooleanBody(null);
}
public Boolean getBooleanBody(Boolean def) {
return body == null ? def : Boolean.valueOf(body);
}
public Integer getIntBody() {
return getIntBody(null);
}
public Integer getIntBody(Integer def) {
return body == null ? def : Integer.valueOf(body);
}
public Long getLongBody() {
return getLongBody(null);
}
public Long getLongBody(Long def) {
return body == null ? def : Long.valueOf(body);
}
public Double getDoubleBody() {
return getDoubleBody(null);
}
public Double getDoubleBody(Double def) {
return body == null ? def : Double.valueOf(body);
}
public Float getFloatBody() {
return getFloatBody(null);
}
public Float getFloatBody(Float def) {
return body == null ? def : Float.valueOf(body);
}
public <T extends Enum<T>> T getEnumAttribute(Class<T> enumType, String name) {
return getEnumAttribute(enumType, name, null);
}
public <T extends Enum<T>> T getEnumAttribute(Class<T> enumType, String name, T def) {
String value = getStringAttribute(name);
return value == null ? def : Enum.valueOf(enumType, value);
}
/**
* Return a attribute value as String.
* <p>
* If attribute value is absent, return value that provided from supplier of default value.
*
* @param name
* attribute name
* @param defSupplier
* a supplier of default value
*
* @return the string attribute
*
* @since 3.5.4
*/
public String getStringAttribute(String name, Supplier<String> defSupplier) {
String value = attributes.getProperty(name);
return value == null ? defSupplier.get() : value;
}
public String getStringAttribute(String name) {
return getStringAttribute(name, (String) null);
}
public String getStringAttribute(String name, String def) {
String value = attributes.getProperty(name);
return value == null ? def : value;
}
public Boolean getBooleanAttribute(String name) {
return getBooleanAttribute(name, null);
}
public Boolean getBooleanAttribute(String name, Boolean def) {
String value = attributes.getProperty(name);
return value == null ? def : Boolean.valueOf(value);
}
public Integer getIntAttribute(String name) {
return getIntAttribute(name, null);
}
public Integer getIntAttribute(String name, Integer def) {
String value = attributes.getProperty(name);
return value == null ? def : Integer.valueOf(value);
}
public Long getLongAttribute(String name) {
return getLongAttribute(name, null);
}
public Long getLongAttribute(String name, Long def) {
String value = attributes.getProperty(name);
return value == null ? def : Long.valueOf(value);
}
public Double getDoubleAttribute(String name) {
return getDoubleAttribute(name, null);
}
public Double getDoubleAttribute(String name, Double def) {
String value = attributes.getProperty(name);
return value == null ? def : Double.valueOf(value);
}
public Float getFloatAttribute(String name) {
return getFloatAttribute(name, null);
}
public Float getFloatAttribute(String name, Float def) {
String value = attributes.getProperty(name);
return value == null ? def : Float.valueOf(value);
}
public List<XNode> getChildren() {
List<XNode> children = new ArrayList<>();
NodeList nodeList = node.getChildNodes();
if (nodeList != null) {
for (int i = 0, n = nodeList.getLength(); i < n; i++) {
Node node = nodeList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
children.add(new XNode(xpathParser, node, variables));
}
}
}
return children;
}
public Properties getChildrenAsProperties() {
Properties properties = new Properties();
for (XNode child : getChildren()) {
String name = child.getStringAttribute("name");
String value = child.getStringAttribute("value");
if (name != null && value != null) {
properties.setProperty(name, value);
}
}
return properties;
}
@Override
public String toString() {
return buildToString(new StringBuilder(), 0).toString();
}
private StringBuilder buildToString(StringBuilder builder, int indentLevel) {
indent(builder, indentLevel).append("<").append(name);
for (Map.Entry<Object, Object> entry : attributes.entrySet()) {
builder.append(" ");
builder.append(entry.getKey());
builder.append("=\"");
builder.append(entry.getValue());
builder.append("\"");
}
NodeList nodeList = node.getChildNodes();
if (nodeList == null || nodeList.getLength() == 0) {
builder.append(" />\n");
} else {
builder.append(">\n");
for (int i = 0, n = nodeList.getLength(); i < n; i++) {
Node node = nodeList.item(i);
short nodeType = node.getNodeType();
if (nodeType == Node.ELEMENT_NODE) {
new XNode(xpathParser, node, variables).buildToString(builder, indentLevel + 1);
} else {
String text = getBodyData(node).trim();
if (text.length() > 0) {
indent(builder, indentLevel + 1).append(text).append("\n");
}
}
}
indent(builder, indentLevel).append("</").append(name).append(">\n");
}
return builder;
}
private StringBuilder indent(StringBuilder builder, int level) {
for (int i = 0; i < level; i++) {
builder.append(" ");
}
return builder;
}
private Properties parseAttributes(Node n) {
Properties attributes = new Properties();
NamedNodeMap attributeNodes = n.getAttributes();
if (attributeNodes != null) {
for (int i = 0; i < attributeNodes.getLength(); i++) {
Node attribute = attributeNodes.item(i);
String value = PropertyParser.parse(attribute.getNodeValue(), variables);
attributes.put(attribute.getNodeName(), value);
}
}
return attributes;
}
private String parseBody(Node node) {
String data = getBodyData(node);
if (data == null) {
NodeList children = node.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node child = children.item(i);
data = getBodyData(child);
if (data != null) {
break;
}
}
}
return data;
}
private String getBodyData(Node child) {
if (child.getNodeType() == Node.CDATA_SECTION_NODE || child.getNodeType() == Node.TEXT_NODE) {
String data = ((CharacterData) child).getData();
return PropertyParser.parse(data, variables);
}
return null;
}
}
|
XNode
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ReservationRequestsInfo.java
|
{
"start": 1463,
"end": 2618
}
|
class ____ {
@XmlElement(name = "reservation-request-interpreter")
private int reservationRequestsInterpreter;
@XmlElement(name = "reservation-request")
private ArrayList<ReservationRequestInfo> reservationRequest;
public ReservationRequestsInfo() {
}
public ReservationRequestsInfo(ReservationRequests requests) {
reservationRequest = new ArrayList<>();
for (ReservationRequest request : requests.getReservationResources()) {
reservationRequest.add(new ReservationRequestInfo(request));
}
reservationRequestsInterpreter = requests.getInterpreter().ordinal();
}
public int getReservationRequestsInterpreter() {
return reservationRequestsInterpreter;
}
public void setReservationRequestsInterpreter(
int reservationRequestsInterpreter) {
this.reservationRequestsInterpreter = reservationRequestsInterpreter;
}
public ArrayList<ReservationRequestInfo> getReservationRequest() {
return reservationRequest;
}
public void setReservationRequest(
ArrayList<ReservationRequestInfo> reservationRequest) {
this.reservationRequest = reservationRequest;
}
}
|
ReservationRequestsInfo
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/annotation/Autowire.java
|
{
"start": 1318,
"end": 2067
}
|
enum ____ {
/**
* Constant that indicates no autowiring at all.
*/
NO(AutowireCapableBeanFactory.AUTOWIRE_NO),
/**
* Constant that indicates autowiring bean properties by name.
*/
BY_NAME(AutowireCapableBeanFactory.AUTOWIRE_BY_NAME),
/**
* Constant that indicates autowiring bean properties by type.
*/
BY_TYPE(AutowireCapableBeanFactory.AUTOWIRE_BY_TYPE);
private final int value;
Autowire(int value) {
this.value = value;
}
public int value() {
return this.value;
}
/**
* Return whether this represents an actual autowiring value.
* @return whether actual autowiring was specified
* (either BY_NAME or BY_TYPE)
*/
public boolean isAutowire() {
return (this == BY_NAME || this == BY_TYPE);
}
}
|
Autowire
|
java
|
apache__camel
|
core/camel-core-model/src/main/java/org/apache/camel/model/rest/RestConfigurationDefinition.java
|
{
"start": 10878,
"end": 23534
}
|
class ____ etc. Not all 3rd party API gateways
* and tools supports vendor-extensions when importing your API docs.
*/
public void setApiVendorExtension(String apiVendorExtension) {
this.apiVendorExtension = apiVendorExtension;
}
public RestHostNameResolver getHostNameResolver() {
return hostNameResolver;
}
/**
* If no hostname has been explicit configured, then this resolver is used to compute the hostname the REST service
* will be using.
*/
public void setHostNameResolver(RestHostNameResolver hostNameResolver) {
this.hostNameResolver = hostNameResolver;
}
public RestBindingMode getBindingMode() {
return bindingMode;
}
/**
* Sets the binding mode to use.
* <p/>
* The default value is off
*/
public void setBindingMode(RestBindingMode bindingMode) {
this.bindingMode = bindingMode;
}
public String getBindingPackageScan() {
return bindingPackageScan;
}
/**
* Package name to use as base (offset) for classpath scanning of POJO classes are located when using binding mode
* is enabled for JSon or XML. Multiple package names can be separated by comma.
*/
public void setBindingPackageScan(String bindingPackageScan) {
this.bindingPackageScan = bindingPackageScan;
}
public String getSkipBindingOnErrorCode() {
return skipBindingOnErrorCode;
}
/**
* Whether to skip binding on output if there is a custom HTTP error code header. This allows to build custom error
* messages that do not bind to json / xml etc, as success messages otherwise will do.
*/
public void setSkipBindingOnErrorCode(String skipBindingOnErrorCode) {
this.skipBindingOnErrorCode = skipBindingOnErrorCode;
}
public String getClientRequestValidation() {
return clientRequestValidation;
}
/**
* Whether to enable validation of the client request to check:
*
* 1) Content-Type header matches what the Rest DSL consumes; returns HTTP Status 415 if validation error. 2) Accept
* header matches what the Rest DSL produces; returns HTTP Status 406 if validation error. 3) Missing required data
* (query parameters, HTTP headers, body); returns HTTP Status 400 if validation error. 4) Parsing error of the
* message body (JSon, XML or Auto binding mode must be enabled); returns HTTP Status 400 if validation error.
*/
public void setClientRequestValidation(String clientRequestValidation) {
this.clientRequestValidation = clientRequestValidation;
}
public String getClientResponseValidation() {
return clientResponseValidation;
}
/**
* Whether to check what Camel is returning as response to the client:
*
* 1) Status-code and Content-Type matches Rest DSL response messages. 2) Check whether expected headers is included
* according to the Rest DSL repose message headers. 3) If the response body is JSon then check whether its valid
* JSon. Returns 500 if validation error detected.
*/
public void setClientResponseValidation(String clientResponseValidation) {
this.clientResponseValidation = clientResponseValidation;
}
public String getEnableCORS() {
return enableCORS;
}
/**
* Whether to enable CORS headers in the HTTP response.
* <p/>
* The default value is false.
*/
public void setEnableCORS(String enableCORS) {
this.enableCORS = enableCORS;
}
public String getEnableNoContentResponse() {
return enableNoContentResponse;
}
/**
* Whether to return HTTP 204 with an empty body when a response contains an empty JSON object or XML root object.
* <p/>
* The default value is false.
*/
public void setEnableNoContentResponse(String enableNoContentResponse) {
this.enableNoContentResponse = enableNoContentResponse;
}
public String getInlineRoutes() {
return inlineRoutes;
}
/**
* Inline routes in rest-dsl which are linked using direct endpoints.
*
* Each service in Rest DSL is an individual route, meaning that you would have at least two routes per service
* (rest-dsl, and the route linked from rest-dsl). By inlining (default) allows Camel to optimize and inline this as
* a single route, however this requires to use direct endpoints, which must be unique per service. If a route is
* not using direct endpoint then the rest-dsl is not inlined, and will become an individual route.
*
* This option is default <tt>true</tt>.
*/
public void setInlineRoutes(String inlineRoutes) {
this.inlineRoutes = inlineRoutes;
}
public String getJsonDataFormat() {
return jsonDataFormat;
}
/**
* Name of specific json data format to use. By default, jackson will be used. Important: This option is only for
* setting a custom name of the data format, not to refer to an existing data format instance.
*/
public void setJsonDataFormat(String jsonDataFormat) {
this.jsonDataFormat = jsonDataFormat;
}
public String getXmlDataFormat() {
return xmlDataFormat;
}
/**
* Name of specific XML data format to use. By default jaxb will be used, but jacksonXml is also supported.
* Important: This option is only for setting a custom name of the data format, not to refer to an existing data
* format instance.
*/
public void setXmlDataFormat(String xmlDataFormat) {
this.xmlDataFormat = xmlDataFormat;
}
public List<RestPropertyDefinition> getComponentProperties() {
return componentProperties;
}
/**
* Allows to configure as many additional properties for the rest component in use.
*/
public void setComponentProperties(List<RestPropertyDefinition> componentProperties) {
this.componentProperties = componentProperties;
}
public List<RestPropertyDefinition> getEndpointProperties() {
return endpointProperties;
}
/**
* Allows to configure as many additional properties for the rest endpoint in use.
*/
public void setEndpointProperties(List<RestPropertyDefinition> endpointProperties) {
this.endpointProperties = endpointProperties;
}
public List<RestPropertyDefinition> getConsumerProperties() {
return consumerProperties;
}
/**
* Allows to configure as many additional properties for the rest consumer in use.
*/
public void setConsumerProperties(List<RestPropertyDefinition> consumerProperties) {
this.consumerProperties = consumerProperties;
}
public List<RestPropertyDefinition> getDataFormatProperties() {
return dataFormatProperties;
}
/**
* Allows to configure as many additional properties for the data formats in use. For example set property
* prettyPrint to true to have json outputted in pretty mode. The properties can be prefixed to denote the option is
* only for either JSON or XML and for either the IN or the OUT. The prefixes are:
* <ul>
* <li>json.in.</li>
* <li>json.out.</li>
* <li>xml.in.</li>
* <li>xml.out.</li>
* </ul>
* For example a key with value "xml.out.mustBeJAXBElement" is only for the XML data format for the outgoing. A key
* without a prefix is a common key for all situations.
*/
public void setDataFormatProperties(List<RestPropertyDefinition> dataFormatProperties) {
this.dataFormatProperties = dataFormatProperties;
}
public List<RestPropertyDefinition> getApiProperties() {
return apiProperties;
}
/**
* Allows to configure as many additional properties for the api documentation. For example set property api.title
* to my cool stuff
*/
public void setApiProperties(List<RestPropertyDefinition> apiProperties) {
this.apiProperties = apiProperties;
}
public List<RestPropertyDefinition> getCorsHeaders() {
return corsHeaders;
}
/**
* Allows to configure custom CORS headers.
*/
public void setCorsHeaders(List<RestPropertyDefinition> corsHeaders) {
this.corsHeaders = corsHeaders;
}
public List<RestPropertyDefinition> getValidationLevels() {
return validationLevels;
}
/**
* Allows to configure custom validation levels when using camel-openapi-validator with client request/response
* validator.
*/
public void setValidationLevels(List<RestPropertyDefinition> validationLevels) {
this.validationLevels = validationLevels;
}
public String getUseXForwardHeaders() {
return useXForwardHeaders;
}
/**
* Whether to use X-Forward headers to set host etc. for OpenApi.
*
* This may be needed in special cases involving reverse-proxy and networking going from HTTP to HTTPS etc. Then the
* proxy can send X-Forward headers (X-Forwarded-Proto) that influences the host names in the OpenAPI schema that
* camel-openapi-java generates from Rest DSL routes.
*/
public void setUseXForwardHeaders(String useXForwardHeaders) {
this.useXForwardHeaders = useXForwardHeaders;
}
// Fluent API
// -------------------------------------------------------------------------
/**
* To use a specific Camel rest component (consumer)
*/
public RestConfigurationDefinition component(String componentId) {
setComponent(componentId);
return this;
}
/**
* To use a specific Camel rest API component
*/
public RestConfigurationDefinition apiComponent(String componentId) {
setApiComponent(componentId);
return this;
}
/**
* To use a specific Camel rest component (producer)
*/
public RestConfigurationDefinition producerComponent(String componentId) {
setProducerComponent(componentId);
return this;
}
/**
* To use a specific scheme such as http/https
*/
public RestConfigurationDefinition scheme(String scheme) {
setScheme(scheme);
return this;
}
/**
* To define the host to use, such as 0.0.0.0 or localhost
*/
public RestConfigurationDefinition host(String host) {
setHost(host);
return this;
}
/**
* To define a specific host to use for API documentation instead of using a generated API hostname that is relative
* to the REST service host.
*/
public RestConfigurationDefinition apiHost(String host) {
setApiHost(host);
return this;
}
/**
* To specify the port number to use for the REST service
*/
public RestConfigurationDefinition port(int port) {
setPort(Integer.toString(port));
return this;
}
/**
* To specify the port number to use for the REST service
*/
public RestConfigurationDefinition port(String port) {
setPort(port);
return this;
}
/**
* Sets the location of the api document the REST producer will use to validate the REST uri and query parameters
* are valid accordingly to the api document.
* <p/>
* The location of the api document is loaded from classpath by default, but you can use <tt>file:</tt> or
* <tt>http:</tt> to refer to resources to load from file or http url.
*/
public RestConfigurationDefinition producerApiDoc(String apiDoc) {
setProducerApiDoc(apiDoc);
return this;
}
/**
* Sets a leading context-path the REST API will be using.
* <p/>
* This can be used when using components such as <tt>camel-servlet</tt> where the deployed web application is
* deployed using a context-path. Or for components such as <tt>camel-jetty</tt> or <tt>camel-netty-http</tt> that
* includes a HTTP server.
*/
public RestConfigurationDefinition apiContextPath(String contextPath) {
setApiContextPath(contextPath);
return this;
}
/**
* Sets the route id to use for the route that services the REST API.
* <p/>
* The route will by default use an auto assigned route id.
*/
public RestConfigurationDefinition apiContextRouteId(String apiContextRouteId) {
setApiContextRouteId(apiContextRouteId);
return this;
}
/**
* Whether vendor extension is enabled in the Rest APIs. If enabled then Camel will include additional information
* as vendor extension (eg keys starting with x-) such as route ids,
|
names
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregator.java
|
{
"start": 1443,
"end": 5012
}
|
class ____ extends NumericMetricsAggregator.SingleDoubleValue {
private final DocValueFormat format;
private final double compression;
private final TDigestExecutionHint executionHint;
private ObjectArray<TDigestState> valueSketches;
MedianAbsoluteDeviationAggregator(
String name,
ValuesSourceConfig config,
DocValueFormat format,
AggregationContext context,
Aggregator parent,
Map<String, Object> metadata,
double compression,
TDigestExecutionHint executionHint
) throws IOException {
super(name, config, context, parent, metadata);
assert config.hasValues();
this.format = Objects.requireNonNull(format);
this.compression = compression;
this.executionHint = executionHint;
this.valueSketches = context.bigArrays().newObjectArray(1);
}
private boolean hasDataForBucket(long bucketOrd) {
return bucketOrd < valueSketches.size() && valueSketches.get(bucketOrd) != null;
}
@Override
public double metric(long owningBucketOrd) {
if (hasDataForBucket(owningBucketOrd)) {
return computeMedianAbsoluteDeviation(valueSketches.get(owningBucketOrd));
} else {
return Double.NaN;
}
}
@Override
protected LeafBucketCollector getLeafCollector(SortedNumericDoubleValues values, LeafBucketCollector sub) {
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
final TDigestState valueSketch = getExistingOrNewHistogram(bigArrays(), bucket);
for (int i = 0; i < values.docValueCount(); i++) {
valueSketch.add(values.nextValue());
}
}
}
};
}
@Override
protected LeafBucketCollector getLeafCollector(DoubleValues values, LeafBucketCollector sub) {
return new LeafBucketCollectorBase(sub, values) {
@Override
public void collect(int doc, long bucket) throws IOException {
if (values.advanceExact(doc)) {
final TDigestState valueSketch = getExistingOrNewHistogram(bigArrays(), bucket);
valueSketch.add(values.doubleValue());
}
}
};
}
private TDigestState getExistingOrNewHistogram(final BigArrays bigArrays, long bucket) {
valueSketches = bigArrays.grow(valueSketches, bucket + 1);
TDigestState state = valueSketches.get(bucket);
if (state == null) {
state = TDigestState.createWithoutCircuitBreaking(compression, executionHint);
valueSketches.set(bucket, state);
}
return state;
}
@Override
public InternalAggregation buildAggregation(long bucket) throws IOException {
if (hasDataForBucket(bucket)) {
final TDigestState valueSketch = valueSketches.get(bucket);
return new InternalMedianAbsoluteDeviation(name, metadata(), format, valueSketch);
} else {
return buildEmptyAggregation();
}
}
@Override
public InternalAggregation buildEmptyAggregation() {
return InternalMedianAbsoluteDeviation.empty(name, metadata(), format, compression, executionHint);
}
@Override
public void doClose() {
Releasables.close(valueSketches);
}
}
|
MedianAbsoluteDeviationAggregator
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/main/java/io/quarkus/redis/datasource/cuckoo/CuckooCommands.java
|
{
"start": 614,
"end": 6895
}
|
interface ____<K, V> extends RedisCommands {
/**
* Execute the command <a href="https://redis.io/commands/cf.add">CF.ADD</a>.
* Summary: Adds the specified element to the specified Cuckoo filter.
* Group: cuckoo
* <p>
* If the cuckoo filter does not exist, it creates a new one.
*
* @param key the key
* @param value the value, must not be {@code null}
*/
void cfadd(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.addnx">CF.ADDNX</a>.
* Summary: Adds an item to a cuckoo filter if the item did not exist previously.
* Group: cuckoo
* <p>
* If the cuckoo filter does not exist, it creates a new one.
*
* @param key the key
* @param value the value, must not be {@code null}
* @return {@code true} if the value was added to the filter, {@code false} otherwise
*/
boolean cfaddnx(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.count">CF.COUNT</a>.
* Summary: Returns the number of times an item may be in the filter. Because this is a probabilistic data structure,
* this may not necessarily be accurate.
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return a Uni producing the count of possible matching copies of the value in the filter
*/
long cfcount(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.del">CF.DEL</a>.
* Summary: Deletes an item once from the filter. If the item exists only once, it will be removed from the filter.
* If the item was added multiple times, it will still be present.
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return a Uni producing {@code true} if the value was removed from the filter, {@code false} otherwise
* (the value was not found in the filter)
*/
boolean cfdel(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.exists">CF.EXISTS</a>.
* Summary: Check if an item exists in a Cuckoo filter
* Group: cuckoo
* <p>
*
* @param key the key
* @param value the value, must not be {@code null}
* @return a Uni producing {@code true} if the value was found in the filter, {@code false} otherwise.
*/
boolean cfexists(K key, V value);
/**
* Execute the command <a href="https://redis.io/commands/cf.insert">CF.INSERT</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
*/
void cfinsert(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insert">CF.INSERT</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param args the extra arguments
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
*/
void cfinsert(K key, CfInsertArgs args, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insertnx">CF.INSERTNX</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return a list of boolean. For each added value, the corresponding boolean is {@code true} if the
* value has been added (non-existing) or {@code false} if the value was already present in the filter.
*/
List<Boolean> cfinsertnx(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.insertnx">CF.INSERTNX</a>.
* Summary: Adds one or more items to a cuckoo filter, allowing the filter to be created with a custom capacity if
* it does not exist yet.
* Group: cuckoo
* <p>
*
* @param key the key
* @param args the extra arguments
* @param values the values, must not be {@code null}, must not be empty, must not contain {@code null}
* @return a list of boolean. For each added value, the corresponding boolean is {@code true} if the
* value has been added (non-existing) or {@code false} if the value was already present in the filter.
*/
List<Boolean> cfinsertnx(K key, CfInsertArgs args, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.mexists">CF.MEXISTS</a>.
* Summary: Check if an item exists in a Cuckoo filter
* Group: cuckoo
* <p>
*
* @param key the key
* @param values the values, must not be {@code null}, must not contain {@code null}, must not be empty
* @return a list of boolean indicating, for each corresponding value, if the value exists in the
* filter or not.
*/
List<Boolean> cfmexists(K key, V... values);
/**
* Execute the command <a href="https://redis.io/commands/cf.reserve">CF.RESERVE</a>.
* Summary: Create a Cuckoo Filter as key with a single sub-filter for the initial amount of capacity for items.
* Group: cuckoo
* <p>
*
* @param key the key
* @param capacity the capacity
*/
void cfreserve(K key, long capacity);
/**
* Execute the command <a href="https://redis.io/commands/cf.reserve">CF.RESERVE</a>.
* Summary: Create a Cuckoo Filter as key with a single sub-filter for the initial amount of capacity for items.
* Group: cuckoo
* <p>
*
* @param key the key
* @param capacity the capacity
* @param args the extra parameters
*/
void cfreserve(K key, long capacity, CfReserveArgs args);
}
|
CuckooCommands
|
java
|
quarkusio__quarkus
|
integration-tests/smallrye-jwt-oidc-webapp/src/main/java/io/quarkus/it/keycloak/PublicResource.java
|
{
"start": 181,
"end": 561
}
|
class ____ {
@Inject
Pool pool;
@Path("/token-state-count")
@GET
public int tokenStateCount() {
return pool
.query("SELECT COUNT(*) FROM oidc_db_token_state_manager")
.execute()
.map(rs -> rs.iterator().next().getInteger(0))
.await()
.indefinitely();
}
}
|
PublicResource
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/rest/messages/EmptyResponseBody.java
|
{
"start": 913,
"end": 1232
}
|
class ____ implements ResponseBody {
private static final EmptyResponseBody INSTANCE = new EmptyResponseBody();
private EmptyResponseBody() {}
private Object readResolve() {
return INSTANCE;
}
public static EmptyResponseBody getInstance() {
return INSTANCE;
}
}
|
EmptyResponseBody
|
java
|
alibaba__nacos
|
k8s-sync/src/main/java/com/alibaba/nacos/k8s/sync/Loggers.java
|
{
"start": 770,
"end": 891
}
|
class ____ {
public static final Logger MAIN = LoggerFactory.getLogger("com.alibaba.nacos.k8s.sync.main");
}
|
Loggers
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/streaming/api/operators/async/AsyncWaitOperatorTest.java
|
{
"start": 63974,
"end": 64571
}
|
class ____<IN> implements AsyncFunction<IN, IN> {
private static final long serialVersionUID = -4214078239267288636L;
private transient CompletableFuture<Void> trigger;
private ControllableAsyncFunction(CompletableFuture<Void> trigger) {
this.trigger = Preconditions.checkNotNull(trigger);
}
@Override
public void asyncInvoke(IN input, ResultFuture<IN> resultFuture) throws Exception {
trigger.thenAccept(v -> resultFuture.complete(Collections.singleton(input)));
}
}
private static
|
ControllableAsyncFunction
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ReflectionUtils.java
|
{
"start": 7443,
"end": 7627
}
|
class ____ the supplied name
* and no parameters. Searches all superclasses up to {@code Object}.
* <p>Returns {@code null} if no {@link Method} can be found.
* @param clazz the
|
with
|
java
|
apache__kafka
|
metadata/src/test/java/org/apache/kafka/controller/metrics/QuorumControllerMetricsTest.java
|
{
"start": 1221,
"end": 11171
}
|
class ____ {
@Test
public void testMetricNames() {
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
try {
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(
Optional.of(registry),
time,
9000)) {
metrics.addTimeSinceLastHeartbeatMetric(1);
Set<String> expected = Set.of(
"kafka.controller:type=ControllerEventManager,name=EventQueueProcessingTimeMs",
"kafka.controller:type=ControllerEventManager,name=EventQueueTimeMs",
"kafka.controller:type=ControllerEventManager,name=AvgIdleRatio",
"kafka.controller:type=KafkaController,name=ActiveControllerCount",
"kafka.controller:type=KafkaController,name=EventQueueOperationsStartedCount",
"kafka.controller:type=KafkaController,name=EventQueueOperationsTimedOutCount",
"kafka.controller:type=KafkaController,name=LastAppliedRecordLagMs",
"kafka.controller:type=KafkaController,name=LastAppliedRecordOffset",
"kafka.controller:type=KafkaController,name=LastAppliedRecordTimestamp",
"kafka.controller:type=KafkaController,name=LastCommittedRecordOffset",
"kafka.controller:type=KafkaController,name=NewActiveControllersCount",
"kafka.controller:type=KafkaController,name=TimedOutBrokerHeartbeatCount",
"kafka.controller:type=KafkaController,name=TimeSinceLastHeartbeatReceivedMs,broker=1"
);
ControllerMetricsTestUtils.assertMetricsForTypeEqual(registry, "kafka.controller", expected);
}
ControllerMetricsTestUtils.assertMetricsForTypeEqual(registry, "kafka.controller",
Set.of());
} finally {
registry.shutdown();
}
}
@Test
public void testUpdateEventQueueTime() {
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(Optional.of(registry), time, 9000)) {
metrics.updateEventQueueTime(1000);
assertMetricHistogram(registry, metricName("ControllerEventManager", "EventQueueTimeMs"), 1, 1000);
} finally {
registry.shutdown();
}
}
@Test
public void testUpdateEventQueueProcessingTime() {
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(Optional.of(registry), time, 9000)) {
metrics.updateEventQueueProcessingTime(1000);
assertMetricHistogram(registry, metricName("ControllerEventManager", "EventQueueProcessingTimeMs"), 1, 1000);
} finally {
registry.shutdown();
}
}
@Test
public void testLastAppliedRecordMetrics() {
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
time.sleep(1000);
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(Optional.of(registry), time, 9000)) {
metrics.setLastAppliedRecordOffset(100);
metrics.setLastAppliedRecordTimestamp(500);
metrics.setLastCommittedRecordOffset(50);
metrics.setActive(true);
for (int i = 0; i < 2; i++) {
metrics.incrementTimedOutHeartbeats();
}
for (int i = 0; i < 3; i++) {
metrics.incrementOperationsStarted();
}
for (int i = 0; i < 4; i++) {
metrics.incrementOperationsTimedOut();
}
for (int i = 0; i < 5; i++) {
metrics.incrementNewActiveControllers();
}
@SuppressWarnings("unchecked")
Gauge<Long> lastAppliedRecordOffset = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "LastAppliedRecordOffset"));
assertEquals(100, lastAppliedRecordOffset.value());
@SuppressWarnings("unchecked")
Gauge<Long> lastAppliedRecordTimestamp = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "LastAppliedRecordTimestamp"));
assertEquals(500, lastAppliedRecordTimestamp.value());
@SuppressWarnings("unchecked")
Gauge<Long> lastAppliedRecordLagMs = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "LastAppliedRecordLagMs"));
assertEquals(time.milliseconds() - 500, lastAppliedRecordLagMs.value());
@SuppressWarnings("unchecked")
Gauge<Long> lastCommittedRecordOffset = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "LastCommittedRecordOffset"));
assertEquals(50, lastCommittedRecordOffset.value());
@SuppressWarnings("unchecked")
Gauge<Long> timedOutBrokerHeartbeats = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "TimedOutBrokerHeartbeatCount"));
assertEquals(2L, timedOutBrokerHeartbeats.value());
@SuppressWarnings("unchecked")
Gauge<Long> operationsStarted = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "EventQueueOperationsStartedCount"));
assertEquals(3L, operationsStarted.value());
@SuppressWarnings("unchecked")
Gauge<Long> operationsTimedOut = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "EventQueueOperationsTimedOutCount"));
assertEquals(4L, operationsTimedOut.value());
@SuppressWarnings("unchecked")
Gauge<Long> newActiveControllers = (Gauge<Long>) registry
.allMetrics()
.get(metricName("KafkaController", "NewActiveControllersCount"));
assertEquals(5L, newActiveControllers.value());
} finally {
registry.shutdown();
}
}
@SuppressWarnings("unchecked")
@Test
public void testTimeSinceLastHeartbeatReceivedMs() {
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
int brokerId = 1;
int sessionTimeoutMs = 9000;
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(Optional.of(registry), time, sessionTimeoutMs)) {
metrics.addTimeSinceLastHeartbeatMetric(1);
int numMetrics = registry.allMetrics().size();
Gauge<Integer> timeSinceLastHeartbeatReceivedMs = (Gauge<Integer>) registry.allMetrics().get(metricName("KafkaController", "TimeSinceLastHeartbeatReceivedMs", "broker=1"));
metrics.updateBrokerContactTime(brokerId);
time.sleep(1000);
assertEquals(1000, timeSinceLastHeartbeatReceivedMs.value());
metrics.updateBrokerContactTime(brokerId);
assertEquals(0, timeSinceLastHeartbeatReceivedMs.value());
time.sleep(100000);
assertEquals(sessionTimeoutMs, timeSinceLastHeartbeatReceivedMs.value());
metrics.removeTimeSinceLastHeartbeatMetrics();
assertEquals(numMetrics - 1, registry.allMetrics().size());
} finally {
registry.shutdown();
}
}
@SuppressWarnings("unchecked") // do not warn about Gauge typecast.
@Test
public void testAvgIdleRatio() {
final double delta = 0.001;
MetricsRegistry registry = new MetricsRegistry();
MockTime time = new MockTime();
try (QuorumControllerMetrics metrics = new QuorumControllerMetrics(Optional.of(registry), time, 9000)) {
Gauge<Double> avgIdleRatio = (Gauge<Double>) registry.allMetrics().get(metricName("ControllerEventManager", "AvgIdleRatio"));
// No idle time recorded yet; returns default ratio of 1.0
assertEquals(1.0, avgIdleRatio.value(), delta);
// First recording is dropped to establish the interval start time
// This is because TimeRatio needs an initial timestamp to measure intervals from
metrics.updateIdleTime(10, time.milliseconds());
time.sleep(40);
metrics.updateIdleTime(20, time.milliseconds());
// avgIdleRatio = (20ms idle) / (40ms interval) = 0.5
assertEquals(0.5, avgIdleRatio.value(), delta);
time.sleep(20);
metrics.updateIdleTime(1, time.milliseconds());
// avgIdleRatio = (1ms idle) / (20ms interval) = 0.05
assertEquals(0.05, avgIdleRatio.value(), delta);
} finally {
registry.shutdown();
}
}
private static void assertMetricHistogram(MetricsRegistry registry, MetricName metricName, long count, double sum) {
Histogram histogram = (Histogram) registry.allMetrics().get(metricName);
assertEquals(count, histogram.count());
assertEquals(sum, histogram.sum(), .1);
}
private static MetricName metricName(String type, String name) {
String mBeanName = String.format("kafka.controller:type=%s,name=%s", type, name);
return new MetricName("kafka.controller", type, name, null, mBeanName);
}
private static MetricName metricName(String type, String name, String scope) {
String mBeanName = String.format("kafka.controller:type=%s,name=%s,%s", type, name, scope);
return new MetricName("kafka.controller", type, name, scope, mBeanName);
}
}
|
QuorumControllerMetricsTest
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableBuffer.java
|
{
"start": 1107,
"end": 1960
}
|
class ____<T, U extends Collection<? super T>> extends AbstractObservableWithUpstream<T, U> {
final int count;
final int skip;
final Supplier<U> bufferSupplier;
public ObservableBuffer(ObservableSource<T> source, int count, int skip, Supplier<U> bufferSupplier) {
super(source);
this.count = count;
this.skip = skip;
this.bufferSupplier = bufferSupplier;
}
@Override
protected void subscribeActual(Observer<? super U> t) {
if (skip == count) {
BufferExactObserver<T, U> bes = new BufferExactObserver<>(t, count, bufferSupplier);
if (bes.createBuffer()) {
source.subscribe(bes);
}
} else {
source.subscribe(new BufferSkipObserver<>(t, count, skip, bufferSupplier));
}
}
static final
|
ObservableBuffer
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-reactive-messaging/src/main/java/io/quarkus/it/opentelemetry/TracedKafkaProcessor.java
|
{
"start": 271,
"end": 494
}
|
class ____ {
@Inject
TracedService tracedService;
@Incoming("traces-in2")
@Outgoing("traces-processed")
String process(String msg) {
return tracedService.call() + msg;
}
}
|
TracedKafkaProcessor
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-sls/src/main/java/org/apache/hadoop/yarn/sls/scheduler/SchedulerWrapper.java
|
{
"start": 1622,
"end": 2225
}
|
interface ____ {
SchedulerMetrics getSchedulerMetrics();
Tracker getTracker();
String getRealQueueName(String queue) throws YarnException;
void propagatedHandle(SchedulerEvent schedulerEvent);
Allocation allocatePropagated(ApplicationAttemptId attemptId,
List<ResourceRequest> resourceRequests,
List<SchedulingRequest> schedulingRequests,
List<ContainerId> containerIds,
List<String> blacklistAdditions,
List<String> blacklistRemovals,
ContainerUpdates updateRequests);
void setSLSRunner(SLSRunner runner);
SLSRunner getSLSRunner();
}
|
SchedulerWrapper
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/planner/QueryFolder.java
|
{
"start": 2515,
"end": 3094
}
|
class ____ extends QueryFoldingRule<FilterExec> {
@Override
protected PhysicalPlan rule(FilterExec plan, EsQueryExec exec) {
QueryContainer qContainer = exec.queryContainer();
Query query = QueryTranslator.toQuery(plan.condition());
if (qContainer.query() != null || query != null) {
query = ExpressionTranslators.and(plan.source(), qContainer.query(), query);
}
qContainer = qContainer.with(query);
return exec.with(qContainer);
}
}
private static
|
FoldFilter
|
java
|
processing__processing4
|
java/src/processing/mode/java/RuntimePathBuilder.java
|
{
"start": 19665,
"end": 21793
}
|
class ____ does not contain empty string (home dir)
return Arrays.stream(classPathString.split(File.pathSeparator))
.filter(p -> p != null && !p.trim().isEmpty())
.distinct()
.collect(Collectors.toList());
}
/**
* Determine if a package is ignorable because it is standard.
* This is different from being ignorable in imports recommendations.
*
* @param packageName The name of the package to evaluate.
* @return True if the package is part of standard Java (like java.lang.*). False otherwise.
*/
protected boolean isIgnorableForSketchPath(String packageName) {
return (packageName.startsWith("java.") || packageName.startsWith("javax."));
}
/**
* Find a fully qualified jar name.
*
* @param jarName The jar name like "javafx.base.jar" for which a
* fully qualified entry should be created.
* @return The fully qualified classpath entry like ".../Processing.app/Contents/PlugIns/
* adoptopenjdk-11.0.1.jdk/Contents/Home/lib/javafx.base.jar"
*/
protected String findFullyQualifiedJarName(String jarName) {
StringJoiner joiner = new StringJoiner(File.separator);
joiner.add(Platform.getJavaHome().getAbsolutePath());
joiner.add("lib");
joiner.add(jarName);
return joiner.toString();
}
/**
* Build a classpath entry for a module.
*
* @param moduleName The name of the module like "java.base.jmod".
* @return The fully qualified classpath entry like ".../Processing.app/Contents/PlugIns/
* adoptopenjdk-11.0.1.jdk/Contents/Home/jmods/java.base.jmod"
*/
protected String buildForModule(String moduleName) {
StringJoiner jmodPathJoiner = new StringJoiner(File.separator);
jmodPathJoiner.add(Platform.getJavaHome().getAbsolutePath());
jmodPathJoiner.add("jmods");
jmodPathJoiner.add(moduleName);
return jmodPathJoiner.toString();
}
/*
* ============================================
* === Interface definitions and utilities. ===
* ============================================
*
* Note that these are protected so that they can be tested. The
|
path
|
java
|
elastic__elasticsearch
|
x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/analyze/RepositoryAnalyzeAction.java
|
{
"start": 36124,
"end": 46118
}
|
class ____ implements Runnable {
private final Random random;
private final String registerName;
private final List<DiscoveryNode> nodes;
private final AtomicBoolean otherAnalysisComplete;
private int currentValue; // actions run in strict sequence so no need for synchronization
UncontendedRegisterAnalysis(Random random, List<DiscoveryNode> nodes, AtomicBoolean otherAnalysisComplete) {
this.random = random;
this.registerName = UNCONTENDED_REGISTER_NAME_PREFIX + UUIDs.randomBase64UUID(random);
this.nodes = nodes;
this.otherAnalysisComplete = otherAnalysisComplete;
}
private final ActionListener<ActionResponse.Empty> stepListener = new ActionListener<>() {
@Override
public void onResponse(ActionResponse.Empty ignored) {
currentValue += 1;
run();
}
@Override
public void onFailure(Exception e) {
fail(e);
}
};
@Override
public void run() {
if (isRunning() == false) {
return;
}
if (currentValue <= request.getRegisterOperationCount() || otherAnalysisComplete.get() == false) {
// complete at least request.getRegisterOperationCount() steps, but we may as well keep running for longer too
logger.trace("[{}] incrementing uncontended register [{}] from [{}]", blobPath, registerName, currentValue);
transportService.sendChildRequest(
nodes.get(currentValue < nodes.size() ? currentValue : random.nextInt(nodes.size())),
UncontendedRegisterAnalyzeAction.NAME,
new UncontendedRegisterAnalyzeAction.Request(request.getRepositoryName(), blobPath, registerName, currentValue),
task,
TransportRequestOptions.EMPTY,
new ActionListenerResponseHandler<>(
ActionListener.releaseAfter(stepListener, requestRefs.acquire()),
in -> ActionResponse.Empty.INSTANCE,
TransportResponseHandler.TRANSPORT_WORKER
)
);
} else {
logger.trace("[{}] resetting uncontended register [{}] from [{}]", blobPath, registerName, currentValue);
transportService.getThreadPool()
.executor(ThreadPool.Names.SNAPSHOT)
.execute(
ActionRunnable.<Void>wrap(
ActionListener.releaseAfter(
ActionListener.wrap(
r -> logger.trace("[{}] uncontended register [{}] analysis succeeded", blobPath, registerName),
AsyncAction.this::fail
),
requestRefs.acquire()
),
l -> UncontendedRegisterAnalyzeAction.verifyFinalValue(
new UncontendedRegisterAnalyzeAction.Request(
request.getRepositoryName(),
blobPath,
registerName,
currentValue
),
repository,
l
)
)
);
}
}
}
private void runCleanUp() {
transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap(listener, l -> {
final long listingStartTimeNanos = System.nanoTime();
ensureConsistentListing();
final long deleteStartTimeNanos = System.nanoTime();
deleteContainer();
sendResponse(listingStartTimeNanos, deleteStartTimeNanos);
}));
}
private void ensureConsistentListing() {
if (timeoutTimeMillis < currentTimeMillisSupplier.getAsLong() || task.isCancelled()) {
logger.warn(
"analysis of repository [{}] failed before cleanup phase, attempting best-effort cleanup "
+ "but you may need to manually remove [{}]",
request.getRepositoryName(),
blobPath
);
isRunning(); // set failure if not already set
} else {
logger.trace(
"all tasks completed, checking expected blobs exist in [{}:{}] before cleanup",
request.repositoryName,
blobPath
);
try {
final BlobContainer blobContainer = getBlobContainer();
final Set<String> missingBlobs = new HashSet<>(expectedBlobs);
final Map<String, BlobMetadata> blobsMap = blobContainer.listBlobs(OperationPurpose.REPOSITORY_ANALYSIS);
missingBlobs.removeAll(blobsMap.keySet());
if (missingBlobs.isEmpty()) {
logger.trace("all expected blobs found, cleaning up [{}:{}]", request.getRepositoryName(), blobPath);
} else {
final RepositoryVerificationException repositoryVerificationException = new RepositoryVerificationException(
request.repositoryName,
"expected blobs " + missingBlobs + " missing in [" + request.repositoryName + ":" + blobPath + "]"
);
logger.debug("failing due to missing blobs", repositoryVerificationException);
fail(repositoryVerificationException);
}
} catch (Exception e) {
logger.debug(() -> format("failure during cleanup of [%s:%s]", request.getRepositoryName(), blobPath), e);
fail(e);
}
}
}
private void deleteContainer() {
try {
final BlobContainer blobContainer = getBlobContainer();
blobContainer.delete(OperationPurpose.REPOSITORY_ANALYSIS);
if (failure.get() != null) {
return;
}
final Map<String, BlobMetadata> blobsMap = blobContainer.listBlobs(OperationPurpose.REPOSITORY_ANALYSIS);
if (blobsMap.isEmpty() == false) {
final RepositoryVerificationException repositoryVerificationException = new RepositoryVerificationException(
request.repositoryName,
"failed to clean up blobs " + blobsMap.keySet()
);
logger.debug("failing due to leftover blobs", repositoryVerificationException);
fail(repositoryVerificationException);
}
} catch (Exception e) {
fail(e);
}
}
private void sendResponse(final long listingStartTimeNanos, final long deleteStartTimeNanos) {
final Exception exception = failure.get();
if (exception == null) {
final long completionTimeNanos = System.nanoTime();
logger.trace("[{}] completed successfully", request.getDescription());
listener.onResponse(
new Response(
transportService.getLocalNode().getId(),
transportService.getLocalNode().getName(),
request.getRepositoryName(),
request.blobCount,
request.concurrency,
request.readNodeCount,
request.earlyReadNodeCount,
request.maxBlobSize,
request.maxTotalDataSize,
request.seed,
request.rareActionProbability,
blobPath,
summary.build(),
responses,
deleteStartTimeNanos - listingStartTimeNanos,
completionTimeNanos - deleteStartTimeNanos
)
);
} else {
logger.debug(() -> "analysis of repository [" + request.repositoryName + "] failed", exception);
final String failureDetail;
if (exception == analysisCancelledException) {
failureDetail = "Repository analysis was cancelled.";
} else if (exception == analysisTimedOutException) {
failureDetail = Strings.format("""
Repository analysis timed out. Consider specifying a longer timeout using the [?timeout] request parameter. See \
[%s] for more information.""", ReferenceDocs.SNAPSHOT_REPOSITORY_ANALYSIS);
} else {
failureDetail = repository.getAnalysisFailureExtraDetail();
}
listener.onFailure(new RepositoryVerificationException(request.getRepositoryName(), Strings.format("""
%s Elasticsearch attempted to remove the data it wrote at [%s] but may have left some behind. If so, \
please now remove this data manually.""", failureDetail, blobPath), exception));
}
}
}
public static
|
UncontendedRegisterAnalysis
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java
|
{
"start": 2035,
"end": 22837
}
|
class ____ extends ESSingleNodeTestCase {
private static final int REQUEST_TIMEOUT_SECONDS = 10;
private QueryRulesIndexService queryRulesIndexService;
@Before
public void setup() {
Set<Setting<?>> settingsSet = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
settingsSet.addAll(QueryRulesConfig.getSettings());
ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, settingsSet);
this.queryRulesIndexService = new QueryRulesIndexService(client(), clusterSettings);
}
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.getPlugins());
plugins.add(TestPlugin.class);
return plugins;
}
public void testEmptyState() throws Exception {
expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRuleset("i-dont-exist"));
expectThrows(ResourceNotFoundException.class, () -> awaitDeleteQueryRuleset("i-dont-exist"));
QueryRulesIndexService.QueryRulesetResult listResults = awaitListQueryRulesets(0, 10);
assertThat(listResults.totalResults(), equalTo(0L));
}
public void testUpdateQueryRuleset() throws Exception {
{
final QueryRule myQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))),
Map.of("ids", List.of("id1", "id2")),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", Collections.singletonList(myQueryRule1));
DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset);
assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK)));
assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME));
QueryRuleset getQueryRuleset = awaitGetQueryRuleset(myQueryRuleset.id());
assertThat(getQueryRuleset, equalTo(myQueryRuleset));
}
final QueryRule myQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))),
Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id1"), Map.of("_index", "my_index2", "_id", "id2"))),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final QueryRule myQueryRule2 = new QueryRule(
"my_rule2",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))),
Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id3"), Map.of("_index", "my_index2", "_id", "id4"))),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2));
DocWriteResponse newResp = awaitPutQueryRuleset(myQueryRuleset);
assertThat(newResp.status(), equalTo(RestStatus.OK));
assertThat(newResp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME));
QueryRuleset getQueryRuleset = awaitGetQueryRuleset(myQueryRuleset.id());
assertThat(getQueryRuleset, equalTo(myQueryRuleset));
}
public void testUpdateQueryRule() throws Exception {
// Creating a rule in a nonexistent ruleset creates the ruleset
final QueryRule myQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))),
Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id1"), Map.of("_index", "my_index2", "_id", "id2"))),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final String rulesetId = "my_ruleset";
PutQueryRuleAction.Response newResp = awaitPutQueryRule(rulesetId, myQueryRule1);
assertThat(newResp.status(), equalTo(RestStatus.CREATED));
QueryRuleset getQueryRuleset = awaitGetQueryRuleset(rulesetId);
assertThat(getQueryRuleset, equalTo(new QueryRuleset("my_ruleset", List.of(myQueryRule1))));
QueryRule getQueryRule = awaitGetQueryRule(rulesetId, "my_rule1");
assertThat(getQueryRule, equalTo(myQueryRule1));
// Updating the same query rule in the ruleset returns OK instead of CREATED
final QueryRule updatedQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))),
Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id2"), Map.of("_index", "my_index2", "_id", "id1"))),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
PutQueryRuleAction.Response updateResp = awaitPutQueryRule(rulesetId, updatedQueryRule1);
assertThat(updateResp.status(), equalTo(RestStatus.OK));
QueryRuleset getUpdatedQueryRuleset = awaitGetQueryRuleset(rulesetId);
assertThat(getUpdatedQueryRuleset, equalTo(new QueryRuleset("my_ruleset", List.of(updatedQueryRule1))));
QueryRule getUpdatedQueryRule = awaitGetQueryRule(rulesetId, "my_rule1");
assertThat(getUpdatedQueryRule, equalTo(updatedQueryRule1));
// Creating a new rule in an existing ruleset
final QueryRule myQueryRule2 = new QueryRule(
"my_rule2",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))),
Map.of("docs", List.of(Map.of("_index", "my_index1", "_id", "id3"), Map.of("_index", "my_index2", "_id", "id4"))),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
PutQueryRuleAction.Response addResp = awaitPutQueryRule(rulesetId, myQueryRule2);
assertThat(addResp.status(), equalTo(RestStatus.CREATED));
QueryRuleset getQueryRuleset2 = awaitGetQueryRuleset(rulesetId);
assertThat(getQueryRuleset2, equalTo(new QueryRuleset("my_ruleset", List.of(updatedQueryRule1, myQueryRule2))));
QueryRule getQueryRule2 = awaitGetQueryRule(rulesetId, "my_rule2");
assertThat(getQueryRule2, equalTo(myQueryRule2));
}
public void testListQueryRulesets() throws Exception {
int numRulesets = 10;
for (int i = 0; i < numRulesets; i++) {
final List<QueryRule> rules = List.of(
new QueryRule(
"my_rule_" + i,
QueryRuleType.PINNED,
List.of(
new QueryRuleCriteria(EXACT, "query_string", List.of("foo" + i)),
new QueryRuleCriteria(GTE, "query_string", List.of(i))
),
Map.of("ids", List.of("id1", "id2")),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
),
new QueryRule(
"my_rule_" + i + "_" + (i + 1),
QueryRuleType.PINNED,
List.of(
new QueryRuleCriteria(FUZZY, "query_string", List.of("bar" + i)),
new QueryRuleCriteria(GTE, "user.age", List.of(i))
),
Map.of("ids", List.of("id3", "id4")),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
)
);
final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset_" + i, rules);
DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset);
assertThat(resp.status(), equalTo(RestStatus.CREATED));
assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME));
}
{
QueryRulesIndexService.QueryRulesetResult searchResponse = awaitListQueryRulesets(0, 10);
final List<QueryRulesetListItem> rulesets = searchResponse.rulesets();
assertNotNull(rulesets);
assertThat(rulesets.size(), equalTo(10));
assertThat(searchResponse.totalResults(), equalTo(10L));
for (int i = 0; i < numRulesets; i++) {
String rulesetId = rulesets.get(i).rulesetId();
assertThat(rulesetId, equalTo("my_ruleset_" + i));
}
}
{
QueryRulesIndexService.QueryRulesetResult searchResponse = awaitListQueryRulesets(5, 10);
final List<QueryRulesetListItem> rulesets = searchResponse.rulesets();
assertNotNull(rulesets);
assertThat(rulesets.size(), equalTo(5));
assertThat(searchResponse.totalResults(), equalTo(10L));
for (int i = 0; i < 5; i++) {
int index = i + 5;
QueryRulesetListItem ruleset = rulesets.get(i);
String rulesetId = ruleset.rulesetId();
assertThat(rulesetId, equalTo("my_ruleset_" + index));
Map<QueryRuleCriteriaType, Integer> criteriaTypeCountMap = ruleset.criteriaTypeToCountMap();
assertThat(criteriaTypeCountMap.size(), equalTo(3));
assertThat(criteriaTypeCountMap.get(EXACT), equalTo(1));
assertThat(criteriaTypeCountMap.get(FUZZY), equalTo(1));
assertThat(criteriaTypeCountMap.get(GTE), equalTo(2));
}
}
}
public void testDeleteQueryRuleset() throws Exception {
for (int i = 0; i < 5; i++) {
final QueryRule myQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))),
Map.of("ids", List.of("id1", "id2")),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final QueryRule myQueryRule2 = new QueryRule(
"my_rule2",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))),
Map.of("ids", List.of("id3", "id4")),
EnterpriseSearchModuleTestUtils.randomQueryRulePriority()
);
final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2));
DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset);
assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK)));
assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME));
QueryRuleset getQueryRuleset = awaitGetQueryRuleset(myQueryRuleset.id());
assertThat(getQueryRuleset, equalTo(myQueryRuleset));
}
DeleteResponse resp = awaitDeleteQueryRuleset("my_ruleset");
assertThat(resp.status(), equalTo(RestStatus.OK));
expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRuleset("my_ruleset"));
}
public void testDeleteQueryRule() throws Exception {
for (int i = 0; i < 5; i++) {
final QueryRule myQueryRule1 = new QueryRule(
"my_rule1",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("foo"))),
Map.of("ids", List.of("id1", "id2")),
randomBoolean() ? randomIntBetween(0, 100) : null
);
final QueryRule myQueryRule2 = new QueryRule(
"my_rule2",
QueryRuleType.PINNED,
List.of(new QueryRuleCriteria(EXACT, "query_string", List.of("bar"))),
Map.of("ids", List.of("id3", "id4")),
randomBoolean() ? randomIntBetween(0, 100) : null
);
final QueryRuleset myQueryRuleset = new QueryRuleset("my_ruleset", List.of(myQueryRule1, myQueryRule2));
DocWriteResponse resp = awaitPutQueryRuleset(myQueryRuleset);
assertThat(resp.status(), anyOf(equalTo(RestStatus.CREATED), equalTo(RestStatus.OK)));
assertThat(resp.getIndex(), equalTo(QUERY_RULES_CONCRETE_INDEX_NAME));
QueryRule getQueryRule = awaitGetQueryRule("my_ruleset", "my_rule1");
assertThat(getQueryRule, equalTo(myQueryRule1));
DeleteQueryRuleAction.Response deleteResp = awaitDeleteQueryRule("my_ruleset", "my_rule1");
assertThat(deleteResp.isAcknowledged(), equalTo(true));
expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRule("my_ruleset", "my_rule1"));
QueryRule getQueryRule2 = awaitGetQueryRule("my_ruleset", "my_rule2");
assertThat(getQueryRule2, equalTo(myQueryRule2));
}
// Deleting the last rule in the ruleset should delete the ruleset
DeleteQueryRuleAction.Response deleteResp = awaitDeleteQueryRule("my_ruleset", "my_rule2");
assertThat(deleteResp.isAcknowledged(), equalTo(true));
expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRule("my_ruleset", "my_rule2"));
expectThrows(ResourceNotFoundException.class, () -> awaitGetQueryRuleset("my_ruleset"));
}
private DocWriteResponse awaitPutQueryRuleset(QueryRuleset queryRuleset) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<DocWriteResponse> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.putQueryRuleset(queryRuleset, new ActionListener<>() {
@Override
public void onResponse(DocWriteResponse indexResponse) {
resp.set(indexResponse);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for put request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from put request", resp.get());
return resp.get();
}
private PutQueryRuleAction.Response awaitPutQueryRule(String queryRulesetId, QueryRule queryRule) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<PutQueryRuleAction.Response> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.putQueryRule(queryRulesetId, queryRule, new ActionListener<>() {
@Override
public void onResponse(PutQueryRuleAction.Response indexResponse) {
resp.set(indexResponse);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for put request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from put request", resp.get());
return resp.get();
}
private QueryRuleset awaitGetQueryRuleset(String name) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<QueryRuleset> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.getQueryRuleset(name, new ActionListener<>() {
@Override
public void onResponse(QueryRuleset ruleset) {
resp.set(ruleset);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for get request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from get request", resp.get());
return resp.get();
}
private QueryRule awaitGetQueryRule(String rulesetId, String ruleId) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<QueryRule> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.getQueryRule(rulesetId, ruleId, new ActionListener<>() {
@Override
public void onResponse(QueryRule rule) {
resp.set(rule);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for get request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from get request", resp.get());
return resp.get();
}
private DeleteResponse awaitDeleteQueryRuleset(String name) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<DeleteResponse> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.deleteQueryRuleset(name, new ActionListener<>() {
@Override
public void onResponse(DeleteResponse deleteResponse) {
resp.set(deleteResponse);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for delete request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from delete request", resp.get());
return resp.get();
}
private DeleteQueryRuleAction.Response awaitDeleteQueryRule(String rulesetId, String ruleId) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<DeleteQueryRuleAction.Response> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.deleteQueryRule(rulesetId, ruleId, new ActionListener<>() {
@Override
public void onResponse(DeleteQueryRuleAction.Response deleteResponse) {
resp.set(deleteResponse);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for delete request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from delete request", resp.get());
return resp.get();
}
private QueryRulesIndexService.QueryRulesetResult awaitListQueryRulesets(int from, int size) throws Exception {
CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<QueryRulesIndexService.QueryRulesetResult> resp = new AtomicReference<>(null);
final AtomicReference<Exception> exc = new AtomicReference<>(null);
queryRulesIndexService.listQueryRulesets(from, size, new ActionListener<>() {
@Override
public void onResponse(QueryRulesIndexService.QueryRulesetResult result) {
resp.set(result);
latch.countDown();
}
@Override
public void onFailure(Exception e) {
exc.set(e);
latch.countDown();
}
});
assertTrue("Timeout waiting for list request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS));
if (exc.get() != null) {
throw exc.get();
}
assertNotNull("Received null response from list request", resp.get());
return resp.get();
}
/**
* Test plugin to register the {@link QueryRulesIndexService} system index descriptor.
*/
public static
|
QueryRulesIndexServiceTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/idmanytoone/Customers.java
|
{
"start": 435,
"end": 1193
}
|
class ____ implements Serializable {
private static final long serialVersionUID = -885167444315163039L;
@Column(name="customerID", nullable=false)
@Id
private int customerID;
@OneToMany(mappedBy="owner", cascade= CascadeType.ALL, targetEntity=ShoppingBaskets.class)
private java.util.Set<ShoppingBaskets> shoppingBasketses = new java.util.HashSet<>();
public void setCustomerID(int value) {
this.customerID = value;
}
public int getCustomerID() {
return customerID;
}
public int getORMID() {
return getCustomerID();
}
public void setShoppingBasketses(java.util.Set<ShoppingBaskets> value) {
this.shoppingBasketses = value;
}
public java.util.Set<ShoppingBaskets> getShoppingBasketses() {
return shoppingBasketses;
}
}
|
Customers
|
java
|
micronaut-projects__micronaut-core
|
inject-java/src/test/groovy/io/micronaut/inject/lifecycle/proxybeanwithpredestroy/C.java
|
{
"start": 756,
"end": 916
}
|
class ____ implements AutoCloseable {
static int closed;
@Override
@PreDestroy
public void close() throws Exception {
closed++;
}
}
|
C
|
java
|
apache__camel
|
components/camel-kubernetes/src/test/java/org/apache/camel/component/openshift/producer/OpenshiftBuildsProducerTest.java
|
{
"start": 1644,
"end": 3778
}
|
class ____ extends KubernetesTestSupport {
KubernetesMockServer server;
NamespacedKubernetesClient client;
@BindToRegistry("client")
public NamespacedKubernetesClient loadClient() throws Exception {
server.expect().withPath("/apis/build.openshift.io/v1/builds")
.andReturn(200, new BuildListBuilder().addNewItem().and().addNewItem().and().build()).once();
server.expect().withPath("/apis/build.openshift.io/v1/builds?labelSelector=" + toUrlEncoded("key1=value1,key2=value2"))
.andReturn(200, new BuildListBuilder().addNewItem().and().addNewItem().and().build()).once();
server.expect().withPath("/apis")
.andReturn(200,
new APIGroupListBuilder().addNewGroup().withApiVersion("v1").withName("autoscaling.k8s.io").endGroup()
.addNewGroup()
.withApiVersion("v1").withName("security.openshift.io").endGroup().build())
.always();
return client;
}
@Test
void listTest() {
List<?> result = template.requestBody("direct:list", "", List.class);
assertEquals(2, result.size());
}
@Test
void listByLabelsTest() {
Exchange ex = template.request("direct:listByLabels", exchange -> {
Map<String, String> labels = new HashMap<>();
labels.put("key1", "value1");
labels.put("key2", "value2");
exchange.getIn().setHeader(KubernetesConstants.KUBERNETES_BUILDS_LABELS, labels);
});
List<?> result = ex.getMessage().getBody(List.class);
assertEquals(2, result.size());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:list").to("openshift-builds:///?operation=listBuilds&kubernetesClient=#client");
from("direct:listByLabels").to("openshift-builds:///?operation=listBuildsByLabels&kubernetesClient=#client");
}
};
}
}
|
OpenshiftBuildsProducerTest
|
java
|
apache__flink
|
flink-state-backends/flink-statebackend-heap-spillable/src/test/java/org/apache/flink/runtime/state/heap/SkipListSerializerTest.java
|
{
"start": 1487,
"end": 5101
}
|
class ____ {
private static final TypeSerializer<String> keySerializer = StringSerializer.INSTANCE;
private static final TypeSerializer<String> namespaceSerializer = StringSerializer.INSTANCE;
private static final SkipListKeySerializer<String, String> skipListKeySerializer =
new SkipListKeySerializer<>(keySerializer, namespaceSerializer);
private static final TypeSerializer<String> stateSerializer = StringSerializer.INSTANCE;
private static final SkipListValueSerializer<String> skipListValueSerializer =
new SkipListValueSerializer<>(stateSerializer);
@Test
void testSkipListKeySerializerBasicOp() throws IOException {
testSkipListKeySerializer(0);
}
@Test
void testSkipListKeySerializerStateless() throws IOException {
for (int i = 0; i < 10; i++) {
testSkipListKeySerializer(i);
}
}
private void testSkipListKeySerializer(int delta) throws IOException {
String key = "key-abcdedg" + delta;
String namespace = "namespace-dfsfdafd" + delta;
byte[] skipListKey = skipListKeySerializer.serialize(key, namespace);
int offset = 10;
byte[] data = new byte[10 + skipListKey.length];
System.arraycopy(skipListKey, 0, data, offset, skipListKey.length);
MemorySegment skipListKeySegment = MemorySegmentFactory.wrap(data);
assertThat(
skipListKeySerializer.deserializeKey(
skipListKeySegment, offset, skipListKey.length))
.isEqualTo(key);
assertThat(
skipListKeySerializer.deserializeNamespace(
skipListKeySegment, offset, skipListKey.length))
.isEqualTo(namespace);
Tuple2<byte[], byte[]> serializedKeyAndNamespace =
skipListKeySerializer.getSerializedKeyAndNamespace(skipListKeySegment, offset);
assertThat(deserialize(keySerializer, serializedKeyAndNamespace.f0)).isEqualTo(key);
assertThat(deserialize(namespaceSerializer, serializedKeyAndNamespace.f1))
.isEqualTo(namespace);
byte[] serializedNamespace = skipListKeySerializer.serializeNamespace(namespace);
assertThat(deserialize(namespaceSerializer, serializedNamespace)).isEqualTo(namespace);
}
@Test
void testSkipListValueSerializerBasicOp() throws IOException {
testSkipListValueSerializer(0);
}
@Test
void testSkipListValueSerializerStateless() throws IOException {
for (int i = 0; i < 10; i++) {
testSkipListValueSerializer(i);
}
}
private void testSkipListValueSerializer(int i) throws IOException {
String state = "value-" + i;
byte[] value = skipListValueSerializer.serialize(state);
int offset = 10;
byte[] data = new byte[10 + value.length];
System.arraycopy(value, 0, data, offset, value.length);
assertThat(deserialize(stateSerializer, value)).isEqualTo(state);
assertThat(
skipListValueSerializer.deserializeState(
MemorySegmentFactory.wrap(data), offset, value.length))
.isEqualTo(state);
}
private <T> T deserialize(TypeSerializer<T> serializer, byte[] data) throws IOException {
ByteArrayInputStreamWithPos inputStream = new ByteArrayInputStreamWithPos(data);
DataInputViewStreamWrapper inputView = new DataInputViewStreamWrapper(inputStream);
return serializer.deserialize(inputView);
}
}
|
SkipListSerializerTest
|
java
|
elastic__elasticsearch
|
modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/ConditionalNode.java
|
{
"start": 616,
"end": 1534
}
|
class ____ extends BinaryNode {
/* ---- begin tree structure ---- */
private ExpressionNode conditionNode;
public void setConditionNode(ExpressionNode conditionNode) {
this.conditionNode = conditionNode;
}
public ExpressionNode getConditionNode() {
return conditionNode;
}
/* ---- end tree structure, begin visitor ---- */
@Override
public <Scope> void visit(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
irTreeVisitor.visitConditional(this, scope);
}
@Override
public <Scope> void visitChildren(IRTreeVisitor<Scope> irTreeVisitor, Scope scope) {
conditionNode.visit(irTreeVisitor, scope);
getLeftNode().visit(irTreeVisitor, scope);
getRightNode().visit(irTreeVisitor, scope);
}
/* ---- end visitor ---- */
public ConditionalNode(Location location) {
super(location);
}
}
|
ConditionalNode
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/processor/internals/StateUpdater.java
|
{
"start": 2349,
"end": 8823
}
|
class ____ {
private final Task task;
private final Optional<RuntimeException> exception;
public RemovedTaskResult(final Task task) {
this(task, null);
}
public RemovedTaskResult(final Task task, final RuntimeException exception) {
this.task = Objects.requireNonNull(task);
this.exception = Optional.ofNullable(exception);
}
public Task task() {
return task;
}
public Optional<RuntimeException> exception() {
return exception;
}
@Override
public boolean equals(final Object o) {
if (this == o) return true;
if (!(o instanceof RemovedTaskResult)) return false;
final RemovedTaskResult that = (RemovedTaskResult) o;
return Objects.equals(task.id(), that.task.id()) && Objects.equals(exception, that.exception);
}
@Override
public int hashCode() {
return Objects.hash(task, exception);
}
@Override
public String toString() {
return "RemovedTaskResult{" +
"task=" + task.id() +
", exception=" + exception +
'}';
}
}
/**
* Starts the state updater.
*/
void start();
/**
* Shuts down the state updater.
*
* @param timeout duration how long to wait until the state updater is shut down
*
* @throws
* org.apache.kafka.streams.errors.StreamsException if the state updater thread cannot shutdown within the timeout
*/
void shutdown(final Duration timeout);
/**
* Adds a task (active or standby) to the state updater.
*
* This method does not block until the task is added to the state updater.
*
* @param task task to add
*/
void add(final Task task);
/**
* Removes a task (active or standby) from the state updater.
*
* This method does not block until the removed task is removed from the state updater. But it returns a future on
* which processing can be blocked. The task to remove is removed from the updating tasks, paused tasks,
* restored tasks, or failed tasks.
*
* @param taskId ID of the task to remove
*/
CompletableFuture<RemovedTaskResult> remove(final TaskId taskId);
/**
* Wakes up the state updater if it is currently dormant, to check if a paused task should be resumed.
*/
void signalResume();
/**
* Drains the restored active tasks from the state updater.
*
* The returned active tasks are removed from the state updater.
*
* With a timeout of zero the method tries to drain the restored active tasks at least once.
*
* @param timeout duration how long the calling thread should wait for restored active tasks
*
* @return set of active tasks with up-to-date states
*/
Set<StreamTask> drainRestoredActiveTasks(final Duration timeout);
/**
* Drains the failed tasks and the corresponding exceptions.
*
* The returned failed tasks are removed from the state updater
*
* @return list of failed tasks and the corresponding exceptions
*/
List<ExceptionAndTask> drainExceptionsAndFailedTasks();
/**
* Checks if the state updater has any failed tasks that should be returned to the StreamThread
* using `drainExceptionsAndFailedTasks`.
*
* @return true if a subsequent call to `drainExceptionsAndFailedTasks` would return a non-empty collection.
*/
boolean hasExceptionsAndFailedTasks();
/**
* Gets all tasks that are managed by the state updater.
*
* The state updater manages all tasks that were added with the {@link StateUpdater#add(Task)} and that have
* not been removed from the state updater with one of the following methods:
* <ul>
* <li>{@link StateUpdater#drainRestoredActiveTasks(Duration)}</li>
* <li>{@link StateUpdater#drainExceptionsAndFailedTasks()}</li>
* <li>{@link StateUpdater#remove(org.apache.kafka.streams.processor.TaskId)}</li>
* </ul>
*
* @return set of all tasks managed by the state updater
*/
Set<Task> tasks();
/**
* Gets all tasks that are currently being restored inside the state updater.
*
* Tasks that have just being added into the state updater via {@link StateUpdater#add(Task)}
* or have restored completely or removed will not be returned; tasks that have just being
* removed via {@link StateUpdater#remove(TaskId)} may still be returned.
*
* @return set of all updating tasks inside the state updater
*/
Set<Task> updatingTasks();
/**
* Returns if the state updater restores active tasks.
*
* The state updater restores active tasks if at least one active task was added with {@link StateUpdater#add(Task)},
* and the task was not removed from the state updater with one of the following methods:
* <ul>
* <li>{@link StateUpdater#drainRestoredActiveTasks(Duration)}</li>
* <li>{@link StateUpdater#drainExceptionsAndFailedTasks()}</li>
* <li>{@link StateUpdater#remove(org.apache.kafka.streams.processor.TaskId)}</li>
* </ul>
*
* @return {@code true} if the state updater restores active tasks, {@code false} otherwise
*/
// TODO: We would still return true if all active tasks to be restored
// are paused, in order to keep consistent behavior compared with
// state updater disabled. In the future we would modify this criterion
// with state updater always enabled to allow mixed processing / restoration.
boolean restoresActiveTasks();
/**
* Gets standby tasks that are managed by the state updater.
*
* The state updater manages all standby tasks that were added with the {@link StateUpdater#add(Task)} and that have
* not been removed from the state updater with one of the following methods:
* <ul>
* <li>{@link StateUpdater#drainExceptionsAndFailedTasks()}</li>
* </ul>
*
* @return set of all tasks managed by the state updater
*/
Set<StandbyTask> standbyTasks();
/**
* Get the restore consumer instance id for telemetry, and complete the given future to return it.
*/
KafkaFutureImpl<Uuid> restoreConsumerInstanceId(final Duration timeout);
}
|
RemovedTaskResult
|
java
|
micronaut-projects__micronaut-core
|
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/FilterErrorTest.java
|
{
"start": 7158,
"end": 8068
}
|
class ____ implements HttpServerFilter {
AtomicInteger executedCount = new AtomicInteger(0);
AtomicReference<HttpStatus> responseStatus = new AtomicReference<>();
private void setResponse(MutableHttpResponse<?> r) {
responseStatus.set(r.status());
}
@Override
public Publisher<MutableHttpResponse<?>> doFilter(HttpRequest<?> request, ServerFilterChain chain) {
executedCount.incrementAndGet();
if (StringUtils.isTrue(request.getHeaders().get("X-Passthru"))) {
return Publishers.then(chain.proceed(request), this::setResponse);
}
return Publishers.just(new FilterException());
}
@Override
public int getOrder() {
return 10;
}
}
@Requires(property = "spec.name", value = SPEC_NAME)
@Filter(Filter.MATCH_ALL_PATTERN)
static
|
First
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/impl/ProducerCacheNonSingletonTest.java
|
{
"start": 2701,
"end": 3213
}
|
class ____ extends DefaultEndpoint {
@Override
public Producer createProducer() {
return new MyDummyProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) {
return null;
}
@Override
public boolean isSingleton() {
return false;
}
@Override
protected String createEndpointUri() {
return "dummy://foo";
}
}
private static
|
MyDummyEndpoint
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/sql/ast/tree/expression/Overflow.java
|
{
"start": 495,
"end": 1636
}
|
class ____ implements Expression, SqlExpressible, SqlAstNode {
private final Expression separatorExpression;
private final Expression fillerExpression;
private final boolean withCount;
public Overflow(Expression separatorExpression, Expression fillerExpression, boolean withCount) {
this.separatorExpression = separatorExpression;
this.fillerExpression = fillerExpression;
this.withCount = withCount;
}
public Expression getSeparatorExpression() {
return separatorExpression;
}
public Expression getFillerExpression() {
return fillerExpression;
}
public boolean isWithCount() {
return withCount;
}
@Override
public JdbcMapping getJdbcMapping() {
return ( (SqlExpressible) separatorExpression ).getJdbcMapping();
}
@Override
public JdbcMappingContainer getExpressionType() {
return separatorExpression.getExpressionType();
}
@Override
public void accept(SqlAstWalker sqlTreeWalker) {
sqlTreeWalker.visitOverflow( this );
}
@Override
public int forEachJdbcType(int offset, IndexedConsumer<JdbcMapping> action) {
action.accept( offset, getJdbcMapping() );
return getJdbcTypeCount();
}
}
|
Overflow
|
java
|
elastic__elasticsearch
|
qa/restricted-loggers/src/test/java/org/elasticsearch/common/logging/LoggersTests.java
|
{
"start": 1127,
"end": 5586
}
|
class ____ extends ESTestCase {
public void testClusterUpdateSettingsRequestValidationForLoggers() {
assertThat(Loggers.RESTRICTED_LOGGERS, hasSize(greaterThan(0)));
ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest(TEST_REQUEST_TIMEOUT, TEST_REQUEST_TIMEOUT);
for (String logger : Loggers.RESTRICTED_LOGGERS) {
var validation = request.persistentSettings(Map.of("logger." + logger, org.elasticsearch.logging.Level.DEBUG)).validate();
assertNotNull(validation);
assertThat(validation.validationErrors(), contains("Level [DEBUG] is not permitted for logger [" + logger + "]"));
// INFO is permitted
assertNull(request.persistentSettings(Map.of("logger." + logger, org.elasticsearch.logging.Level.INFO)).validate());
}
}
public void testCheckRestrictedLoggers() {
assertThat(Loggers.RESTRICTED_LOGGERS, hasSize(greaterThan(0)));
Settings settings;
for (String restricted : Loggers.RESTRICTED_LOGGERS) {
for (String suffix : List.of("", ".xyz")) {
String logger = restricted + suffix;
for (Level level : List.of(Level.ALL, Level.TRACE, Level.DEBUG)) {
settings = Settings.builder().put("logger." + logger, level).build();
List<String> errors = checkRestrictedLoggers(settings);
assertThat(errors, contains("Level [" + level + "] is not permitted for logger [" + logger + "]"));
}
for (Level level : List.of(Level.ERROR, Level.WARN, Level.INFO)) {
settings = Settings.builder().put("logger." + logger, level).build();
assertThat(checkRestrictedLoggers(settings), hasSize(0));
}
settings = Settings.builder().put("logger." + logger, "INVALID").build();
assertThat(checkRestrictedLoggers(settings), hasSize(0));
settings = Settings.builder().put("logger." + logger, (String) null).build();
assertThat(checkRestrictedLoggers(settings), hasSize(0));
}
}
}
public void testSetLevelWithRestrictions() {
assertThat(Loggers.RESTRICTED_LOGGERS, hasSize(greaterThan(0)));
for (String restricted : Loggers.RESTRICTED_LOGGERS) {
TestLoggers.runWithLoggersRestored(() -> {
// 'org.apache.http' is an example of a restricted logger,
// a restricted component logger would be `org.apache.http.client.HttpClient` for instance,
// and the parent logger is `org.apache`.
Logger restrictedLogger = LogManager.getLogger(restricted);
Logger restrictedComponent = LogManager.getLogger(restricted + ".component");
Logger parentLogger = LogManager.getLogger(restricted.substring(0, restricted.lastIndexOf('.')));
Loggers.setLevel(restrictedLogger, Level.INFO);
assertHasINFO(restrictedLogger, restrictedComponent);
for (Logger log : List.of(restrictedComponent, restrictedLogger)) {
// DEBUG is rejected due to restriction
Loggers.setLevel(log, Level.DEBUG);
assertHasINFO(restrictedComponent, restrictedLogger);
}
// OK for parent `org.apache`, but restriction is enforced for restricted descendants
Loggers.setLevel(parentLogger, Level.DEBUG);
assertEquals(Level.DEBUG, parentLogger.getLevel());
assertHasINFO(restrictedComponent, restrictedLogger);
// Inheriting DEBUG of parent `org.apache` is rejected
Loggers.setLevel(restrictedLogger, (Level) null);
assertHasINFO(restrictedComponent, restrictedLogger);
// DEBUG of root logger isn't propagated to restricted loggers
Loggers.setLevel(LogManager.getRootLogger(), Level.DEBUG);
assertEquals(Level.DEBUG, LogManager.getRootLogger().getLevel());
assertHasINFO(restrictedComponent, restrictedLogger);
});
}
}
private static void assertHasINFO(Logger... loggers) {
for (Logger log : loggers) {
assertThat("Unexpected log level for [" + log.getName() + "]", log.getLevel(), is(Level.INFO));
}
}
}
|
LoggersTests
|
java
|
spring-projects__spring-boot
|
core/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingClassTests.java
|
{
"start": 1083,
"end": 1924
}
|
class ____ {
private final AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
@Test
void testVanillaOnClassCondition() {
this.context.register(BasicConfiguration.class, FooConfiguration.class);
this.context.refresh();
assertThat(this.context.containsBean("bar")).isFalse();
assertThat(this.context.getBean("foo")).isEqualTo("foo");
}
@Test
void testMissingOnClassCondition() {
this.context.register(MissingConfiguration.class, FooConfiguration.class);
this.context.refresh();
assertThat(this.context.containsBean("bar")).isTrue();
assertThat(this.context.getBean("foo")).isEqualTo("foo");
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingClass("org.springframework.boot.autoconfigure.condition.ConditionalOnMissingClassTests")
static
|
ConditionalOnMissingClassTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/ValueExtractorForExponentialHistogram.java
|
{
"start": 1779,
"end": 2365
}
|
class ____ implements ExponentialHistogramBlock.SerializedOutput {
BreakingBytesRefBuilder target;
@Override
public void appendDouble(double value) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeDouble(value, target);
}
@Override
public void appendLong(long value) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeLong(value, target);
}
@Override
public void appendBytesRef(BytesRef value) {
TopNEncoder.DEFAULT_UNSORTABLE.encodeBytesRef(value, target);
}
}
}
|
ReusableTopNEncoderOutput
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/web/filter/DelegatingFilterProxy.java
|
{
"start": 3238,
"end": 3886
}
|
class ____ originally inspired by Spring Security's {@code FilterToBeanProxy}
* class, written by Ben Alex.
*
* @author Juergen Hoeller
* @author Sam Brannen
* @author Chris Beams
* @since 1.2
* @see #setTargetBeanName
* @see #setTargetFilterLifecycle
* @see jakarta.servlet.Filter#doFilter
* @see jakarta.servlet.Filter#init
* @see jakarta.servlet.Filter#destroy
* @see #DelegatingFilterProxy(Filter)
* @see #DelegatingFilterProxy(String)
* @see #DelegatingFilterProxy(String, WebApplicationContext)
* @see jakarta.servlet.ServletContext#addFilter(String, Filter)
* @see org.springframework.web.WebApplicationInitializer
*/
public
|
was
|
java
|
apache__flink
|
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java
|
{
"start": 128853,
"end": 129128
}
|
class ____
extends ProcessTableFunction<Integer> {
public int eval(
@StateHint Integer i, @ArgumentHint(ArgumentTrait.ROW_SEMANTIC_TABLE) Row t) {
return 0;
}
}
private static
|
NonCompositeStateProcessTableFunction
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/generator/values/GeneratedValueBasicResultBuilder.java
|
{
"start": 1212,
"end": 4222
}
|
class ____ implements ResultBuilder {
private final NavigablePath navigablePath;
private final BasicValuedModelPart modelPart;
private final Integer valuesArrayPosition;
private final TableGroup tableGroup;
public GeneratedValueBasicResultBuilder(
NavigablePath navigablePath,
BasicValuedModelPart modelPart,
TableGroup tableGroup,
Integer valuesArrayPosition) {
this.navigablePath = navigablePath;
this.modelPart = modelPart;
this.valuesArrayPosition = valuesArrayPosition;
this.tableGroup = tableGroup;
}
@Override
public Class<?> getJavaType() {
return modelPart.getExpressibleJavaType().getJavaTypeClass();
}
@Override
public ResultBuilder cacheKeyInstance() {
return this;
}
@Override
public BasicResult<?> buildResult(
JdbcValuesMetadata jdbcResultsMetadata,
int resultPosition,
DomainResultCreationState domainResultCreationState) {
return new BasicResult<>(
sqlSelection( jdbcResultsMetadata, domainResultCreationState )
.getValuesArrayPosition(),
null,
modelPart.getJdbcMapping(),
navigablePath,
false,
false
);
}
private SqlSelection sqlSelection(
JdbcValuesMetadata jdbcResultsMetadata, DomainResultCreationState domainResultCreationState) {
final var creationStateImpl = impl( domainResultCreationState );
return sqlSelection( jdbcResultsMetadata, creationStateImpl, tableReference( creationStateImpl ) );
}
private TableReference tableReference(DomainResultCreationStateImpl creationStateImpl) {
return creationStateImpl.getFromClauseAccess()
.resolveTableGroup( navigablePath.getParent(), path -> this.tableGroup )
.resolveTableReference( navigablePath, modelPart, "t" );
}
private SqlSelection sqlSelection(
JdbcValuesMetadata jdbcResultsMetadata,
DomainResultCreationStateImpl creationStateImpl,
TableReference tableReference) {
return creationStateImpl.resolveSqlSelection(
ResultsHelper.resolveSqlExpression(
creationStateImpl,
tableReference,
modelPart,
valuesArrayPosition != null
? valuesArrayPosition
: columnIndex( jdbcResultsMetadata, modelPart )
),
modelPart.getJdbcMapping().getJdbcJavaType(),
null,
creationStateImpl.getSessionFactory().getTypeConfiguration()
);
}
public BasicValuedModelPart getModelPart() {
return modelPart;
}
private static int columnIndex(JdbcValuesMetadata jdbcResultsMetadata, BasicValuedModelPart modelPart) {
if ( jdbcResultsMetadata.getColumnCount() == 1 ) {
assert modelPart.isEntityIdentifierMapping()
|| getColumnPosition( jdbcResultsMetadata, modelPart ) == 1;
return 0;
}
else {
return jdbcPositionToValuesArrayPosition( getColumnPosition( jdbcResultsMetadata, modelPart ) );
}
}
private static int getColumnPosition(JdbcValuesMetadata valuesMetadata, BasicValuedModelPart modelPart) {
return valuesMetadata.resolveColumnPosition( getActualGeneratedModelPart( modelPart ).getSelectionExpression() );
}
}
|
GeneratedValueBasicResultBuilder
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/objectid/TestObjectIdDeserialization.java
|
{
"start": 2851,
"end": 3155
}
|
class ____
{
public int value;
protected int customId;
public IdWrapperExt next;
public ValueNodeExt() { this(0); }
public ValueNodeExt(int v) { value = v; }
public void setCustomId(int i) {
customId = i;
}
}
static
|
ValueNodeExt
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/RequestResponseBodyMethodProcessorTests.java
|
{
"start": 57249,
"end": 57353
}
|
class ____<A> {
public abstract A handle(@RequestBody A arg);
}
static
|
MyControllerWithAbstractMethod
|
java
|
apache__camel
|
test-infra/camel-test-infra-triton/src/test/java/org/apache/camel/test/infra/triton/services/TritonServiceFactory.java
|
{
"start": 1408,
"end": 1547
}
|
class ____ extends TritonLocalContainerInfraService
implements TritonService {
}
public static
|
TritonLocalContainerService
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/TemporalJoinTestPrograms.java
|
{
"start": 1285,
"end": 9362
}
|
class ____ {
static final SourceTestStep ORDERS =
SourceTestStep.newBuilder("Orders")
.addSchema(
"amount bigint",
"currency STRING",
"order_time STRING",
"rowtime as TO_TIMESTAMP(order_time) ",
"WATERMARK FOR rowtime AS rowtime")
.producedBeforeRestore(
Row.of(2L, "Euro", "2020-10-10 00:00:42"),
Row.of(1L, "USD", "2020-10-10 00:00:43"),
Row.of(50L, "Yen", "2020-10-10 00:00:44"),
Row.of(3L, "Euro", "2020-10-10 00:00:45"))
.producedAfterRestore(
Row.of(1L, "Euro", "2020-10-10 00:00:58"),
Row.of(1L, "USD", "2020-10-10 00:00:58"))
.build();
static final SourceTestStep ORDERS_WITH_NESTED_ID =
SourceTestStep.newBuilder("OrdersNestedId")
.addSchema(
"amount bigint",
"nested_row ROW<currency STRING>",
"nested_map MAP<STRING NOT NULL, STRING>",
"order_time STRING",
"rowtime as TO_TIMESTAMP(order_time) ",
"WATERMARK FOR rowtime AS rowtime")
.producedBeforeRestore(
Row.of(
2L,
Row.of("Euro"),
mapOf("currency", "Euro"),
"2020-10-10 00:00:42"),
Row.of(
1L,
Row.of("usd"),
mapOf("currency", "USD"),
"2020-10-10 00:00:43"),
Row.of(
50L,
Row.of("Yen"),
mapOf("currency", "Yen"),
"2020-10-10 00:00:44"),
Row.of(
3L,
Row.of("Euro"),
mapOf("currency", "Euro"),
"2020-10-10 00:00:45"))
.producedAfterRestore(
Row.of(
1L,
Row.of("Euro"),
mapOf("currency", "Euro"),
"2020-10-10 00:00:58"),
Row.of(
1L,
Row.of("usd"),
mapOf("currency", "USD"),
"2020-10-10 00:00:58"))
.build();
static final SourceTestStep RATES =
SourceTestStep.newBuilder("RatesHistory")
.addSchema(
"currency STRING",
"rate bigint",
"rate_time STRING",
"rowtime as TO_TIMESTAMP(rate_time) ",
"WATERMARK FOR rowtime AS rowtime",
"PRIMARY KEY(currency) NOT ENFORCED")
.producedBeforeRestore(
Row.of("USD", 102L, "2020-10-10 00:00:41"),
Row.of("Euro", 114L, "2020-10-10 00:00:41"),
Row.of("Yen", 1L, "2020-10-10 00:00:41"),
Row.of("Euro", 116L, "2020-10-10 00:00:45"),
Row.of("Euro", 119L, "2020-10-10 00:00:47"))
.producedAfterRestore(
Row.of("USD", 103L, "2020-10-10 00:00:58"),
Row.of("Euro", 120L, "2020-10-10 00:00:59"))
.build();
static final SinkTestStep AMOUNTS =
SinkTestStep.newBuilder("MySink")
.addSchema("amount bigint")
.consumedBeforeRestore("+I[102]", "+I[228]", "+I[348]", "+I[50]")
.consumedAfterRestore("+I[103]", "+I[119]")
.build();
static final TableTestProgram TEMPORAL_JOIN_TABLE_JOIN =
TableTestProgram.of("temporal-join-table-join", "validates temporal join with a table")
.setupTableSource(ORDERS)
.setupTableSource(RATES)
.setupTableSink(AMOUNTS)
.runSql(
"INSERT INTO MySink "
+ "SELECT amount * r.rate "
+ "FROM Orders AS o "
+ "JOIN RatesHistory FOR SYSTEM_TIME AS OF o.rowtime AS r "
+ "ON o.currency = r.currency ")
.build();
static final TableTestProgram TEMPORAL_JOIN_TABLE_JOIN_NESTED_KEY =
TableTestProgram.of(
"temporal-join-table-join-nested-key",
"validates temporal join with a table when the join keys comes from a nested row")
.setupTableSource(ORDERS_WITH_NESTED_ID)
.setupTableSource(RATES)
.setupTableSink(AMOUNTS)
.runSql(
"INSERT INTO MySink "
+ "SELECT amount * r.rate "
+ "FROM OrdersNestedId AS o "
+ "JOIN RatesHistory FOR SYSTEM_TIME AS OF o.rowtime AS r "
+ "ON (case when o.nested_row.currency = 'usd' then upper(o.nested_row.currency) ELSE o.nested_row.currency END) = r.currency ")
.build();
static final TableTestProgram TEMPORAL_JOIN_TABLE_JOIN_KEY_FROM_MAP =
TableTestProgram.of(
"temporal-join-table-join-key-from-map",
"validates temporal join with a table when the join key comes from a map value")
.setupTableSource(ORDERS_WITH_NESTED_ID)
.setupTableSource(RATES)
.setupTableSink(AMOUNTS)
.runSql(
"INSERT INTO MySink "
+ "SELECT amount * r.rate "
+ "FROM OrdersNestedId AS o "
+ "JOIN RatesHistory FOR SYSTEM_TIME AS OF o.rowtime AS r "
+ "ON o.nested_map['currency'] = r.currency ")
.build();
static final TableTestProgram TEMPORAL_JOIN_TEMPORAL_FUNCTION =
TableTestProgram.of(
"temporal-join-temporal-function",
"validates temporal join with a temporal function")
.setupTableSource(ORDERS)
.setupTableSource(RATES)
.setupTemporarySystemTemporalTableFunction(
"Rates", "RatesHistory", $("rowtime"), $("currency"))
.setupTableSink(AMOUNTS)
.runSql(
"INSERT INTO MySink "
+ "SELECT amount * r.rate "
+ "FROM Orders AS o, "
+ "LATERAL TABLE (Rates(o.rowtime)) AS r "
+ "WHERE o.currency = r.currency ")
.build();
private static Map<String, String> mapOf(String key, String value) {
final HashMap<String, String> map = new HashMap<>();
map.put(key, value);
return map;
}
}
|
TemporalJoinTestPrograms
|
java
|
apache__camel
|
components/camel-debezium/camel-debezium-common/camel-debezium-common-component/src/main/java/org/apache/camel/component/debezium/DebeziumTypeConverter.java
|
{
"start": 1026,
"end": 1965
}
|
class ____ {
private DebeziumTypeConverter() {
}
/**
* Convert {@link Struct} to {@link HashMap}, this only works with flat fields and it doesn't handle nested
* structure. Also as a result of the conversion, the schema data will be lost which is expected.
*
* @param struct
* @return {@link Map}
*/
@Converter
public static Map<String, Object> toMap(final Struct struct) {
final HashMap<String, Object> fieldsToValues = new HashMap<>();
struct.schema().fields().forEach(field -> {
Object value = struct.get(field);
// recursive call if we have nested structs
if (value instanceof Struct) {
fieldsToValues.put(field.name(), toMap((Struct) value));
} else {
fieldsToValues.put(field.name(), value);
}
});
return fieldsToValues;
}
}
|
DebeziumTypeConverter
|
java
|
quarkusio__quarkus
|
extensions/grpc/runtime/src/main/java/io/quarkus/grpc/runtime/config/GrpcServerConfiguration.java
|
{
"start": 6091,
"end": 7009
}
|
interface ____ {
/**
* Sets a custom keep-alive duration. This configures the time before sending a `keepalive` ping
* when there is no read activity.
*/
Optional<Duration> keepAliveTime();
/**
* Sets a custom permit-keep-alive duration. This configures the most aggressive keep-alive time clients
* are permitted to configure.
* The server will try to detect clients exceeding this rate and when detected will forcefully close the connection.
*
* @see #permitKeepAliveWithoutCalls
*/
Optional<Duration> permitKeepAliveTime();
/**
* Sets whether to allow clients to send keep-alive HTTP/2 PINGs even if
* there are no outstanding RPCs on the connection.
*/
Optional<Boolean> permitKeepAliveWithoutCalls();
}
@ConfigGroup
public
|
GrpcServerNettyConfig
|
java
|
apache__camel
|
components/camel-file/src/main/java/org/apache/camel/component/file/GenericFileDefaultSorter.java
|
{
"start": 1113,
"end": 3964
}
|
class ____ {
private GenericFileDefaultSorter() {
}
/**
* Returns a new sory by file language expression
*
* @param context the camel context
* @param expression the file language expression
* @param reverse true to reverse order
* @return the comparator
*/
public static Comparator<Exchange> sortByFileLanguage(CamelContext context, String expression, boolean reverse) {
return sortByFileLanguage(context, expression, reverse, false, null);
}
/**
* Returns a new sory by file language expression
*
* @param context the camel context
* @param expression the file language expression
* @param reverse true to reverse order
* @param ignoreCase ignore case if comparing strings
* @return the comparator
*/
public static Comparator<Exchange> sortByFileLanguage(
CamelContext context, String expression, boolean reverse, boolean ignoreCase) {
return sortByFileLanguage(context, expression, reverse, ignoreCase, null);
}
/**
* Returns a new sort by file language expression
*
* @param context the camel context
* @param expression the file language expression
* @param reverse true to reverse order
* @param ignoreCase ignore case if comparing strings
* @param nested nested comparator for sub group sorting, can be null
* @return the comparator
*/
public static Comparator<Exchange> sortByFileLanguage(
final CamelContext context, final String expression, final boolean reverse, final boolean ignoreCase,
final Comparator<Exchange> nested) {
// the expression should be enclosed by ${ }
String text = expression;
if (!expression.startsWith("${")) {
text = "${" + text;
}
if (!expression.endsWith("}")) {
text = text + "}";
}
Language language = context.resolveLanguage("file");
final Expression exp = language.createExpression(text);
return new Comparator<Exchange>() {
public int compare(Exchange o1, Exchange o2) {
Object result1 = exp.evaluate(o1, Object.class);
Object result2 = exp.evaluate(o2, Object.class);
int answer = ObjectHelper.compare(result1, result2, ignoreCase);
// if equal then sub sort by nested comparator
if (answer == 0 && nested != null) {
answer = nested.compare(o1, o2);
}
return reverse ? -1 * answer : answer;
}
public String toString() {
return expression + (nested != null ? ";" + nested.toString() : "");
}
};
}
}
|
GenericFileDefaultSorter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterRpc.java
|
{
"start": 8265,
"end": 9277
}
|
class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(TestRouterRpc.class);
protected static final int NUM_SUBCLUSTERS = 2;
// We need at least 6 DNs to test Erasure Coding with RS-6-3-64k
protected static final int NUM_DNS = 6;
protected static final Comparator<ErasureCodingPolicyInfo> EC_POLICY_CMP =
new Comparator<ErasureCodingPolicyInfo>() {
public int compare(
ErasureCodingPolicyInfo ec0,
ErasureCodingPolicyInfo ec1) {
String name0 = ec0.getPolicy().getName();
String name1 = ec1.getPolicy().getName();
return name0.compareTo(name1);
}
};
/** Federated HDFS cluster. */
private static MiniRouterDFSCluster cluster;
/** Random Router for this federated cluster. */
private RouterContext router;
/** Random nameservice in the federated cluster. */
private String ns;
/** First namenode in the nameservice. */
private NamenodeContext namenode;
/** Client
|
TestRouterRpc
|
java
|
elastic__elasticsearch
|
modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java
|
{
"start": 2289,
"end": 6880
}
|
class ____ extends AbstractSearchCancellationTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(AggregationsPlugin.class);
return List.copyOf(plugins);
}
public void testCancellationDuringTimeSeriesAggregation() throws Exception {
List<ScriptedBlockPlugin> plugins = initBlockFactory();
int numberOfShards = between(2, 5);
long now = Instant.now().toEpochMilli();
int numberOfRefreshes = between(1, 5);
// After a few initial checks we check every 2048 - number of shards records so we need to ensure all
// shards have enough records to trigger a check
int numberOfDocsPerRefresh = numberOfShards * between(3000, 3500) / numberOfRefreshes;
assertAcked(
prepareCreate("test").setSettings(
indexSettings(numberOfShards, 0).put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name())
.put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim")
.put(TIME_SERIES_START_TIME.getKey(), now)
.put(TIME_SERIES_END_TIME.getKey(), now + (long) numberOfRefreshes * numberOfDocsPerRefresh + 1)
).setMapping("""
{
"properties": {
"@timestamp": {"type": "date", "format": "epoch_millis"},
"dim": {"type": "keyword", "time_series_dimension": true}
}
}
""")
);
for (int i = 0; i < numberOfRefreshes; i++) {
// Make sure we sometimes have a few segments
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
for (int j = 0; j < numberOfDocsPerRefresh; j++) {
bulkRequestBuilder.add(
prepareIndex("test").setOpType(DocWriteRequest.OpType.CREATE)
.setSource(
"@timestamp",
now + (long) i * numberOfDocsPerRefresh + j,
"val",
(double) j,
"dim",
String.valueOf(j % 100)
)
);
}
assertNoFailures(bulkRequestBuilder.get());
}
logger.info("Executing search");
Client client = client();
TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg");
ActionFuture<SearchResponse> searchResponse = client.prepareSearch("test")
.setQuery(matchAllQuery())
.addAggregation(
timeSeriesAggregationBuilder.subAggregation(
new ScriptedMetricAggregationBuilder("sub_agg").initScript(
new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.INIT_SCRIPT_NAME, Collections.emptyMap())
)
.mapScript(
new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.MAP_BLOCK_SCRIPT_NAME, Collections.emptyMap())
)
.combineScript(
new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap())
)
.reduceScript(
new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_FAIL_SCRIPT_NAME, Collections.emptyMap())
)
)
)
.execute();
awaitForBlock(plugins);
cancelSearch(TransportSearchAction.TYPE.name());
disableBlocks(plugins);
SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, searchResponse::actionGet);
assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST));
logger.info("All shards failed with", ex);
if (lowLevelCancellation) {
// Ensure that we cancelled in TimeSeriesIndexSearcher and not in reduce phase
assertThat(ExceptionsHelper.stackTrace(ex), not(containsString("not building sub-aggregations due to task cancellation")));
} else {
assertThat(ExceptionsHelper.stackTrace(ex), containsString("not building sub-aggregations due to task cancellation"));
}
}
}
|
SearchCancellationIT
|
java
|
elastic__elasticsearch
|
libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java
|
{
"start": 1789,
"end": 1947
}
|
class ____ extends TestTargetIntermediateClass {
@Override
public void instanceMethod(int x, String y) {}
}
|
TestTargetImplementationClass
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/mvdedupe/BatchEncoder.java
|
{
"start": 21750,
"end": 22702
}
|
class ____ implements Decoder {
@Override
public void decode(Block.Builder builder, IsNull isNull, BytesRef[] encoded, int count) {
BytesRef scratch = new BytesRef();
BytesRefBlock.Builder b = (BytesRefBlock.Builder) builder;
for (int i = 0; i < count; i++) {
if (isNull.isNull(i)) {
b.appendNull();
} else {
BytesRef e = encoded[i];
scratch.bytes = e.bytes;
scratch.length = (int) intHandle.get(e.bytes, e.offset);
e.offset += Integer.BYTES;
e.length -= Integer.BYTES;
scratch.offset = e.offset;
b.appendBytesRef(scratch);
e.offset += scratch.length;
e.length -= scratch.length;
}
}
}
}
protected static final
|
BytesRefsDecoder
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/request/target/BaseTarget.java
|
{
"start": 1375,
"end": 2110
}
|
class ____<Z> implements Target<Z> {
private Request request;
@Override
public void setRequest(@Nullable Request request) {
this.request = request;
}
@Override
@Nullable
public Request getRequest() {
return request;
}
@Override
public void onLoadCleared(@Nullable Drawable placeholder) {
// Do nothing.
}
@Override
public void onLoadStarted(@Nullable Drawable placeholder) {
// Do nothing.
}
@Override
public void onLoadFailed(@Nullable Drawable errorDrawable) {
// Do nothing.
}
@Override
public void onStart() {
// Do nothing.
}
@Override
public void onStop() {
// Do nothing.
}
@Override
public void onDestroy() {
// Do nothing.
}
}
|
BaseTarget
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/single/SingleFlatMap.java
|
{
"start": 1009,
"end": 1561
}
|
class ____<T, R> extends Single<R> {
final SingleSource<? extends T> source;
final Function<? super T, ? extends SingleSource<? extends R>> mapper;
public SingleFlatMap(SingleSource<? extends T> source, Function<? super T, ? extends SingleSource<? extends R>> mapper) {
this.mapper = mapper;
this.source = source;
}
@Override
protected void subscribeActual(SingleObserver<? super R> downstream) {
source.subscribe(new SingleFlatMapCallback<T, R>(downstream, mapper));
}
static final
|
SingleFlatMap
|
java
|
google__guice
|
extensions/assistedinject/test/com/google/inject/assistedinject/FactoryProvider2Test.java
|
{
"start": 21609,
"end": 23762
}
|
interface ____ {
Car createCar(Provider<Color> colorProvider, Provider<String> stringProvider);
Mustang createMustang(@Assisted("color") Provider<Color> colorProvider);
}
@Test
public void testAssistedProviderIsDisallowed() {
try {
Guice.createInjector(
new AbstractModule() {
@Override
protected void configure() {
bind(ProviderBasedColoredCarFactory.class)
.toProvider(
FactoryProvider.newFactory(
ProviderBasedColoredCarFactory.class, Subaru.class));
}
});
fail();
} catch (CreationException expected) {
assertEquals(expected.getMessage(), 4, expected.getErrorMessages().size());
// Assert each method individually, because JDK7 doesn't guarantee method ordering.
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [1] with key [Provider<FactoryProvider2Test$Color>"
+ " annotated with @Assisted("
+ Annotations.memberValueString("value", "color")
+ ")] on method"
+ " [FactoryProvider2Test$ProviderBasedColoredCarFactory.createMustang()]");
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [1] with key [Provider<FactoryProvider2Test$Color>] on"
+ " method [FactoryProvider2Test$ProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
"A Provider may not be a type in a factory method of an AssistedInject.",
"Offending instance is parameter [2] with key [Provider<String>] on method"
+ " [FactoryProvider2Test$ProviderBasedColoredCarFactory.createCar()]");
assertContains(
expected.getMessage(),
"No implementation for FactoryProvider2Test$ProviderBasedColoredCarFactory was bound.");
}
}
|
ProviderBasedColoredCarFactory
|
java
|
apache__camel
|
core/camel-util/src/main/java/org/apache/camel/util/ObjectHelper.java
|
{
"start": 16622,
"end": 18966
}
|
class ____ <tt>null</tt> if it could not be loaded
*/
public static Class<?> loadSimpleType(String name) {
// special for byte[] or Object[] as its common to use
if ("java.lang.byte[]".equals(name) || "byte[]".equals(name)) {
return byte[].class;
} else if ("java.lang.Byte[]".equals(name) || "Byte[]".equals(name)) {
return Byte[].class;
} else if ("java.lang.Object[]".equals(name) || "Object[]".equals(name)) {
return Object[].class;
} else if ("java.lang.String[]".equals(name) || "String[]".equals(name)) {
return String[].class;
// and these are common as well
} else if ("java.lang.String".equals(name) || "String".equals(name)) {
return String.class;
} else if ("java.lang.Boolean".equals(name) || "Boolean".equals(name)) {
return Boolean.class;
} else if ("boolean".equals(name)) {
return boolean.class;
} else if ("java.lang.Integer".equals(name) || "Integer".equals(name)) {
return Integer.class;
} else if ("int".equals(name)) {
return int.class;
} else if ("java.lang.Long".equals(name) || "Long".equals(name)) {
return Long.class;
} else if ("long".equals(name)) {
return long.class;
} else if ("java.lang.Short".equals(name) || "Short".equals(name)) {
return Short.class;
} else if ("short".equals(name)) {
return short.class;
} else if ("java.lang.Byte".equals(name) || "Byte".equals(name)) {
return Byte.class;
} else if ("byte".equals(name)) {
return byte.class;
} else if ("java.lang.Float".equals(name) || "Float".equals(name)) {
return Float.class;
} else if ("float".equals(name)) {
return float.class;
} else if ("java.lang.Double".equals(name) || "Double".equals(name)) {
return Double.class;
} else if ("double".equals(name)) {
return double.class;
} else if ("java.lang.Character".equals(name) || "Character".equals(name)) {
return Character.class;
} else if ("char".equals(name)) {
return char.class;
}
return null;
}
/**
* Loads the given
|
or
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/ServiceComponentBuilderFactory.java
|
{
"start": 1931,
"end": 5397
}
|
interface ____ extends ComponentBuilder<ServiceComponent> {
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions (if possible) occurred while the Camel
* consumer is trying to pickup incoming messages, or the likes, will
* now be processed as a message and handled by the routing Error
* Handler. Important: This is only possible if the 3rd party component
* allows Camel to be alerted if an exception was thrown. Some
* components handle this internally only, and therefore
* bridgeErrorHandler is not possible. In other situations we may
* improve the Camel component to hook into the 3rd party component and
* make this possible for future releases. By default the consumer will
* use the org.apache.camel.spi.ExceptionHandler to deal with
* exceptions, that will be logged at WARN or ERROR level and ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default ServiceComponentBuilder bridgeErrorHandler(boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default ServiceComponentBuilder autowiredEnabled(boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* Inject the service to use.
*
* The option is a:
* <code>org.apache.camel.cloud.ServiceRegistry</code> type.
*
* Group: advanced
*
* @param service the value to set
* @return the dsl builder
*/
default ServiceComponentBuilder service(org.apache.camel.cloud.ServiceRegistry service) {
doSetProperty("service", service);
return this;
}
/**
* Inject the service selector used to lookup the ServiceRegistry to
* use.
*
* The option is a:
* <code>org.apache.camel.cloud.ServiceRegistry.Selector</code> type.
*
* Group: advanced
*
* @param serviceSelector the value to set
* @return the dsl builder
*/
default ServiceComponentBuilder serviceSelector(org.apache.camel.cloud.ServiceRegistry.Selector serviceSelector) {
doSetProperty("serviceSelector", serviceSelector);
return this;
}
}
|
ServiceComponentBuilder
|
java
|
elastic__elasticsearch
|
build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ChangelogEntry.java
|
{
"start": 1294,
"end": 4929
}
|
class ____ {
private static final Logger LOGGER = Logging.getLogger(GenerateReleaseNotesTask.class);
private Integer pr;
private String summary;
private String area;
private String type;
private List<Integer> issues;
private Breaking breaking;
private Highlight highlight;
private Deprecation deprecation;
private String entryOverride;
private static final ObjectMapper yamlMapper = new ObjectMapper(new YAMLFactory());
/**
* Create a new instance by parsing the supplied file
* @param file the YAML file to parse
* @return a new instance
*/
public static ChangelogEntry parse(File file) {
try {
return yamlMapper.readValue(file, ChangelogEntry.class);
} catch (IOException e) {
LOGGER.error("Failed to parse changelog from " + file.getAbsolutePath(), e);
throw new UncheckedIOException(e);
}
}
public Integer getPr() {
return pr;
}
public void setPr(Integer pr) {
this.pr = pr;
}
public List<Integer> getIssues() {
return issues;
}
public void setIssues(List<Integer> issues) {
this.issues = issues;
}
public String getArea() {
return area;
}
public void setArea(String area) {
this.area = area;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public Highlight getHighlight() {
return highlight;
}
public void setHighlight(Highlight highlight) {
this.highlight = highlight;
if (this.highlight != null) this.highlight.pr = this.pr;
}
public Breaking getBreaking() {
return breaking;
}
public void setBreaking(Breaking breaking) {
this.breaking = breaking;
}
public Deprecation getDeprecation() {
return deprecation;
}
public void setDeprecation(Deprecation deprecation) {
this.deprecation = deprecation;
}
public String getEntryOverride() {
return entryOverride;
}
public void setEntryOverride(String entryOverride) {
this.entryOverride = entryOverride;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ChangelogEntry that = (ChangelogEntry) o;
return Objects.equals(pr, that.pr)
&& Objects.equals(issues, that.issues)
&& Objects.equals(area, that.area)
&& Objects.equals(type, that.type)
&& Objects.equals(summary, that.summary)
&& Objects.equals(highlight, that.highlight)
&& Objects.equals(breaking, that.breaking)
&& Objects.equals(entryOverride, that.entryOverride);
}
@Override
public int hashCode() {
return Objects.hash(pr, issues, area, type, summary, highlight, breaking, entryOverride);
}
@Override
public String toString() {
return String.format(
Locale.ROOT,
"ChangelogEntry{pr=%d, issues=%s, area='%s', type='%s', summary='%s', highlight=%s, breaking=%s, deprecation=%s}",
pr,
issues,
area,
type,
summary,
highlight,
breaking,
deprecation
);
}
public static
|
ChangelogEntry
|
java
|
grpc__grpc-java
|
api/src/main/java/io/grpc/Channel.java
|
{
"start": 1289,
"end": 1454
}
|
class ____ but rather work with stubs that have been bound to a
* Channel that was decorated during application initialization.
*/
@ThreadSafe
public abstract
|
directly
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/OptionConverter.java
|
{
"start": 1408,
"end": 5813
}
|
class ____ {
private static final Logger LOGGER = StatusLogger.getLogger();
private static final String DELIM_START = "${";
private static final char DELIM_STOP = '}';
private static final int DELIM_START_LEN = 2;
private static final int DELIM_STOP_LEN = 1;
private static final int ONE_K = 1024;
/**
* OptionConverter is a static class.
*/
private OptionConverter() {}
public static String[] concatenateArrays(final String[] l, final String[] r) {
final int len = l.length + r.length;
final String[] a = new String[len];
System.arraycopy(l, 0, a, 0, l.length);
System.arraycopy(r, 0, a, l.length, r.length);
return a;
}
public static String convertSpecialChars(final String s) {
char c;
final int len = s.length();
final StringBuilder sbuf = new StringBuilder(len);
int i = 0;
while (i < len) {
c = s.charAt(i++);
if (c == '\\') {
c = s.charAt(i++);
switch (c) {
case 'n':
c = '\n';
break;
case 'r':
c = '\r';
break;
case 't':
c = '\t';
break;
case 'f':
c = '\f';
break;
case 'b':
c = '\b';
break;
case '"':
c = '\"';
break;
case '\'':
c = '\'';
break;
case '\\':
c = '\\';
break;
default:
// there is no default case.
}
}
sbuf.append(c);
}
return sbuf.toString();
}
public static Object instantiateByKey(
final Properties props, final String key, final Class<?> superClass, final Object defaultValue) {
// Get the value of the property in string form
final String className = findAndSubst(key, props);
if (className == null) {
LOGGER.error("Could not find value for key {}", key);
return defaultValue;
}
// Trim className to avoid trailing spaces that cause problems.
return OptionConverter.instantiateByClassName(className.trim(), superClass, defaultValue);
}
/**
* If <code>value</code> is "true", then {@code true} is
* returned. If <code>value</code> is "false", then
* {@code false} is returned. Otherwise, <code>default</code> is
* returned.
*
* <p>Case of value is unimportant.</p>
* @param value The value to convert.
* @param defaultValue The default value.
* @return true or false, depending on the value and/or default.
*/
public static boolean toBoolean(final String value, final boolean defaultValue) {
if (value == null) {
return defaultValue;
}
final String trimmedVal = value.trim();
if ("true".equalsIgnoreCase(trimmedVal)) {
return true;
}
if ("false".equalsIgnoreCase(trimmedVal)) {
return false;
}
return defaultValue;
}
/**
* Convert the String value to an int.
* @param value The value as a String.
* @param defaultValue The default value.
* @return The value as an int.
*/
public static int toInt(final String value, final int defaultValue) {
if (value != null) {
final String s = value;
try {
return Integers.parseInt(s);
} catch (final NumberFormatException e) {
LOGGER.error("[{}] is not in proper int form.", s, e);
}
}
return defaultValue;
}
public static Level toLevel(String value, Level defaultValue) {
if (value == null) {
return defaultValue;
}
value = value.trim();
final int hashIndex = value.indexOf('#');
if (hashIndex == -1) {
if ("NULL".equalsIgnoreCase(value)) {
return null;
} else {
// no
|
OptionConverter
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/core/publisher/MonoHasElement.java
|
{
"start": 1372,
"end": 2349
}
|
class ____<T>
extends Operators.BaseFluxToMonoOperator<T, Boolean> {
boolean done;
HasElementSubscriber(CoreSubscriber<? super Boolean> actual) {
super(actual);
}
@Override
public @Nullable Object scanUnsafe(Attr key) {
if (key == Attr.TERMINATED) return done;
return super.scanUnsafe(key);
}
@Override
public void onNext(T t) {
if (done) {
Operators.onNextDropped(t, currentContext());
return;
}
this.done = true;
Operators.onDiscard(t, currentContext());
this.actual.onNext(true);
this.actual.onComplete();
}
@Override
public void onError(Throwable t) {
if (done) {
Operators.onErrorDropped(t, currentContext());
return;
}
this.done = true;
this.actual.onError(t);
}
@Override
public void onComplete() {
if (done) {
return;
}
this.done = true;
completePossiblyEmpty();
}
@Override
Boolean accumulatedValue() {
return false;
}
}
}
|
HasElementSubscriber
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/basic/LocalTimeMappingTests.java
|
{
"start": 1011,
"end": 2015
}
|
class ____ {
@Test
public void verifyMappings(SessionFactoryScope scope) {
final MappingMetamodelImplementor mappingMetamodel = scope.getSessionFactory()
.getRuntimeMetamodels()
.getMappingMetamodel();
final EntityPersister entityDescriptor = mappingMetamodel.findEntityDescriptor(EntityWithLocalTime.class);
final BasicAttributeMapping duration = (BasicAttributeMapping) entityDescriptor.findAttributeMapping("localTime");
final JdbcMapping jdbcMapping = duration.getJdbcMapping();
assertThat(jdbcMapping.getJavaTypeDescriptor().getJavaTypeClass(), equalTo(LocalTime.class));
assertThat( jdbcMapping.getJdbcType().getJdbcTypeCode(), equalTo( Types.TIME));
scope.inTransaction(
(session) -> {
session.persist(new EntityWithLocalTime(1, LocalTime.now()));
}
);
scope.inTransaction(
(session) -> session.find(EntityWithLocalTime.class, 1)
);
}
@Entity(name = "EntityWithLocalTime")
@Table(name = "EntityWithLocalTime")
public static
|
LocalTimeMappingTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/boot/models/annotations/internal/PropertyRefAnnotation.java
|
{
"start": 469,
"end": 1323
}
|
class ____ implements PropertyRef {
private String value;
/**
* Used in creating dynamic annotation instances (e.g. from XML)
*/
public PropertyRefAnnotation(ModelsContext modelContext) {
}
/**
* Used in creating annotation instances from JDK variant
*/
public PropertyRefAnnotation(PropertyRef annotation, ModelsContext modelContext) {
this.value = annotation.value();
}
/**
* Used in creating annotation instances from Jandex variant
*/
public PropertyRefAnnotation(Map<String, Object> attributeValues, ModelsContext modelContext) {
this.value = (String) attributeValues.get( "value" );
}
@Override
public Class<? extends Annotation> annotationType() {
return PropertyRef.class;
}
@Override
public String value() {
return value;
}
public void value(String value) {
this.value = value;
}
}
|
PropertyRefAnnotation
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/launcher/TestContainerLauncher.java
|
{
"start": 16600,
"end": 18000
}
|
class ____ extends MRApp {
private NMTokenSecretManagerInNM tokenSecretManager;
public MRAppWithSlowNM(NMTokenSecretManagerInNM tokenSecretManager) {
super(1, 0, false, "TestContainerLauncher", true);
this.tokenSecretManager = tokenSecretManager;
}
@Override
protected ContainerLauncher
createContainerLauncher(final AppContext context) {
return new ContainerLauncherImpl(context) {
@Override
public ContainerManagementProtocolProxyData getCMProxy(
String containerMgrBindAddr, ContainerId containerId)
throws IOException {
InetSocketAddress addr = NetUtils.getConnectAddress(server);
String containerManagerBindAddr =
addr.getHostName() + ":" + addr.getPort();
Token token =
tokenSecretManager.createNMToken(
containerId.getApplicationAttemptId(),
NodeId.newInstance(addr.getHostName(), addr.getPort()), "user");
ContainerManagementProtocolProxy cmProxy =
new ContainerManagementProtocolProxy(conf);
ContainerManagementProtocolProxyData proxy =
cmProxy.new ContainerManagementProtocolProxyData(
YarnRPC.create(conf), containerManagerBindAddr, containerId,
token);
return proxy;
}
};
};
}
public
|
MRAppWithSlowNM
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/Ordering.java
|
{
"start": 14020,
"end": 38422
}
|
class ____ extends Ordering<@Nullable Object> {
private final AtomicInteger counter = new AtomicInteger(0);
private final ConcurrentMap<Object, Integer> uids =
Platform.tryWeakKeys(new MapMaker()).makeMap();
private Integer getUid(Object obj) {
Integer uid = uids.get(obj);
if (uid == null) {
// One or more integer values could be skipped in the event of a race
// to generate a UID for the same object from multiple threads, but
// that shouldn't be a problem.
uid = counter.getAndIncrement();
Integer alreadySet = uids.putIfAbsent(obj, uid);
if (alreadySet != null) {
uid = alreadySet;
}
}
return uid;
}
@Override
public int compare(@Nullable Object left, @Nullable Object right) {
if (left == right) {
return 0;
} else if (left == null) {
return -1;
} else if (right == null) {
return 1;
}
int leftCode = identityHashCode(left);
int rightCode = identityHashCode(right);
if (leftCode != rightCode) {
return leftCode < rightCode ? -1 : 1;
}
// identityHashCode collision (rare, but not as rare as you'd think)
int result = getUid(left).compareTo(getUid(right));
if (result == 0) {
throw new AssertionError(); // extremely, extremely unlikely.
}
return result;
}
@Override
public String toString() {
return "Ordering.arbitrary()";
}
/*
* We need to be able to mock identityHashCode() calls for tests, because it
* can take 1-10 seconds to find colliding objects. Mocking frameworks that
* can do magic to mock static method calls still can't do so for a system
* class, so we need the indirection. In production, Hotspot should still
* recognize that the call is 1-morphic and should still be willing to
* inline it if necessary.
*/
int identityHashCode(Object object) {
return System.identityHashCode(object);
}
}
// Constructor
/**
* Constructs a new instance of this class (only invokable by the subclass constructor, typically
* implicit).
*/
protected Ordering() {}
// Instance-based factories (and any static equivalents)
/**
* Returns the reverse of this ordering; the {@code Ordering} equivalent to {@link
* Collections#reverseOrder(Comparator)}.
*
* <p><b>Java 8+ users:</b> Use {@code thisComparator.reversed()} instead.
*/
// type parameter <S> lets us avoid the extra <String> in statements like:
// Ordering<String> o = Ordering.<String>natural().reverse();
public <S extends T> Ordering<S> reverse() {
return new ReverseOrdering<>(this);
}
/**
* Returns an ordering that treats {@code null} as less than all other values and uses {@code
* this} to compare non-null values.
*
* <p>The returned object is serializable if this object is serializable.
*
* <p><b>Java 8+ users:</b> Use {@code Comparator.nullsFirst(thisComparator)} instead.
*/
// type parameter <S> lets us avoid the extra <String> in statements like:
// Ordering<String> o = Ordering.<String>natural().nullsFirst();
public <S extends T> Ordering<@Nullable S> nullsFirst() {
return new NullsFirstOrdering<S>(this);
}
/**
* Returns an ordering that treats {@code null} as greater than all other values and uses this
* ordering to compare non-null values.
*
* <p>The returned object is serializable if this object is serializable.
*
* <p><b>Java 8+ users:</b> Use {@code Comparator.nullsLast(thisComparator)} instead.
*/
// type parameter <S> lets us avoid the extra <String> in statements like:
// Ordering<String> o = Ordering.<String>natural().nullsLast();
public <S extends T> Ordering<@Nullable S> nullsLast() {
return new NullsLastOrdering<S>(this);
}
/**
* Returns a new ordering on {@code F} which orders elements by first applying a function to them,
* then comparing those results using {@code this}. For example, to compare objects by their
* string forms, in a case-insensitive manner, use:
*
* {@snippet :
* Ordering.from(String.CASE_INSENSITIVE_ORDER)
* .onResultOf(Functions.toStringFunction())
* }
*
* <p><b>Java 8+ users:</b> Use {@code Comparator.comparing(function, thisComparator)} instead
* (you can omit the comparator if it is the natural order).
*/
public <F extends @Nullable Object> Ordering<F> onResultOf(Function<F, ? extends T> function) {
return new ByFunctionOrdering<>(function, this);
}
<T2 extends T> Ordering<Entry<T2, ?>> onKeys() {
return onResultOf(Entry::getKey);
}
/**
* Returns an ordering which first uses the ordering {@code this}, but which in the event of a
* "tie", then delegates to {@code secondaryComparator}. For example, to sort a bug list first by
* status and second by priority, you might use {@code byStatus.compound(byPriority)}. For a
* compound ordering with three or more components, simply chain multiple calls to this method.
*
* <p>An ordering produced by this method, or a chain of calls to this method, is equivalent to
* one created using {@link Ordering#compound(Iterable)} on the same component comparators.
*
* <p>The returned object is serializable if this object and {@code secondaryComparator} are both
* serializable.
*
* <p><b>Java 8+ users:</b> Use {@code thisComparator.thenComparing(secondaryComparator)} instead.
* Depending on what {@code secondaryComparator} is, one of the other overloads of {@code
* thenComparing} may be even more useful.
*/
public <U extends T> Ordering<U> compound(Comparator<? super U> secondaryComparator) {
return new CompoundOrdering<>(this, checkNotNull(secondaryComparator));
}
/**
* Returns an ordering which tries each given comparator in order until a non-zero result is
* found, returning that result, and returning zero only if all comparators return zero. The
* returned ordering is based on the state of the {@code comparators} iterable at the time it was
* provided to this method.
*
* <p>The returned ordering is equivalent to that produced using {@code
* Ordering.from(comp1).compound(comp2).compound(comp3) . . .}.
*
* <p>The returned object is serializable if each of the {@code comparators} is serializable.
*
* <p><b>Warning:</b> Supplying an argument with undefined iteration order, such as a {@link
* HashSet}, will produce non-deterministic results.
*
* <p><b>Java 8+ users:</b> Use a chain of calls to {@link Comparator#thenComparing(Comparator)},
* or {@code comparatorCollection.stream().reduce(Comparator::thenComparing).get()} (if the
* collection might be empty, also provide a default comparator as the {@code identity} parameter
* to {@code reduce}).
*
* @param comparators the comparators to try in order
*/
public static <T extends @Nullable Object> Ordering<T> compound(
Iterable<? extends Comparator<? super T>> comparators) {
return new CompoundOrdering<>(comparators);
}
/**
* Returns a new ordering which sorts iterables by comparing corresponding elements pairwise until
* a nonzero result is found; imposes "dictionary order". If the end of one iterable is reached,
* but not the other, the shorter iterable is considered to be less than the longer one. For
* example, a lexicographical natural ordering over integers considers {@code [] < [1] < [1, 1] <
* [1, 2] < [2]}.
*
* <p>Note that {@code ordering.lexicographical().reverse()} is not equivalent to {@code
* ordering.reverse().lexicographical()} (consider how each would order {@code [1]} and {@code [1,
* 1]}).
*
* <p><b>Java 8+ users:</b> Use {@link Comparators#lexicographical(Comparator)} instead.
*
* @since 2.0
*/
// type parameter <S> lets us avoid the extra <String> in statements like:
// Ordering<Iterable<String>> o =
// Ordering.<String>natural().lexicographical();
public <S extends T> Ordering<Iterable<S>> lexicographical() {
/*
* Note that technically the returned ordering should be capable of
* handling not just {@code Iterable<S>} instances, but also any {@code
* Iterable<? extends S>}. However, the need for this comes up so rarely
* that it doesn't justify making everyone else deal with the very ugly
* wildcard.
*/
return new LexicographicalOrdering<S>(this);
}
// Regular instance methods
@Override
public abstract int compare(@ParametricNullness T left, @ParametricNullness T right);
/**
* Returns the least of the specified values according to this ordering. If there are multiple
* least values, the first of those is returned. The iterator will be left exhausted: its {@code
* hasNext()} method will return {@code false}.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterator).min(thisComparator).get()} instead
* (but note that it does not guarantee which tied minimum element is returned).
*
* @param iterator the iterator whose minimum element is to be determined
* @throws NoSuchElementException if {@code iterator} is empty
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
* @since 11.0
*/
@ParametricNullness
public <E extends T> E min(Iterator<E> iterator) {
// let this throw NoSuchElementException as necessary
E minSoFar = iterator.next();
while (iterator.hasNext()) {
minSoFar = this.<E>min(minSoFar, iterator.next());
}
return minSoFar;
}
/**
* Returns the least of the specified values according to this ordering. If there are multiple
* least values, the first of those is returned.
*
* <p><b>Java 8+ users:</b> If {@code iterable} is a {@link Collection}, use {@code
* Collections.min(collection, thisComparator)} instead. Otherwise, use {@code
* Streams.stream(iterable).min(thisComparator).get()} instead. Note that these alternatives do
* not guarantee which tied minimum element is returned.
*
* @param iterable the iterable whose minimum element is to be determined
* @throws NoSuchElementException if {@code iterable} is empty
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E min(Iterable<E> iterable) {
return min(iterable.iterator());
}
/**
* Returns the lesser of the two values according to this ordering. If the values compare as 0,
* the first is returned.
*
* <p><b>Implementation note:</b> this method is invoked by the default implementations of the
* other {@code min} overloads, so overriding it will affect their behavior.
*
* <p><b>Note:</b> Consider using {@code Comparators.min(a, b, thisComparator)} instead. If {@code
* thisComparator} is {@link Ordering#natural}, then use {@code Comparators.min(a, b)}.
*
* @param a value to compare, returned if less than or equal to b.
* @param b value to compare.
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E min(@ParametricNullness E a, @ParametricNullness E b) {
return (compare(a, b) <= 0) ? a : b;
}
/**
* Returns the least of the specified values according to this ordering. If there are multiple
* least values, the first of those is returned.
*
* <p><b>Java 8+ users:</b> Use {@code Collections.min(Arrays.asList(a, b, c...), thisComparator)}
* instead (but note that it does not guarantee which tied minimum element is returned).
*
* @param a value to compare, returned if less than or equal to the rest.
* @param b value to compare
* @param c value to compare
* @param rest values to compare
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E min(
@ParametricNullness E a, @ParametricNullness E b, @ParametricNullness E c, E... rest) {
E minSoFar = min(min(a, b), c);
for (E r : rest) {
minSoFar = min(minSoFar, r);
}
return minSoFar;
}
/**
* Returns the greatest of the specified values according to this ordering. If there are multiple
* greatest values, the first of those is returned. The iterator will be left exhausted: its
* {@code hasNext()} method will return {@code false}.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterator).max(thisComparator).get()} instead
* (but note that it does not guarantee which tied maximum element is returned).
*
* @param iterator the iterator whose maximum element is to be determined
* @throws NoSuchElementException if {@code iterator} is empty
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
* @since 11.0
*/
@ParametricNullness
public <E extends T> E max(Iterator<E> iterator) {
// let this throw NoSuchElementException as necessary
E maxSoFar = iterator.next();
while (iterator.hasNext()) {
maxSoFar = this.<E>max(maxSoFar, iterator.next());
}
return maxSoFar;
}
/**
* Returns the greatest of the specified values according to this ordering. If there are multiple
* greatest values, the first of those is returned.
*
* <p><b>Java 8+ users:</b> If {@code iterable} is a {@link Collection}, use {@code
* Collections.max(collection, thisComparator)} instead. Otherwise, use {@code
* Streams.stream(iterable).max(thisComparator).get()} instead. Note that these alternatives do
* not guarantee which tied maximum element is returned.
*
* @param iterable the iterable whose maximum element is to be determined
* @throws NoSuchElementException if {@code iterable} is empty
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E max(Iterable<E> iterable) {
return max(iterable.iterator());
}
/**
* Returns the greater of the two values according to this ordering. If the values compare as 0,
* the first is returned.
*
* <p><b>Implementation note:</b> this method is invoked by the default implementations of the
* other {@code max} overloads, so overriding it will affect their behavior.
*
* <p><b>Note:</b> Consider using {@code Comparators.max(a, b, thisComparator)} instead. If {@code
* thisComparator} is {@link Ordering#natural}, then use {@code Comparators.max(a, b)}.
*
* @param a value to compare, returned if greater than or equal to b.
* @param b value to compare.
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E max(@ParametricNullness E a, @ParametricNullness E b) {
return (compare(a, b) >= 0) ? a : b;
}
/**
* Returns the greatest of the specified values according to this ordering. If there are multiple
* greatest values, the first of those is returned.
*
* <p><b>Java 8+ users:</b> Use {@code Collections.max(Arrays.asList(a, b, c...), thisComparator)}
* instead (but note that it does not guarantee which tied maximum element is returned).
*
* @param a value to compare, returned if greater than or equal to the rest.
* @param b value to compare
* @param c value to compare
* @param rest values to compare
* @throws ClassCastException if the parameters are not <i>mutually comparable</i> under this
* ordering.
*/
@ParametricNullness
public <E extends T> E max(
@ParametricNullness E a, @ParametricNullness E b, @ParametricNullness E c, E... rest) {
E maxSoFar = max(max(a, b), c);
for (E r : rest) {
maxSoFar = max(maxSoFar, r);
}
return maxSoFar;
}
/**
* Returns the {@code k} least elements of the given iterable according to this ordering, in order
* from least to greatest. If there are fewer than {@code k} elements present, all will be
* included.
*
* <p>The implementation does not necessarily use a <i>stable</i> sorting algorithm; when multiple
* elements are equivalent, it is undefined which will come first.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterable).collect(Comparators.least(k,
* thisComparator))} instead.
*
* @return an immutable {@code RandomAccess} list of the {@code k} least elements in ascending
* order
* @throws IllegalArgumentException if {@code k} is negative
* @since 8.0
*/
public <E extends T> List<E> leastOf(Iterable<E> iterable, int k) {
if (iterable instanceof Collection) {
Collection<E> collection = (Collection<E>) iterable;
if (collection.size() <= 2L * k) {
// In this case, just dumping the collection to an array and sorting is
// faster than using the implementation for Iterator, which is
// specialized for k much smaller than n.
@SuppressWarnings("unchecked") // c only contains E's and doesn't escape
E[] array = (E[]) collection.toArray();
sort(array, this);
if (array.length > k) {
array = Arrays.copyOf(array, k);
}
return unmodifiableList(asList(array));
}
}
return leastOf(iterable.iterator(), k);
}
/**
* Returns the {@code k} least elements from the given iterator according to this ordering, in
* order from least to greatest. If there are fewer than {@code k} elements present, all will be
* included.
*
* <p>The implementation does not necessarily use a <i>stable</i> sorting algorithm; when multiple
* elements are equivalent, it is undefined which will come first.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterator).collect(Comparators.least(k,
* thisComparator))} instead.
*
* @return an immutable {@code RandomAccess} list of the {@code k} least elements in ascending
* order
* @throws IllegalArgumentException if {@code k} is negative
* @since 14.0
*/
@SuppressWarnings("EmptyList") // ImmutableList doesn't support nullable element types
public <E extends T> List<E> leastOf(Iterator<E> iterator, int k) {
checkNotNull(iterator);
checkNonnegative(k, "k");
if (k == 0 || !iterator.hasNext()) {
return emptyList();
} else if (k >= Integer.MAX_VALUE / 2) {
// k is really large; just do a straightforward sorted-copy-and-sublist
ArrayList<E> list = Lists.newArrayList(iterator);
sort(list, this);
if (list.size() > k) {
list.subList(k, list.size()).clear();
}
list.trimToSize();
return unmodifiableList(list);
} else {
TopKSelector<E> selector = TopKSelector.least(k, this);
selector.offerAll(iterator);
return selector.topK();
}
}
/**
* Returns the {@code k} greatest elements of the given iterable according to this ordering, in
* order from greatest to least. If there are fewer than {@code k} elements present, all will be
* included.
*
* <p>The implementation does not necessarily use a <i>stable</i> sorting algorithm; when multiple
* elements are equivalent, it is undefined which will come first.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterable).collect(Comparators.greatest(k,
* thisComparator))} instead.
*
* @return an immutable {@code RandomAccess} list of the {@code k} greatest elements in
* <i>descending order</i>
* @throws IllegalArgumentException if {@code k} is negative
* @since 8.0
*/
public <E extends T> List<E> greatestOf(Iterable<E> iterable, int k) {
// TODO(kevinb): see if delegation is hurting performance noticeably
// TODO(kevinb): if we change this implementation, add full unit tests.
return this.<E>reverse().leastOf(iterable, k);
}
/**
* Returns the {@code k} greatest elements from the given iterator according to this ordering, in
* order from greatest to least. If there are fewer than {@code k} elements present, all will be
* included.
*
* <p>The implementation does not necessarily use a <i>stable</i> sorting algorithm; when multiple
* elements are equivalent, it is undefined which will come first.
*
* <p><b>Java 8+ users:</b> Use {@code Streams.stream(iterator).collect(Comparators.greatest(k,
* thisComparator))} instead.
*
* @return an immutable {@code RandomAccess} list of the {@code k} greatest elements in
* <i>descending order</i>
* @throws IllegalArgumentException if {@code k} is negative
* @since 14.0
*/
public <E extends T> List<E> greatestOf(Iterator<E> iterator, int k) {
return this.<E>reverse().leastOf(iterator, k);
}
/**
* Returns a <b>mutable</b> list containing {@code elements} sorted by this ordering; use this
* only when the resulting list may need further modification, or may contain {@code null}. The
* input is not modified. The returned list is serializable and has random access.
*
* <p>Unlike {@link Sets#newTreeSet(Iterable)}, this method does not discard elements that are
* duplicates according to the comparator. The sort performed is <i>stable</i>, meaning that such
* elements will appear in the returned list in the same order they appeared in {@code elements}.
*
* <p><b>Performance note:</b> According to our
* benchmarking
* on Open JDK 7, {@link #immutableSortedCopy} generally performs better (in both time and space)
* than this method, and this method in turn generally performs better than copying the list and
* calling {@link Collections#sort(List)}.
*/
// TODO(kevinb): rerun benchmarks including new options
public <E extends T> List<E> sortedCopy(Iterable<E> elements) {
@SuppressWarnings("unchecked") // does not escape, and contains only E's
E[] array = (E[]) Iterables.toArray(elements);
sort(array, this);
return new ArrayList<>(asList(array));
}
/**
* Returns an <b>immutable</b> list containing {@code elements} sorted by this ordering. The input
* is not modified.
*
* <p>Unlike {@link Sets#newTreeSet(Iterable)}, this method does not discard elements that are
* duplicates according to the comparator. The sort performed is <i>stable</i>, meaning that such
* elements will appear in the returned list in the same order they appeared in {@code elements}.
*
* <p><b>Performance note:</b> According to our
* benchmarking
* on Open JDK 7, this method is the most efficient way to make a sorted copy of a collection.
*
* @throws NullPointerException if any element of {@code elements} is {@code null}
* @since 3.0
*/
// TODO(kevinb): rerun benchmarks including new options
public <E extends @NonNull T> ImmutableList<E> immutableSortedCopy(Iterable<E> elements) {
return ImmutableList.sortedCopyOf(this, elements);
}
/**
* Returns {@code true} if each element in {@code iterable} after the first is greater than or
* equal to the element that preceded it, according to this ordering. Note that this is always
* true when the iterable has fewer than two elements.
*
* <p><b>Java 8+ users:</b> Use the equivalent {@link Comparators#isInOrder(Iterable, Comparator)}
* instead, since the rest of {@code Ordering} is mostly obsolete (as explained in the class
* documentation).
*/
public boolean isOrdered(Iterable<? extends T> iterable) {
Iterator<? extends T> it = iterable.iterator();
if (it.hasNext()) {
T prev = it.next();
while (it.hasNext()) {
T next = it.next();
if (compare(prev, next) > 0) {
return false;
}
prev = next;
}
}
return true;
}
/**
* Returns {@code true} if each element in {@code iterable} after the first is <i>strictly</i>
* greater than the element that preceded it, according to this ordering. Note that this is always
* true when the iterable has fewer than two elements.
*
* <p><b>Java 8+ users:</b> Use the equivalent {@link Comparators#isInStrictOrder(Iterable,
* Comparator)} instead, since the rest of {@code Ordering} is mostly obsolete (as explained in
* the
|
ArbitraryOrdering
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/mapper/vectors/VectorDVLeafFieldData.java
|
{
"start": 6175,
"end": 8026
}
|
class ____ implements FormattedDocValues {
private float[] vector = new float[dims];
private FloatVectorValues floatVectorValues; // use when indexed
private KnnVectorValues.DocIndexIterator iterator; // use when indexed
private BinaryDocValues binary; // use when not indexed
FloatDocValues() {
try {
if (indexed) {
floatVectorValues = reader.getFloatVectorValues(field);
iterator = (floatVectorValues == null) ? null : floatVectorValues.iterator();
} else {
binary = DocValues.getBinary(reader, field);
}
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public boolean advanceExact(int docId) throws IOException {
if (indexed) {
if (iteratorAdvanceExact(iterator, docId) == false) {
return false;
}
vector = floatVectorValues.vectorValue(iterator.index());
} else {
if (binary == null || binary.advanceExact(docId) == false) {
return false;
}
BytesRef ref = binary.binaryValue();
decodeDenseVector(indexVersion, ref, vector);
}
return true;
}
void decodeDenseVector(IndexVersion indexVersion, BytesRef ref, float[] vector) {
VectorEncoderDecoder.decodeDenseVector(indexVersion, ref, vector);
}
@Override
public int docValueCount() {
return 1;
}
@Override
public Object nextValue() {
return Arrays.copyOf(vector, vector.length);
}
}
private
|
FloatDocValues
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/subprotocol/SubprotocolSelectedTest.java
|
{
"start": 1551,
"end": 2142
}
|
class ____ {
@Inject
WebSocketConnection connection;
@OnOpen
Uni<Void> open() {
if (connection.handshakeRequest().header(SEC_WEBSOCKET_PROTOCOL) == null) {
return connection.sendText("Sec-WebSocket-Protocol not set: " + connection.handshakeRequest().headers());
} else if ("oak".equals(connection.subprotocol())) {
return connection.sendText("ok");
} else {
return connection.sendText("Invalid protocol: " + connection.subprotocol());
}
}
}
}
|
Endpoint
|
java
|
elastic__elasticsearch
|
test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
|
{
"start": 32532,
"end": 33934
}
|
class ____ implements AggregationScript.Factory {
private final MockDeterministicScript script;
MockAggregationScript(MockDeterministicScript script) {
this.script = script;
}
@Override
public boolean isResultDeterministic() {
return script.isResultDeterministic();
}
@Override
public AggregationScript.LeafFactory newFactory(Map<String, Object> params, SearchLookup lookup) {
return new AggregationScript.LeafFactory() {
@Override
public AggregationScript newInstance(final LeafReaderContext ctx) {
return new AggregationScript(params, lookup, ctx) {
@Override
public Object execute() {
Map<String, Object> vars = new HashMap<>(params);
vars.put("params", params);
vars.put("doc", getDoc());
vars.put("_score", get_score());
vars.put("_value", get_value());
return script.apply(vars);
}
};
}
@Override
public boolean needs_score() {
return true;
}
};
}
}
|
MockAggregationScript
|
java
|
quarkusio__quarkus
|
extensions/spring-security/deployment/src/test/java/io/quarkus/spring/security/deployment/app/SpringSecuredSubClass.java
|
{
"start": 196,
"end": 388
}
|
class ____ extends BeanWithSpringSecurityMethodAnnotations {
@Override
@Secured("user")
public String restricted() {
return "restrictedOnMethod";
}
}
|
SpringSecuredSubClass
|
java
|
apache__camel
|
components/camel-jgroups-raft/src/main/java/org/apache/camel/component/jgroups/raft/JGroupsRaftEndpoint.java
|
{
"start": 1773,
"end": 8023
}
|
class ____ extends DefaultEndpoint {
private static final Logger LOG = LoggerFactory.getLogger(JGroupsRaftEndpoint.class);
private AtomicInteger connectCount = new AtomicInteger();
private RaftHandle raftHandle;
private RaftHandle resolvedRaftHandle;
private StateMachine stateMachine;
private String raftId;
private String channelProperties;
@UriPath
@Metadata(required = true)
private String clusterName;
@UriParam(label = "consumer", defaultValue = "false")
private boolean enableRoleChangeEvents;
public JGroupsRaftEndpoint(String endpointUri, String clusterName, Component component,
String raftId, String channelProperties, StateMachine stateMachine, RaftHandle raftHandle) {
super(endpointUri, component);
this.clusterName = clusterName;
this.raftId = raftId;
this.channelProperties = channelProperties;
this.stateMachine = stateMachine;
this.raftHandle = raftHandle;
}
@Override
public Producer createProducer() {
return new JGroupsRaftProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
JGroupsRaftConsumer consumer = new JGroupsRaftConsumer(this, processor, clusterName, enableRoleChangeEvents);
configureConsumer(consumer);
return consumer;
}
public void populateJGroupsRaftHeaders(Exchange exchange) {
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_COMMIT_INDEX, resolvedRaftHandle.commitIndex());
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_CURRENT_TERM, resolvedRaftHandle.currentTerm());
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_IS_LEADER, resolvedRaftHandle.isLeader());
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_LAST_APPLIED, resolvedRaftHandle.lastApplied());
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_LOG_SIZE, resolvedRaftHandle.logSize());
exchange.getIn().setHeader(JGroupsRaftConstants.HEADER_JGROUPSRAFT_RAFT_ID, resolvedRaftHandle.raftId());
}
@Override
protected void doStart() throws Exception {
LOG.info("Resolving JGroupsraft handle {}", getEndpointUri());
super.doStart();
resolvedRaftHandle = resolveHandle();
}
@Override
protected void doStop() throws Exception {
LOG.info("Closing JGroupsraft Channel {}", getEndpointUri());
if (resolvedRaftHandle != null && resolvedRaftHandle.channel() != null) {
resolvedRaftHandle.channel().close();
LOG.info("Closed JGroupsraft Channel {}", getEndpointUri());
}
LOG.info("Closing Log {}", getEndpointUri());
if (resolvedRaftHandle != null && resolvedRaftHandle.log() != null) {
resolvedRaftHandle.log().close();
LOG.info("Closed Log Channel {}", getEndpointUri());
}
super.doStop();
}
private RaftHandle resolveHandle() throws Exception {
if (raftHandle != null) {
LOG.trace("Raft Handle resolved as passed by Component: {}", raftHandle);
return raftHandle;
}
if (channelProperties != null && !channelProperties.isEmpty()) {
LOG.trace("Raft Handle created with configured channelProperties: {} and state machine: {}", channelProperties,
stateMachine);
return new RaftHandle(new JChannel(channelProperties).name(raftId), stateMachine).raftId(raftId);
}
LOG.trace("Raft Handle created with defaults: {}, {},", JGroupsRaftConstants.DEFAULT_JGROUPSRAFT_CONFIG, stateMachine);
return new RaftHandle(new JChannel(JGroupsRaftConstants.DEFAULT_JGROUPSRAFT_CONFIG).name(raftId), stateMachine)
.raftId(raftId);
}
/**
* Connect shared RaftHandle channel, called by producer and consumer.
*
* @throws Exception
*/
public void connect() throws Exception {
connectCount.incrementAndGet();
LOG.trace("Connecting JGroups-raft Channel {} with cluster name: {}, raftHandle: {} and using config: {}",
getEndpointUri(), clusterName, resolvedRaftHandle, channelProperties == null ? "default" : channelProperties);
resolvedRaftHandle.channel().connect(clusterName);
}
/**
* Disconnect shared RaftHandle channel, called by producer and consumer.
*/
public void disconnect() {
if (connectCount.decrementAndGet() == 0) {
LOG.trace("Disconnecting JGroupsraft Channel {}", getEndpointUri());
resolvedRaftHandle.channel().disconnect();
}
}
public String getClusterName() {
return clusterName;
}
/**
* The name of the JGroupsraft cluster the component should connect to.
*/
public void setClusterName(String clusterName) {
this.clusterName = clusterName;
}
public boolean isEnableRoleChangeEvents() {
return enableRoleChangeEvents;
}
/**
* If set to true, the consumer endpoint will receive roleChange event as well (not just connecting and/or using the
* state machine). By default it is set to false.
*/
public void setEnableRoleChangeEvents(boolean enableRoleChangeEvents) {
this.enableRoleChangeEvents = enableRoleChangeEvents;
}
public String getChannelProperties() {
return channelProperties;
}
public void setChannelProperties(String channelProperties) {
this.channelProperties = channelProperties;
}
public String getRaftId() {
return raftId;
}
public void setRaftId(String raftId) {
this.raftId = raftId;
}
public RaftHandle getRaftHandle() {
return raftHandle;
}
public void setRaftHandle(RaftHandle raftHandle) {
this.raftHandle = raftHandle;
}
public StateMachine getStateMachine() {
return stateMachine;
}
public void setStateMachine(StateMachine stateMachine) {
this.stateMachine = stateMachine;
}
public RaftHandle getResolvedRaftHandle() {
return resolvedRaftHandle;
}
}
|
JGroupsRaftEndpoint
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/contract/AbfsFileSystemContract.java
|
{
"start": 1304,
"end": 2173
}
|
class ____ extends AbstractBondedFSContract {
public static final String CONTRACT_XML = "abfs.xml";
private final boolean isSecure;
public AbfsFileSystemContract(final Configuration conf, boolean secure) {
super(conf);
//insert the base features
addConfResource(CONTRACT_XML);
this.isSecure = secure;
}
@Override
public String getScheme() {
return isSecure ? FileSystemUriSchemes.ABFS_SECURE_SCHEME
: FileSystemUriSchemes.ABFS_SCHEME;
}
@Override
public Path getTestPath() {
return new Path(UriUtils.generateUniqueTestPath());
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder(
"AbfsFileSystemContract{");
sb.append("isSecureScheme=").append(isSecure);
sb.append(super.toString());
sb.append('}');
return sb.toString();
}
}
|
AbfsFileSystemContract
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/CharValueSerializer.java
|
{
"start": 2922,
"end": 3119
}
|
class ____
extends SimpleTypeSerializerSnapshot<CharValue> {
public CharValueSerializerSnapshot() {
super(() -> INSTANCE);
}
}
}
|
CharValueSerializerSnapshot
|
java
|
apache__kafka
|
connect/runtime/src/test/java/org/apache/kafka/connect/cli/AbstractConnectCliTest.java
|
{
"start": 3045,
"end": 3307
}
|
class
____ restrictedClassLoader = new RestrictedClassLoader(providerClassName);
Thread.currentThread().setContextClassLoader(restrictedClassLoader);
// Verify the restricted classloader cannot load the ConfigProvider
|
ClassLoader
|
java
|
reactor__reactor-core
|
reactor-core/src/test/java/reactor/core/publisher/ContextTests.java
|
{
"start": 1062,
"end": 10365
}
|
class ____ {
@Test
public void contextPassing() throws InterruptedException {
AtomicReference<Context> innerC = new AtomicReference<>();
Flux.range(1, 1000)
.log()
.flatMapSequential(d -> Mono.just(d)
//ctx: test=baseSubscriber_take_range
//return: old (discarded since inner)
.contextWrite(ctx -> {
if (innerC.get() == null) {
innerC.set(ctx.put("test", ctx.get("test") + "_innerFlatmap"));
}
return ctx;
})
.log())
.take(10, false)
//ctx: test=baseSubscriber_take
//return: test=baseSubscriber_take_range
.contextWrite(ctx -> ctx.put("test", ctx.get("test") + "_range"))
//ctx: test=baseSubscriber
//return: test=baseSubscriber_take
.contextWrite(ctx -> ctx.put("test", ctx.get("test") + "_take"))
.log()
.subscribe(new BaseSubscriber<Integer>() {
@Override
public Context currentContext() {
return Context.empty()
.put("test", "baseSubscriber");
}
});
assertThat(innerC.get().getOrDefault("test", "defaultUnexpected")).isEqualTo("baseSubscriber_take_range_innerFlatmap");
}
@Test
public void contextPassing2() throws InterruptedException {
AtomicReference<String> innerC = new AtomicReference<>();
Flux.range(1, 1000)
.log()
.flatMapSequential(d ->
Mono.just(d)
.contextWrite(ctx -> {
if (innerC.get() == null) {
innerC.set(""+ ctx.get("test") + ctx.get("test2"));
}
return ctx;
})
.log())
.map(d -> d)
.take(10, false)
.contextWrite(ctx -> ctx.put("test", "foo"))
.contextWrite(ctx -> ctx.put("test2", "bar"))
.log()
.subscribe();
assertThat(innerC).hasValue("foobar");
}
@Test
void contextGet() {
StepVerifier.create(Flux.range(1, 1000)
.log()
.handle((d, c) -> c.next(c.contextView().get("test") + "" + d))
.skip(3)
.take(3, false)
.handle((d, c) -> c.next(c.contextView().get("test2") + "" + d))
.contextWrite(ctx -> ctx.put("test", "foo"))
.contextWrite(ctx -> ctx.put("test2", "bar"))
.log())
.expectNext("barfoo4")
.expectNext("barfoo5")
.expectNext("barfoo6")
.verifyComplete();
}
@Test
public void currentContext() throws InterruptedException {
StepVerifier.create(Mono.just("foo")
.flatMap(d -> Mono.deferContextual(Mono::just)
.map(c -> d + c.get(Integer.class)))
.contextWrite(ctx ->
ctx.put(Integer.class, ctx.get(Integer.class) + 1))
.flatMapMany(Mono::just)
.contextWrite(ctx -> ctx.put(Integer.class, 0)))
.expectNext("foo1")
.verifyComplete();
}
@Test
public void currentContextWithEmpty() throws InterruptedException {
StepVerifier.create(Mono.just("foo")
.flatMap(d -> Mono.deferContextual(Mono::just)
.map(c -> d + c.get(Integer.class))))
.verifyErrorMatches(t -> t instanceof NoSuchElementException
&& "Context is empty".equals(t.getMessage()));
}
@Test
public void contextGetHide() throws InterruptedException {
StepVerifier.create(Flux.range(1, 1000)
.hide()
.log()
.map(d -> d)
.handle((d, c) -> c.next(c.contextView().get("test") + "" + d))
.skip(3)
.take(3, false)
.handle((d, c) -> c.next(c.contextView().get("test2") + "" + d))
.contextWrite(ctx -> ctx.put("test", "foo"))
.contextWrite(ctx -> ctx.put("test2", "bar"))
.log())
.expectNext("barfoo4")
.expectNext("barfoo5")
.expectNext("barfoo6")
.verifyComplete();
}
@Test
public void contextGetMono() throws InterruptedException {
StepVerifier.create(Mono.just(1)
.log()
.handle((d, c) -> c.next(c.contextView().get("test") + "" + d))
.handle((d, c) -> c.next(c.contextView().get("test2") + "" + d))
.contextWrite(ctx -> ctx.put("test2", "bar"))
.contextWrite(ctx -> ctx.put("test", "foo"))
.log())
.expectNext("barfoo1")
.verifyComplete();
}
@Test
public void contextGetHideMono() throws InterruptedException {
StepVerifier.create(Mono.just(1)
.hide()
.log()
.handle((d, c) -> c.next(c.contextView().get("test") + "" + d))
.handle((d, c) -> c.next(c.contextView().get("test2") + "" + d))
.contextWrite(ctx -> ctx.put("test", "foo"))
.contextWrite(ctx -> ctx.put("test2", "bar"))
.log())
.expectNext("barfoo1")
.verifyComplete();
}
@Test
public void monoSubscriberContextPutsAll() {
StepVerifier.create(
Mono.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.of("foo", "bar", 1, "baz"))
.contextWrite(Context.of("initial", "value"))
)
.expectNextMatches(c -> c.hasKey("foo") && c.hasKey(1) && c.hasKey("initial"))
.verifyComplete();
}
@Test
public void monoSubscriberContextWithMergedEmpty() {
StepVerifier.create(
Mono.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.empty())
.contextWrite(Context.of("initial", "value"))
)
.expectNextMatches(c -> c.hasKey("initial"))
.verifyComplete();
}
@Test
public void monoSubscriberContextWithBothEmpty() {
StepVerifier.create(
Mono.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.empty())
.contextWrite(Context.empty())
)
.expectNextMatches(ContextView::isEmpty)
.verifyComplete();
}
@Test
public void fluxSubscriberContextPutsAll() {
StepVerifier.create(
Flux.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.of("foo", "bar", 1, "baz"))
.contextWrite(Context.of("initial", "value"))
)
.expectNextMatches(c -> c.hasKey("foo") && c.hasKey(1) && c.hasKey("initial"))
.verifyComplete();
}
@Test
public void fluxSubscriberContextWithMergedEmpty() {
StepVerifier.create(
Flux.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.empty())
.contextWrite(Context.of("initial", "value"))
)
.expectNextMatches(c -> c.hasKey("initial"))
.verifyComplete();
}
@Test
public void fluxSubscriberContextWithBothEmpty() {
StepVerifier.create(
Flux.just("foo")
.flatMap(v -> Mono.deferContextual(Mono::just))
.contextWrite(Context.empty())
.contextWrite(Context.empty())
)
.expectNextMatches(ContextView::isEmpty)
.verifyComplete();
}
@Test
public void expectAccessibleContextWithInitialContext() {
StepVerifierOptions stepVerifierOptions = StepVerifierOptions.create()
.withInitialContext(Context.of("foo", "bar"));
StepVerifier.create(Mono.just(1), stepVerifierOptions)
.expectAccessibleContext()
.contains("foo", "bar")
.then()
.expectNext(1)
.verifyComplete();
}
@Test
public void contextAccessibleWithEmptySubscriptionAndOperator1() {
StepVerifier.create(Flux.empty()
.contextWrite(Context.of("a", "b")))
.expectAccessibleContext()
.contains("a", "b")
.then()
.verifyComplete();
}
@Test
public void contextAccessibleWithEmptySubscriptionAndOperator2() {
StepVerifier.create(Flux.empty()
.map(i -> i), StepVerifierOptions.create().withInitialContext(Context.of("a", "b")))
.expectAccessibleContext()
.contains("a", "b")
.then()
.verifyComplete();
}
@Test
public void contextNotAccessibleWithEmptySubscriptionOnly() {
StepVerifier.create(Flux.empty(), StepVerifierOptions.create().withInitialContext(Context.of("a", "b")))
.expectNoAccessibleContext()
.verifyComplete();
}
}
|
ContextTests
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-plugin-kubernetes/src/test/java/org/apache/camel/dsl/jbang/core/commands/kubernetes/KubernetesCommandTest.java
|
{
"start": 1716,
"end": 4196
}
|
class ____ extends KubernetesBaseTest {
@Test
public void shouldResolvePlugin() {
FactoryFinder factoryFinder
= new DefaultFactoryFinder(new DefaultClassResolver(), FactoryFinder.DEFAULT_PATH + "camel-jbang-plugin/");
Assertions.assertTrue(factoryFinder.newInstance("camel-jbang-plugin-kubernetes").isPresent());
}
@Test
public void shouldPrintKubernetesManifest() {
CamelJBangMain.run(createMain(), "kubernetes", "run", "classpath:route.yaml",
"--disable-auto=true", "--image-group", "camel-test", "--output", "yaml");
List<HasMetadata> resources = kubernetesClient.load(getKubernetesManifestAsStream(printer.getOutput())).items();
Assertions.assertEquals(2, resources.size());
Deployment deployment = resources.stream()
.filter(it -> Deployment.class.isAssignableFrom(it.getClass()))
.map(Deployment.class::cast)
.findFirst()
.orElseThrow(() -> new RuntimeCamelException("Missing deployment in Kubernetes manifest"));
var matchLabels = deployment.getSpec().getSelector().getMatchLabels();
Assertions.assertEquals("route", deployment.getMetadata().getName());
Assertions.assertEquals(1, deployment.getSpec().getTemplate().getSpec().getContainers().size());
Assertions.assertEquals("route", deployment.getMetadata().getLabels().get(BaseTrait.KUBERNETES_LABEL_NAME));
Assertions.assertEquals("route", deployment.getSpec().getTemplate().getSpec().getContainers().get(0).getName());
Assertions.assertEquals("route", matchLabels.get(BaseTrait.KUBERNETES_LABEL_NAME));
Assertions.assertEquals("jkube", matchLabels.get(BaseTrait.KUBERNETES_LABEL_MANAGED_BY));
Assertions.assertEquals("camel-test/route:1.0-SNAPSHOT",
deployment.getSpec().getTemplate().getSpec().getContainers().get(0).getImage());
Assertions.assertEquals("IfNotPresent",
deployment.getSpec().getTemplate().getSpec().getContainers().get(0).getImagePullPolicy());
}
private CamelJBangMain createMain() {
return new CamelJBangMain() {
@Override
public void quit(int exitCode) {
Assertions.assertEquals(0, exitCode,
"Main finished with exit code %d:%n%s".formatted(exitCode, printer.getOutput()));
}
}.withPrinter(printer);
}
}
|
KubernetesCommandTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/balancer/Dispatcher.java
|
{
"start": 20066,
"end": 20169
}
|
class ____ {
/** A group of storages in a datanode with the same storage type. */
public
|
DDatanode
|
java
|
spring-projects__spring-boot
|
loader/spring-boot-loader-tools/src/main/java/org/springframework/boot/loader/tools/Layouts.java
|
{
"start": 1778,
"end": 2586
}
|
class ____ implements RepackagingLayout {
@Override
public @Nullable String getLauncherClassName() {
return "org.springframework.boot.loader.launch.JarLauncher";
}
@Override
public String getLibraryLocation(String libraryName, @Nullable LibraryScope scope) {
return "BOOT-INF/lib/";
}
@Override
public String getClassesLocation() {
return "";
}
@Override
public String getRepackagedClassesLocation() {
return "BOOT-INF/classes/";
}
@Override
public String getClasspathIndexFileLocation() {
return "BOOT-INF/classpath.idx";
}
@Override
public String getLayersIndexFileLocation() {
return "BOOT-INF/layers.idx";
}
@Override
public boolean isExecutable() {
return true;
}
}
/**
* Executable expanded archive layout.
*/
public static
|
Jar
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/util/concurrent/DetachedThreadLocal.java
|
{
"start": 380,
"end": 3975
}
|
class ____<T> implements Runnable {
final WeakConcurrentMap<Thread, T> map;
public DetachedThreadLocal(Cleaner cleaner) {
switch (cleaner) {
case THREAD:
case MANUAL:
map =
new WeakConcurrentMap<Thread, T>(cleaner == Cleaner.THREAD) {
@Override
protected T defaultValue(Thread key) {
return DetachedThreadLocal.this.initialValue(key);
}
};
break;
case INLINE:
map =
new WeakConcurrentMap.WithInlinedExpunction<Thread, T>() {
@Override
protected T defaultValue(Thread key) {
return DetachedThreadLocal.this.initialValue(key);
}
};
break;
default:
throw new AssertionError();
}
}
public T get() {
return map.get(Thread.currentThread());
}
/**
* @param thread The thread for which to set a thread-local value.
* @return The value associated with this thread.
*/
public T get(Thread thread) {
return map.get(thread);
}
public void set(T value) {
map.put(Thread.currentThread(), value);
}
public void clear() {
map.remove(Thread.currentThread());
}
/**
* Clears all thread local references for all threads.
*/
public void clearAll() {
map.clear();
}
/**
* @param thread The thread to which this thread's thread local value should be pushed.
* @return The value being set.
*/
public T pushTo(Thread thread) {
T value = get();
if (value != null) {
map.put(thread, inheritValue(value));
}
return value;
}
/**
* @param thread The thread from which the thread thread local value should be fetched.
* @return The value being set.
*/
public T fetchFrom(Thread thread) {
T value = map.get(thread);
if (value != null) {
set(inheritValue(value));
}
return value;
}
/**
* @param thread The thread for which to set a thread-local value.
* @param value The value to set.
*/
public void define(Thread thread, T value) {
map.put(thread, value);
}
/**
* @param thread The thread for which an initial value is created.
* @return The initial value for any thread local. If no default is set, the default value is {@code null}.
*/
protected T initialValue(Thread thread) {
return null;
}
/**
* @param value The value that is inherited.
* @return The inherited value.
*/
protected T inheritValue(T value) {
return value;
}
/**
* @return The weak map that backs this detached thread local.
*/
public WeakConcurrentMap<Thread, T> getBackingMap() {
return map;
}
@Override
public void run() {
map.run();
}
/**
* Determines the cleaning format. A reference is removed either by an explicitly started cleaner thread
* associated with this instance ({@link Cleaner#THREAD}), as a result of interacting with this thread local
* from any thread ({@link Cleaner#INLINE} or manually by submitting the detached thread local to a thread
* ({@link Cleaner#MANUAL}).
*/
public
|
DetachedThreadLocal
|
java
|
apache__camel
|
components/camel-fhir/camel-fhir-component/src/generated/java/org/apache/camel/component/fhir/internal/FhirValidateApiMethod.java
|
{
"start": 662,
"end": 1862
}
|
enum ____ implements ApiMethod {
RESOURCE(
ca.uhn.fhir.rest.api.MethodOutcome.class,
"resource",
arg("resource", org.hl7.fhir.instance.model.api.IBaseResource.class),
arg("extraParameters", java.util.Map.class)),
RESOURCE_1(
ca.uhn.fhir.rest.api.MethodOutcome.class,
"resource",
arg("resourceAsString", String.class),
arg("extraParameters", java.util.Map.class));
private final ApiMethod apiMethod;
FhirValidateApiMethod(Class<?> resultType, String name, ApiMethodArg... args) {
this.apiMethod = new ApiMethodImpl(FhirValidate.class, resultType, name, args);
}
@Override
public String getName() { return apiMethod.getName(); }
@Override
public Class<?> getResultType() { return apiMethod.getResultType(); }
@Override
public List<String> getArgNames() { return apiMethod.getArgNames(); }
@Override
public List<String> getSetterArgNames() { return apiMethod.getSetterArgNames(); }
@Override
public List<Class<?>> getArgTypes() { return apiMethod.getArgTypes(); }
@Override
public Method getMethod() { return apiMethod.getMethod(); }
}
|
FhirValidateApiMethod
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/sequence/NegativeValueSequenceTest.java
|
{
"start": 13751,
"end": 14045
}
|
class ____ {
@Id
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "ID_GENERATOR")
@SequenceGenerator(name = "ID_GENERATOR", sequenceName = "NEG_TWO_INCR_POS_START_SEQ", initialValue= 5, allocationSize = -2)
public Integer id;
}
}
|
NegativeTwoIncrementSizePositiveInitialValue
|
java
|
apache__flink
|
flink-streaming-java/src/test/java/org/apache/flink/asyncprocessing/operators/AsyncKeyedCoProcessOperatorTest.java
|
{
"start": 2556,
"end": 17235
}
|
class ____ {
@Test
void testDeclareProcessor() throws Exception {
TestChainDeclarationFunction function = new TestChainDeclarationFunction();
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(function);
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
ArrayList<StreamRecord<String>> expectedOutput = new ArrayList<>();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(5));
expectedOutput.add(new StreamRecord<>("11"));
assertThat(function.value.get()).isEqualTo(11);
testHarness.processElement2(new StreamRecord<>("6"));
expectedOutput.add(new StreamRecord<>("6"));
assertThat(function.value.get()).isEqualTo(17);
assertThat(testHarness.getOutput()).containsExactly(expectedOutput.toArray());
}
@Test
void testTimestampAndWatermarkQuerying() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new WatermarkQueryingProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processWatermark1(new Watermark(17));
testHarness.processWatermark2(new Watermark(17));
testHarness.processElement1(new StreamRecord<>(5, 12L));
testHarness.processWatermark1(new Watermark(42));
testHarness.processWatermark2(new Watermark(42));
testHarness.processElement2(new StreamRecord<>("6", 13L));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(17L));
expectedOutput.add(new StreamRecord<>("5WM:17 TS:12", 12L));
expectedOutput.add(new Watermark(42L));
expectedOutput.add(new StreamRecord<>("6WM:42 TS:13", 13L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
@Test
void testTimestampAndProcessingTimeQuerying() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new ProcessingTimeQueryingProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(17);
testHarness.processElement1(new StreamRecord<>(5));
testHarness.setProcessingTime(42);
testHarness.processElement2(new StreamRecord<>("6"));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("5PT:17 TS:null"));
expectedOutput.add(new StreamRecord<>("6PT:42 TS:null"));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
@Test
void testEventTimeTimers() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new EventTimeTriggeringProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(17, 42L));
testHarness.processElement2(new StreamRecord<>("18", 42L));
testHarness.processWatermark1(new Watermark(5));
testHarness.processWatermark2(new Watermark(5));
testHarness.processWatermark1(new Watermark(6));
testHarness.processWatermark2(new Watermark(6));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("INPUT1:17", 42L));
expectedOutput.add(new StreamRecord<>("INPUT2:18", 42L));
expectedOutput.add(new StreamRecord<>("17:1777", 5L));
expectedOutput.add(new Watermark(5L));
expectedOutput.add(new StreamRecord<>("18:1777", 6L));
expectedOutput.add(new Watermark(6L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
@Test
void testProcessingTimeTimers() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new ProcessingTimeTriggeringProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(17));
testHarness.processElement2(new StreamRecord<>("18"));
testHarness.setProcessingTime(5);
testHarness.setProcessingTime(6);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("INPUT1:17"));
expectedOutput.add(new StreamRecord<>("INPUT2:18"));
expectedOutput.add(new StreamRecord<>("1777"));
expectedOutput.add(new StreamRecord<>("1777"));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
/** Verifies that we don't have leakage between different keys. */
@Test
void testEventTimeTimerWithState() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new EventTimeTriggeringStatefulProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processWatermark1(new Watermark(1));
testHarness.processWatermark2(new Watermark(1));
testHarness.processElement1(new StreamRecord<>(17, 0L)); // should set timer for 6
testHarness.processElement1(new StreamRecord<>(13, 0L)); // should set timer for 6
testHarness.processWatermark1(new Watermark(2));
testHarness.processWatermark2(new Watermark(2));
testHarness.processElement1(new StreamRecord<>(13, 1L)); // should delete timer
testHarness.processElement2(new StreamRecord<>("42", 1L)); // should set timer for 7
testHarness.processWatermark1(new Watermark(6));
testHarness.processWatermark2(new Watermark(6));
testHarness.processWatermark1(new Watermark(7));
testHarness.processWatermark2(new Watermark(7));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new Watermark(1L));
expectedOutput.add(new StreamRecord<>("INPUT1:17", 0L));
expectedOutput.add(new StreamRecord<>("INPUT1:13", 0L));
expectedOutput.add(new Watermark(2L));
expectedOutput.add(new StreamRecord<>("INPUT2:42", 1L));
expectedOutput.add(new StreamRecord<>("STATE:17", 6L));
expectedOutput.add(new Watermark(6L));
expectedOutput.add(new StreamRecord<>("STATE:42", 7L));
expectedOutput.add(new Watermark(7L));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
/** Verifies that we don't have leakage between different keys. */
@Test
void testProcessingTimeTimerWithState() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(
new ProcessingTimeTriggeringStatefulProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.setProcessingTime(1);
testHarness.processElement1(new StreamRecord<>(17)); // should set timer for 6
testHarness.processElement1(new StreamRecord<>(13)); // should set timer for 6
testHarness.setProcessingTime(2);
testHarness.processElement1(new StreamRecord<>(13)); // should delete timer again
testHarness.processElement2(new StreamRecord<>("42")); // should set timer for 7
testHarness.setProcessingTime(6);
testHarness.setProcessingTime(7);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("INPUT1:17"));
expectedOutput.add(new StreamRecord<>("INPUT1:13"));
expectedOutput.add(new StreamRecord<>("INPUT2:42"));
expectedOutput.add(new StreamRecord<>("STATE:17"));
expectedOutput.add(new StreamRecord<>("STATE:42"));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
@Test
void testSnapshotAndRestore() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new BothTriggeringProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(5, 12L));
testHarness.processElement2(new StreamRecord<>("5", 12L));
// snapshot and restore from scratch
OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
testHarness.close();
operator = new AsyncKeyedCoProcessOperator<>(new BothTriggeringProcessFunction());
testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.setProcessingTime(5);
testHarness.processWatermark1(new Watermark(6));
testHarness.processWatermark2(new Watermark(6));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("PROC:1777"));
expectedOutput.add(new StreamRecord<>("EVENT:1777", 6L));
expectedOutput.add(new Watermark(6));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
@Test
void testGetCurrentKeyFromContext() throws Exception {
AsyncKeyedCoProcessOperator<String, Integer, String, String> operator =
new AsyncKeyedCoProcessOperator<>(new AppendCurrentKeyProcessFunction());
AsyncKeyedTwoInputStreamOperatorTestHarness<String, Integer, String, String> testHarness =
AsyncKeyedTwoInputStreamOperatorTestHarness.create(
operator,
new IntToStringKeySelector<>(),
new IdentityKeySelector<>(),
BasicTypeInfo.STRING_TYPE_INFO);
testHarness.setup();
testHarness.open();
testHarness.processElement1(new StreamRecord<>(5));
testHarness.processElement1(new StreamRecord<>(6));
testHarness.processElement2(new StreamRecord<>("hello"));
testHarness.processElement2(new StreamRecord<>("world"));
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>("5,5"));
expectedOutput.add(new StreamRecord<>("6,6"));
expectedOutput.add(new StreamRecord<>("hello,hello"));
expectedOutput.add(new StreamRecord<>("world,world"));
TestHarnessUtil.assertOutputEquals(
"Output was not correct.", expectedOutput, testHarness.getOutput());
testHarness.close();
}
private static
|
AsyncKeyedCoProcessOperatorTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/refcolnames/misc/Misc4Test.java
|
{
"start": 4749,
"end": 5298
}
|
class ____ implements Serializable {
private int a1;
private String a2;
private String a3;
@Column(name = "A1", nullable = false, length = 15)
@Id
public int getA1() {
return a1;
}
public void setA1(int a1) {
this.a1 = a1;
}
@Column(name = "A2", nullable = false, length = 15)
@Id
public String getA2() {
return a2;
}
public void setA2(String a2) {
this.a2 = a2;
}
@Column(name = "A3", nullable = false, length = 15)
@Id
public String getA3() {
return a3;
}
public void setA3(String a3) {
this.a3 = a3;
}
}
}
|
BPK
|
java
|
apache__commons-lang
|
src/main/java/org/apache/commons/lang3/CharUtils.java
|
{
"start": 18267,
"end": 18567
}
|
class ____ be used as {@code CharUtils.toString('c');}.
*
* <p>This constructor is public to permit tools that require a JavaBean instance
* to operate.</p>
*
* @deprecated TODO Make private in 4.0.
*/
@Deprecated
public CharUtils() {
// empty
}
}
|
should
|
java
|
elastic__elasticsearch
|
modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldBlockLoaderTests.java
|
{
"start": 762,
"end": 1489
}
|
class ____ extends NumberFieldBlockLoaderTestCase<Double> {
public ScaledFloatFieldBlockLoaderTests(Params params) {
super(FieldType.SCALED_FLOAT, params);
}
@Override
protected Double convert(Number value, Map<String, Object> fieldMapping) {
var scalingFactor = ((Number) fieldMapping.get("scaling_factor")).doubleValue();
// Adjust values coming from source to the way they are stored in doc_values.
// See mapper implementation.
return Math.round(value.doubleValue() * scalingFactor) / scalingFactor;
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return List.of(new MapperExtrasPlugin());
}
}
|
ScaledFloatFieldBlockLoaderTests
|
java
|
apache__camel
|
dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/OpenstackNovaComponentBuilderFactory.java
|
{
"start": 1379,
"end": 1881
}
|
interface ____ {
/**
* OpenStack Nova (camel-openstack)
* Access OpenStack to manage compute resources.
*
* Category: container
* Since: 2.19
* Maven coordinates: org.apache.camel:camel-openstack
*
* @return the dsl builder
*/
static OpenstackNovaComponentBuilder openstackNova() {
return new OpenstackNovaComponentBuilderImpl();
}
/**
* Builder for the OpenStack Nova component.
*/
|
OpenstackNovaComponentBuilderFactory
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
|
{
"start": 14224,
"end": 15521
}
|
class ____ in any other state, implying that
* the subclass should do its close operations.
*/
protected synchronized boolean enterClosedState() {
if (!state.equals(Closed)) {
enterState(null, Closed);
return true;
} else {
return false;
}
}
@Override
public void close() throws IOException {
if (enterClosedState()) {
LOG.debug("Closed {}", this);
innerClose();
}
}
/**
* Inner close logic for subclasses to implement.
*
* @throws IOException raised on errors performing I/O.
*/
protected void innerClose() throws IOException {
}
/**
* A block has been allocated.
*/
protected void blockAllocated() {
if (statistics != null) {
statistics.blockAllocated();
}
}
/**
* A block has been released.
*/
protected void blockReleased() {
if (statistics != null) {
statistics.blockReleased();
}
}
protected BlockUploadStatistics getStatistics() {
return statistics;
}
public long getIndex() {
return index;
}
}
// ====================================================================
/**
* Use byte arrays on the heap for storage.
*/
static
|
was
|
java
|
spring-projects__spring-framework
|
spring-messaging/src/main/java/org/springframework/messaging/support/HeaderMapper.java
|
{
"start": 1093,
"end": 1570
}
|
interface ____<T> {
/**
* Map from the given {@link MessageHeaders} to the specified target message.
* @param headers the abstracted MessageHeaders
* @param target the native target message
*/
void fromHeaders(MessageHeaders headers, T target);
/**
* Map from the given target message to abstracted {@link MessageHeaders}.
* @param source the native target message
* @return the abstracted MessageHeaders
*/
MessageHeaders toHeaders(T source);
}
|
HeaderMapper
|
java
|
micronaut-projects__micronaut-core
|
test-suite/src/test/java/io/micronaut/docs/http/client/bind/type/MetadataClientArgumentBinder.java
|
{
"start": 437,
"end": 1106
}
|
class ____ implements TypedClientArgumentRequestBinder<Metadata> {
@Override
@NonNull
public Argument<Metadata> argumentType() {
return Argument.of(Metadata.class);
}
@Override
public void bind(@NonNull ArgumentConversionContext<Metadata> context,
@NonNull ClientRequestUriContext uriContext,
@NonNull Metadata value,
@NonNull MutableHttpRequest<?> request) {
request.header("X-Metadata-Version", value.getVersion().toString());
request.header("X-Metadata-Deployment-Id", value.getDeploymentId().toString());
}
}
//end::clazz[]
|
MetadataClientArgumentBinder
|
java
|
spring-projects__spring-boot
|
cli/spring-boot-cli/src/main/java/org/springframework/boot/cli/command/init/ProjectGenerationRequest.java
|
{
"start": 1092,
"end": 11040
}
|
class ____ {
public static final String DEFAULT_SERVICE_URL = "https://start.spring.io";
private String serviceUrl = DEFAULT_SERVICE_URL;
private @Nullable String output;
private boolean extract;
private @Nullable String groupId;
private @Nullable String artifactId;
private @Nullable String version;
private @Nullable String name;
private @Nullable String description;
private @Nullable String packageName;
private @Nullable String type;
private @Nullable String packaging;
private @Nullable String build;
private @Nullable String format;
private boolean detectType;
private @Nullable String javaVersion;
private @Nullable String language;
private @Nullable String bootVersion;
private final List<String> dependencies = new ArrayList<>();
/**
* The URL of the service to use.
* @return the service URL
* @see #DEFAULT_SERVICE_URL
*/
String getServiceUrl() {
return this.serviceUrl;
}
void setServiceUrl(String serviceUrl) {
this.serviceUrl = serviceUrl;
}
/**
* The location of the generated project.
* @return the location of the generated project
*/
@Nullable String getOutput() {
return this.output;
}
void setOutput(@Nullable String output) {
if (output != null && output.endsWith("/")) {
this.output = output.substring(0, output.length() - 1);
this.extract = true;
}
else {
this.output = output;
}
}
/**
* Whether the project archive should be extracted in the output location. If the
* {@link #getOutput() output} ends with "/", the project is extracted automatically.
* @return {@code true} if the archive should be extracted, otherwise {@code false}
*/
boolean isExtract() {
return this.extract;
}
void setExtract(boolean extract) {
this.extract = extract;
}
/**
* The groupId to use or {@code null} if it should not be customized.
* @return the groupId or {@code null}
*/
@Nullable String getGroupId() {
return this.groupId;
}
void setGroupId(@Nullable String groupId) {
this.groupId = groupId;
}
/**
* The artifactId to use or {@code null} if it should not be customized.
* @return the artifactId or {@code null}
*/
@Nullable String getArtifactId() {
return this.artifactId;
}
void setArtifactId(@Nullable String artifactId) {
this.artifactId = artifactId;
}
/**
* The artifact version to use or {@code null} if it should not be customized.
* @return the artifact version or {@code null}
*/
@Nullable String getVersion() {
return this.version;
}
void setVersion(@Nullable String version) {
this.version = version;
}
/**
* The name to use or {@code null} if it should not be customized.
* @return the name or {@code null}
*/
@Nullable String getName() {
return this.name;
}
void setName(@Nullable String name) {
this.name = name;
}
/**
* The description to use or {@code null} if it should not be customized.
* @return the description or {@code null}
*/
@Nullable String getDescription() {
return this.description;
}
void setDescription(@Nullable String description) {
this.description = description;
}
/**
* Return the package name or {@code null} if it should not be customized.
* @return the package name or {@code null}
*/
@Nullable String getPackageName() {
return this.packageName;
}
void setPackageName(@Nullable String packageName) {
this.packageName = packageName;
}
/**
* The type of project to generate. Should match one of the advertised type that the
* service supports. If not set, the default is retrieved from the service metadata.
* @return the project type
*/
@Nullable String getType() {
return this.type;
}
void setType(@Nullable String type) {
this.type = type;
}
/**
* The packaging type or {@code null} if it should not be customized.
* @return the packaging type or {@code null}
*/
@Nullable String getPackaging() {
return this.packaging;
}
void setPackaging(@Nullable String packaging) {
this.packaging = packaging;
}
/**
* The build type to use. Ignored if a type is set. Can be used alongside the
* {@link #getFormat() format} to identify the type to use.
* @return the build type
*/
@Nullable String getBuild() {
return this.build;
}
void setBuild(@Nullable String build) {
this.build = build;
}
/**
* The project format to use. Ignored if a type is set. Can be used alongside the
* {@link #getBuild() build} to identify the type to use.
* @return the project format
*/
@Nullable String getFormat() {
return this.format;
}
void setFormat(@Nullable String format) {
this.format = format;
}
/**
* Whether the type should be detected based on the build and format value.
* @return {@code true} if type detection will be performed, otherwise {@code false}
*/
boolean isDetectType() {
return this.detectType;
}
void setDetectType(boolean detectType) {
this.detectType = detectType;
}
/**
* The Java version to use or {@code null} if it should not be customized.
* @return the Java version or {@code null}
*/
@Nullable String getJavaVersion() {
return this.javaVersion;
}
void setJavaVersion(@Nullable String javaVersion) {
this.javaVersion = javaVersion;
}
/**
* The programming language to use or {@code null} if it should not be customized.
* @return the programming language or {@code null}
*/
@Nullable String getLanguage() {
return this.language;
}
void setLanguage(@Nullable String language) {
this.language = language;
}
/**
* The Spring Boot version to use or {@code null} if it should not be customized.
* @return the Spring Boot version or {@code null}
*/
@Nullable String getBootVersion() {
return this.bootVersion;
}
void setBootVersion(@Nullable String bootVersion) {
this.bootVersion = bootVersion;
}
/**
* The identifiers of the dependencies to include in the project.
* @return the dependency identifiers
*/
List<String> getDependencies() {
return this.dependencies;
}
/**
* Generates the URI to use to generate a project represented by this request.
* @param metadata the metadata that describes the service
* @return the project generation URI
*/
URI generateUrl(InitializrServiceMetadata metadata) {
try {
URIBuilder builder = new URIBuilder(this.serviceUrl);
StringBuilder sb = new StringBuilder();
if (builder.getPath() != null) {
sb.append(builder.getPath());
}
ProjectType projectType = determineProjectType(metadata);
this.type = projectType.getId();
sb.append(projectType.getAction());
builder.setPath(sb.toString());
if (!this.dependencies.isEmpty()) {
builder.setParameter("dependencies", StringUtils.collectionToCommaDelimitedString(this.dependencies));
}
if (this.groupId != null) {
builder.setParameter("groupId", this.groupId);
}
String resolvedArtifactId = resolveArtifactId();
if (resolvedArtifactId != null) {
builder.setParameter("artifactId", resolvedArtifactId);
}
if (this.version != null) {
builder.setParameter("version", this.version);
}
if (this.name != null) {
builder.setParameter("name", this.name);
}
if (this.description != null) {
builder.setParameter("description", this.description);
}
if (this.packageName != null) {
builder.setParameter("packageName", this.packageName);
}
if (this.type != null) {
builder.setParameter("type", projectType.getId());
}
if (this.packaging != null) {
builder.setParameter("packaging", this.packaging);
}
if (this.javaVersion != null) {
builder.setParameter("javaVersion", this.javaVersion);
}
if (this.language != null) {
builder.setParameter("language", this.language);
}
if (this.bootVersion != null) {
builder.setParameter("bootVersion", this.bootVersion);
}
return builder.build();
}
catch (URISyntaxException ex) {
throw new ReportableException("Invalid service URL (" + ex.getMessage() + ")");
}
}
protected ProjectType determineProjectType(InitializrServiceMetadata metadata) {
if (this.type != null) {
ProjectType result = metadata.getProjectTypes().get(this.type);
if (result == null) {
throw new ReportableException(
("No project type with id '" + this.type + "' - check the service capabilities (--list)"));
}
return result;
}
else if (isDetectType()) {
Map<String, ProjectType> types = new HashMap<>(metadata.getProjectTypes());
if (this.build != null) {
filter(types, "build", this.build);
}
if (this.format != null) {
filter(types, "format", this.format);
}
if (types.size() == 1) {
return types.values().iterator().next();
}
else if (types.isEmpty()) {
throw new ReportableException("No type found with build '" + this.build + "' and format '" + this.format
+ "' check the service capabilities (--list)");
}
else {
throw new ReportableException("Multiple types found with build '" + this.build + "' and format '"
+ this.format + "' use --type with a more specific value " + types.keySet());
}
}
else {
ProjectType defaultType = metadata.getDefaultType();
if (defaultType == null) {
throw new ReportableException(("No project type is set and no default is defined. "
+ "Check the service capabilities (--list)"));
}
return defaultType;
}
}
/**
* Resolve the artifactId to use or {@code null} if it should not be customized.
* @return the artifactId
*/
protected @Nullable String resolveArtifactId() {
if (this.artifactId != null) {
return this.artifactId;
}
if (this.output != null) {
int i = this.output.lastIndexOf('.');
return (i != -1) ? this.output.substring(0, i) : this.output;
}
return null;
}
private static void filter(Map<String, ProjectType> projects, String tag, String tagValue) {
projects.entrySet().removeIf((entry) -> !tagValue.equals(entry.getValue().getTags().get(tag)));
}
}
|
ProjectGenerationRequest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AbstractSqlBlockingIntegTestCase.java
|
{
"start": 2157,
"end": 5536
}
|
class ____ extends ESIntegTestCase {
@Override
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) {
Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings));
settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false);
settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false);
settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false);
settings.put(LicenseSettings.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
return settings.build();
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(LocalStateSQLXPackPlugin.class, SearchBlockPlugin.class);
}
protected List<SearchBlockPlugin> initBlockFactory(boolean searchBlock, boolean fieldCapsBlock) {
List<SearchBlockPlugin> plugins = new ArrayList<>();
for (PluginsService pluginsService : internalCluster().getInstances(PluginsService.class)) {
pluginsService.filterPlugins(SearchBlockPlugin.class).forEach(plugins::add);
}
for (SearchBlockPlugin plugin : plugins) {
plugin.reset();
if (searchBlock) {
plugin.enableSearchBlock();
}
if (fieldCapsBlock) {
plugin.enableFieldCapBlock();
}
}
return plugins;
}
protected void disableBlocks(List<SearchBlockPlugin> plugins) {
disableFieldCapBlocks(plugins);
disableSearchBlocks(plugins);
}
protected void disableSearchBlocks(List<SearchBlockPlugin> plugins) {
for (SearchBlockPlugin plugin : plugins) {
plugin.disableSearchBlock();
}
}
protected void disableFieldCapBlocks(List<SearchBlockPlugin> plugins) {
for (SearchBlockPlugin plugin : plugins) {
plugin.disableFieldCapBlock();
}
}
protected void awaitForBlockedSearches(List<SearchBlockPlugin> plugins, String index) throws Exception {
int numberOfShards = getNumShards(index).numPrimaries;
assertBusy(() -> {
int numberOfBlockedPlugins = getNumberOfContexts(plugins);
logger.trace("The plugin blocked on {} out of {} shards", numberOfBlockedPlugins, numberOfShards);
assertThat(numberOfBlockedPlugins, greaterThan(0));
});
}
protected int getNumberOfContexts(List<SearchBlockPlugin> plugins) throws Exception {
int count = 0;
for (SearchBlockPlugin plugin : plugins) {
count += plugin.contexts.get();
}
return count;
}
protected int getNumberOfFieldCaps(List<SearchBlockPlugin> plugins) throws Exception {
int count = 0;
for (SearchBlockPlugin plugin : plugins) {
count += plugin.fieldCaps.get();
}
return count;
}
protected void awaitForBlockedFieldCaps(List<SearchBlockPlugin> plugins) throws Exception {
assertBusy(() -> {
int numberOfBlockedPlugins = getNumberOfFieldCaps(plugins);
logger.trace("The plugin blocked on {} nodes", numberOfBlockedPlugins);
assertThat(numberOfBlockedPlugins, greaterThan(0));
});
}
public static
|
AbstractSqlBlockingIntegTestCase
|
java
|
spring-projects__spring-boot
|
module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/autoconfigure/DevToolsR2dbcAutoConfigurationTests.java
|
{
"start": 6534,
"end": 6796
}
|
class ____ {
@Bean
ConnectionFactory connectionFactoryOne() {
return new SimpleTestConnectionFactory();
}
@Bean
ConnectionFactory connectionFactoryTwo() {
return new SimpleTestConnectionFactory();
}
}
}
|
MultipleConnectionFactoriesConfiguration
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/handlers/MatrixParamHandler.java
|
{
"start": 206,
"end": 489
}
|
class ____ implements ServerRestHandler {
@Override
public void handle(ResteasyReactiveRequestContext requestContext) throws Exception {
if (requestContext.getPath().indexOf(';') != -1) {
requestContext.initPathSegments();
}
}
}
|
MatrixParamHandler
|
java
|
elastic__elasticsearch
|
x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java
|
{
"start": 4335,
"end": 34656
}
|
class ____ extends AggregatorTestCase {
public void testNoDocs() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {}, true, doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(emptyList()));
}
public void testUnmappedMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(singletonList(doubleField("s", 1.0)));
}, true, numberFieldType(NumberType.DOUBLE, "s"));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), hasSize(1));
assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getTopMetrics().get(0).getMetricValues(), equalTo(singletonList(null)));
}
public void testMissingValueForDoubleMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(singletonList(doubleField("s", 1.0)));
}, true, doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), hasSize(1));
assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getTopMetrics().get(0).getMetricValues(), equalTo(singletonList(null)));
}
public void testMissingValueForLongMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(singletonList(longField("s", 1)));
}, true, longFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), hasSize(1));
assertThat(result.getTopMetrics().get(0).getSortValue(), equalTo(SortValue.from(1)));
assertThat(result.getTopMetrics().get(0).getMetricValues(), equalTo(singletonList(null)));
}
public void testActualValueForDoubleMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
}, true, doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0))));
}
public void testActualValueForLongMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(longField("s", 1), longField("m", 2)));
}, true, longFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1, 2))));
}
private InternalTopMetrics collectFromDoubles(TopMetricsAggregationBuilder builder) throws IOException {
return collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m", 3.0)));
}, true, doubleFields());
}
public void testSortByDoubleAscending() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0))));
}
public void testSortByDoubleDescending() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)));
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 3.0))));
}
public void testSortByDoubleCastToLong() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").setNumericType("long")));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1, 2.0))));
}
public void testSortByDoubleTwoHits() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC), 2));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(List.of(top(1.0, 2.0), top(2.0, 3.0))));
}
public void testSortByFloatAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0)));
}, true, floatAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0d))));
}
public void testSortByFloatDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0)));
}, true, floatAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 3.0))));
}
public void testSortByLongAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0)));
}, true, longAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(10, 2.0))));
}
public void testSortByLongDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0)));
}, true, longAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(20, 3.0))));
}
public void testSortByScoreDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
}, true, textAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 2.0))));
}
public void testSortByScoreAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
}, true, textAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 3.0))));
}
public void testSortByScriptDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0)));
}, false, doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(2.0, 2.0))));
}
public void testSortByScriptAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0)));
}, false, doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.0, 3.0))));
}
public void testSortByStringScriptFails() throws IOException {
Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap());
TopMetricsAggregationBuilder builder = simpleBuilder(new ScriptSortBuilder(script, ScriptSortType.STRING));
Exception e = expectThrows(IllegalArgumentException.class, () -> collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
}, true, textAndDoubleField()));
assertThat(
e.getMessage(),
equalTo("error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]")
);
}
private InternalTopMetrics collectFromNewYorkAndLA(TopMetricsAggregationBuilder builder) throws IOException {
return collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(geoPointField("s", 40.7128, -74.0060), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(geoPointField("s", 34.0522, -118.2437), doubleField("m", 3.0)));
}, true, geoPointAndDoubleField());
}
public void testSortByGeoDistanceDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.DESC));
InternalTopMetrics result = collectFromNewYorkAndLA(builder);
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.2054632268631617E7, 3.0))));
}
public void testSortByGeoDistanceAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.ASC));
InternalTopMetrics result = collectFromNewYorkAndLA(builder);
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getTopMetrics(), equalTo(singletonList(top(1.1062351376961706E7, 2.0))));
}
public void testSortByGeoDistanceTwoHits() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.DESC), 2);
InternalTopMetrics result = collectFromNewYorkAndLA(builder);
assertThat(result.getSize(), equalTo(2));
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getTopMetrics(), equalTo(List.of(top(1.2054632268631617E7, 3.0), top(1.1062351376961706E7, 2.0))));
}
public void testInsideTerms() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
TermsAggregationBuilder terms = new TermsAggregationBuilder("terms").field("c").subAggregation(builder);
Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0)));
writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 9.0)));
}, true, numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m"));
Terms.Bucket bucket1 = result.getBuckets().get(0);
assertThat(bucket1.getKey(), equalTo(1.0));
InternalTopMetrics top1 = bucket1.getAggregations().get("test");
assertThat(top1.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top1.getTopMetrics(), equalTo(singletonList(top(1.0, 2.0))));
Terms.Bucket bucket2 = result.getBuckets().get(1);
assertThat(bucket2.getKey(), equalTo(2.0));
InternalTopMetrics top2 = bucket2.getAggregations().get("test");
assertThat(top2.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top2.getTopMetrics(), equalTo(singletonList(top(4.0, 9.0))));
}
public void testTermsSortedBySingle() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
TermsAggregationBuilder terms = new TermsAggregationBuilder("terms").field("c")
.subAggregation(builder)
.order(BucketOrder.aggregation("test", "m", true));
Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 9.0)));
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0)));
writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 2.0)));
}, true, numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m"));
Terms.Bucket bucket1 = result.getBuckets().get(0);
assertThat(bucket1.getKey(), equalTo(2.0));
InternalTopMetrics top1 = bucket1.getAggregations().get("test");
assertThat(top1.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top1.getTopMetrics(), equalTo(singletonList(top(4.0, 2.0))));
Terms.Bucket bucket2 = result.getBuckets().get(1);
assertThat(bucket2.getKey(), equalTo(1.0));
InternalTopMetrics top2 = bucket2.getAggregations().get("test");
assertThat(top2.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top2.getTopMetrics(), equalTo(singletonList(top(1.0, 9.0))));
}
public void testTermsSortedByMulti() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC), 2);
TermsAggregationBuilder terms = new TermsAggregationBuilder("terms").field("c")
.subAggregation(builder)
.order(BucketOrder.aggregation("test", "m", true));
Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 9.0)));
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0)));
writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 2.0)));
}, true, numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m"));
Terms.Bucket bucket1 = result.getBuckets().get(0);
assertThat(bucket1.getKey(), equalTo(2.0));
InternalTopMetrics top1 = bucket1.getAggregations().get("test");
assertThat(top1.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top1.getTopMetrics(), equalTo(singletonList(top(4.0, 2.0))));
Terms.Bucket bucket2 = result.getBuckets().get(1);
assertThat(bucket2.getKey(), equalTo(1.0));
InternalTopMetrics top2 = bucket2.getAggregations().get("test");
assertThat(top2.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top2.getTopMetrics(), equalTo(List.of(top(1.0, 9.0), top(2.0, 3.0))));
}
public void testTonsOfBucketsTriggersBreaker() throws IOException {
// Build a "simple" circuit breaker that trips at 20k
CircuitBreakerService breaker = mock(CircuitBreakerService.class);
ByteSizeValue max = ByteSizeValue.of(20, ByteSizeUnit.KB);
when(breaker.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new MockBigArrays.LimitedBreaker(CircuitBreaker.REQUEST, max));
// Collect some buckets with it
try (Directory directory = newDirectory()) {
try (RandomIndexWriter writer = new RandomIndexWriter(random(), directory)) {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
try (
AggregationContext context = createAggregationContext(
indexReader,
createIndexSettings(),
new MatchAllDocsQuery(),
breaker,
builder.bytesToPreallocate(),
MultiBucketConsumerService.DEFAULT_MAX_BUCKETS,
true,
false,
doubleFields()
)
) {
Aggregator aggregator = builder.build(context, null).create(null, CardinalityUpperBound.ONE);
aggregator.preCollection();
assertThat(indexReader.leaves(), hasSize(1));
LeafBucketCollector leaf = aggregator.getLeafCollector(
new AggregationExecutionContext(indexReader.leaves().get(0), null, null, null)
);
/*
* Collect some number of buckets that we *know* fit in the
* breaker. The number of buckets feels fairly arbitrary but
* it comes from:
* budget = 15k = 20k - 5k for the "default weight" of ever agg
* The 646th bucket causes a resize which requests puts the total
* just over 15k. This works out to more like 190 bits per bucket
* when we're fairly sure this should take about 129 bits per
* bucket. The difference is because, for arrays in of this size,
* BigArrays allocates the new array before freeing the old one.
* That causes us to trip when we're about 2/3 of the way to the
* limit. And 2/3 of 190 is 126. Which is pretty much what we
* expect. Sort of.
*/
int bucketThatBreaks = 646;
for (int b = 0; b < bucketThatBreaks; b++) {
try {
leaf.collect(0, b);
} catch (CircuitBreakingException e) {
throw new AssertionError("Unexpected circuit break at [" + b + "]. Expected at [" + bucketThatBreaks + "]", e);
}
}
CircuitBreakingException e = expectThrows(CircuitBreakingException.class, () -> leaf.collect(0, bucketThatBreaks));
assertThat(e.getMessage(), equalTo(MockBigArrays.ERROR_MESSAGE));
assertThat(e.getByteLimit(), equalTo(max.getBytes()));
assertThat(e.getBytesWanted(), equalTo(5872L));
}
}
}
}
public void testManyMetrics() throws IOException {
List<SortBuilder<?>> sorts = singletonList(new FieldSortBuilder("s").order(SortOrder.ASC));
TopMetricsAggregationBuilder builder = new TopMetricsAggregationBuilder(
"test",
sorts,
1,
List.of(
new MultiValuesSourceFieldConfig.Builder().setFieldName("m1").build(),
new MultiValuesSourceFieldConfig.Builder().setFieldName("m2").build(),
new MultiValuesSourceFieldConfig.Builder().setFieldName("m3").build()
)
);
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m1", 12.0), longField("m2", 22), doubleField("m3", 32.0)));
writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m1", 13.0), longField("m2", 23), doubleField("m3", 33.0)));
}, true, manyMetricsFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(
result.getTopMetrics(),
equalTo(
singletonList(
new InternalTopMetrics.TopMetric(
DocValueFormat.RAW,
SortValue.from(1.0),
metricValues(SortValue.from(12.0), SortValue.from(22), SortValue.from(32.0))
)
)
)
);
}
private TopMetricsAggregationBuilder simpleBuilder() {
return simpleBuilder(new FieldSortBuilder("s"));
}
private TopMetricsAggregationBuilder simpleBuilder(SortBuilder<?> sort) {
return simpleBuilder(sort, 1);
}
private TopMetricsAggregationBuilder simpleBuilder(SortBuilder<?> sort, int size) {
return new TopMetricsAggregationBuilder(
"test",
singletonList(sort),
size,
singletonList(new MultiValuesSourceFieldConfig.Builder().setFieldName("m").build())
);
}
/**
* Build a query that matches all documents but adds 1 to the score of
* all docs that contain "foo". We use this instead of a term query
* directly because the score that can come from the term query can
* very quite a bit but this is super predictable.
*/
private Query boostFoo() {
return new BooleanQuery.Builder().add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST))
.add(new BooleanClause(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("s", "foo"))), 1.0f), Occur.SHOULD))
.build();
}
private MappedFieldType[] doubleFields() {
return new MappedFieldType[] { numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m") };
}
private MappedFieldType[] longFields() {
return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.LONG, "m") };
}
private MappedFieldType[] manyMetricsFields() {
return new MappedFieldType[] {
numberFieldType(NumberType.DOUBLE, "s"),
numberFieldType(NumberType.DOUBLE, "m1"),
numberFieldType(NumberType.LONG, "m2"),
numberFieldType(NumberType.DOUBLE, "m3"), };
}
private MappedFieldType[] floatAndDoubleField() {
return new MappedFieldType[] { numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m") };
}
private MappedFieldType[] longAndDoubleField() {
return new MappedFieldType[] { numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m") };
}
private MappedFieldType[] textAndDoubleField() {
return new MappedFieldType[] { textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") };
}
private MappedFieldType[] geoPointAndDoubleField() {
return new MappedFieldType[] { geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m") };
}
private MappedFieldType numberFieldType(NumberType numberType, String name) {
return new NumberFieldMapper.NumberFieldType(name, numberType);
}
private MappedFieldType textFieldType(String name) {
return new TextFieldMapper.TextFieldType(name, randomBoolean(), false);
}
private MappedFieldType geoPointFieldType(String name) {
return new GeoPointFieldMapper.GeoPointFieldType(name);
}
private IndexableField doubleField(String name, double value) {
return new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value));
}
private IndexableField floatField(String name, float value) {
return new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value));
}
private IndexableField longField(String name, long value) {
return new SortedNumericDocValuesField(name, value);
}
private IndexableField textField(String name, String value) {
return new TextField(name, value, Field.Store.NO);
}
private IndexableField geoPointField(String name, double lat, double lon) {
return new LatLonDocValuesField(name, lat, lon);
}
private InternalTopMetrics collect(
TopMetricsAggregationBuilder builder,
Query query,
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
boolean shouldBeCached,
MappedFieldType... fields
) throws IOException {
InternalTopMetrics result = (InternalTopMetrics) collect((AggregationBuilder) builder, query, buildIndex, shouldBeCached, fields);
List<String> expectedFieldNames = builder.getMetricFields()
.stream()
.map(MultiValuesSourceFieldConfig::getFieldName)
.collect(toList());
assertThat(result.getMetricNames(), equalTo(expectedFieldNames));
return result;
}
private InternalAggregation collect(
AggregationBuilder builder,
Query query,
CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
boolean shouldBeCached,
MappedFieldType... fields
) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
buildIndex.accept(indexWriter);
}
try (DirectoryReader indexReader = DirectoryReader.open(directory)) {
InternalAggregation agg = searchAndReduce(
indexReader,
new AggTestConfig(builder, fields).withShouldBeCached(shouldBeCached).withQuery(query)
);
verifyOutputFieldNames(builder, agg);
return agg;
}
}
}
private InternalTopMetrics.TopMetric top(long sortValue, double... metricValues) {
return new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(sortValue), metricValues(metricValues));
}
private InternalTopMetrics.TopMetric top(long sortValue, long... metricValues) {
return new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(sortValue), metricValues(metricValues));
}
private InternalTopMetrics.TopMetric top(double sortValue, double... metricValues) {
return new InternalTopMetrics.TopMetric(DocValueFormat.RAW, SortValue.from(sortValue), metricValues(metricValues));
}
private List<InternalTopMetrics.MetricValue> metricValues(double... metricValues) {
return metricValues(Arrays.stream(metricValues).mapToObj(SortValue::from).toArray(SortValue[]::new));
}
private List<InternalTopMetrics.MetricValue> metricValues(long... metricValues) {
return metricValues(Arrays.stream(metricValues).mapToObj(SortValue::from).toArray(SortValue[]::new));
}
private List<InternalTopMetrics.MetricValue> metricValues(SortValue... metricValues) {
return Arrays.stream(metricValues).map(v -> new InternalTopMetrics.MetricValue(DocValueFormat.RAW, v)).collect(toList());
}
/**
* Builds a simple script that reads the "s" field.
*/
private ScriptSortBuilder scriptSortOnS() {
return new ScriptSortBuilder(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap()), ScriptSortType.NUMBER);
}
@Override
protected ScriptService getMockScriptService() {
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME, singletonMap("s", args -> {
@SuppressWarnings("unchecked")
Map<String, ScriptDocValues<?>> fields = (Map<String, ScriptDocValues<?>>) args.get("doc");
ScriptDocValues.Doubles field = (ScriptDocValues.Doubles) fields.get("s");
return field.getValue();
}), emptyMap());
Map<String, ScriptEngine> engines = singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(
Settings.EMPTY,
engines,
ScriptModule.CORE_CONTEXTS,
() -> 1L,
TestProjectResolvers.singleProject(randomProjectIdOrDefault())
);
}
@Override
protected List<SearchPlugin> getSearchPlugins() {
return Collections.singletonList(new AnalyticsPlugin());
}
}
|
TopMetricsAggregatorTests
|
java
|
square__moshi
|
examples/src/main/java/com/squareup/moshi/recipes/CustomAdapterFactory.java
|
{
"start": 1569,
"end": 1897
}
|
class ____ an adapter for any element type into an adapter for a sorted set of those
* elements. For example, given a {@code JsonAdapter<MovieTicket>}, use this to get a {@code
* JsonAdapter<SortedSet<MovieTicket>>}. It works by looping over the input elements when both
* reading and writing.
*/
static final
|
composes
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/onetoone/cache/OneToOneCacheTest.java
|
{
"start": 1194,
"end": 4230
}
|
class ____ {
private <TPerson extends Person, TDetails extends Details> void OneToOneTest(
Class<TPerson> personClass,
Class<TDetails> detailsClass,
SessionFactoryScope scope) throws Exception {
// Initialize the database with data.
List<Object> ids = createPersonsAndDetails( personClass, detailsClass, scope );
// Clear the second level cache and the statistics.
SessionFactoryImplementor sfi = scope.getSessionFactory();
CacheImplementor cache = sfi.getCache();
StatisticsImplementor statistics = sfi.getStatistics();
cache.evictEntityData( personClass );
cache.evictEntityData( detailsClass );
cache.evictQueryRegions();
statistics.clear();
// Fill the empty caches with data.
this.getPersons( personClass, ids, scope );
// Verify that no data was retrieved from the cache.
assertEquals( 0, statistics.getSecondLevelCacheHitCount(), "Second level cache hit count" );
statistics.clear();
this.getPersons( personClass, ids, scope );
// Verify that all data was retrieved from the cache.
assertEquals( 0, statistics.getSecondLevelCacheMissCount(), "Second level cache miss count" );
}
private <TPerson extends Person, TDetails extends Details> List<Object> createPersonsAndDetails(
Class<TPerson> personClass,
Class<TDetails> detailsClass,
SessionFactoryScope scope) throws Exception {
Constructor<TPerson> ctorPerson = personClass.getConstructor();
Constructor<TDetails> ctorDetails = detailsClass.getConstructor();
List<Object> ids = new ArrayList<>();
return scope.fromTransaction(
session -> {
for ( int i = 0; i < 6; i++ ) {
Person person;
try {
person = ctorPerson.newInstance();
if ( i % 2 == 0 ) {
Details details = ctorDetails.newInstance();
details.setData( String.format( "%s%d", detailsClass.getName(), i ) );
person.setDetails( details );
}
person.setName( String.format( "%s%d", personClass.getName(), i ) );
session.persist( person );
ids.add( person.getId() );
}
catch (Exception e) {
throw new RuntimeException( e );
}
}
return ids;
}
);
}
private <TPerson extends Person> List<TPerson> getPersons(
Class<TPerson> personClass,
List<Object> ids, SessionFactoryScope scope) {
return scope.fromTransaction(
session -> {
List<TPerson> people = new ArrayList<>();
for ( Object id : ids ) {
people.add( session.get( personClass, id ) );
}
return people;
}
);
}
@Test
@FailureExpected( jiraKey = "HHH-14216", reason = "The changes introduces by HHH-14216 have been reverted see https://github.com/hibernate/hibernate-orm/pull/5061 discussion")
public void OneToOneCacheByForeignKey(SessionFactoryScope scope) throws Exception {
OneToOneTest( PersonByFK.class, DetailsByFK.class, scope );
}
@Test
public void OneToOneCacheByRef(SessionFactoryScope scope) throws Exception {
OneToOneTest( PersonByRef.class, DetailsByRef.class, scope );
}
}
|
OneToOneCacheTest
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/inference/InputType.java
|
{
"start": 873,
"end": 5178
}
|
enum ____ {
INGEST,
SEARCH,
UNSPECIFIED,
CLASSIFICATION,
CLUSTERING,
// Use the following enums when calling the inference API internally
INTERNAL_SEARCH,
INTERNAL_INGEST;
private static final EnumSet<InputType> SUPPORTED_REQUEST_VALUES = EnumSet.of(
InputType.CLASSIFICATION,
InputType.CLUSTERING,
InputType.INGEST,
InputType.SEARCH
);
@Override
public String toString() {
return name().toLowerCase(Locale.ROOT);
}
public static InputType fromString(String name) {
return valueOf(name.trim().toUpperCase(Locale.ROOT));
}
public static InputType fromRestString(String name) {
var inputType = InputType.fromString(name);
if (inputType == InputType.INTERNAL_INGEST || inputType == InputType.INTERNAL_SEARCH) {
throw new IllegalArgumentException(format("Unrecognized input_type [%s]", inputType));
}
return inputType;
}
public static boolean isInternalTypeOrUnspecified(InputType inputType) {
return inputType == InputType.INTERNAL_INGEST || inputType == InputType.INTERNAL_SEARCH || inputType == InputType.UNSPECIFIED;
}
public static boolean isIngest(InputType inputType) {
return inputType == InputType.INGEST || inputType == InputType.INTERNAL_INGEST;
}
public static boolean isSpecified(InputType inputType) {
return inputType != null && inputType != InputType.UNSPECIFIED;
}
public static String invalidInputTypeMessage(InputType inputType) {
return Strings.format("received invalid input type value [%s]", inputType.toString());
}
/**
* Ensures that a map used for translating input types is valid. The keys of the map are the external representation,
* and the values correspond to the values in this class.
* Throws a {@link ValidationException} if any value is not a valid InputType.
*
* @param inputTypeTranslation the map of input type translations to validate
* @param validationException a ValidationException to which errors will be added
*/
public static Map<InputType, String> validateInputTypeTranslationValues(
Map<String, Object> inputTypeTranslation,
ValidationException validationException
) {
if (inputTypeTranslation == null || inputTypeTranslation.isEmpty()) {
return Map.of();
}
var translationMap = new HashMap<InputType, String>();
for (var entry : inputTypeTranslation.entrySet()) {
var key = entry.getKey();
var value = entry.getValue();
if (value instanceof String == false || Strings.isNullOrEmpty((String) value)) {
validationException.addValidationError(
Strings.format(
"Input type translation value for key [%s] must be a String that is "
+ "not null and not empty, received: [%s], type: [%s].",
key,
value,
value == null ? "null" : value.getClass().getSimpleName()
)
);
throw validationException;
}
try {
var inputTypeKey = InputType.fromStringValidateSupportedRequestValue(key);
translationMap.put(inputTypeKey, (String) value);
} catch (Exception e) {
validationException.addValidationError(
Strings.format(
"Invalid input type translation for key: [%s], is not a valid value. Must be one of %s",
key,
SUPPORTED_REQUEST_VALUES
)
);
throw validationException;
}
}
return translationMap;
}
private static InputType fromStringValidateSupportedRequestValue(String name) {
var inputType = fromRestString(name);
if (SUPPORTED_REQUEST_VALUES.contains(inputType) == false) {
throw new IllegalArgumentException(
format("Unrecognized input_type [%s], must be one of %s", inputType, SUPPORTED_REQUEST_VALUES)
);
}
return inputType;
}
}
|
InputType
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/extension/ClassTemplateInvocationContextProvider.java
|
{
"start": 1797,
"end": 2383
}
|
class ____ method will be invoked using the contexts of all active
* providers.
*
* <p>An active provider may return zero invocation contexts from its
* {@link #provideClassTemplateInvocationContexts} method if it overrides
* {@link #mayReturnZeroClassTemplateInvocationContexts} to return {@code true}.
*
* <h2>Constructor Requirements</h2>
*
* <p>Consult the documentation in {@link Extension} for details on constructor
* requirements.
*
* @since 5.13
* @see ClassTemplate
* @see ClassTemplateInvocationContext
*/
@API(status = EXPERIMENTAL, since = "6.0")
public
|
template
|
java
|
bumptech__glide
|
library/src/main/java/com/bumptech/glide/load/engine/DecodeJob.java
|
{
"start": 25339,
"end": 26383
}
|
class ____<Z> {
private Key key;
private ResourceEncoder<Z> encoder;
private LockedResource<Z> toEncode;
@Synthetic
DeferredEncodeManager() {}
// We just need the encoder and resource type to match, which this will enforce.
@SuppressWarnings("unchecked")
<X> void init(Key key, ResourceEncoder<X> encoder, LockedResource<X> toEncode) {
this.key = key;
this.encoder = (ResourceEncoder<Z>) encoder;
this.toEncode = (LockedResource<Z>) toEncode;
}
void encode(DiskCacheProvider diskCacheProvider, Options options) {
GlideTrace.beginSection("DecodeJob.encode");
try {
diskCacheProvider
.getDiskCache()
.put(key, new DataCacheWriter<>(encoder, toEncode, options));
} finally {
toEncode.unlock();
GlideTrace.endSection();
}
}
boolean hasResourceToEncode() {
return toEncode != null;
}
void clear() {
key = null;
encoder = null;
toEncode = null;
}
}
|
DeferredEncodeManager
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.