language
stringclasses
1 value
repo
stringclasses
60 values
path
stringlengths
22
294
class_span
dict
source
stringlengths
13
1.16M
target
stringlengths
1
113
java
hibernate__hibernate-orm
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/graphs/Course.java
{ "start": 544, "end": 1763 }
class ____ { @Id @GeneratedValue private int id; private String name; @ManyToMany(mappedBy="courses", cascade=CascadeType.ALL) private List<Student> students = new ArrayList<>(); public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<Student> getStudents() { return students; } public void setStudents(List<Student> students) { this.students = students; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + getId(); result = prime * result + ((name == null) ? 0 : name.hashCode()); return result; } @Override public boolean equals(Object obj) { int id = getId(); if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Course other = (Course) obj; if (id != other.id) return false; if (name == null) { if (other.name != null) return false; } else if (!name.equals(other.name)) return false; return true; } @Override public String toString() { return "Course [name=" + name + "]"; } }
Course
java
apache__camel
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/nodes/KubernetesNodesEndpoint.java
{ "start": 1670, "end": 2263 }
class ____ extends AbstractKubernetesEndpoint { public KubernetesNodesEndpoint(String uri, KubernetesNodesComponent component, KubernetesConfiguration config) { super(uri, component, config); } @Override public Producer createProducer() throws Exception { return new KubernetesNodesProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { Consumer consumer = new KubernetesNodesConsumer(this, processor); configureConsumer(consumer); return consumer; } }
KubernetesNodesEndpoint
java
spring-projects__spring-framework
spring-oxm/src/test/java/org/springframework/oxm/xstream/XStreamMarshallerTests.java
{ "start": 2700, "end": 15333 }
class ____ { private static final String EXPECTED_STRING = "<flight><flightNumber>42</flightNumber></flight>"; private final Flight flight = new Flight(); private XStreamMarshaller marshaller; @BeforeEach void createMarshaller() { marshaller = new XStreamMarshaller(); marshaller.setTypePermissions(AnyTypePermission.ANY); marshaller.setAliases(Collections.singletonMap("flight", Flight.class.getName())); flight.setFlightNumber(42L); } @Test void marshalDOMResult() throws Exception { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); Document document = builder.newDocument(); DOMResult domResult = new DOMResult(document); marshaller.marshal(flight, domResult); Document expected = builder.newDocument(); Element flightElement = expected.createElement("flight"); expected.appendChild(flightElement); Element numberElement = expected.createElement("flightNumber"); flightElement.appendChild(numberElement); Text text = expected.createTextNode("42"); numberElement.appendChild(text); assertThat(XmlContent.of(document)).isSimilarTo(expected); } // see SWS-392 @Test void marshalDOMResultToExistentDocument() throws Exception { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); Document existent = builder.newDocument(); Element rootElement = existent.createElement("root"); Element flightsElement = existent.createElement("flights"); rootElement.appendChild(flightsElement); existent.appendChild(rootElement); // marshall into the existent document DOMResult domResult = new DOMResult(flightsElement); marshaller.marshal(flight, domResult); Document expected = builder.newDocument(); Element eRootElement = expected.createElement("root"); Element eFlightsElement = expected.createElement("flights"); Element eFlightElement = expected.createElement("flight"); eRootElement.appendChild(eFlightsElement); eFlightsElement.appendChild(eFlightElement); expected.appendChild(eRootElement); Element eNumberElement = expected.createElement("flightNumber"); eFlightElement.appendChild(eNumberElement); Text text = expected.createTextNode("42"); eNumberElement.appendChild(text); assertThat(XmlContent.of(existent)).isSimilarTo(expected); } @Test void marshalStreamResultWriter() throws Exception { StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); marshaller.marshal(flight, result); assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(EXPECTED_STRING); } @Test void marshalStreamResultOutputStream() throws Exception { ByteArrayOutputStream os = new ByteArrayOutputStream(); StreamResult result = new StreamResult(os); marshaller.marshal(flight, result); String s = os.toString(StandardCharsets.UTF_8); assertThat(XmlContent.of(s)).isSimilarToIgnoringWhitespace(EXPECTED_STRING); } @Test void marshalSaxResult() throws Exception { ContentHandler contentHandler = mock(); SAXResult result = new SAXResult(contentHandler); marshaller.marshal(flight, result); InOrder ordered = inOrder(contentHandler); ordered.verify(contentHandler).startDocument(); ordered.verify(contentHandler).startElement(eq(""), eq("flight"), eq("flight"), isA(Attributes.class)); ordered.verify(contentHandler).startElement(eq(""), eq("flightNumber"), eq("flightNumber"), isA(Attributes.class)); ordered.verify(contentHandler).characters(isA(char[].class), eq(0), eq(2)); ordered.verify(contentHandler).endElement("", "flightNumber", "flightNumber"); ordered.verify(contentHandler).endElement("", "flight", "flight"); ordered.verify(contentHandler).endDocument(); } @Test void marshalStaxResultXMLStreamWriter() throws Exception { XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); StringWriter writer = new StringWriter(); XMLStreamWriter streamWriter = outputFactory.createXMLStreamWriter(writer); Result result = StaxUtils.createStaxResult(streamWriter); marshaller.marshal(flight, result); assertThat(XmlContent.from(writer)).isSimilarTo(EXPECTED_STRING); } @Test void marshalStaxResultXMLStreamWriterDefaultNamespace() throws Exception { QNameMap map = new QNameMap(); map.setDefaultNamespace("https://example.com"); map.setDefaultPrefix("spr"); StaxDriver driver = new StaxDriver(map); marshaller.setStreamDriver(driver); XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); StringWriter writer = new StringWriter(); XMLStreamWriter streamWriter = outputFactory.createXMLStreamWriter(writer); Result result = StaxUtils.createStaxResult(streamWriter); marshaller.marshal(flight, result); assertThat(XmlContent.from(writer)).isSimilarTo( "<spr:flight xmlns:spr=\"https://example.com\"><spr:flightNumber>42</spr:flightNumber></spr:flight>"); } @Test void marshalStaxResultXMLEventWriter() throws Exception { XMLOutputFactory outputFactory = XMLOutputFactory.newInstance(); StringWriter writer = new StringWriter(); XMLEventWriter eventWriter = outputFactory.createXMLEventWriter(writer); Result result = StaxUtils.createStaxResult(eventWriter); marshaller.marshal(flight, result); assertThat(XmlContent.from(writer)).isSimilarTo(EXPECTED_STRING); } @Test void converters() { marshaller.setConverters(new EncodedByteArrayConverter()); byte[] buf = {0x1, 0x2}; // Execute multiple times concurrently to ensure there are no concurrency issues. // See https://github.com/spring-projects/spring-framework/issues/25017 IntStream.rangeClosed(1, 100).parallel().forEach(n -> { try { Writer writer = new StringWriter(); marshaller.marshal(buf, new StreamResult(writer)); assertThat(XmlContent.from(writer)).isSimilarTo("<byte-array>AQI=</byte-array>"); Reader reader = new StringReader(writer.toString()); byte[] bufResult = (byte[]) marshaller.unmarshal(new StreamSource(reader)); assertThat(bufResult).as("Invalid result").isEqualTo(buf); } catch (Exception ex) { throw new RuntimeException(ex); } }); } @Test void useAttributesFor() throws Exception { marshaller.setUseAttributeForTypes(long.class); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); String expected = "<flight flightNumber=\"42\" />"; assertThat(XmlContent.from(writer)).isSimilarTo(expected); } @Test void useAttributesForStringClassMap() throws Exception { marshaller.setUseAttributeFor(Collections.singletonMap("flightNumber", long.class)); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); String expected = "<flight flightNumber=\"42\" />"; assertThat(XmlContent.from(writer)).isSimilarTo(expected); } @Test void useAttributesForClassStringMap() throws Exception { marshaller.setUseAttributeFor(Collections.singletonMap(Flight.class, "flightNumber")); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); String expected = "<flight flightNumber=\"42\" />"; assertThat(XmlContent.from(writer)).isSimilarTo(expected); } @Test void useAttributesForClassStringListMap() throws Exception { marshaller.setUseAttributeFor(Collections.singletonMap(Flight.class, Collections.singletonList("flightNumber"))); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); String expected = "<flight flightNumber=\"42\" />"; assertThat(XmlContent.from(writer)).isSimilarTo(expected); } @Test void aliasesByTypeStringClassMap() throws Exception { Map<String, Class<?>> aliases = new HashMap<>(); aliases.put("flight", Flight.class); FlightSubclass flight = new FlightSubclass(); flight.setFlightNumber(42); marshaller.setAliasesByType(aliases); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(EXPECTED_STRING); } @Test void aliasesByTypeStringStringMap() throws Exception { Map<String, String> aliases = new HashMap<>(); aliases.put("flight", Flight.class.getName()); FlightSubclass flight = new FlightSubclass(); flight.setFlightNumber(42); marshaller.setAliasesByType(aliases); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(EXPECTED_STRING); } @Test void fieldAliases() throws Exception { marshaller.setFieldAliases(Collections.singletonMap("org.springframework.oxm.xstream.Flight.flightNumber", "flightNo")); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); String expected = "<flight><flightNo>42</flightNo></flight>"; assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(expected); } @Test @SuppressWarnings({ "rawtypes", "unchecked" }) void omitFields() throws Exception { Map omittedFieldsMap = Collections.singletonMap(Flight.class, "flightNumber"); marshaller.setOmittedFields(omittedFieldsMap); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); assertXpathDoesNotExist("/flight/flightNumber", writer.toString()); } @Test @SuppressWarnings({ "rawtypes", "unchecked" }) void implicitCollections() throws Exception { Flights flights = new Flights(); flights.getFlights().add(flight); flights.getStrings().add("42"); Map<String, Class<?>> aliases = new HashMap<>(); aliases.put("flight", Flight.class); aliases.put("flights", Flights.class); marshaller.setAliases(aliases); Map implicitCollections = Collections.singletonMap(Flights.class, "flights,strings"); marshaller.setImplicitCollections(implicitCollections); Writer writer = new StringWriter(); marshaller.marshal(flights, new StreamResult(writer)); String result = writer.toString(); assertXpathDoesNotExist("/flights/flights", result); assertXpathExists("/flights/flight", result); assertXpathDoesNotExist("/flights/strings", result); assertXpathExists("/flights/string", result); } @Test void jettisonDriver() throws Exception { marshaller.setStreamDriver(new JettisonMappedXmlDriver()); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); assertThat(writer.toString()).as("Invalid result").isEqualTo("{\"flight\":{\"flightNumber\":42}}"); Object o = marshaller.unmarshal(new StreamSource(new StringReader(writer.toString()))); assertThat(o).as("Unmarshalled object is not Flights").isInstanceOf(Flight.class); Flight unflight = (Flight) o; assertThat(unflight).as("Flight is null").isNotNull(); assertThat(unflight.getFlightNumber()).as("Number is invalid").isEqualTo(42L); } @Test void jsonDriver() throws Exception { marshaller.setStreamDriver(new JsonHierarchicalStreamDriver() { @Override public HierarchicalStreamWriter createWriter(Writer writer) { return new JsonWriter(writer, JsonWriter.DROP_ROOT_MODE, new JsonWriter.Format(new char[0], new char[0], JsonWriter.Format.SPACE_AFTER_LABEL | JsonWriter.Format.COMPACT_EMPTY_ELEMENT)); } }); Writer writer = new StringWriter(); marshaller.marshal(flight, new StreamResult(writer)); assertThat(writer.toString()).as("Invalid result").isEqualTo("{\"flightNumber\": 42}"); } @Test void annotatedMarshalStreamResultWriter() throws Exception { marshaller.setAnnotatedClasses(Flight.class); StringWriter writer = new StringWriter(); StreamResult result = new StreamResult(writer); Flight flight = new Flight(); flight.setFlightNumber(42); marshaller.marshal(flight, result); String expected = "<flight><number>42</number></flight>"; assertThat(XmlContent.from(writer)).isSimilarToIgnoringWhitespace(expected); } private static void assertXpathExists(String xPathExpression, String inXMLString){ Source source = Input.fromString(inXMLString).build(); Iterable<Node> nodes = new JAXPXPathEngine().selectNodes(xPathExpression, source); assertThat(nodes).as("Expecting to find matches for Xpath " + xPathExpression).isNotEmpty(); } private static void assertXpathDoesNotExist(String xPathExpression, String inXMLString){ Source source = Input.fromString(inXMLString).build(); Iterable<Node> nodes = new JAXPXPathEngine().selectNodes(xPathExpression, source); assertThat(nodes).as("Should be zero matches for Xpath " + xPathExpression).isEmpty(); } }
XStreamMarshallerTests
java
apache__camel
components/camel-netty-http/src/test/java/org/apache/camel/component/netty/http/rest/RestNettyHttpGetWildcardsTest.java
{ "start": 1174, "end": 3095 }
class ____ extends BaseNettyTest { @BindToRegistry("mybinding") private RestNettyHttpBinding binding = new RestNettyHttpBinding(); @Test public void testProducerGet() { String out = template.requestBody("netty-http:http://localhost:{{port}}/users/123/basic", null, String.class); assertEquals("123;Donald Duck", out); } @Test public void testServletProducerGetWildcards() { String out = template.requestBody("netty-http:http://localhost:{{port}}/users/456/name=g*", null, String.class); assertEquals("456;Goofy", out); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { @Override public void configure() { // configure to use netty-http on localhost with the given port restConfiguration().component("netty-http").host("localhost").port(getPort()) .endpointProperty("nettyHttpBinding", "#mybinding"); // use the rest DSL to define the rest services rest("/users/") .get("{id}/{query}").to("direct:query") .get("{id}/basic").to("direct:basic"); from("direct:query") .to("log:query") .process(exchange -> { String id = exchange.getIn().getHeader("id", String.class); exchange.getMessage().setBody(id + ";Goofy"); }); from("direct:basic") .to("log:input") .process(exchange -> { String id = exchange.getIn().getHeader("id", String.class); exchange.getMessage().setBody(id + ";Donald Duck"); }); } }; } }
RestNettyHttpGetWildcardsTest
java
junit-team__junit5
junit-platform-suite-api/src/main/java/org/junit/platform/suite/api/SelectFiles.java
{ "start": 1159, "end": 1293 }
interface ____ { /** * An array of one or more {@link SelectFile @SelectFile} declarations. */ SelectFile[] value(); }
SelectFiles
java
alibaba__nacos
maintainer-client/src/main/java/com/alibaba/nacos/maintainer/client/naming/NamingMaintainerService.java
{ "start": 1265, "end": 4535 }
interface ____ extends CoreMaintainerService, ServiceMaintainerService, InstanceMaintainerService, NamingClientMaintainerService, Closeable { /** * Get system metrics. * * @param onlyStatus whether to return only status information * @return the metrics information * @throws NacosException if an error occurs */ MetricsInfo getMetrics(boolean onlyStatus) throws NacosException; /** * Set the log level. * * @param logName the name of the logger * @param logLevel the new log level * @return the result of the operation * @throws NacosException if an error occurs */ String setLogLevel(String logName, String logLevel) throws NacosException; /** * Update the health status of a persistent instance. * * <p> * This API is designed to work with the persistent instance which don't need to auto-check and maintainer by admin * to change the health status. * So This API works at following several condition: * <ul> * <li>1. instance should be persistent instance</li> * <li>2. health checker for cluster of this instance should be `NONE` </li> * </ul> * </p> * <p> * How to change the health checker for cluster: see {@link #updateCluster(Service, ClusterInfo)}. * </p> * * @param service service need to be updated, {@link Service#getNamespaceId()}, {@link Service#getGroupName()} * and {@link Service#getName()} are required. * {@link Service#isEphemeral()} must be `false`. * @param instance instance need to be updated, {@link Instance#getIp()}, {@link Instance#getPort()} * and {@link Instance#getClusterName()} are required. * {@link Instance#isEphemeral()} must be `false`. * @return the result of the operation * @throws NacosException if an error occurs */ String updateInstanceHealthStatus(Service service, Instance instance) throws NacosException; /** * Get all health checkers for current nacos cluster. * * @return a map of health checkers * @throws NacosException if an error occurs */ Map<String, AbstractHealthChecker> getHealthCheckers() throws NacosException; /** * Update cluster metadata in target service. * * @param service the service of updated cluster * @param cluster the new cluster metadata. {@link ClusterInfo#getClusterName()} is required and can't be changed, * used to locate which cluster need to be updated. * {@link ClusterInfo#getHealthChecker()} is from {@link #getHealthCheckers()}. * {@link ClusterInfo#getMetadata()}, {@link ClusterInfo#getHealthyCheckPort()} * and {@link ClusterInfo#isUseInstancePortForCheck()} will full replace server side. * {@link ClusterInfo#getHosts()} will be ignored in this API. * @return the result of the operation * @throws NacosException if an error occurs */ String updateCluster(Service service, ClusterInfo cluster) throws NacosException; }
NamingMaintainerService
java
apache__flink
flink-kubernetes/src/test/java/org/apache/flink/kubernetes/highavailability/KubernetesLeaderElectionAndRetrievalITCase.java
{ "start": 2418, "end": 7460 }
class ____ { private static final String LEADER_CONFIGMAP_NAME = "leader-test-cluster"; private static final String LEADER_ADDRESS = "pekko.tcp://flink@172.20.1.21:6123/user/rpc/dispatcher"; @RegisterExtension static final KubernetesExtension KUBERNETES_EXTENSION = new KubernetesExtension(); @RegisterExtension static final TestExecutorExtension<ExecutorService> EXECUTOR_EXTENSION = new TestExecutorExtension<>(Executors::newCachedThreadPool); @Test void testLeaderElectionAndRetrieval() throws Exception { final String componentId = "component-id"; final String leaderAddress = "random-address"; final String configMapName = LEADER_CONFIGMAP_NAME + UUID.randomUUID(); final FlinkKubeClient flinkKubeClient = KUBERNETES_EXTENSION.getFlinkKubeClient(); final Configuration configuration = KUBERNETES_EXTENSION.getConfiguration(); final String clusterId = configuration.get(KubernetesConfigOptions.CLUSTER_ID); // This will make the leader election retrieval time out if we won't process already // existing leader information when starting it up. configuration.set( KubernetesHighAvailabilityOptions.KUBERNETES_LEASE_DURATION, Duration.ofHours(1)); configuration.set( KubernetesHighAvailabilityOptions.KUBERNETES_RETRY_PERIOD, Duration.ofHours(1)); configuration.set( KubernetesHighAvailabilityOptions.KUBERNETES_RENEW_DEADLINE, Duration.ofHours(1)); final List<AutoCloseable> closeables = new ArrayList<>(); final KubernetesConfigMapSharedWatcher configMapSharedWatcher = flinkKubeClient.createConfigMapSharedWatcher(configMapName); closeables.add(configMapSharedWatcher); final TestingLeaderElectionListener electionEventHandler = new TestingLeaderElectionListener(); try { final KubernetesLeaderElectionDriver leaderElectionDriver = new KubernetesLeaderElectionDriver( new KubernetesLeaderElectionConfiguration( configMapName, UUID.randomUUID().toString(), configuration), flinkKubeClient, electionEventHandler, configMapSharedWatcher, EXECUTOR_EXTENSION.getExecutor()); closeables.add(leaderElectionDriver); final KubernetesLeaderRetrievalDriverFactory driverFactory = new KubernetesLeaderRetrievalDriverFactory( configMapSharedWatcher, EXECUTOR_EXTENSION.getExecutor(), configMapName, componentId); final TestingFatalErrorHandler fatalErrorHandler = new TestingFatalErrorHandler(); final TestingLeaderRetrievalEventHandler firstLeaderRetrievalEventHandler = new TestingLeaderRetrievalEventHandler(); closeables.add( driverFactory.createLeaderRetrievalDriver( firstLeaderRetrievalEventHandler, fatalErrorHandler)); // Wait for the driver to obtain leadership. electionEventHandler.await(LeaderElectionEvent.IsLeaderEvent.class); final LeaderInformation leaderInformation = LeaderInformation.known(UUID.randomUUID(), leaderAddress); leaderElectionDriver.publishLeaderInformation(componentId, leaderInformation); // Check if the leader retrieval driver is notified about the leader address awaitLeadership(firstLeaderRetrievalEventHandler, leaderInformation); // Start a second leader retrieval that should be notified immediately because we // already know who the leader is. final TestingLeaderRetrievalEventHandler secondRetrievalEventHandler = new TestingLeaderRetrievalEventHandler(); closeables.add( driverFactory.createLeaderRetrievalDriver( secondRetrievalEventHandler, fatalErrorHandler)); awaitLeadership(secondRetrievalEventHandler, leaderInformation); } finally { for (AutoCloseable closeable : closeables) { closeable.close(); } flinkKubeClient.deleteConfigMap(configMapName).get(); electionEventHandler.failIfErrorEventHappened(); } } private static void awaitLeadership( TestingLeaderRetrievalEventHandler handler, LeaderInformation leaderInformation) throws Exception { handler.waitForNewLeader(); assertThat(handler.getLeaderSessionID()).isEqualTo(leaderInformation.getLeaderSessionID()); assertThat(handler.getAddress()).isEqualTo(leaderInformation.getLeaderAddress()); } }
KubernetesLeaderElectionAndRetrievalITCase
java
apache__kafka
connect/runtime/src/test/resources/test-plugins/bad-packaging/test/plugins/NoDefaultConstructorOverridePolicy.java
{ "start": 1301, "end": 1744 }
class ____ implements ConnectorClientConfigOverridePolicy { public NoDefaultConstructorOverridePolicy(int ignored) { } @Override public void close() throws IOException { } @Override public void configure(Map<String, ?> configs) { } @Override public List<ConfigValue> validate(ConnectorClientConfigRequest connectorClientConfigRequest) { return null; } }
NoDefaultConstructorOverridePolicy
java
micronaut-projects__micronaut-core
http-server-netty/src/main/java/io/micronaut/http/server/netty/handler/PipeliningServerHandler.java
{ "start": 21338, "end": 24082 }
interface ____ if (message.getClass() == DefaultLastHttpContent.class || message instanceof LastHttpContent) { // we got the full message before readComplete ByteBuf fullBody; if (buffer.size() == 0) { fullBody = Unpooled.EMPTY_BUFFER; } else if (buffer.size() == 1) { fullBody = buffer.get(0).content(); } else { CompositeByteBuf composite = ctx.alloc().compositeBuffer(); for (HttpContent c : buffer) { composite.addComponent(true, c.content()); } fullBody = composite; } buffer.clear(); HttpRequest request = this.request; this.request = null; OutboundAccess outboundAccess = this.outboundAccess; this.outboundAccess = null; requestHandler.accept(ctx, request, byteBodyFactory().createChecked(bodySizeLimits, fullBody), outboundAccess); inboundHandler = baseInboundHandler; } } @Override void readComplete() { devolveToStreaming(); inboundHandler.readComplete(); } @Override void handleUpstreamError(Throwable cause) { devolveToStreaming(); inboundHandler.handleUpstreamError(cause); } private void devolveToStreaming() { HttpRequest request = this.request; OutboundAccessImpl outboundAccess = this.outboundAccess; this.request = null; this.outboundAccess = null; StreamingInboundHandler streamingInboundHandler = new StreamingInboundHandler(outboundAccess, HttpUtil.is100ContinueExpected(request)); for (HttpContent content : buffer) { streamingInboundHandler.read(content); } buffer.clear(); if (inboundHandler == this) { inboundHandler = streamingInboundHandler; } else { ((DecompressingInboundHandler) inboundHandler).delegate = streamingInboundHandler; } streamingInboundHandler.dest.setExpectedLengthFrom(request.headers()); requestHandler.accept(ctx, request, new StreamingNettyByteBody(streamingInboundHandler.dest), outboundAccess); } @Override void discard() { for (HttpContent content : buffer) { content.release(); } buffer.clear(); } } /** * Handler that exposes incoming content as a {@link Flux}. */ private final
instanceof
java
google__dagger
javatests/dagger/internal/codegen/DependencyCycleValidationTest.java
{ "start": 9146, "end": 9720 }
interface ____"); }); } @Test public void cyclicDependencyNotBrokenByMapBinding() { Source component = CompilerTests.javaSource( "test.Outer", "package test;", "", "import dagger.Component;", "import dagger.Module;", "import dagger.Provides;", "import dagger.multibindings.IntoMap;", "import dagger.multibindings.StringKey;", "import java.util.Map;", "import javax.inject.Inject;", "", "final
DComponent
java
quarkusio__quarkus
extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/customexceptions/UnwrapExceptionTest.java
{ "start": 727, "end": 2182 }
class ____ { @RegisterExtension static QuarkusUnitTest test = new QuarkusUnitTest() .setArchiveProducer(new Supplier<>() { @Override public JavaArchive get() { return ShrinkWrap.create(JavaArchive.class) .addClasses(FirstException.class, SecondException.class, ThirdException.class, FourthException.class, FifthException.class, SixthException.class, Mappers.class, Resource.class, ExceptionUtil.class); } }); @Test public void testWrapperWithUnmappedException() { when().get("/hello/iaeInSecond") .then().statusCode(500); } @Test public void testMappedExceptionWithoutUnwrappedWrapper() { when().get("/hello/iseInFirst") .then().statusCode(500); when().get("/hello/iseInThird") .then().statusCode(500); when().get("/hello/iseInSixth") .then().statusCode(500); } @Test public void testWrapperWithMappedException() { when().get("/hello/iseInSecond") .then().statusCode(900); when().get("/hello/iseInFourth") .then().statusCode(900); when().get("/hello/iseInFifth") .then().statusCode(900); } @Path("hello") public static
UnwrapExceptionTest
java
spring-projects__spring-security
itest/context/src/main/java/org/springframework/security/integration/multiannotation/SecuredService.java
{ "start": 795, "end": 869 }
interface ____ { @Secured("ROLE_A") void securedMethod(); }
SecuredService
java
apache__camel
components/camel-kudu/src/main/java/org/apache/camel/component/kudu/KuduEndpoint.java
{ "start": 1850, "end": 5915 }
class ____ extends DefaultEndpoint implements EndpointServiceLocation { private static final Logger LOG = LoggerFactory.getLogger(KuduEndpoint.class); private KuduClient kuduClient; private boolean userManagedClient; @UriPath(name = "host", displayName = "Host", label = "common", description = "Host of the server to connect to") @Metadata(required = true) private String host; @UriPath(name = "port", displayName = "Port", label = "common", description = "Port of the server to connect to") @Metadata(required = true) private String port; @UriParam(description = "Operation to perform") private KuduOperations operation; @UriPath(name = "tableName", displayName = "Table Name", label = "common", description = "Table to connect to") private String tableName; public KuduEndpoint(String uri, KuduComponent component) { super(uri, component); Pattern p = Pattern.compile("^(\\S+)\\:(\\d+)\\/(\\S+)$"); Matcher m = p.matcher(uri); if (!m.matches()) { throw new RuntimeException("Unrecognizable url: " + uri); } this.setHost(m.group(1)); this.setPort(m.group(2)); this.setTableName(m.group(3)); } @Override public String getServiceUrl() { return host + ":" + port; } @Override public String getServiceProtocol() { return "kudu"; } @Override protected void doStart() throws Exception { LOG.trace("Connection: {}, {}", getHost(), getPort()); //To facilitate tests, if the client is already created, do not recreate. if (this.getKuduClient() == null) { setKuduClient(new KuduClient.KuduClientBuilder(getHost() + ":" + getPort()).build()); } LOG.debug("Resolved the host with the name {} as {}", getHost(), getKuduClient()); super.doStart(); } @Override protected void doStop() throws Exception { // Only shut down clients created by this endpoint if (!isUserManagedClient()) { KuduClient client = getKuduClient(); if (client != null) { LOG.debug("Shutting down kudu client"); try { client.shutdown(); } catch (Exception e) { LOG.error("Unable to shutdown kudu client", e); } } } super.doStop(); } public String getHost() { return host; } /** * Kudu master to connect to */ public void setHost(String host) { this.host = host; } public String getPort() { return port; } public KuduClient getKuduClient() { return kuduClient; } /** * Set the client to connect to a kudu resource */ public void setKuduClient(KuduClient kuduClient) { this.kuduClient = kuduClient; } /** * Port where kudu service is listening */ public void setPort(String port) { this.port = port; } @Override public Producer createProducer() { return new KuduProducer(this); } @Override public Consumer createConsumer(Processor processor) throws UnsupportedOperationException { throw new UnsupportedOperationException("You cannot create consumers on this endpoint"); } public String getTableName() { return tableName; } /** * The name of the table where the rows are stored */ public void setTableName(String tableName) { this.tableName = tableName; } public KuduOperations getOperation() { return operation; } /** * What kind of operation is to be performed in the table */ public void setOperation(KuduOperations operation) { this.operation = operation; } public boolean isUserManagedClient() { return userManagedClient; } public void setUserManagedClient(boolean userManagedClient) { this.userManagedClient = userManagedClient; } }
KuduEndpoint
java
elastic__elasticsearch
x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/BaseRestSqlTestCase.java
{ "start": 2862, "end": 3391 }
class ____ extends RemoteClusterAwareSqlRestTestCase { private static final String TEST_INDEX = "test"; private static final String DATA_STREAM_TEMPLATE = "test-ds-index-template"; /** * What's the version of the server that the clients should be compatible with? * This will be either the stack version, or SqlVersions.getLatestVersion() if the stack version is not available. */ private static final SqlVersion SERVER_COMPAT_VERSION = getServerCompatVersion(); public static
BaseRestSqlTestCase
java
spring-projects__spring-framework
spring-webflux/src/main/java/org/springframework/web/reactive/function/client/WebClientException.java
{ "start": 938, "end": 1499 }
class ____ extends NestedRuntimeException { private static final long serialVersionUID = 472776714118912855L; /** * Construct a new instance of {@code WebClientException} with the given message. * @param msg the message */ public WebClientException(String msg) { super(msg); } /** * Construct a new instance of {@code WebClientException} with the given message * and exception. * @param msg the message * @param ex the exception */ public WebClientException(@Nullable String msg, Throwable ex) { super(msg, ex); } }
WebClientException
java
apache__camel
components/camel-kafka/src/test/java/org/apache/camel/component/kafka/KafkaProducerTest.java
{ "start": 2663, "end": 26032 }
class ____ { private static final String SOME_INDIVIDUAL_HEADER = "someIndividualHeader"; private final KafkaProducer producer; private final KafkaEndpoint endpoint; private final KafkaEndpoint fromEndpoint; private final TypeConverter converter = Mockito.mock(TypeConverter.class); private final CamelContext context = Mockito.mock(DefaultCamelContext.class); private final Exchange exchange = Mockito.mock(Exchange.class); private final ExtendedCamelContext ecc = Mockito.mock(ExtendedCamelContext.class); private final Message in = new DefaultMessage(context); private final AsyncCallback callback = Mockito.mock(AsyncCallback.class); @SuppressWarnings({ "unchecked" }) public KafkaProducerTest() throws Exception { KafkaComponent kafka = new KafkaComponent(new DefaultCamelContext()); kafka.getConfiguration().setBrokers("broker1:1234,broker2:4567"); kafka.getConfiguration().setRecordMetadata(true); kafka.init(); endpoint = kafka.createEndpoint("kafka:sometopic", "sometopic", new HashMap()); endpoint.doBuild(); assertInstanceOf(DefaultKafkaClientFactory.class, endpoint.getKafkaClientFactory()); producer = new KafkaProducer(endpoint); fromEndpoint = kafka.createEndpoint("kafka:fromtopic", "fromtopic", new HashMap()); fromEndpoint.doBuild(); assertInstanceOf(DefaultKafkaClientFactory.class, fromEndpoint.getKafkaClientFactory()); RecordMetadata rm = new RecordMetadata(null, 0, 0, 0, 0, 0); Future future = Mockito.mock(Future.class); Mockito.when(future.get()).thenReturn(rm); org.apache.kafka.clients.producer.KafkaProducer kp = Mockito.mock(org.apache.kafka.clients.producer.KafkaProducer.class); Mockito.when(kp.send(any(ProducerRecord.class))).thenReturn(future); Mockito.when(exchange.getContext()).thenReturn(context); Mockito.when(context.getTypeConverter()).thenReturn(converter); Mockito.when(converter.tryConvertTo(String.class, exchange, null)).thenReturn(null); Mockito.when(context.getCamelContextExtension()).thenReturn(ecc); Mockito.when(ecc.getHeadersMapFactory()) .thenReturn(new DefaultHeadersMapFactory()); Mockito.when(context.getTypeConverter()).thenReturn(converter); producer.setKafkaProducer(kp); producer.setWorkerPool(Executors.newFixedThreadPool(1)); } @Test public void testPropertyBuilder() { Properties props = producer.getProps(); assertEquals("broker1:1234,broker2:4567", props.getProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG)); } @Test @SuppressWarnings({ "unchecked" }) public void processSendsMessage() throws Exception { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); producer.process(exchange); Mockito.verify(producer.getKafkaProducer()).send(any(ProducerRecord.class)); assertRecordMetadataExists(); } @Test @SuppressWarnings({ "unchecked" }) public void processSendsMessageWithException() { endpoint.getConfiguration().setTopic("sometopic"); // set up the exception here org.apache.kafka.clients.producer.Producer kp = producer.getKafkaProducer(); Mockito.when(kp.send(any(ProducerRecord.class))).thenThrow(new ApiException()); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); assertThrows(ApiException.class, () -> producer.process(exchange)); } @Test public void processAsyncSendsMessage() { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); producer.process(exchange, callback); ArgumentCaptor<Callback> callBackCaptor = ArgumentCaptor.forClass(Callback.class); Mockito.verify(producer.getKafkaProducer()).send(any(ProducerRecord.class), callBackCaptor.capture()); Callback kafkaCallback = callBackCaptor.getValue(); kafkaCallback.onCompletion(new RecordMetadata(null, 0, 0, 0, 0, 0), null); assertRecordMetadataExists(); } @Test public void processAsyncSendsMessageWithException() { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); // set up the exception here org.apache.kafka.clients.producer.Producer kp = producer.getKafkaProducer(); Mockito.when(kp.send(any(ProducerRecord.class), any(Callback.class))).thenThrow(new ApiException()); in.setHeader(KafkaConstants.PARTITION_KEY, 4); producer.process(exchange, callback); ArgumentCaptor<Callback> callBackCaptor = ArgumentCaptor.forClass(Callback.class); Mockito.verify(producer.getKafkaProducer()).send(any(ProducerRecord.class), callBackCaptor.capture()); Mockito.verify(exchange).setException(isA(ApiException.class)); Mockito.verify(callback).done(eq(true)); Callback kafkaCallback = callBackCaptor.getValue(); kafkaCallback.onCompletion(new RecordMetadata(null, 0, 0, 0, 0, 0), null); assertRecordMetadataExists(); } @Test public void processSendsMessageWithTopicHeaderAndNoTopicInEndPoint() throws Exception { endpoint.getConfiguration().setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); Mockito.when(exchange.getMessage()).thenReturn(in); producer.process(exchange); verifySendMessage("sometopic"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithTopicHeaderAndEndPoint() throws Exception { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); // the header is preserved assertNotNull(in.getHeader(KafkaConstants.TOPIC)); verifySendMessage(4, "sometopic", "someKey"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithOverrideTopicHeaderAndEndPoint() throws Exception { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); in.setHeader(KafkaConstants.OVERRIDE_TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); // test using a string value instead of long String time = String.valueOf(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli()); in.setHeader(KafkaConstants.OVERRIDE_TIMESTAMP, time); producer.process(exchange); // the header is now removed assertNull(in.getHeader(KafkaConstants.OVERRIDE_TOPIC)); verifySendMessage(4, "anotherTopic", "someKey"); assertRecordMetadataExists(); } @Test public void processRequiresTopicInEndpointOrInHeader() throws Exception { endpoint.getConfiguration().setTopic(null); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("sometopic", "someKey"); assertRecordMetadataExists(); } @Test public void processRequiresTopicInConfiguration() throws Exception { endpoint.getConfiguration().setTopic("configTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("configTopic", "someKey"); assertRecordMetadataExists(); } @Test public void processDoesNotRequirePartitionHeader() throws Exception { endpoint.getConfiguration().setTopic("sometopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); producer.process(exchange); assertRecordMetadataExists(); } @Test public void processSendsMessageWithPartitionKeyHeader() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage(4, "someTopic", "someKey"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithPartitionKeyHeaderOnly() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.PARTITION_KEY, 4); producer.process(exchange); verifySendMessage(4, "someTopic"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithMessageKeyHeader() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.KEY, "someKey"); producer.process(exchange); verifySendMessage("someTopic", "someKey"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithMessageTimestampHeader() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.KEY, "someKey"); in.setHeader(KafkaConstants.OVERRIDE_TIMESTAMP, LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant().toEpochMilli()); producer.process(exchange); verifySendMessage("someTopic", "someKey"); assertRecordMetadataTimestampExists(); } @Test public void processSendMessageWithTopicHeader() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); in.setHeader(KafkaConstants.TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); in.setHeader(KafkaConstants.PARTITION_KEY, 4); producer.process(exchange); verifySendMessage(4, "someTopic", "someKey"); assertRecordMetadataExists(); } @Test // Message and Topic Name alone public void processSendsMessageWithMessageTopicName() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); producer.process(exchange); verifySendMessage("someTopic"); assertRecordMetadataExists(); } @Test public void processSendsMessageWithListOfExchangesWithOverrideTopicHeaderOnEveryExchange() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); // we set the initial topic in.setHeader(KafkaConstants.OVERRIDE_TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); // we add our exchanges in order to aggregate final List<Exchange> nestedExchanges = createListOfExchangesWithTopics(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3")); // aggregate final Exchange finalAggregatedExchange = aggregateExchanges(nestedExchanges, new GroupedExchangeAggregationStrategy()); in.setBody(finalAggregatedExchange.getIn().getBody()); in.setHeaders(finalAggregatedExchange.getIn().getHeaders()); producer.process(exchange); // assert results verifySendMessages(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3"), null); assertRecordMetadataExists(3); assertRecordMetadataExistsForEachAggregatedExchange(); } @Test public void processSendsMessageWithListOfMessagesWithOverrideTopicHeaderOnEveryExchange() throws Exception { endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); // we set the initial topic in.setHeader(KafkaConstants.OVERRIDE_TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); // we add our exchanges in order to aggregate final List<Exchange> nestedExchanges = createListOfExchangesWithTopics(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3")); // aggregate final Exchange finalAggregatedExchange = aggregateExchanges(nestedExchanges, new GroupedMessageAggregationStrategy()); in.setBody(finalAggregatedExchange.getIn().getBody()); in.setHeaders(finalAggregatedExchange.getIn().getHeaders()); producer.process(exchange); // assert results verifySendMessages(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3"), Arrays.asList("", "", "")); assertRecordMetadataExists(3); assertRecordMetadataExistsForEachAggregatedMessage(); } @Test public void processSendsMessageWithListOfExchangesWithIndividualHeaders() throws Exception { endpoint.getConfiguration().setBatchWithIndividualHeaders(true); endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); // we set the initial topic in.setHeader(KafkaConstants.OVERRIDE_TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); in.setHeader(SOME_INDIVIDUAL_HEADER, "default"); // we add our exchanges in order to aggregate final List<Exchange> nestedExchanges = createListOfExchangesWithTopics(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3")); // aggregate final Exchange finalAggregatedExchange = aggregateExchanges(nestedExchanges, new GroupedExchangeAggregationStrategy()); in.setBody(finalAggregatedExchange.getIn().getBody()); in.setHeaders(finalAggregatedExchange.getIn().getHeaders()); producer.process(exchange); // assert results verifySendMessages(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3"), Arrays.asList("value-1", "value-2", "value-3")); assertRecordMetadataExists(3); assertRecordMetadataExistsForEachAggregatedExchange(); } @Test public void processSendsMessageWithListOfMessagesWithIndividualHeaders() throws Exception { endpoint.getConfiguration().setBatchWithIndividualHeaders(true); endpoint.getConfiguration().setTopic("someTopic"); Mockito.when(exchange.getIn()).thenReturn(in); Mockito.when(exchange.getMessage()).thenReturn(in); // we set the initial topic in.setHeader(KafkaConstants.OVERRIDE_TOPIC, "anotherTopic"); in.setHeader(KafkaConstants.KEY, "someKey"); // we add our exchanges in order to aggregate final List<Exchange> nestedExchanges = createListOfExchangesWithTopics(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3")); // aggregate messages final Exchange finalAggregatedExchange = aggregateExchanges(nestedExchanges, new GroupedMessageAggregationStrategy()); in.setBody(finalAggregatedExchange.getIn().getBody()); in.setHeaders(finalAggregatedExchange.getIn().getHeaders()); producer.process(exchange); // assert results verifySendMessages(Arrays.asList("overridenTopic1", "overridenTopic2", "overridenTopic3"), Arrays.asList("value-1", "value-2", "value-3")); assertRecordMetadataExists(3); assertRecordMetadataExistsForEachAggregatedMessage(); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void verifySendMessage(Integer partitionKey, String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(partitionKey, captor.getValue().partition()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void verifySendMessage(Integer partitionKey, String topic) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(partitionKey, captor.getValue().partition()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void verifySendMessage(String topic, String messageKey) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(messageKey, captor.getValue().key()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void verifySendMessage(String topic) { ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer()).send(captor.capture()); assertEquals(topic, captor.getValue().topic()); } @SuppressWarnings({ "unchecked", "rawtypes" }) protected void verifySendMessages(final List<String> expectedTopics, final List<String> expectedIndividualHeaderValues) { final ArgumentCaptor<ProducerRecord> captor = ArgumentCaptor.forClass(ProducerRecord.class); Mockito.verify(producer.getKafkaProducer(), Mockito.atLeast(expectedTopics.size())).send(captor.capture()); final List<ProducerRecord> records = captor.getAllValues(); final List<String> actualTopics = records.stream().map(ProducerRecord::topic).toList(); assertEquals(expectedTopics, actualTopics); if (expectedIndividualHeaderValues == null) { return; } final List<String> actualIndividualHeaderValues = records.stream() .map(ProducerRecord::headers) .map(headers -> headers.lastHeader(SOME_INDIVIDUAL_HEADER)) .map(header -> header == null ? "" : new String(header.value(), StandardCharsets.UTF_8)) .collect(Collectors.toList()); assertEquals(expectedIndividualHeaderValues, actualIndividualHeaderValues); } private void assertRecordMetadataTimestampExists() { List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>) in.getHeader(KafkaConstants.KAFKA_RECORD_META); assertNotNull(recordMetaData1); assertEquals(1, recordMetaData1.size(), "Expected one recordMetaData"); assertNotNull(recordMetaData1.get(0)); } private void assertRecordMetadataExists() { List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>) in.getHeader(KafkaConstants.KAFKA_RECORD_META); assertNotNull(recordMetaData1); assertEquals(1, recordMetaData1.size(), "Expected one recordMetaData"); assertNotNull(recordMetaData1.get(0)); } private void assertRecordMetadataExists(final int numMetadata) { List<RecordMetadata> recordMetaData1 = (List<RecordMetadata>) in.getHeader(KafkaConstants.KAFKA_RECORD_META); assertNotNull(recordMetaData1); assertEquals(recordMetaData1.size(), numMetadata, "Expected one recordMetaData"); assertNotNull(recordMetaData1.get(0)); } private void assertRecordMetadataExistsForEachAggregatedExchange() { List<Exchange> exchanges = (List<Exchange>) in.getBody(); for (Exchange ex : exchanges) { List<RecordMetadata> recordMetaData = (List<RecordMetadata>) ex.getMessage().getHeader(KafkaConstants.KAFKA_RECORD_META); assertNotNull(recordMetaData); assertEquals(1, recordMetaData.size(), "Expected one recordMetaData"); assertNotNull(recordMetaData.get(0)); } } private void assertRecordMetadataExistsForEachAggregatedMessage() { List<Message> messages = (List<Message>) in.getBody(); for (Message msg : messages) { List<RecordMetadata> recordMetaData = (List<RecordMetadata>) msg.getHeader(KafkaConstants.KAFKA_RECORD_META); assertNotNull(recordMetaData); assertEquals(1, recordMetaData.size(), "Expected one recordMetaData"); assertNotNull(recordMetaData.get(0)); } } private Exchange aggregateExchanges(final List<Exchange> exchangesToAggregate, final AggregationStrategy strategy) { Exchange exchangeHolder = new DefaultExchange(context); for (final Exchange innerExchange : exchangesToAggregate) { exchangeHolder = strategy.aggregate(exchangeHolder, innerExchange); } strategy.onCompletion(exchangeHolder); return exchangeHolder; } private List<Exchange> createListOfExchangesWithTopics(final List<String> topics) { final List<Exchange> resultLists = new LinkedList<>(); int index = 1; for (String topic : topics) { final Exchange innerExchange = new DefaultExchange(context); innerExchange.setExchangeId("exchange-" + index); final Message msg = innerExchange.getIn(); msg.setMessageId("message-" + index); msg.setHeader(KafkaConstants.OVERRIDE_TOPIC, topic); msg.setHeader(SOME_INDIVIDUAL_HEADER, "value-" + index++); resultLists.add(innerExchange); } return resultLists; } }
KafkaProducerTest
java
spring-projects__spring-boot
module/spring-boot-r2dbc/src/dockerTest/java/org/springframework/boot/r2dbc/docker/compose/OracleXeR2dbcDockerComposeConnectionDetailsFactoryIntegrationTests.java
{ "start": 1584, "end": 2688 }
class ____ { @DockerComposeTest(composeFile = "oracle-compose.yaml", image = TestImage.ORACLE_XE) void runCreatesConnectionDetailsThatCanBeUsedToAccessDatabase(R2dbcConnectionDetails connectionDetails) { ConnectionFactoryOptions connectionFactoryOptions = connectionDetails.getConnectionFactoryOptions(); assertThat(connectionFactoryOptions.toString()).contains("database=xepdb1", "driver=oracle", "password=REDACTED", "user=app_user"); assertThat(connectionFactoryOptions.getRequiredValue(ConnectionFactoryOptions.PASSWORD)) .isEqualTo("app_user_secret"); Awaitility.await().atMost(Duration.ofMinutes(1)).ignoreExceptions().untilAsserted(() -> { String validationQuery = DatabaseDriver.ORACLE.getValidationQuery(); assertThat(validationQuery).isNotNull(); Object result = DatabaseClient.create(ConnectionFactories.get(connectionFactoryOptions)) .sql(validationQuery) .map((row, metadata) -> row.get(0)) .first() .block(Duration.ofSeconds(30)); assertThat(result).isEqualTo("Hello"); }); } }
OracleXeR2dbcDockerComposeConnectionDetailsFactoryIntegrationTests
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/nullness/RedundantNullCheckTest.java
{ "start": 16517, "end": 16897 }
class ____ { public static String getString() { return null; } // Implicitly nullable return } """) .addSourceLines( "Test.java", """ import org.jspecify.annotations.NullMarked; import mylib.NullUnmarkedLib; @NullMarked
NullUnmarkedLib
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/ObjectPostProcessorTests.java
{ "start": 1371, "end": 1520 }
class ____ { static List<?> perform(ArrayList<?> l) { return new ListToLinkedListObjectPostProcessor().postProcess(l); } } }
PerformConversion
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/DoNotClaimAnnotationsTest.java
{ "start": 928, "end": 1449 }
class ____ { private final BugCheckerRefactoringTestHelper testHelper = BugCheckerRefactoringTestHelper.newInstance(DoNotClaimAnnotations.class, getClass()); @Test public void positive() { testHelper .addInputLines( "Test.java", """ import java.util.Set; import javax.annotation.processing.Processor; import javax.annotation.processing.RoundEnvironment; import javax.annotation.processing.RoundEnvironment; import javax.lang.model.element.TypeElement; abstract
DoNotClaimAnnotationsTest
java
apache__maven
its/core-it-suite/src/test/java/org/apache/maven/it/MavenITmng5840RelativePathReactorMatching.java
{ "start": 903, "end": 1623 }
class ____ extends AbstractMavenIntegrationTestCase { public MavenITmng5840RelativePathReactorMatching() {} @Test public void testRelativePathPointsToWrongVersion() throws Exception { File testDir = extractResources("/mng-5840-relative-path-reactor-matching"); Verifier verifier = newVerifier(new File(testDir, "parent-1").getAbsolutePath()); verifier.addCliArgument("install"); verifier.execute(); verifier.verifyErrorFreeLog(); verifier = newVerifier(new File(testDir, "child").getAbsolutePath()); verifier.addCliArgument("validate"); verifier.execute(); verifier.verifyErrorFreeLog(); } }
MavenITmng5840RelativePathReactorMatching
java
bumptech__glide
library/test/src/test/java/com/bumptech/glide/load/model/stream/BaseGlideUrlLoaderTest.java
{ "start": 1243, "end": 4841 }
class ____ { @Mock private ModelCache<Object, GlideUrl> modelCache; @Mock private ModelLoader<GlideUrl, InputStream> wrapped; @Mock private DataFetcher<InputStream> fetcher; private TestLoader urlLoader; private Options options; @SuppressWarnings("unchecked") @Before public void setUp() { MockitoAnnotations.initMocks(this); options = new Options(); urlLoader = new TestLoader(wrapped, modelCache); } @Test public void testReturnsNullIfUrlIsNull() { urlLoader.resultUrl = null; assertNull(urlLoader.buildLoadData(new Object(), 100, 100, options)); } @Test public void testReturnsNullIfUrlIsEmpty() { urlLoader.resultUrl = " "; assertNull(urlLoader.buildLoadData(new Object(), 100, 100, options)); } @Test public void testReturnsUrlFromCacheIfPresent() { Object model = new Object(); int width = 100; int height = 200; GlideUrl expectedUrl = mock(GlideUrl.class); when(modelCache.get(eq(model), eq(width), eq(height))).thenReturn(expectedUrl); when(wrapped.buildLoadData(eq(expectedUrl), eq(width), eq(height), eq(options))) .thenReturn(new ModelLoader.LoadData<>(mock(Key.class), fetcher)); assertEquals( fetcher, Preconditions.checkNotNull(urlLoader.buildLoadData(model, width, height, options)).fetcher); } @Test public void testBuildsNewUrlIfNotPresentInCache() { int width = 10; int height = 11; urlLoader.resultUrl = "fakeUrl"; when(wrapped.buildLoadData(any(GlideUrl.class), eq(width), eq(height), eq(options))) .thenAnswer( new Answer<ModelLoader.LoadData<InputStream>>() { @Override public ModelLoader.LoadData<InputStream> answer(InvocationOnMock invocationOnMock) { GlideUrl glideUrl = (GlideUrl) invocationOnMock.getArguments()[0]; assertEquals(urlLoader.resultUrl, glideUrl.toStringUrl()); return new ModelLoader.LoadData<>(mock(Key.class), fetcher); } }); assertEquals( fetcher, Preconditions.checkNotNull( urlLoader.buildLoadData(new GlideUrl(urlLoader.resultUrl), width, height, options)) .fetcher); } @Test public void testAddsNewUrlToCacheIfNotPresentInCache() { urlLoader.resultUrl = "fakeUrl"; Object model = new Object(); int width = 400; int height = 500; doAnswer( new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) { GlideUrl glideUrl = (GlideUrl) invocationOnMock.getArguments()[3]; assertEquals(urlLoader.resultUrl, glideUrl.toStringUrl()); return null; } }) .when(modelCache) .put(eq(model), eq(width), eq(height), any(GlideUrl.class)); urlLoader.buildLoadData(model, width, height, options); verify(modelCache).put(eq(model), eq(width), eq(height), any(GlideUrl.class)); } @Test public void testDoesNotInteractWithModelCacheIfNull() { TestLoader urlLoader = new TestLoader(wrapped, null); urlLoader.resultUrl = "fakeUrl"; int width = 456; int height = 789; when(wrapped.buildLoadData(any(GlideUrl.class), eq(width), eq(height), eq(options))) .thenReturn(new ModelLoader.LoadData<>(mock(Key.class), fetcher)); assertEquals( fetcher, Preconditions.checkNotNull(urlLoader.buildLoadData(new Object(), width, height, options)) .fetcher); } private static final
BaseGlideUrlLoaderTest
java
elastic__elasticsearch
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/GreaterThanIntsEvaluator.java
{ "start": 1200, "end": 4988 }
class ____ implements EvalOperator.ExpressionEvaluator { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(GreaterThanIntsEvaluator.class); private final Source source; private final EvalOperator.ExpressionEvaluator lhs; private final EvalOperator.ExpressionEvaluator rhs; private final DriverContext driverContext; private Warnings warnings; public GreaterThanIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, EvalOperator.ExpressionEvaluator rhs, DriverContext driverContext) { this.source = source; this.lhs = lhs; this.rhs = rhs; this.driverContext = driverContext; } @Override public Block eval(Page page) { try (IntBlock lhsBlock = (IntBlock) lhs.eval(page)) { try (IntBlock rhsBlock = (IntBlock) rhs.eval(page)) { IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } IntVector rhsVector = rhsBlock.asVector(); if (rhsVector == null) { return eval(page.getPositionCount(), lhsBlock, rhsBlock); } return eval(page.getPositionCount(), lhsVector, rhsVector).asBlock(); } } } @Override public long baseRamBytesUsed() { long baseRamBytesUsed = BASE_RAM_BYTES_USED; baseRamBytesUsed += lhs.baseRamBytesUsed(); baseRamBytesUsed += rhs.baseRamBytesUsed(); return baseRamBytesUsed; } public BooleanBlock eval(int positionCount, IntBlock lhsBlock, IntBlock rhsBlock) { try(BooleanBlock.Builder result = driverContext.blockFactory().newBooleanBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { switch (lhsBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } switch (rhsBlock.getValueCount(p)) { case 0: result.appendNull(); continue position; case 1: break; default: warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); result.appendNull(); continue position; } int lhs = lhsBlock.getInt(lhsBlock.getFirstValueIndex(p)); int rhs = rhsBlock.getInt(rhsBlock.getFirstValueIndex(p)); result.appendBoolean(GreaterThan.processInts(lhs, rhs)); } return result.build(); } } public BooleanVector eval(int positionCount, IntVector lhsVector, IntVector rhsVector) { try(BooleanVector.FixedBuilder result = driverContext.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { int lhs = lhsVector.getInt(p); int rhs = rhsVector.getInt(p); result.appendBoolean(p, GreaterThan.processInts(lhs, rhs)); } return result.build(); } } @Override public String toString() { return "GreaterThanIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; } @Override public void close() { Releasables.closeExpectNoException(lhs, rhs); } private Warnings warnings() { if (warnings == null) { this.warnings = Warnings.createWarnings( driverContext.warningsMode(), source.source().getLineNumber(), source.source().getColumnNumber(), source.text() ); } return warnings; } static
GreaterThanIntsEvaluator
java
spring-projects__spring-framework
spring-webmvc/src/main/java/org/springframework/web/servlet/view/DefaultFragmentsRenderingBuilder.java
{ "start": 1205, "end": 2807 }
class ____ implements FragmentsRendering.Builder { private @Nullable HttpStatusCode status; private @Nullable HttpHeaders headers; private final Collection<ModelAndView> fragments = new ArrayList<>(); @Override public FragmentsRendering.Builder status(HttpStatusCode status) { this.status = status; return this; } @Override public FragmentsRendering.Builder header(String headerName, String... headerValues) { initHeaders().put(headerName, Arrays.asList(headerValues)); return this; } @Override public FragmentsRendering.Builder headers(Consumer<HttpHeaders> headersConsumer) { headersConsumer.accept(initHeaders()); return this; } private HttpHeaders initHeaders() { if (this.headers == null) { this.headers = new HttpHeaders(); } return this.headers; } @Override public DefaultFragmentsRenderingBuilder fragment(String viewName, Map<String, Object> model) { return fragment(new ModelAndView(viewName, model)); } @Override public DefaultFragmentsRenderingBuilder fragment(String viewName) { return fragment(new ModelAndView(viewName)); } @Override public DefaultFragmentsRenderingBuilder fragment(ModelAndView fragment) { this.fragments.add(fragment); return this; } @Override public DefaultFragmentsRenderingBuilder fragments(Collection<ModelAndView> fragments) { this.fragments.addAll(fragments); return this; } @Override public FragmentsRendering build() { return new DefaultFragmentsRendering( this.status, (this.headers != null ? this.headers : HttpHeaders.EMPTY), this.fragments); } }
DefaultFragmentsRenderingBuilder
java
square__retrofit
retrofit-converters/scalars/src/test/java/retrofit2/converter/scalars/ScalarsConverterFactoryTest.java
{ "start": 10386, "end": 10629 }
class ____.lang.Object\n" + " for method Service.object"); assertThat(e.getCause()) .hasMessageThat() .isEqualTo( "" + "Could not locate ResponseBody converter for
java
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/utils/AdaptiveBatchAbstractTestBase.java
{ "start": 1351, "end": 2260 }
class ____ { protected static final int DEFAULT_PARALLELISM = 3; @RegisterExtension private static final MiniClusterExtension MINI_CLUSTER_EXTENSION = new MiniClusterExtension( new MiniClusterResourceConfiguration.Builder() .setConfiguration(getConfiguration()) .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM) .build()); private static Configuration getConfiguration() { Configuration config = new Configuration(); config.set(TaskManagerOptions.MANAGED_MEMORY_SIZE, MemorySize.parse("100m")); config.set( BatchExecutionOptions.ADAPTIVE_AUTO_PARALLELISM_AVG_DATA_VOLUME_PER_TASK, MemorySize.parse("100k")); return config; } }
AdaptiveBatchAbstractTestBase
java
spring-projects__spring-framework
spring-tx/src/test/java/org/springframework/transaction/annotation/AnnotationTransactionInterceptorTests.java
{ "start": 15704, "end": 16477 }
class ____ { public void doSomething() { assertThat(TransactionSynchronizationManager.isActualTransactionActive()).isTrue(); assertThat(TransactionSynchronizationManager.isCurrentTransactionReadOnly()).isFalse(); } @Transactional(readOnly = true) public void doSomethingElse() { assertThat(TransactionSynchronizationManager.isActualTransactionActive()).isTrue(); assertThat(TransactionSynchronizationManager.isCurrentTransactionReadOnly()).isTrue(); } public void doSomethingCompletelyElse() { assertThat(TransactionSynchronizationManager.isActualTransactionActive()).isTrue(); assertThat(TransactionSynchronizationManager.isCurrentTransactionReadOnly()).isFalse(); } } @Transactional(readOnly = true) static
TestWithSingleMethodOverride
java
spring-projects__spring-boot
module/spring-boot-jersey/src/main/java/org/springframework/boot/jersey/autoconfigure/JerseyAutoConfiguration.java
{ "start": 3678, "end": 7242 }
class ____ implements ServletContextAware { private static final Log logger = LogFactory.getLog(JerseyAutoConfiguration.class); private final JerseyProperties jersey; private final ResourceConfig config; JerseyAutoConfiguration(JerseyProperties jersey, ResourceConfig config, ObjectProvider<ResourceConfigCustomizer> customizers) { this.jersey = jersey; this.config = config; customizers.orderedStream().forEach((customizer) -> customizer.customize(this.config)); } @Bean @ConditionalOnMissingFilterBean FilterRegistrationBean<RequestContextFilter> requestContextFilter() { FilterRegistrationBean<RequestContextFilter> registration = new FilterRegistrationBean<>(); registration.setFilter(new RequestContextFilter()); registration.setOrder(this.jersey.getFilter().getOrder() - 1); registration.setName("requestContextFilter"); return registration; } @Bean @ConditionalOnMissingBean JerseyApplicationPath jerseyApplicationPath() { return new DefaultJerseyApplicationPath(this.jersey.getApplicationPath(), this.config); } @Bean @ConditionalOnMissingBean(name = "jerseyFilterRegistration") @ConditionalOnProperty(name = "spring.jersey.type", havingValue = "filter") FilterRegistrationBean<ServletContainer> jerseyFilterRegistration(JerseyApplicationPath applicationPath) { FilterRegistrationBean<ServletContainer> registration = new FilterRegistrationBean<>(); registration.setFilter(new ServletContainer(this.config)); registration.setUrlPatterns(Collections.singletonList(applicationPath.getUrlMapping())); registration.setOrder(this.jersey.getFilter().getOrder()); registration.addInitParameter(ServletProperties.FILTER_CONTEXT_PATH, stripPattern(applicationPath.getPath())); addInitParameters(registration); registration.setName("jerseyFilter"); registration.setDispatcherTypes(EnumSet.allOf(DispatcherType.class)); return registration; } private String stripPattern(String path) { if (path.endsWith("/*")) { path = path.substring(0, path.lastIndexOf("/*")); } return path; } @Bean @ConditionalOnMissingBean(name = "jerseyServletRegistration") @ConditionalOnProperty(name = "spring.jersey.type", havingValue = "servlet", matchIfMissing = true) ServletRegistrationBean<ServletContainer> jerseyServletRegistration(JerseyApplicationPath applicationPath) { ServletRegistrationBean<ServletContainer> registration = new ServletRegistrationBean<>( new ServletContainer(this.config), applicationPath.getUrlMapping()); addInitParameters(registration); registration.setName(getServletRegistrationName()); registration.setLoadOnStartup(this.jersey.getServlet().getLoadOnStartup()); registration.setIgnoreRegistrationFailure(true); return registration; } private String getServletRegistrationName() { return ClassUtils.getUserClass(this.config.getClass()).getName(); } private void addInitParameters(DynamicRegistrationBean<?> registration) { this.jersey.getInit().forEach(registration::addInitParameter); } @Override public void setServletContext(ServletContext servletContext) { String servletRegistrationName = getServletRegistrationName(); ServletRegistration registration = servletContext.getServletRegistration(servletRegistrationName); if (registration != null) { if (logger.isInfoEnabled()) { logger.info("Configuring existing registration for Jersey servlet '" + servletRegistrationName + "'"); } registration.setInitParameters(this.jersey.getInit()); } } @Order(Ordered.HIGHEST_PRECEDENCE) public static final
JerseyAutoConfiguration
java
google__dagger
javatests/dagger/functional/guava/a/OptionalBindingComponentsWithInaccessibleTypes.java
{ "start": 1025, "end": 1188 }
interface ____ extends OptionalBindingComponent {} @Component(modules = {OptionalBindingModule.class, ConcreteBindingModule.class})
AbsentOptionalBindingComponent
java
apache__camel
core/camel-core/src/test/java/org/apache/camel/impl/engine/CamelPostProcessorHelperTest.java
{ "start": 32616, "end": 33288 }
class ____ { private int timeout; private String greeting; public String doSomething(String body) { return greeting + " " + body + " with timeout=" + timeout; } public int getTimeout() { return timeout; } @PropertyInject("myTimeout") public void setTimeout(int timeout) { this.timeout = timeout; } public String getGreeting() { return greeting; } @PropertyInject("Hello {{myApp}}") public void setGreeting(String greeting) { this.greeting = greeting; } } public static
MyPropertyMethodBean
java
apache__kafka
streams/integration-tests/src/test/java/org/apache/kafka/streams/integration/KStreamRepartitionIntegrationTest.java
{ "start": 4021, "end": 15533 }
class ____ { private static final int NUM_BROKERS = 1; public static final EmbeddedKafkaCluster CLUSTER = new EmbeddedKafkaCluster(NUM_BROKERS); @BeforeAll public static void startCluster() throws IOException { CLUSTER.start(); } @AfterAll public static void closeCluster() { CLUSTER.stop(); } private String topicB; private String inputTopic; private String outputTopic; private String applicationId; private String safeTestName; private List<KafkaStreams> kafkaStreamsInstances; private final File testFolder = TestUtils.tempDirectory(); @BeforeEach public void before(final TestInfo testInfo) throws InterruptedException { kafkaStreamsInstances = new ArrayList<>(); safeTestName = safeUniqueTestName(testInfo); topicB = "topic-b-" + safeTestName; inputTopic = "input-topic-" + safeTestName; outputTopic = "output-topic-" + safeTestName; applicationId = "app-" + safeTestName; CLUSTER.createTopic(inputTopic, 4, 1); CLUSTER.createTopic(outputTopic, 1, 1); } private Properties createStreamsConfig(final String topologyOptimization, final boolean useNewProtocol) { final Properties streamsConfiguration = new Properties(); streamsConfiguration.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId); streamsConfiguration.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers()); streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, testFolder.getPath()); streamsConfiguration.put(StreamsConfig.STATESTORE_CACHE_MAX_BYTES_CONFIG, 0); streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 100L); streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.Integer().getClass()); streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); streamsConfiguration.put(StreamsConfig.TOPOLOGY_OPTIMIZATION_CONFIG, topologyOptimization); if (useNewProtocol) { streamsConfiguration.put(StreamsConfig.GROUP_PROTOCOL_CONFIG, GroupProtocol.STREAMS.name().toLowerCase(Locale.getDefault())); } return streamsConfiguration; } private static Stream<Arguments> protocolAndOptimizationParameters() { return Stream.of( Arguments.of(StreamsConfig.OPTIMIZE, false), // OPTIMIZE with CLASSIC protocol Arguments.of(StreamsConfig.OPTIMIZE, true), // OPTIMIZE with STREAMS protocol Arguments.of(StreamsConfig.NO_OPTIMIZATION, false), // NO_OPTIMIZATION with CLASSIC protocol Arguments.of(StreamsConfig.NO_OPTIMIZATION, true) // NO_OPTIMIZATION with STREAMS protocol ); } @AfterEach public void whenShuttingDown() throws IOException { kafkaStreamsInstances.stream() .filter(Objects::nonNull) .forEach(ks -> ks.close(Duration.ofSeconds(60))); Utils.delete(testFolder); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining(final String topologyOptimization, final boolean useNewProtocol) throws InterruptedException { final int topicBNumberOfPartitions = 6; final String inputTopicRepartitionName = "join-repartition-test"; final AtomicReference<Throwable> expectedThrowable = new AtomicReference<>(); final int inputTopicRepartitionedNumOfPartitions = 2; CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionName) .withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.ofTimeDifferenceWithNoGrace(Duration.ofSeconds(10))) .to(outputTopic); final Properties streamsConfiguration = createStreamsConfig(topologyOptimization, useNewProtocol); try (final KafkaStreams ks = new KafkaStreams(builder.build(streamsConfiguration), streamsConfiguration)) { ks.setUncaughtExceptionHandler(exception -> { expectedThrowable.set(exception); System.out.println(String.format("[%s Protocol] Exception caught: %s", useNewProtocol ? "STREAMS" : "CLASSIC", exception.getMessage())); return SHUTDOWN_CLIENT; }); ks.start(); TestUtils.waitForCondition(() -> ks.state() == ERROR, 30_000, "Kafka Streams never went into error state"); final String expectedMsg = String.format("Number of partitions [%s] of repartition topic [%s] " + "doesn't match number of partitions [%s] of the source topic.", inputTopicRepartitionedNumOfPartitions, toRepartitionTopicName(inputTopicRepartitionName), topicBNumberOfPartitions); assertNotNull(expectedThrowable.get()); assertTrue(expectedThrowable.get().getMessage().contains(expectedMsg)); } } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldDeductNumberOfPartitionsFromRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String topicBMapperName = "topic-b-mapper"; final int topicBNumberOfPartitions = 6; final String inputTopicRepartitionName = "join-repartition-test"; final int inputTopicRepartitionedNumOfPartitions = 3; final long timestamp = System.currentTimeMillis(); CLUSTER.createTopic(topicB, topicBNumberOfPartitions, 1); final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ); sendEvents(timestamp, expectedRecords); sendEvents(topicB, timestamp, expectedRecords); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionName) .withNumberOfPartitions(inputTopicRepartitionedNumOfPartitions); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())) .map(KeyValue::new, Named.as(topicBMapperName)); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))) .to(outputTopic); final Properties streamsConfiguration = createStreamsConfig(topologyOptimization, useNewProtocol); builder.build(streamsConfiguration); startStreams(builder, streamsConfiguration); assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionName))); assertEquals(inputTopicRepartitionedNumOfPartitions, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBMapperName))); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecords ); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String topicBRepartitionedName = "topic-b-scale-up"; final String inputTopicRepartitionedName = "input-topic-scale-up"; final long timestamp = System.currentTimeMillis(); CLUSTER.createTopic(topicB, 1, 1); final List<KeyValue<Integer, String>> expectedRecords = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(2, "B") ); final List<KeyValue<Integer, String>> recordsToSend = new ArrayList<>(expectedRecords); recordsToSend.add(new KeyValue<>(null, "C")); sendEvents(timestamp, recordsToSend); sendEvents(topicB, timestamp, recordsToSend); final StreamsBuilder builder = new StreamsBuilder(); final Repartitioned<Integer, String> inputTopicRepartitioned = Repartitioned .<Integer, String>as(inputTopicRepartitionedName) .withNumberOfPartitions(4); final Repartitioned<Integer, String> topicBRepartitioned = Repartitioned .<Integer, String>as(topicBRepartitionedName) .withNumberOfPartitions(4); final KStream<Integer, String> topicBStream = builder .stream(topicB, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(topicBRepartitioned); builder.stream(inputTopic, Consumed.with(Serdes.Integer(), Serdes.String())) .repartition(inputTopicRepartitioned) .join(topicBStream, (value1, value2) -> value2, JoinWindows.of(Duration.ofSeconds(10))) .to(outputTopic); startStreams(builder, createStreamsConfig(topologyOptimization, useNewProtocol)); assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(topicBRepartitionedName))); assertEquals(4, getNumberOfPartitionsForTopic(toRepartitionTopicName(inputTopicRepartitionedName))); validateReceivedMessages( new IntegerDeserializer(), new StringDeserializer(), expectedRecords ); } @ParameterizedTest @MethodSource("protocolAndOptimizationParameters") public void shouldRepartitionToMultiplePartitions(final String topologyOptimization, final boolean useNewProtocol) throws Exception { final String repartitionName = "broadcasting-partitioner-test"; final long timestamp = System.currentTimeMillis(); final AtomicInteger partitionerInvocation = new AtomicInteger(0); // This test needs to write to an output topic with 4 partitions. Hence, creating a new one final String broadcastingOutputTopic = "broadcast-output-topic-" + safeTestName; CLUSTER.createTopic(broadcastingOutputTopic, 4, 1); final List<KeyValue<Integer, String>> expectedRecordsOnRepartition = Arrays.asList( new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(1, "A"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B"), new KeyValue<>(2, "B") ); final List<KeyValue<Integer, String>> expectedRecords = expectedRecordsOnRepartition.subList(3, 5);
KStreamRepartitionIntegrationTest
java
spring-projects__spring-framework
framework-docs/src/main/java/org/springframework/docs/integration/jms/jmsreceivingasyncmessagelisteneradapter/MessageListenerConfiguration.java
{ "start": 925, "end": 1369 }
class ____ { // tag::snippet[] @Bean MessageListenerAdapter messageListener(DefaultTextMessageDelegate messageDelegate) { MessageListenerAdapter messageListener = new MessageListenerAdapter(messageDelegate); messageListener.setDefaultListenerMethod("receive"); // We don't want automatic message context extraction messageListener.setMessageConverter(null); return messageListener; } // end::snippet[] }
MessageListenerConfiguration
java
elastic__elasticsearch
server/src/main/java/org/elasticsearch/index/codec/vectors/es93/ES93HnswVectorsFormat.java
{ "start": 1137, "end": 2798 }
class ____ extends AbstractHnswVectorsFormat { static final String NAME = "ES93HnswVectorsFormat"; private final FlatVectorsFormat flatVectorsFormat; public ES93HnswVectorsFormat() { super(NAME); flatVectorsFormat = new ES93GenericFlatVectorsFormat(); } public ES93HnswVectorsFormat(DenseVectorFieldMapper.ElementType elementType) { super(NAME); flatVectorsFormat = new ES93GenericFlatVectorsFormat(elementType, false); } public ES93HnswVectorsFormat(int maxConn, int beamWidth, DenseVectorFieldMapper.ElementType elementType) { super(NAME, maxConn, beamWidth); flatVectorsFormat = new ES93GenericFlatVectorsFormat(elementType, false); } public ES93HnswVectorsFormat( int maxConn, int beamWidth, DenseVectorFieldMapper.ElementType elementType, int numMergeWorkers, ExecutorService mergeExec ) { super(NAME, maxConn, beamWidth, numMergeWorkers, mergeExec); flatVectorsFormat = new ES93GenericFlatVectorsFormat(elementType, false); } @Override protected FlatVectorsFormat flatVectorsFormat() { return flatVectorsFormat; } @Override public KnnVectorsWriter fieldsWriter(SegmentWriteState state) throws IOException { return new Lucene99HnswVectorsWriter(state, maxConn, beamWidth, flatVectorsFormat.fieldsWriter(state), numMergeWorkers, mergeExec); } @Override public KnnVectorsReader fieldsReader(SegmentReadState state) throws IOException { return new Lucene99HnswVectorsReader(state, flatVectorsFormat.fieldsReader(state)); } }
ES93HnswVectorsFormat
java
apache__flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/ModelProviderFactory.java
{ "start": 1504, "end": 1775 }
interface ____ extends Factory { /** Create ModelProvider based on provider. */ ModelProvider createModelProvider(Context context); /** Provides catalog and session information describing the model to be accessed. */ @PublicEvolving
ModelProviderFactory
java
quarkusio__quarkus
integration-tests/awt/src/test/java/io/quarkus/awt/it/ImageGeometryFontsTest.java
{ "start": 801, "end": 5697 }
class ____ { /** * When comparing pixel colour values, how much difference * from the expected value is allowed. * 0 means no difference is tolerated. * * e.g. When building with -Dquarkus.native.builder-image=registry.access.redhat.com/quarkus/mandrel-21-rhel8:latest * a different set of static libraries is used for HotSpot (system ones from RPM) and a different set * for Native Image (those bundled in JDK static libs installation). * * The discrepancy between imaging libraries could cause slight difference. * In this case, it is fonts antialiasing: * * TIFF: Wrong pixel. Expected: [46,14,32,255] Actual: [46,14,30,255] * GIF: Wrong pixel. Expected: [2,0,0,0] Actual: [1,0,0,0] * PNG: Wrong pixel. Expected: [46,14,32,255] Actual: [46,14,30,255] * JPG: Wrong pixel. Expected: [73,0,44,0] Actual: [72,0,39,0] * BMP: Wrong pixel. Expected: [46,14,32,0] Actual: [46,14,30,0] * * JPEG compression behaves differently between Linux and Windows, * so that also begs for pixel difference tolerance. * * Hence, the tolerance below is higher than 0: */ private static final int[] PIXEL_DIFFERENCE_THRESHOLD_RGBA_VEC = new int[] { 25, 25, 40, 0 }; @ParameterizedTest // @formatter:off @ValueSource(strings = { // Image format name followed by expected pixel values "TIFF █46,14,32,255 █229,195,83,255 █255,200,0,255 █255,0,0,255 █255,0,0,255", "GIF █2,0,0,0 █214,0,0,0 █204,0,0,0 █121,0,0,0 █121,0,0,0", "PNG █46,14,32,255 █229,195,83,255 █255,200,0,255 █255,0,0,255 █255,0,0,255", "JPG █73,0,44,0 █225,162,170,0 █255,199,20,0 █239,7,0,0 █249,1,2,0", "BMP █46,14,32,0 █229,195,83,0 █255,200,0,0 █255,0,0,0 █255,0,0,0", "WBMP █0,0,0,0 █0,0,0,0 █1,0,0,0 █0,0,0,0 █0,0,0,0" }) // @formatter:on public void testGeometryAndFonts(String testData) throws IOException { final String[] formatPixels = testData.split("\\█"); final String formatName = formatPixels[0].trim(); final byte[] imgBytes = given() .when() .get("/graphics/" + formatName) .asByteArray(); final BufferedImage image = ImageIO.read(new ByteArrayInputStream(imgBytes)); // Sanity Assertions.assertNotNull(image, formatName + ": The data returned are not a valid image."); Assertions.assertTrue(image.getWidth() == 350 && image.getHeight() == 300, String.format("%s: image's expected dimension is %d x %d, but was %d x %d.", formatName, 350, 300, image.getWidth(), image.getHeight())); // Test pixels final int[][] pixelsCoordinates = new int[][] { { 80, 56 }, { 34, 103 }, { 50, 186 }, { 246, 204 }, { 294, 205 } }; for (int i = 0; i < pixelsCoordinates.length; i++) { final int[] expected = decodeArray4(formatPixels[i + 1].trim()); final int[] actual = new int[4]; //4BYTE RGBA image.getData().getPixel(pixelsCoordinates[i][0], pixelsCoordinates[i][1], actual); Assertions.assertTrue(compareArrays(expected, actual, PIXEL_DIFFERENCE_THRESHOLD_RGBA_VEC), String.format("%s: Wrong pixel at %dx%d. Expected: [%d,%d,%d,%d] Actual: [%d,%d,%d,%d]", formatName, pixelsCoordinates[i][0], pixelsCoordinates[i][1], expected[0], expected[1], expected[2], expected[3], actual[0], actual[1], actual[2], actual[3])); } checkLog(null, "Geometry and Fonts"); } /** * At least some system fonts are expected to be found. The number may vary wildly * depending on the system, so it doesn't make sense to be checking for a particular * number or even for font family names. */ @Test void checkFonts() { final String fonts = given() .when() .get("/fonts") .asString(); final String[] actual = fonts.substring(1, fonts.length() - 1).split(", "); Assertions.assertTrue(actual.length > 3, "There are supposed to be at least some system fonts found beside those installed by this TS."); final Map<String, Boolean> expected = new HashMap<>(Map.of( "MyFreeMono", Boolean.FALSE, "MyFreeSerif", Boolean.FALSE, "DejaVu Sans Mono X", Boolean.FALSE)); for (String f : actual) { expected.replace(f, Boolean.TRUE); } Assertions.assertTrue(expected.values().stream().allMatch(Boolean.TRUE::equals), "Not all expected fonts were found: " + expected + ". " + "These fonts were found though: " + Arrays.toString(actual)); } }
ImageGeometryFontsTest
java
apache__hadoop
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/DtUtilShell.java
{ "start": 10876, "end": 11481 }
class ____ extends SubCommand { public static final String RENEW_USAGE = "dtutil renew -alias <alias> filename..."; @Override public boolean validate() { if (alias == null) { LOG.error("-alias flag is not optional for renew"); return false; } return true; } @Override public void execute() throws Exception { for (File tokenFile : tokenFiles) { DtFileOperations.renewTokenFile(tokenFile, format, alias, getConf()); } } @Override public String getUsage() { return RENEW_USAGE; } } private
Renew
java
apache__hadoop
hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/util/PlatformName.java
{ "start": 3209, "end": 3510 }
class ____ by the * vendor rather than using the vendor method. For example if on JVM provides a * different Kerberos login module testing for that login module being loadable * before configuring to use it is preferable to using the vendor data. * * @param className the name of a
provided
java
resilience4j__resilience4j
resilience4j-circuitbreaker/src/main/java/io/github/resilience4j/circuitbreaker/internal/CircuitBreakerStateMachine.java
{ "start": 18706, "end": 21553 }
class ____ extends EventProcessor<CircuitBreakerEvent> implements EventConsumer<CircuitBreakerEvent>, EventPublisher { @Override public EventPublisher onSuccess( EventConsumer<CircuitBreakerOnSuccessEvent> onSuccessEventConsumer) { registerConsumer(CircuitBreakerOnSuccessEvent.class.getName(), onSuccessEventConsumer); return this; } @Override public EventPublisher onError( EventConsumer<CircuitBreakerOnErrorEvent> onErrorEventConsumer) { registerConsumer(CircuitBreakerOnErrorEvent.class.getName(), onErrorEventConsumer); return this; } @Override public EventPublisher onStateTransition( EventConsumer<CircuitBreakerOnStateTransitionEvent> onStateTransitionEventConsumer) { registerConsumer(CircuitBreakerOnStateTransitionEvent.class.getName(), onStateTransitionEventConsumer); return this; } @Override public EventPublisher onReset( EventConsumer<CircuitBreakerOnResetEvent> onResetEventConsumer) { registerConsumer(CircuitBreakerOnResetEvent.class.getName(), onResetEventConsumer); return this; } @Override public EventPublisher onIgnoredError( EventConsumer<CircuitBreakerOnIgnoredErrorEvent> onIgnoredErrorEventConsumer) { registerConsumer(CircuitBreakerOnIgnoredErrorEvent.class.getName(), onIgnoredErrorEventConsumer); return this; } @Override public EventPublisher onCallNotPermitted( EventConsumer<CircuitBreakerOnCallNotPermittedEvent> onCallNotPermittedEventConsumer) { registerConsumer(CircuitBreakerOnCallNotPermittedEvent.class.getName(), onCallNotPermittedEventConsumer); return this; } @Override public EventPublisher onFailureRateExceeded( EventConsumer<CircuitBreakerOnFailureRateExceededEvent> onFailureRateExceededConsumer) { registerConsumer(CircuitBreakerOnFailureRateExceededEvent.class.getName(), onFailureRateExceededConsumer); return this; } @Override public EventPublisher onSlowCallRateExceeded( EventConsumer<CircuitBreakerOnSlowCallRateExceededEvent> onSlowCallRateExceededConsumer) { registerConsumer(CircuitBreakerOnSlowCallRateExceededEvent.class.getName(), onSlowCallRateExceededConsumer); return this; } @Override public void consumeEvent(CircuitBreakerEvent event) { super.processEvent(event); } } private
CircuitBreakerEventProcessor
java
apache__flink
flink-connectors/flink-connector-files/src/main/java/org/apache/flink/connector/file/table/stream/StreamingSink.java
{ "start": 2821, "end": 8753 }
class ____ { private StreamingSink() {} /** * Create a file writer by input stream. This is similar to {@link StreamingFileSink}, in * addition, it can emit {@link PartitionCommitInfo} to down stream. */ public static <T> DataStream<PartitionCommitInfo> writer( ProviderContext providerContext, DataStream<T> inputStream, long bucketCheckInterval, StreamingFileSink.BucketsBuilder< T, String, ? extends StreamingFileSink.BucketsBuilder<T, String, ?>> bucketsBuilder, int parallelism, List<String> partitionKeys, Configuration conf, boolean parallelismConfigured) { StreamingFileWriter<T> fileWriter = new StreamingFileWriter<>(bucketCheckInterval, bucketsBuilder, partitionKeys, conf); SingleOutputStreamOperator<PartitionCommitInfo> writerStream = inputStream.transform( StreamingFileWriter.class.getSimpleName(), TypeInformation.of(PartitionCommitInfo.class), fileWriter); writerStream.getTransformation().setParallelism(parallelism, parallelismConfigured); providerContext.generateUid("streaming-writer").ifPresent(writerStream::uid); return writerStream; } /** * Create a file writer with compaction operators by input stream. In addition, it can emit * {@link PartitionCommitInfo} to down stream. */ public static <T> DataStream<PartitionCommitInfo> compactionWriter( ProviderContext providerContext, DataStream<T> inputStream, long bucketCheckInterval, StreamingFileSink.BucketsBuilder< T, String, ? extends StreamingFileSink.BucketsBuilder<T, String, ?>> bucketsBuilder, FileSystemFactory fsFactory, Path path, CompactReader.Factory<T> readFactory, long targetFileSize, int parallelism, boolean parallelismConfigured) { CompactFileWriter<T> writer = new CompactFileWriter<>(bucketCheckInterval, bucketsBuilder); SupplierWithException<FileSystem, IOException> fsSupplier = (SupplierWithException<FileSystem, IOException> & Serializable) () -> fsFactory.create(path.toUri()); CompactCoordinator coordinator = new CompactCoordinator(fsSupplier, targetFileSize); SingleOutputStreamOperator<CoordinatorInput> writerStream = inputStream.transform( "streaming-writer", TypeInformation.of(CoordinatorInput.class), writer); writerStream.getTransformation().setParallelism(parallelism, parallelismConfigured); providerContext.generateUid("streaming-writer").ifPresent(writerStream::uid); SingleOutputStreamOperator<CoordinatorOutput> coordinatorStream = writerStream .transform( "compact-coordinator", TypeInformation.of(CoordinatorOutput.class), coordinator) .setParallelism(1) .setMaxParallelism(1); providerContext.generateUid("compact-coordinator").ifPresent(coordinatorStream::uid); CompactWriter.Factory<T> writerFactory = CompactBucketWriter.factory( (SupplierWithException<BucketWriter<T, String>, IOException> & Serializable) bucketsBuilder::createBucketWriter); CompactOperator<T> compacter = new CompactOperator<>(fsSupplier, readFactory, writerFactory); SingleOutputStreamOperator<PartitionCommitInfo> operatorStream = coordinatorStream .broadcast() .transform( "compact-operator", TypeInformation.of(PartitionCommitInfo.class), compacter); operatorStream.getTransformation().setParallelism(parallelism, parallelismConfigured); providerContext.generateUid("compact-operator").ifPresent(operatorStream::uid); return operatorStream; } /** * Create a sink from file writer. Decide whether to add the node to commit partitions according * to options. */ public static DataStreamSink<?> sink( ProviderContext providerContext, DataStream<PartitionCommitInfo> writer, Path locationPath, ObjectIdentifier identifier, List<String> partitionKeys, TableMetaStoreFactory msFactory, FileSystemFactory fsFactory, Configuration options) { DataStream<?> stream = writer; if (partitionKeys.size() > 0 && options.contains(SINK_PARTITION_COMMIT_POLICY_KIND)) { PartitionCommitter committer = new PartitionCommitter( locationPath, identifier, partitionKeys, msFactory, fsFactory, options); SingleOutputStreamOperator<Void> committerStream = writer.transform( PartitionCommitter.class.getSimpleName(), Types.VOID, committer) .setParallelism(1) .setMaxParallelism(1); providerContext.generateUid("partition-committer").ifPresent(committerStream::uid); stream = committerStream; } DataStreamSink<?> discardingSink = stream.sinkTo(new DiscardingSink<>()).name("end").setParallelism(1); providerContext.generateUid("discarding-sink").ifPresent(discardingSink::uid); return discardingSink; } }
StreamingSink
java
redisson__redisson
redisson/src/main/java/org/redisson/RedissonReference.java
{ "start": 4201, "end": 4655 }
class ____ be located */ public Class<? extends Codec> getCodecType() throws ClassNotFoundException { if (codec != null) { return (Class<? extends Codec>) Class.forName(codec); } return null; } private boolean isAvailable(String type) { try { Class.forName(type); return true; } catch (ClassNotFoundException e) { return false; } } }
cannot
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/LangChain4jEmbeddingsEndpointBuilderFactory.java
{ "start": 1414, "end": 1568 }
interface ____ { /** * Builder for endpoint for the LangChain4j Embeddings component. */ public
LangChain4jEmbeddingsEndpointBuilderFactory
java
apache__avro
lang/java/ipc/src/main/java/org/apache/avro/ipc/DatagramServer.java
{ "start": 1235, "end": 2546 }
class ____ extends Thread implements Server { private static final Logger LOG = LoggerFactory.getLogger(DatagramServer.class); private final Responder responder; private final DatagramChannel channel; private final Transceiver transceiver; public DatagramServer(Responder responder, SocketAddress addr) throws IOException { String name = "DatagramServer on " + addr; this.responder = responder; this.channel = DatagramChannel.open(); channel.socket().bind(addr); this.transceiver = new DatagramTransceiver(channel); setName(name); setDaemon(true); } @Override public int getPort() { return channel.socket().getLocalPort(); } @Override public void run() { while (true) { try { transceiver.writeBuffers(responder.respond(transceiver.readBuffers())); } catch (ClosedChannelException e) { return; } catch (IOException e) { LOG.warn("unexpected error", e); throw new RuntimeException(e); } } } @Override public void close() { this.interrupt(); } public static void main(String[] arg) throws Exception { DatagramServer server = new DatagramServer(null, new InetSocketAddress(0)); server.start(); System.out.println("started"); server.join(); } }
DatagramServer
java
micronaut-projects__micronaut-core
core-processor/src/main/java/io/micronaut/inject/annotation/AnnotationMetadataGenUtils.java
{ "start": 8301, "end": 9485 }
class ____ expression function. * * @param declaringType The declaring type * @param loadTypeMethods The load type methods * @return The function */ @NonNull public static Function<String, ExpressionDef> createLoadClassValueExpressionFn(ClassTypeDef declaringType, Map<String, MethodDef> loadTypeMethods) { return typeName -> invokeLoadClassValueMethod(declaringType, loadTypeMethods, typeName); } /** * Creates a `getAnnotationMetadata` method. * * @param owningType The owning type * @param annotationMetadata The annotation metadata * @return The new method */ @NonNull public static MethodDef createGetAnnotationMetadataMethodDef(ClassTypeDef owningType, AnnotationMetadata annotationMetadata) { return MethodDef.builder("getAnnotationMetadata").returns(TYPE_ANNOTATION_METADATA) .addModifiers(Modifier.PUBLIC) .build((aThis, methodParameters) -> { // in order to save memory of a method doesn't have any annotations of its own but merely references
value
java
apache__kafka
connect/runtime/src/main/java/org/apache/kafka/connect/runtime/WorkerConnector.java
{ "start": 23207, "end": 23660 }
class ____ extends WorkerConnectorContext implements SourceConnectorContext { private final OffsetStorageReader offsetStorageReader; WorkerSourceConnectorContext(OffsetStorageReader offsetStorageReader) { this.offsetStorageReader = offsetStorageReader; } @Override public OffsetStorageReader offsetStorageReader() { return offsetStorageReader; } } }
WorkerSourceConnectorContext
java
apache__dubbo
dubbo-common/src/test/java/org/apache/dubbo/common/beanutil/JavaBeanSerializeUtilTest.java
{ "start": 16182, "end": 28426 }
class ____ { public String gender; public int age; String toy; Parent parent; private String name; public String getName() { return name; } public void setName(String name) { this.name = name; } public int getAge() { return age; } public void setAge(int age) { this.age = age; } public String getToy() { return toy; } public void setToy(String toy) { this.toy = toy; } public Parent getParent() { return parent; } public void setParent(Parent parent) { this.parent = parent; } } @Test void testBeanSerialize() { Bean bean = new Bean(); bean.setDate(new Date()); bean.setStatus(PersonStatus.ENABLED); bean.setType(Bean.class); bean.setArray(new Phone[] {}); Collection<Phone> collection = new ArrayList<Phone>(); bean.setCollection(collection); Phone phone = new Phone(); collection.add(phone); Map<String, FullAddress> map = new HashMap<String, FullAddress>(); FullAddress address = new FullAddress(); map.put("first", address); bean.setAddresses(map); JavaBeanDescriptor descriptor = JavaBeanSerializeUtil.serialize(bean, JavaBeanAccessor.METHOD); Assertions.assertTrue(descriptor.isBeanType()); assertEqualsPrimitive(bean.getDate(), descriptor.getProperty("date")); assertEqualsEnum(bean.getStatus(), descriptor.getProperty("status")); Assertions.assertTrue(((JavaBeanDescriptor) descriptor.getProperty("type")).isClassType()); Assertions.assertEquals( Bean.class.getName(), ((JavaBeanDescriptor) descriptor.getProperty("type")).getClassNameProperty()); Assertions.assertTrue(((JavaBeanDescriptor) descriptor.getProperty("array")).isArrayType()); Assertions.assertEquals(0, ((JavaBeanDescriptor) descriptor.getProperty("array")).propertySize()); JavaBeanDescriptor property = (JavaBeanDescriptor) descriptor.getProperty("collection"); Assertions.assertTrue(property.isCollectionType()); Assertions.assertEquals(1, property.propertySize()); property = (JavaBeanDescriptor) property.getProperty(0); Assertions.assertTrue(property.isBeanType()); Assertions.assertEquals(Phone.class.getName(), property.getClassName()); Assertions.assertEquals(0, property.propertySize()); property = (JavaBeanDescriptor) descriptor.getProperty("addresses"); Assertions.assertTrue(property.isMapType()); Assertions.assertEquals(bean.getAddresses().getClass().getName(), property.getClassName()); Assertions.assertEquals(1, property.propertySize()); Map.Entry<Object, Object> entry = property.iterator().next(); Assertions.assertTrue(((JavaBeanDescriptor) entry.getKey()).isPrimitiveType()); Assertions.assertEquals("first", ((JavaBeanDescriptor) entry.getKey()).getPrimitiveProperty()); Assertions.assertTrue(((JavaBeanDescriptor) entry.getValue()).isBeanType()); Assertions.assertEquals(FullAddress.class.getName(), ((JavaBeanDescriptor) entry.getValue()).getClassName()); Assertions.assertEquals(0, ((JavaBeanDescriptor) entry.getValue()).propertySize()); } @Test void testDeserializeBean() { Bean bean = new Bean(); bean.setDate(new Date()); bean.setStatus(PersonStatus.ENABLED); bean.setType(Bean.class); bean.setArray(new Phone[] {}); Collection<Phone> collection = new ArrayList<Phone>(); bean.setCollection(collection); Phone phone = new Phone(); collection.add(phone); Map<String, FullAddress> map = new HashMap<String, FullAddress>(); FullAddress address = new FullAddress(); map.put("first", address); bean.setAddresses(map); JavaBeanDescriptor beanDescriptor = JavaBeanSerializeUtil.serialize(bean, JavaBeanAccessor.METHOD); Object deser = JavaBeanSerializeUtil.deserialize(beanDescriptor); Assertions.assertTrue(deser instanceof Bean); Bean deserBean = (Bean) deser; Assertions.assertEquals(bean.getDate(), deserBean.getDate()); Assertions.assertEquals(bean.getStatus(), deserBean.getStatus()); Assertions.assertEquals(bean.getType(), deserBean.getType()); Assertions.assertEquals( bean.getCollection().size(), deserBean.getCollection().size()); Assertions.assertEquals( bean.getCollection().iterator().next().getClass(), deserBean.getCollection().iterator().next().getClass()); Assertions.assertEquals( bean.getAddresses().size(), deserBean.getAddresses().size()); Assertions.assertEquals( bean.getAddresses().entrySet().iterator().next().getKey(), deserBean.getAddresses().entrySet().iterator().next().getKey()); Assertions.assertEquals( bean.getAddresses().entrySet().iterator().next().getValue().getClass(), deserBean.getAddresses().entrySet().iterator().next().getValue().getClass()); } @Test @SuppressWarnings("unchecked") public void testSerializeJavaBeanDescriptor() { JavaBeanDescriptor descriptor = new JavaBeanDescriptor(); JavaBeanDescriptor result = JavaBeanSerializeUtil.serialize(descriptor); Assertions.assertSame(descriptor, result); Map map = new HashMap(); map.put("first", descriptor); result = JavaBeanSerializeUtil.serialize(map); Assertions.assertTrue(result.isMapType()); Assertions.assertEquals(HashMap.class.getName(), result.getClassName()); Assertions.assertEquals(map.size(), result.propertySize()); Object object = result.iterator().next().getValue(); Assertions.assertTrue(object instanceof JavaBeanDescriptor); JavaBeanDescriptor actual = (JavaBeanDescriptor) object; Assertions.assertEquals(map.get("first"), actual); } static void assertEqualsEnum(Enum<?> expected, Object obj) { JavaBeanDescriptor descriptor = (JavaBeanDescriptor) obj; Assertions.assertTrue(descriptor.isEnumType()); Assertions.assertEquals(expected.getClass().getName(), descriptor.getClassName()); Assertions.assertEquals(expected.name(), descriptor.getEnumPropertyName()); } static void assertEqualsPrimitive(Object expected, Object obj) { if (expected == null) { return; } JavaBeanDescriptor descriptor = (JavaBeanDescriptor) obj; Assertions.assertTrue(descriptor.isPrimitiveType()); Assertions.assertEquals(expected, descriptor.getPrimitiveProperty()); } static void assertEqualsBigPerson(BigPerson person, Object obj) { JavaBeanDescriptor descriptor = (JavaBeanDescriptor) obj; Assertions.assertTrue(descriptor.isBeanType()); assertEqualsPrimitive(person.getPersonId(), descriptor.getProperty("personId")); assertEqualsPrimitive(person.getLoginName(), descriptor.getProperty("loginName")); assertEqualsEnum(person.getStatus(), descriptor.getProperty("status")); assertEqualsPrimitive(person.getEmail(), descriptor.getProperty("email")); assertEqualsPrimitive(person.getPersonName(), descriptor.getProperty("personName")); JavaBeanDescriptor infoProfile = (JavaBeanDescriptor) descriptor.getProperty("infoProfile"); Assertions.assertTrue(infoProfile.isBeanType()); JavaBeanDescriptor phones = (JavaBeanDescriptor) infoProfile.getProperty("phones"); Assertions.assertTrue(phones.isCollectionType()); assertEqualsPhone(person.getInfoProfile().getPhones().get(0), phones.getProperty(0)); assertEqualsPhone(person.getInfoProfile().getPhones().get(1), phones.getProperty(1)); assertEqualsPhone(person.getInfoProfile().getFax(), infoProfile.getProperty("fax")); assertEqualsFullAddress(person.getInfoProfile().getFullAddress(), infoProfile.getProperty("fullAddress")); assertEqualsPrimitive(person.getInfoProfile().getMobileNo(), infoProfile.getProperty("mobileNo")); assertEqualsPrimitive(person.getInfoProfile().getName(), infoProfile.getProperty("name")); assertEqualsPrimitive(person.getInfoProfile().getDepartment(), infoProfile.getProperty("department")); assertEqualsPrimitive(person.getInfoProfile().getJobTitle(), infoProfile.getProperty("jobTitle")); assertEqualsPrimitive(person.getInfoProfile().getHomepageUrl(), infoProfile.getProperty("homepageUrl")); assertEqualsPrimitive(person.getInfoProfile().isFemale(), infoProfile.getProperty("female")); assertEqualsPrimitive(person.getInfoProfile().isMale(), infoProfile.getProperty("male")); } static void assertEqualsPhone(Phone expected, Object obj) { JavaBeanDescriptor descriptor = (JavaBeanDescriptor) obj; Assertions.assertTrue(descriptor.isBeanType()); if (expected.getArea() != null) { assertEqualsPrimitive(expected.getArea(), descriptor.getProperty("area")); } if (expected.getCountry() != null) { assertEqualsPrimitive(expected.getCountry(), descriptor.getProperty("country")); } if (expected.getExtensionNumber() != null) { assertEqualsPrimitive(expected.getExtensionNumber(), descriptor.getProperty("extensionNumber")); } if (expected.getNumber() != null) { assertEqualsPrimitive(expected.getNumber(), descriptor.getProperty("number")); } } static void assertEqualsFullAddress(FullAddress expected, Object obj) { JavaBeanDescriptor descriptor = (JavaBeanDescriptor) obj; Assertions.assertTrue(descriptor.isBeanType()); if (expected.getCityId() != null) { assertEqualsPrimitive(expected.getCityId(), descriptor.getProperty("cityId")); } if (expected.getCityName() != null) { assertEqualsPrimitive(expected.getCityName(), descriptor.getProperty("cityName")); } if (expected.getCountryId() != null) { assertEqualsPrimitive(expected.getCountryId(), descriptor.getProperty("countryId")); } if (expected.getCountryName() != null) { assertEqualsPrimitive(expected.getCountryName(), descriptor.getProperty("countryName")); } if (expected.getProvinceName() != null) { assertEqualsPrimitive(expected.getProvinceName(), descriptor.getProperty("provinceName")); } if (expected.getStreetAddress() != null) { assertEqualsPrimitive(expected.getStreetAddress(), descriptor.getProperty("streetAddress")); } if (expected.getZipCode() != null) { assertEqualsPrimitive(expected.getZipCode(), descriptor.getProperty("zipCode")); } } static BigPerson createBigPerson() { BigPerson bigPerson; bigPerson = new BigPerson(); bigPerson.setPersonId("superman111"); bigPerson.setLoginName("superman"); bigPerson.setStatus(PersonStatus.ENABLED); bigPerson.setEmail("sm@1.com"); bigPerson.setPersonName("pname"); ArrayList<Phone> phones = new ArrayList<Phone>(); Phone phone1 = new Phone("86", "0571", "87654321", "001"); Phone phone2 = new Phone("86", "0571", "87654322", "002"); phones.add(phone1); phones.add(phone2); PersonInfo pi = new PersonInfo(); pi.setPhones(phones); Phone fax = new Phone("86", "0571", "87654321", null); pi.setFax(fax); FullAddress addr = new FullAddress("CN", "zj", "3480", "wensanlu", "315000"); pi.setFullAddress(addr); pi.setMobileNo("13584652131"); pi.setMale(true); pi.setDepartment("b2b"); pi.setHomepageUrl("www.capcom.com"); pi.setJobTitle("qa"); pi.setName("superman"); bigPerson.setInfoProfile(pi); return bigPerson; } }
Child
java
spring-projects__spring-framework
spring-aop/src/main/java/org/springframework/aop/framework/adapter/AdvisorAdapter.java
{ "start": 1328, "end": 2393 }
interface ____ { /** * Does this adapter understand this advice object? Is it valid to * invoke the {@code getInterceptors} method with an Advisor that * contains this advice as an argument? * @param advice an Advice such as a BeforeAdvice * @return whether this adapter understands the given advice object * @see #getInterceptor(org.springframework.aop.Advisor) * @see org.springframework.aop.BeforeAdvice */ boolean supportsAdvice(Advice advice); /** * Return an AOP Alliance MethodInterceptor exposing the behavior of * the given advice to an interception-based AOP framework. * <p>Don't worry about any Pointcut contained in the Advisor; * the AOP framework will take care of checking the pointcut. * @param advisor the Advisor. The supportsAdvice() method must have * returned true on this object * @return an AOP Alliance interceptor for this Advisor. There's * no need to cache instances for efficiency, as the AOP framework * caches advice chains. */ MethodInterceptor getInterceptor(Advisor advisor); }
AdvisorAdapter
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/StatementSwitchToExpressionSwitchTest.java
{ "start": 94448, "end": 94538 }
interface ____ { String v() default ""; } @
MyAnno
java
apache__flink
flink-tests/src/test/java/org/apache/flink/test/checkpointing/UdfStreamOperatorCheckpointingITCase.java
{ "start": 2149, "end": 5063 }
class ____ extends StreamFaultToleranceTestBase { private static final long NUM_INPUT = 500_000L; private static final int NUM_OUTPUT = 1_000; /** * Assembles a stream of a grouping field and some long data. Applies reduce functions on this * stream. */ @Override public void testProgram(StreamExecutionEnvironment env) { // base stream KeyedStream<Tuple2<Integer, Long>, Integer> stream = env.addSource(new StatefulMultipleSequence()).keyBy(x -> x.f0); stream // testing built-in aggregate .min(1) // failure generation .map(new OnceFailingIdentityMapFunction(NUM_INPUT)) .keyBy(x -> x.f0) .addSink(new MinEvictingQueueSink()); stream // testing UDF reducer .reduce( new ReduceFunction<Tuple2<Integer, Long>>() { @Override public Tuple2<Integer, Long> reduce( Tuple2<Integer, Long> value1, Tuple2<Integer, Long> value2) throws Exception { return Tuple2.of(value1.f0, value1.f1 + value2.f1); } }) .keyBy(x -> x.f0) .addSink(new SumEvictingQueueSink()); } @Override public void postSubmit() { // Note that these checks depend on the ordering of the input // Checking the result of the built-in aggregate for (int i = 0; i < PARALLELISM; i++) { for (Long value : MinEvictingQueueSink.queues[i]) { Assert.assertTrue("Value different from 1 found, was " + value + ".", value == 1); } } // Checking the result of the UDF reducer for (int i = 0; i < PARALLELISM; i++) { long prevCount = NUM_INPUT - NUM_OUTPUT; long sum = prevCount * (prevCount + 1) / 2; while (!SumEvictingQueueSink.queues[i].isEmpty()) { sum += ++prevCount; Long value = SumEvictingQueueSink.queues[i].remove(); Assert.assertTrue( "Unexpected reduce value " + value + " instead of " + sum + ".", value == sum); } } } // -------------------------------------------------------------------------------------------- // Custom Functions // -------------------------------------------------------------------------------------------- /** * Produces a sequence multiple times for each parallelism instance of downstream operators, * augmented by the designated parallel subtaskId. The source is not parallel to ensure order. */ private static
UdfStreamOperatorCheckpointingITCase
java
spring-projects__spring-boot
module/spring-boot-web-server/src/test/java/org/springframework/boot/web/server/servlet/context/WebServletHandlerTests.java
{ "start": 5923, "end": 6064 }
class ____ extends HttpServlet { } @WebServlet(value = { "alpha", "bravo" }, urlPatterns = { "alpha", "bravo" })
UrlPatternsFromValueServlet
java
google__error-prone
core/src/main/java/com/google/errorprone/bugpatterns/flogger/FloggerPerWithoutRateLimit.java
{ "start": 1602, "end": 2893 }
class ____ extends BugChecker implements MethodInvocationTreeMatcher { private static final Matcher<ExpressionTree> LOG_METHOD = instanceMethod().onDescendantOf("com.google.common.flogger.LoggingApi").named("log"); private static final Matcher<ExpressionTree> PER_METHOD = instanceMethod().onDescendantOf("com.google.common.flogger.LoggingApi").named("per"); private static final Matcher<ExpressionTree> RATE_LIMITING_METHOD = instanceMethod() .onDescendantOf("com.google.common.flogger.LoggingApi") .namedAnyOf("atMostEvery", "every", "onAverageEvery"); @Override public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) { if (!LOG_METHOD.matches(tree, state)) { return Description.NO_MATCH; } ExpressionTree perNode = null; for (ExpressionTree receiver = tree; receiver instanceof MethodInvocationTree; receiver = getReceiver(receiver)) { if (RATE_LIMITING_METHOD.matches(receiver, state)) { return Description.NO_MATCH; } if (PER_METHOD.matches(receiver, state)) { perNode = receiver; } } if (perNode != null) { return describeMatch(perNode); } return Description.NO_MATCH; } }
FloggerPerWithoutRateLimit
java
apache__camel
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ThymeleafEndpointBuilderFactory.java
{ "start": 20086, "end": 20418 }
class ____ extends AbstractEndpointBuilder implements ThymeleafEndpointBuilder, AdvancedThymeleafEndpointBuilder { public ThymeleafEndpointBuilderImpl(String path) { super(componentName, path); } } return new ThymeleafEndpointBuilderImpl(path); } }
ThymeleafEndpointBuilderImpl
java
google__guava
android/guava-tests/test/com/google/common/reflect/TypeTokenSubtypeTest.java
{ "start": 20380, "end": 20519 }
class ____<ProductT> extends Shop<ProductT> implements Comparator<ProductT>, ConsumerFacing<ProductT> {} } private static
Retailer
java
spring-projects__spring-framework
spring-test/src/test/java/org/springframework/test/context/TestContextAnnotationUtilsTests.java
{ "start": 12545, "end": 24034 }
class ____ { @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithNoAnnotationPresent() { assertThat(findAnnotationDescriptorForTypes(NonAnnotatedInterface.class, Transactional.class, Component.class)).isNull(); assertThat(findAnnotationDescriptorForTypes(NonAnnotatedClass.class, Transactional.class, Order.class)).isNull(); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithInheritedAnnotationOnClass() { // Note: @Transactional is inherited assertThat(findAnnotationDescriptorForTypes(InheritedAnnotationClass.class, Transactional.class).getRootDeclaringClass()).isEqualTo(InheritedAnnotationClass.class); assertThat(findAnnotationDescriptorForTypes(SubInheritedAnnotationClass.class, Transactional.class).getRootDeclaringClass()).isEqualTo(InheritedAnnotationClass.class); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithInheritedAnnotationOnInterface() { // Note: @Transactional is inherited Transactional rawAnnotation = InheritedAnnotationInterface.class.getAnnotation(Transactional.class); UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes(InheritedAnnotationInterface.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(InheritedAnnotationInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(InheritedAnnotationInterface.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); descriptor = findAnnotationDescriptorForTypes(SubInheritedAnnotationInterface.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(SubInheritedAnnotationInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(InheritedAnnotationInterface.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); descriptor = findAnnotationDescriptorForTypes(SubSubInheritedAnnotationInterface.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(SubSubInheritedAnnotationInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(InheritedAnnotationInterface.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesForNonInheritedAnnotationOnClass() { // Note: @Order is not inherited. assertThat(findAnnotationDescriptorForTypes(NonInheritedAnnotationClass.class, Order.class).getRootDeclaringClass()).isEqualTo(NonInheritedAnnotationClass.class); assertThat(findAnnotationDescriptorForTypes(SubNonInheritedAnnotationClass.class, Order.class).getRootDeclaringClass()).isEqualTo(NonInheritedAnnotationClass.class); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesForNonInheritedAnnotationOnInterface() { // Note: @Order is not inherited. Order rawAnnotation = NonInheritedAnnotationInterface.class.getAnnotation(Order.class); UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes(NonInheritedAnnotationInterface.class, Order.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(NonInheritedAnnotationInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(NonInheritedAnnotationInterface.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); descriptor = findAnnotationDescriptorForTypes(SubNonInheritedAnnotationInterface.class, Order.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(SubNonInheritedAnnotationInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(NonInheritedAnnotationInterface.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithLocalAndMetaComponentAnnotation() { Class<Component> annotationType = Component.class; UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( HasLocalAndMetaComponentAnnotation.class, Transactional.class, annotationType, Order.class); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(HasLocalAndMetaComponentAnnotation.class); assertThat(descriptor.getAnnotationType()).isEqualTo(annotationType); } @Test void findAnnotationDescriptorForTypesWithMetaComponentAnnotation() { Class<?> startClass = HasMetaComponentAnnotation.class; assertAtComponentOnComposedAnnotationForMultipleCandidateTypes(startClass, "meta1", Meta1.class); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithMetaAnnotationWithDefaultAttributes() { Class<?> startClass = MetaConfigWithDefaultAttributesTestCase.class; Class<ContextConfiguration> annotationType = ContextConfiguration.class; UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes(startClass, Service.class, ContextConfiguration.class, Order.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(startClass); assertThat(descriptor.getAnnotationType()).isEqualTo(annotationType); assertThat(((ContextConfiguration) descriptor.getAnnotation()).value()).isEmpty(); assertThat(((ContextConfiguration) descriptor.getAnnotation()).classes()) .containsExactly(MetaConfig.DevConfig.class, MetaConfig.ProductionConfig.class); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesWithMetaAnnotationWithOverriddenAttributes() { Class<?> startClass = MetaConfigWithOverriddenAttributesTestCase.class; Class<ContextConfiguration> annotationType = ContextConfiguration.class; UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( startClass, Service.class, ContextConfiguration.class, Order.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(startClass); assertThat(descriptor.getAnnotationType()).isEqualTo(annotationType); assertThat(((ContextConfiguration) descriptor.getAnnotation()).value()).isEmpty(); assertThat(((ContextConfiguration) descriptor.getAnnotation()).classes()) .containsExactly(TestContextAnnotationUtilsTests.class); } @Test void findAnnotationDescriptorForTypesForInterfaceWithMetaAnnotation() { Class<?> startClass = InterfaceWithMetaAnnotation.class; assertAtComponentOnComposedAnnotationForMultipleCandidateTypes(startClass, "meta1", Meta1.class); } @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesForClassWithMetaAnnotatedInterface() { Component rawAnnotation = AnnotationUtils.findAnnotation(ClassWithMetaAnnotatedInterface.class, Component.class); UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( ClassWithMetaAnnotatedInterface.class, Service.class, Component.class, Order.class, Transactional.class); assertThat(descriptor).isNotNull(); assertThat(descriptor.getRootDeclaringClass()).isEqualTo(ClassWithMetaAnnotatedInterface.class); assertThat(descriptor.getDeclaringClass()).isEqualTo(Meta1.class); assertThat(descriptor.getAnnotation()).isEqualTo(rawAnnotation); } @Test void findAnnotationDescriptorForTypesForClassWithLocalMetaAnnotationAndMetaAnnotatedInterface() { Class<?> startClass = ClassWithLocalMetaAnnotationAndMetaAnnotatedInterface.class; assertAtComponentOnComposedAnnotationForMultipleCandidateTypes(startClass, "meta2", Meta2.class); } @Test void findAnnotationDescriptorForTypesForSubClassWithLocalMetaAnnotationAndMetaAnnotatedInterface() { assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( SubClassWithLocalMetaAnnotationAndMetaAnnotatedInterface.class, ClassWithLocalMetaAnnotationAndMetaAnnotatedInterface.class, "meta2", Meta2.class); } /** * @since 4.0.3 */ @Test void findAnnotationDescriptorForTypesOnMetaMetaAnnotatedClass() { Class<?> startClass = MetaMetaAnnotatedClass.class; assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( startClass, startClass, Meta2.class, "meta2"); } /** * @since 4.0.3 */ @Test void findAnnotationDescriptorForTypesOnMetaMetaMetaAnnotatedClass() { Class<?> startClass = MetaMetaMetaAnnotatedClass.class; assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( startClass, startClass, Meta2.class, "meta2"); } /** * @since 4.0.3 */ @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesOnAnnotatedClassWithMissingTargetMetaAnnotation() { // InheritedAnnotationClass is NOT annotated or meta-annotated with @Component, // @Service, or @Order, but it is annotated with @Transactional. UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( InheritedAnnotationClass.class, Service.class, Component.class, Order.class); assertThat(descriptor).as("Should not find @Component on InheritedAnnotationClass").isNull(); } /** * @since 4.0.3 */ @Test @SuppressWarnings("unchecked") void findAnnotationDescriptorForTypesOnMetaCycleAnnotatedClassWithMissingTargetMetaAnnotation() { UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( MetaCycleAnnotatedClass.class, Service.class, Component.class, Order.class); assertThat(descriptor).as("Should not find @Component on MetaCycleAnnotatedClass").isNull(); } private void assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( Class<?> startClass, String name, Class<? extends Annotation> composedAnnotationType) { assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( startClass, startClass, name, composedAnnotationType); } private void assertAtComponentOnComposedAnnotationForMultipleCandidateTypes(Class<?> startClass, Class<?> rootDeclaringClass, String name, Class<? extends Annotation> composedAnnotationType) { assertAtComponentOnComposedAnnotationForMultipleCandidateTypes( startClass, rootDeclaringClass, composedAnnotationType, name); } @SuppressWarnings("unchecked") private void assertAtComponentOnComposedAnnotationForMultipleCandidateTypes(Class<?> startClass, Class<?> rootDeclaringClass, Class<?> declaringClass, String name) { Class<Component> annotationType = Component.class; UntypedAnnotationDescriptor descriptor = findAnnotationDescriptorForTypes( startClass, Service.class, annotationType, Order.class, Transactional.class); assertThat(descriptor).as("UntypedAnnotationDescriptor should not be null").isNotNull(); assertThat(descriptor.getRootDeclaringClass()).as("rootDeclaringClass").isEqualTo(rootDeclaringClass); assertThat(descriptor.getDeclaringClass()).as("declaringClass").isEqualTo(declaringClass); assertThat(descriptor.getAnnotationType()).as("annotationType").isEqualTo(annotationType); assertThat(((Component) descriptor.getAnnotation()).value()).as("component name").isEqualTo(name); } } // ------------------------------------------------------------------------- @Component(value = "meta1") @Order @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @
FindAnnotationDescriptorForTypesTests
java
elastic__elasticsearch
test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java
{ "start": 30650, "end": 52400 }
class ____ { List<UpdateCheck> updateChecks = new ArrayList<>(); Map<String, ConflictCheck> conflictChecks = new HashMap<>(); /** * Register a check that a parameter can be updated, using the minimal mapping as a base * * @param update a field builder applied on top of the minimal mapping * @param check a check that the updated parameter has been applied to the FieldMapper */ public void registerUpdateCheck(CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check) throws IOException { updateChecks.add(new UpdateCheck(update, check)); } /** * Register a check that a parameter can be updated * * @param init the initial mapping * @param update the updated mapping * @param check a check that the updated parameter has been applied to the FieldMapper */ public void registerUpdateCheck( CheckedConsumer<XContentBuilder, IOException> init, CheckedConsumer<XContentBuilder, IOException> update, Consumer<FieldMapper> check ) throws IOException { updateChecks.add(new UpdateCheck(init, update, check)); } /** * Register a check that a parameter update will cause a conflict, using the minimal mapping as a base * * @param param the parameter name, expected to appear in the error message * @param update a field builder applied on top of the minimal mapping */ public void registerConflictCheck(String param, CheckedConsumer<XContentBuilder, IOException> update) throws IOException { conflictChecks.put(param, new ConflictCheck(fieldMapping(MapperTestCase.this::minimalMapping), fieldMapping(b -> { minimalMapping(b); update.accept(b); }))); } /** * Register a check that a parameter update will cause a conflict * * @param param the parameter name, expected to appear in the error message * @param init the initial mapping * @param update the updated mapping */ public void registerConflictCheck(String param, XContentBuilder init, XContentBuilder update) { conflictChecks.put(param, new ConflictCheck(init, update)); } } protected abstract void registerParameters(ParameterChecker checker) throws IOException; public void testUpdates() throws IOException { ParameterChecker checker = new ParameterChecker(); registerParameters(checker); if (supportsIgnoreMalformed()) { checker.registerUpdateCheck(b -> b.field("ignore_malformed", true), m -> assertTrue(m.ignoreMalformed())); } else { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); Exception e = expectThrows( MapperParsingException.class, "No conflict when setting parameter [ignore_malformed]", () -> merge(mapperService, fieldMapping(b -> { minimalMapping(b); b.field("ignore_malformed", true); })) ); assertThat(e.getMessage(), containsString("unknown parameter [ignore_malformed] on mapper [field]")); } for (UpdateCheck updateCheck : checker.updateChecks) { MapperService mapperService = createMapperService(updateCheck.init); merge(mapperService, updateCheck.update); FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field"); updateCheck.check.accept(mapper); // do it again to ensure that we don't get conflicts the second time merge(mapperService, updateCheck.update); mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("field"); updateCheck.check.accept(mapper); } for (String param : checker.conflictChecks.keySet()) { MapperService mapperService = createMapperService(checker.conflictChecks.get(param).init); // merging the same change is fine merge(mapperService, checker.conflictChecks.get(param).init); // merging the conflicting update should throw an exception Exception e = expectThrows( IllegalArgumentException.class, "No conflict when updating parameter [" + param + "]", () -> merge(mapperService, checker.conflictChecks.get(param).update) ); assertThat( e.getMessage(), anyOf( containsString("Cannot update parameter [" + param + "]"), containsString("different [" + param + "]"), containsString("[" + param + "] cannot be ") ) ); } assertParseMaximalWarnings(); } public final void testTextSearchInfoConsistency() throws IOException { MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); MappedFieldType fieldType = mapperService.fieldType("field"); if (fieldType.getTextSearchInfo() == TextSearchInfo.NONE) { expectThrows(IllegalArgumentException.class, () -> fieldType.termQuery(null, null)); } else { SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); assertNotNull(fieldType.termQuery(getSampleValueForQuery(), searchExecutionContext)); } assertSearchable(fieldType); assertParseMinimalWarnings(); } protected void assertSearchable(MappedFieldType fieldType) { assertEquals(fieldType.isSearchable(), fieldType.getTextSearchInfo() != TextSearchInfo.NONE); } /** * Asserts that fetching a single value from doc values and from the native * {@link MappedFieldType#valueFetcher} produce the same results. * <p> * Generally this method covers many many random cases but rarely. So if * it fails its generally a good idea to capture its randomized * parameters into a new method so we can be sure we consistently test * any unique and interesting failure case. See the tests for * {@link DateFieldMapper} for some examples. */ public final void testFetch() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { MappedFieldType ft = mapperService.fieldType("field"); assertFetch(mapperService, "field", generateRandomInputValue(ft), randomFetchTestFormat()); } finally { assertParseMinimalWarnings(); } } /** * Asserts that fetching many values from doc values and from the native * {@link MappedFieldType#valueFetcher} produce the same results. * <p> * Generally this method covers many many random cases but rarely. So if * it fails its generally a good idea to capture its randomized * parameters into a new method so we can be sure we consistently test * any unique and interesting failure case. See the tests for * {@link DateFieldMapper} for some examples. */ public final void testFetchMany() throws IOException { MapperService mapperService = randomFetchTestMapper(); try { MappedFieldType ft = mapperService.fieldType("field"); int count = between(2, 10); List<Object> values = new ArrayList<>(count); while (values.size() < count) { values.add(generateRandomInputValue(ft)); } assertFetchMany(mapperService, "field", values, randomFetchTestFormat(), count); } finally { assertParseMinimalWarnings(); } } protected final MapperService randomFetchTestMapper() throws IOException { return createMapperService(mapping(b -> { b.startObject("field"); randomFetchTestFieldConfig(b); b.endObject(); })); } /** * Field configuration for {@link #testFetch} and {@link #testFetchMany}. * Default implementation delegates to {@link #minimalMapping} but can * be overridden to randomize the field type and options. */ protected void randomFetchTestFieldConfig(XContentBuilder b) throws IOException { minimalMapping(b); } /** * A random format to use when tripping in {@link #testFetch} and * {@link #testFetchMany}. */ protected String randomFetchTestFormat() { return null; } /** * Test that dimension parameter is not updateable */ protected void registerDimensionChecks(ParameterChecker checker) throws IOException { // dimension cannot be updated checker.registerConflictCheck("time_series_dimension", b -> b.field("time_series_dimension", true)); checker.registerConflictCheck("time_series_dimension", b -> b.field("time_series_dimension", false)); checker.registerConflictCheck("time_series_dimension", fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", false); }), fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); })); checker.registerConflictCheck("time_series_dimension", fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); }), fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", false); })); } /** * Create a random {@code _source} value for this field. Must be compatible * with {@link XContentBuilder#value(Object)} and the field's parser. */ protected abstract Object generateRandomInputValue(MappedFieldType ft); /** * Assert that fetching many values using {@link MappedFieldType#valueFetcher} * produces the same values as fetching using doc values. */ protected void assertFetchMany(MapperService mapperService, String field, Object value, String format, int count) throws IOException { assertFetch(mapperService, field, value, format); } /** * Assert that fetching a value using {@link MappedFieldType#valueFetcher} * produces the same value as fetching using doc values. */ protected void assertFetch(MapperService mapperService, String field, Object value, String format) throws IOException { MappedFieldType ft = mapperService.fieldType(field); MappedFieldType.FielddataOperation fdt = MappedFieldType.FielddataOperation.SEARCH; SourceToParse source = source(b -> b.field(ft.name(), value)); ValueFetcher docValueFetcher = new DocValueFetcher( ft.docValueFormat(format, null), ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) ); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); when(searchExecutionContext.getIndexSettings()).thenReturn(mapperService.getIndexSettings()); when(searchExecutionContext.isSourceEnabled()).thenReturn(true); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); when(searchExecutionContext.getForField(ft, fdt)).thenAnswer(inv -> fieldDataLookup(mapperService).apply(ft, () -> { throw new UnsupportedOperationException(); }, fdt)); ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); ParsedDocument doc = mapperService.documentMapper().parse(source); withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics()) .getSource(ir.leaves().get(0), 0); docValueFetcher.setNextReader(ir.leaves().get(0)); nativeFetcher.setNextReader(ir.leaves().get(0)); List<Object> fromDocValues = docValueFetcher.fetchValues(s, 0, new ArrayList<>()); List<Object> fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>()); /* * The native fetcher uses byte, short, etc but doc values always * uses long or double. This difference is fine because on the outside * users can't see it. */ fromNative = fromNative.stream().map(o -> { if (o instanceof Integer || o instanceof Short || o instanceof Byte) { return ((Number) o).longValue(); } if (o instanceof Float) { return ((Float) o).doubleValue(); } return o; }).collect(toList()); if (dedupAfterFetch()) { fromNative = fromNative.stream().distinct().collect(Collectors.toList()); } /* * Doc values sort according to something appropriate to the field * and the native fetchers usually don't sort. We're ok with this * difference. But we have to convince the test we're ok with it. */ assertThat("fetching " + value, fromNative, containsInAnyOrder(fromDocValues.toArray())); }); } /** * A few field types (e.g. keyword fields) don't allow duplicate values, so in those cases we need to de-dup our expected values. * Field types where this is the case should overwrite this. The default is to not de-duplicate though. */ protected boolean dedupAfterFetch() { return false; } /** * @return whether or not this field type supports access to its values from a SearchLookup */ protected boolean supportsSearchLookup() { return true; } /** * Checks that field data from this field produces the same values for query-time * scripts and for index-time scripts */ public final void testIndexTimeFieldData() throws IOException { assumeTrue("Field type does not support access via search lookup", supportsSearchLookup()); MapperService mapperService = createMapperService(fieldMapping(this::minimalMapping)); assertParseMinimalWarnings(); MappedFieldType fieldType = mapperService.fieldType("field"); if (fieldType.isAggregatable() == false) { return; // No field data available, so we ignore } SourceToParse source = source(this::writeField); ParsedDocument doc = mapperService.documentMapper().parse(source); withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { LeafReaderContext ctx = ir.leaves().get(0); DocValuesScriptFieldFactory docValuesFieldSource = fieldType.fielddataBuilder(FieldDataContext.noRuntimeFields("test")) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) .load(ctx) .getScriptFieldFactory("test"); docValuesFieldSource.setNextDocId(0); DocumentLeafReader reader = new DocumentLeafReader(doc.rootDoc(), Collections.emptyMap()); DocValuesScriptFieldFactory indexData = fieldType.fielddataBuilder(FieldDataContext.noRuntimeFields("test")) .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) .load(reader.getContext()) .getScriptFieldFactory("test"); indexData.setNextDocId(0); // compare index and search time fielddata assertThat(docValuesFieldSource.toScriptDocValues(), equalTo(indexData.toScriptDocValues())); }); } protected boolean supportsStoredFields() { return true; } protected void minimalStoreMapping(XContentBuilder b) throws IOException { minimalMapping(b); b.field("store", true); } /** * Checks that loading stored fields for this field produces the same set of values * for query time scripts and index time scripts */ public final void testIndexTimeStoredFieldsAccess() throws IOException { assumeTrue("Field type does not support stored fields", supportsStoredFields()); MapperService mapperService = createMapperService(fieldMapping(this::minimalStoreMapping)); assertParseMinimalWarnings(); MappedFieldType fieldType = mapperService.fieldType("field"); SourceToParse source = source(this::writeField); ParsedDocument doc = mapperService.documentMapper().parse(source); SearchLookup lookup = new SearchLookup( f -> fieldType, (f, s, t) -> { throw new UnsupportedOperationException(); }, (ctx, docid) -> Source.fromBytes(doc.source()) ); withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { LeafReaderContext ctx = ir.leaves().get(0); LeafStoredFieldsLookup storedFields = lookup.getLeafSearchLookup(ctx).fields(); storedFields.setDocument(0); DocumentLeafReader reader = new DocumentLeafReader(doc.rootDoc(), Collections.emptyMap()); LeafStoredFieldsLookup indexStoredFields = lookup.getLeafSearchLookup(reader.getContext()).fields(); indexStoredFields.setDocument(0); // compare index and search time stored fields assertThat(storedFields.get("field").getValues(), equalTo(indexStoredFields.get("field").getValues())); }); } public final void testNullInput() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); if (allowsNullValues()) { ParsedDocument doc = mapper.parse(source(b -> b.nullField("field"))); assertThat(doc.docs().get(0).getFields("field"), empty()); assertThat(doc.docs().get(0).getFields("_field_names"), empty()); } else { expectThrows(DocumentParsingException.class, () -> mapper.parse(source(b -> b.nullField("field")))); } assertWarnings(getParseMinimalWarnings()); } protected boolean allowsNullValues() { return true; } public final void testMinimalIsInvalidInRoutingPath() throws IOException { MapperService mapper = createMapperService(fieldMapping(this::minimalMapping)); try { IndexSettings settings = createIndexSettings( IndexVersion.current(), Settings.builder() .put(IndexSettings.MODE.getKey(), "time_series") .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "field") .put(IndexSettings.TIME_SERIES_START_TIME.getKey(), "2021-04-28T00:00:00Z") .put(IndexSettings.TIME_SERIES_END_TIME.getKey(), "2021-04-29T00:00:00Z") .build() ); Exception e = expectThrows(IllegalArgumentException.class, () -> mapper.documentMapper().validate(settings, false)); assertThat(e.getMessage(), equalTo(minimalIsInvalidRoutingPathErrorMessage(mapper.mappingLookup().getMapper("field")))); } finally { assertParseMinimalWarnings(); } } protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { if (mapper instanceof FieldMapper fieldMapper && fieldMapper.fieldType().isDimension() == false) { return "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [" + mapper.fullPath() + "] was not a dimension."; } return "All fields that match routing_path must be configured with [time_series_dimension: true] " + "or flattened fields with a list of dimensions in [time_series_dimensions] and " + "without the [script] parameter. [" + mapper.fullPath() + "] was [" + mapper.typeName() + "]."; } public record SyntheticSourceExample( CheckedConsumer<XContentBuilder, IOException> inputValue, CheckedConsumer<XContentBuilder, IOException> expectedForSyntheticSource, CheckedConsumer<XContentBuilder, IOException> mapping ) { public SyntheticSourceExample(Object inputValue, Object result, CheckedConsumer<XContentBuilder, IOException> mapping) { this(b -> b.value(inputValue), b -> b.value(result), mapping); } public void buildInput(XContentBuilder b) throws IOException { b.field("field"); inputValue.accept(b); } public void buildInputArray(XContentBuilder b, int elementCount) throws IOException { b.startArray("field"); for (int i = 0; i < elementCount; i++) { inputValue.accept(b); } b.endArray(); } private String expected() throws IOException { XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); expectedForSyntheticSource.accept(b); return Strings.toString(b.endObject()); } } public record SyntheticSourceInvalidExample(Matcher<String> error, CheckedConsumer<XContentBuilder, IOException> mapping) {} public
ParameterChecker
java
google__auto
value/src/it/functional/src/test/java/com/google/auto/value/AutoBuilderTest.java
{ "start": 17396, "end": 17931 }
interface ____<E> { SingletonSetBuilder<E> setElement(E element); SingletonSet<E> build(); } static <E> SingletonSetBuilder<E> singletonSetBuilder() { return new AutoBuilder_AutoBuilderTest_SingletonSetBuilder<>(); } @Test public void genericClass() { ImmutableSet<String> expected = ImmutableSet.of("foo"); SingletonSetBuilder<String> builder = singletonSetBuilder(); Set<String> actual = builder.setElement("foo").build(); assertThat(actual).isEqualTo(expected); } static
SingletonSetBuilder
java
netty__netty
codec-http/src/main/java/io/netty/handler/codec/http/HttpMessage.java
{ "start": 881, "end": 1394 }
interface ____ extends HttpObject { /** * @deprecated Use {@link #protocolVersion()} instead. */ @Deprecated HttpVersion getProtocolVersion(); /** * Returns the protocol version of this {@link HttpMessage} */ HttpVersion protocolVersion(); /** * Set the protocol version of this {@link HttpMessage} */ HttpMessage setProtocolVersion(HttpVersion version); /** * Returns the headers of this message. */ HttpHeaders headers(); }
HttpMessage
java
elastic__elasticsearch
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/AllLastLongByTimestampAggregatorFunction.java
{ "start": 1013, "end": 5647 }
class ____ implements AggregatorFunction { private static final List<IntermediateStateDesc> INTERMEDIATE_STATE_DESC = List.of( new IntermediateStateDesc("timestamps", ElementType.LONG), new IntermediateStateDesc("values", ElementType.LONG), new IntermediateStateDesc("seen", ElementType.BOOLEAN), new IntermediateStateDesc("hasValue", ElementType.BOOLEAN) ); private final DriverContext driverContext; private final AllLongLongState state; private final List<Integer> channels; public AllLastLongByTimestampAggregatorFunction(DriverContext driverContext, List<Integer> channels, AllLongLongState state) { this.driverContext = driverContext; this.channels = channels; this.state = state; } public static AllLastLongByTimestampAggregatorFunction create(DriverContext driverContext, List<Integer> channels) { return new AllLastLongByTimestampAggregatorFunction(driverContext, channels, AllLastLongByTimestampAggregator.initSingle(driverContext)); } public static List<IntermediateStateDesc> intermediateStateDesc() { return INTERMEDIATE_STATE_DESC; } @Override public int intermediateBlockCount() { return INTERMEDIATE_STATE_DESC.size(); } @Override public void addRawInput(Page page, BooleanVector mask) { if (mask.allFalse()) { // Entire page masked away } else if (mask.allTrue()) { addRawInputNotMasked(page); } else { addRawInputMasked(page, mask); } } private void addRawInputMasked(Page page, BooleanVector mask) { LongBlock valueBlock = page.getBlock(channels.get(0)); LongBlock timestampBlock = page.getBlock(channels.get(1)); addRawBlock(valueBlock, timestampBlock, mask); } private void addRawInputNotMasked(Page page) { LongBlock valueBlock = page.getBlock(channels.get(0)); LongBlock timestampBlock = page.getBlock(channels.get(1)); addRawBlock(valueBlock, timestampBlock); } private void addRawBlock(LongBlock valueBlock, LongBlock timestampBlock) { for (int p = 0; p < valueBlock.getPositionCount(); p++) { AllLastLongByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); } } private void addRawBlock(LongBlock valueBlock, LongBlock timestampBlock, BooleanVector mask) { for (int p = 0; p < valueBlock.getPositionCount(); p++) { if (mask.getBoolean(p) == false) { continue; } AllLastLongByTimestampAggregator.combine(state, p, valueBlock, timestampBlock); } } @Override public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); Block timestampsUncast = page.getBlock(channels.get(0)); if (timestampsUncast.areAllValuesNull()) { return; } LongVector timestamps = ((LongBlock) timestampsUncast).asVector(); assert timestamps.getPositionCount() == 1; Block valuesUncast = page.getBlock(channels.get(1)); if (valuesUncast.areAllValuesNull()) { return; } LongVector values = ((LongBlock) valuesUncast).asVector(); assert values.getPositionCount() == 1; Block seenUncast = page.getBlock(channels.get(2)); if (seenUncast.areAllValuesNull()) { return; } BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); assert seen.getPositionCount() == 1; Block hasValueUncast = page.getBlock(channels.get(3)); if (hasValueUncast.areAllValuesNull()) { return; } BooleanVector hasValue = ((BooleanBlock) hasValueUncast).asVector(); assert hasValue.getPositionCount() == 1; AllLastLongByTimestampAggregator.combineIntermediate(state, timestamps.getLong(0), values.getLong(0), seen.getBoolean(0), hasValue.getBoolean(0)); } @Override public void evaluateIntermediate(Block[] blocks, int offset, DriverContext driverContext) { state.toIntermediate(blocks, offset, driverContext); } @Override public void evaluateFinal(Block[] blocks, int offset, DriverContext driverContext) { if (state.seen() == false) { blocks[offset] = driverContext.blockFactory().newConstantNullBlock(1); return; } blocks[offset] = AllLastLongByTimestampAggregator.evaluateFinal(state, driverContext); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()).append("["); sb.append("channels=").append(channels); sb.append("]"); return sb.toString(); } @Override public void close() { state.close(); } }
AllLastLongByTimestampAggregatorFunction
java
apache__camel
core/camel-api/src/main/java/org/apache/camel/spi/ContextServicePlugin.java
{ "start": 2163, "end": 2985 }
interface ____ { /** * Called during CamelContext initialization to allow the plugin to configure or customize the context. * <p> * This method is invoked after the CamelContext has been created but before routes are started. Implementations * should perform any necessary setup operations such as registering beans, adding event notifiers, or configuring * global settings. * * @param camelContext the CamelContext being initialized, never {@code null} */ void load(CamelContext camelContext); /** * Called during CamelContext stop. Use it to free allocated resources. * * @param camelContext the CamelContext being uninitialized, never {@code null} */ default void unload(CamelContext camelContext) { // NO-OP } }
ContextServicePlugin
java
elastic__elasticsearch
test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/ErrorQueryBuilder.java
{ "start": 6028, "end": 8264 }
class ____ extends Query { private final IndexError indexError; private volatile boolean sleepCompleted; private final MatchAllDocsQuery matchAllQuery; ErrorQuery(IndexError error, SearchExecutionContext context) { this.indexError = error; this.sleepCompleted = false; this.matchAllQuery = new MatchAllDocsQuery(); if (error.getShardIds() != null) { boolean match = false; for (int shardId : error.getShardIds()) { if (context.getShardId() == shardId) { match = true; break; } } if (match == false) { return; } } final String header = "[" + context.index().getName() + "][" + context.getShardId() + "]"; if (error.getErrorType() == IndexError.ERROR_TYPE.WARNING) { HeaderWarning.addWarning(header + " " + error.getMessage()); } else if (error.getErrorType() == IndexError.ERROR_TYPE.EXCEPTION) { if (indexError.getStallTimeSeconds() > 0) { sleep(indexError.getStallTimeSeconds() * 1000L); } throw new RuntimeException(header + " " + error.getMessage()); } } @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { if (indexError.getStallTimeSeconds() > 0 && sleepCompleted == false) { sleep(indexError.getStallTimeSeconds() * 1000L); sleepCompleted = true; } return matchAllQuery.createWeight(searcher, scoreMode, boost); } @Override public String toString(String field) { return "ErrorQuery MatchAll *:*"; } @Override public boolean equals(Object o) { return sameClassAs(o); } @Override public int hashCode() { return classHash(); } @Override public void visit(QueryVisitor visitor) { matchAllQuery.visit(visitor); } } }
ErrorQuery
java
apache__camel
components/camel-aws/camel-aws2-textract/src/main/java/org/apache/camel/component/aws2/textract/Textract2Operations.java
{ "start": 861, "end": 1119 }
enum ____ { detectDocumentText, analyzeDocument, analyzeExpense, startDocumentTextDetection, startDocumentAnalysis, startExpenseAnalysis, getDocumentTextDetection, getDocumentAnalysis, getExpenseAnalysis }
Textract2Operations
java
apache__kafka
streams/src/test/java/org/apache/kafka/streams/processor/internals/ReadOnlyTaskTest.java
{ "start": 1717, "end": 7823 }
class ____ { private final List<String> readOnlyMethods = List.of( "needsInitializationOrRestoration", "inputPartitions", "changelogPartitions", "commitRequested", "commitNeeded", "isActive", "changelogOffsets", "state", "id", "store" ); private final List<String> objectMethods = List.of( "wait", "equals", "getClass", "hashCode", "notify", "notifyAll", "toString" ); final Task task = statelessTask(new TaskId(1, 0)).build(); @Test public void shouldDelegateNeedsInitializationOrRestoration() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.needsInitializationOrRestoration(); verify(task).needsInitializationOrRestoration(); } @Test public void shouldDelegateId() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.id(); verify(task).id(); } @Test public void shouldDelegateIsActive() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.isActive(); verify(task).isActive(); } @Test public void shouldDelegateInputPartitions() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.inputPartitions(); verify(task).inputPartitions(); } @Test public void shouldDelegateChangelogPartitions() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.changelogPartitions(); verify(task).changelogPartitions(); } @Test public void shouldDelegateCommitRequested() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.commitRequested(); verify(task).commitRequested(); } @Test public void shouldDelegateState() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); readOnlyTask.state(); verify(task).state(); } @Test public void shouldDelegateCommitNeededIfStandby() { final StandbyTask standbyTask = standbyTask(new TaskId(1, 0), Set.of(new TopicPartition("topic", 0))).build(); final ReadOnlyTask readOnlyTask = new ReadOnlyTask(standbyTask); readOnlyTask.commitNeeded(); verify(standbyTask).commitNeeded(); } @Test public void shouldThrowUnsupportedOperationExceptionForCommitNeededIfActive() { final StreamTask statefulTask = statefulTask(new TaskId(1, 0), Set.of(new TopicPartition("topic", 0))).build(); final ReadOnlyTask readOnlyTask = new ReadOnlyTask(statefulTask); final Exception exception = assertThrows(UnsupportedOperationException.class, readOnlyTask::commitNeeded); assertEquals("This task is read-only", exception.getMessage()); } @Test public void shouldThrowUnsupportedOperationExceptionForForbiddenMethods() { final ReadOnlyTask readOnlyTask = new ReadOnlyTask(task); for (final Method method : ReadOnlyTask.class.getMethods()) { final String methodName = method.getName(); if (!readOnlyMethods.contains(methodName) && !objectMethods.contains(methodName)) { shouldThrowUnsupportedOperationException(readOnlyTask, method); } } } private void shouldThrowUnsupportedOperationException(final ReadOnlyTask readOnlyTask, final Method method) { final Exception exception = assertThrows( UnsupportedOperationException.class, () -> { try { method.invoke(readOnlyTask, getParameters(method.getParameterTypes())); } catch (final InvocationTargetException invocationTargetException) { throw invocationTargetException.getCause(); } }, "Something unexpected happened during invocation of method '" + method.getName() + "'!" ); assertEquals("This task is read-only", exception.getMessage()); } private Object[] getParameters(final Class<?>[] parameterTypes) throws Exception { final Object[] parameters = new Object[parameterTypes.length]; for (int i = 0; i < parameterTypes.length; ++i) { switch (parameterTypes[i].getName()) { case "boolean": parameters[i] = true; break; case "long": parameters[i] = 0; break; case "java.util.Set": parameters[i] = Collections.emptySet(); break; case "java.util.Collection": parameters[i] = Collections.emptySet(); break; case "java.util.Map": parameters[i] = Collections.emptyMap(); break; case "org.apache.kafka.common.TopicPartition": parameters[i] = new TopicPartition("topic", 0); break; case "org.apache.kafka.clients.consumer.OffsetAndMetadata": parameters[i] = new OffsetAndMetadata(0, Optional.empty(), ""); break; case "java.lang.Exception": parameters[i] = new IllegalStateException(); break; case "java.util.function.Consumer": parameters[i] = (Consumer) ignored -> { }; break; case "java.lang.Iterable": parameters[i] = Collections.emptySet(); break; case "org.apache.kafka.common.utils.Time": parameters[i] = Time.SYSTEM; break; default: parameters[i] = parameterTypes[i].getConstructor().newInstance(); } } return parameters; } }
ReadOnlyTaskTest
java
apache__flink
flink-metrics/flink-metrics-core/src/test/java/org/apache/flink/metrics/util/TestMetricGroup.java
{ "start": 1388, "end": 4120 }
class ____ implements MetricGroup, LogicalScopeProvider { private final String[] scopeComponents; private final Map<String, String> variables; private final BiFunction<String, Optional<CharacterFilter>, String> metricIdentifierFunction; private final BiFunction<CharacterFilter, Optional<Character>, String> logicalScopeFunction; public TestMetricGroup( String[] scopeComponents, Map<String, String> variables, BiFunction<String, Optional<CharacterFilter>, String> metricIdentifierFunction, BiFunction<CharacterFilter, Optional<Character>, String> logicalScopeFunction) { this.scopeComponents = scopeComponents; this.variables = variables; this.metricIdentifierFunction = metricIdentifierFunction; this.logicalScopeFunction = logicalScopeFunction; } public static TestMetricGroupBuilder newBuilder() { return new TestMetricGroupBuilder(); } @Override public Counter counter(String name) { return new SimpleCounter(); } @Override public <C extends Counter> C counter(String name, C counter) { return counter; } @Override public <T, G extends Gauge<T>> G gauge(String name, G gauge) { return gauge; } @Override public <H extends Histogram> H histogram(String name, H histogram) { return histogram; } @Override public <M extends Meter> M meter(String name, M meter) { return meter; } @Override public MetricGroup addGroup(String name) { return this; } @Override public MetricGroup addGroup(String key, String value) { return this; } @Override public String[] getScopeComponents() { return scopeComponents; } @Override public Map<String, String> getAllVariables() { return variables; } @Override public String getMetricIdentifier(String metricName) { return metricIdentifierFunction.apply(metricName, Optional.empty()); } @Override public String getMetricIdentifier(String metricName, CharacterFilter filter) { return metricIdentifierFunction.apply(metricName, Optional.of(filter)); } @Override public String getLogicalScope(CharacterFilter filter) { return logicalScopeFunction.apply(filter, Optional.empty()); } @Override public String getLogicalScope(CharacterFilter filter, char delimiter) { return logicalScopeFunction.apply(filter, Optional.of(delimiter)); } @Override public MetricGroup getWrappedMetricGroup() { return this; } /** Builder for {@link TestMetricGroup}. */ public static final
TestMetricGroup
java
apache__hadoop
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryFileManager.java
{ "start": 6022, "end": 7222 }
class ____ { private ConcurrentSkipListMap<JobId, HistoryFileInfo> cache; private AtomicInteger mapSize; JobIdHistoryFileInfoMap() { cache = new ConcurrentSkipListMap<JobId, HistoryFileInfo>(); mapSize = new AtomicInteger(); } public HistoryFileInfo putIfAbsent(JobId key, HistoryFileInfo value) { HistoryFileInfo ret = cache.putIfAbsent(key, value); if (ret == null) { mapSize.incrementAndGet(); } return ret; } public HistoryFileInfo remove(JobId key) { HistoryFileInfo ret = cache.remove(key); if (ret != null) { mapSize.decrementAndGet(); } return ret; } /** * Returns the recorded size of the internal map. Note that this could be out * of sync with the actual size of the map * @return "recorded" size */ public int size() { return mapSize.get(); } public HistoryFileInfo get(JobId key) { return cache.get(key); } public NavigableSet<JobId> navigableKeySet() { return cache.navigableKeySet(); } public Collection<HistoryFileInfo> values() { return cache.values(); } } static
JobIdHistoryFileInfoMap
java
google__error-prone
core/src/test/java/com/google/errorprone/matchers/AnnotationHasArgumentWithValueTest.java
{ "start": 1113, "end": 1289 }
class ____ extends CompilerBasedAbstractTest { @Before public void setUp() { writeFile( "Thing.java", """ public @
AnnotationHasArgumentWithValueTest
java
spring-projects__spring-boot
loader/spring-boot-loader/src/test/java/org/springframework/boot/loader/launch/LauncherTests.java
{ "start": 3917, "end": 4191 }
class ____ { static void main(String[] args) { System.out.println("Launched static void main(String[] args)"); } static void main() { System.out.println("Launched static void main()"); } } } /** * Jar Mode tests. */ @Nested
MultipleMainMethods
java
google__guice
core/test/com/google/inject/CircularDependencyTest.java
{ "start": 13531, "end": 16223 }
class ____ implements G { private final F f; @Inject RealG(F f) { this.f = f; } @Override public F f() { return f; } @Override public String toString() { return "G"; } } /** * Tests that ProviderInternalFactory can detect circular dependencies before it gets to * Scopes.SINGLETON. This is especially important because the failure in Scopes.SINGLETON doesn't * have enough context to provide a decent error message. */ public void testCircularDependenciesDetectedEarlyWhenDependenciesHaveDifferentTypes() { Injector injector = Guice.createInjector( new AbstractModule() { @Override protected void configure() { bind(Number.class).to(Integer.class); } @Provides @Singleton Integer provideInteger(List<Object> list) { return 2; } @Provides List<Object> provideList(Integer integer) { return new ArrayList<>(); } }); try { injector.getInstance(Number.class); fail(); } catch (ProvisionException expected) { assertContains( expected.getMessage(), "Tried proxying Integer to support a circular dependency, ", "but it is not an interface."); } } public void testPrivateModulesDontTriggerCircularErrorsInProviders() { Injector injector = Guice.createInjector( new AbstractModule() { @Override protected void configure() { install( new PrivateModule() { @Override protected void configure() { bind(Foo.class); expose(Foo.class); } @Provides String provideString(Bar bar) { return new String("private 1, " + bar.string); } }); install( new PrivateModule() { @Override protected void configure() { bind(Bar.class); expose(Bar.class); } @Provides String provideString() { return new String("private 2"); } }); } }); Foo foo = injector.getInstance(Foo.class); assertEquals("private 1, private 2", foo.string); } static
RealG
java
spring-projects__spring-boot
module/spring-boot-transaction/src/main/java/org/springframework/boot/transaction/autoconfigure/TransactionProperties.java
{ "start": 1242, "end": 2436 }
class ____ implements TransactionManagerCustomizer<AbstractPlatformTransactionManager> { /** * Default transaction timeout. If a duration suffix is not specified, seconds will be * used. */ @DurationUnit(ChronoUnit.SECONDS) private @Nullable Duration defaultTimeout; /** * Whether to roll back on commit failures. */ private @Nullable Boolean rollbackOnCommitFailure; public @Nullable Duration getDefaultTimeout() { return this.defaultTimeout; } public void setDefaultTimeout(@Nullable Duration defaultTimeout) { this.defaultTimeout = defaultTimeout; } public @Nullable Boolean getRollbackOnCommitFailure() { return this.rollbackOnCommitFailure; } public void setRollbackOnCommitFailure(@Nullable Boolean rollbackOnCommitFailure) { this.rollbackOnCommitFailure = rollbackOnCommitFailure; } @Override public void customize(AbstractPlatformTransactionManager transactionManager) { if (this.defaultTimeout != null) { transactionManager.setDefaultTimeout((int) this.defaultTimeout.getSeconds()); } if (this.rollbackOnCommitFailure != null) { transactionManager.setRollbackOnCommitFailure(this.rollbackOnCommitFailure); } } }
TransactionProperties
java
apache__camel
components/camel-servicenow/camel-servicenow-component/src/test/java/org/apache/camel/component/servicenow/model/IncidentWithParms.java
{ "start": 1352, "end": 2000 }
class ____ { @JsonProperty("sys_id") private String id; @JsonProperty("number") private String number; @JsonProperty("caller_id") private String callerId; public IncidentWithParms() { } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getNumber() { return number; } public void setNumber(String number) { this.number = number; } public String getCallerId() { return callerId; } public void setCallerId(String callerId) { this.callerId = callerId; } }
IncidentWithParms
java
quarkusio__quarkus
extensions/resteasy-classic/resteasy-client/deployment/src/test/java/io/quarkus/restclient/configuration/MPRestClientsTest.java
{ "start": 228, "end": 665 }
class ____ extends AbstractRestClientsTest { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(EchoResource.class, EchoClient.class, EchoClientWithConfigKey.class, ShortNameEchoClient.class)) .withConfigurationResource("mp-restclients-test-application.properties"); }
MPRestClientsTest
java
apache__kafka
streams/test-utils/src/main/java/org/apache/kafka/streams/processor/api/MockProcessorContext.java
{ "start": 3165, "end": 4115 }
class ____<KForward, VForward> implements ProcessorContext<KForward, VForward>, RecordCollector.Supplier { // Immutable fields ================================================ private final StreamsMetricsImpl metrics; private final TaskId taskId; private final StreamsConfig config; private final File stateDir; // settable record metadata ================================================ private MockRecordMetadata recordMetadata; private Long currentSystemTimeMs; private Long currentStreamTimeMs; // mocks ================================================ private final Map<String, StateStore> stateStores = new HashMap<>(); private final List<CapturedPunctuator> punctuators = new LinkedList<>(); private final List<CapturedForward<? extends KForward, ? extends VForward>> capturedForwards = new LinkedList<>(); private boolean committed = false; private static final
MockProcessorContext
java
elastic__elasticsearch
x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/FileSettingsRoleMappingsRestartIT.java
{ "start": 1912, "end": 13875 }
class ____ extends SecurityIntegTestCase { private static final int MAX_WAIT_TIME_SECONDS = 20; private final AtomicLong versionCounter = new AtomicLong(1); @Before public void resetVersion() { versionCounter.set(1); } private static final String testJSONOnlyRoleMappings = """ { "metadata": { "version": "%s", "compatibility": "8.4.0" }, "state": { "role_mappings": { "everyone_kibana_alone": { "enabled": true, "roles": [ "kibana_user" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo": "something" } }, "everyone_fleet_alone": { "enabled": false, "roles": [ "fleet_user" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo": "something_else" } } } } }"""; private static final String testJSONOnlyUpdatedRoleMappings = """ { "metadata": { "version": "%s", "compatibility": "8.4.0" }, "state": { "role_mappings": { "everyone_kibana_together": { "enabled": true, "roles": [ "kibana_user", "kibana_admin" ], "rules": { "field": { "username": "*" } }, "metadata": { "uuid" : "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo": "something" } } } } }"""; private static final String emptyJSON = """ { "metadata": { "version": "%s", "compatibility": "8.4.0" }, "state": { "cluster_settings": {}, "role_mappings": {} } }"""; public void testReservedStatePersistsOnRestart() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); final String masterNode = internalCluster().startMasterOnlyNode(); awaitMasterNode(); var savedClusterState = setupClusterStateListener(masterNode, "everyone_kibana_alone"); awaitFileSettingsWatcher(); logger.info("--> write some role mappings, no other file settings"); RoleMappingFileSettingsIT.writeJSONFile(masterNode, testJSONOnlyRoleMappings, logger, versionCounter.incrementAndGet()); assertRoleMappingsInClusterStateWithAwait( savedClusterState, new ExpressionRoleMapping( "everyone_kibana_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( "everyone_fleet_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), Map.of( "uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else", METADATA_NAME_FIELD, "everyone_fleet_alone" ), false ) ); logger.info("--> restart master"); internalCluster().restartNode(masterNode); ensureGreen(); awaitFileSettingsWatcher(); assertRoleMappingsInClusterState( new ExpressionRoleMapping( "everyone_kibana_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( "everyone_fleet_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), Map.of( "uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else", METADATA_NAME_FIELD, "everyone_fleet_alone" ), false ) ); // now remove the role mappings via an empty settings file cleanupClusterStateAndAssertNoMappings(masterNode); // and restart the master to confirm the role mappings are all gone logger.info("--> restart master again"); internalCluster().restartNode(masterNode); ensureGreen(); // no role mappings assertRoleMappingsInClusterState(); } public void testFileSettingsReprocessedOnRestartWithoutVersionChange() throws Exception { internalCluster().setBootstrapMasterNodeIndex(0); final String masterNode = internalCluster().startMasterOnlyNode(); awaitMasterNode(); Tuple<CountDownLatch, AtomicLong> savedClusterState = setupClusterStateListener(masterNode, "everyone_kibana_alone"); awaitFileSettingsWatcher(); logger.info("--> write some role mappings, no other file settings"); RoleMappingFileSettingsIT.writeJSONFile(masterNode, testJSONOnlyRoleMappings, logger, versionCounter.incrementAndGet()); assertRoleMappingsInClusterStateWithAwait( savedClusterState, new ExpressionRoleMapping( "everyone_kibana_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user"), List.of(), Map.of("uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_alone"), true ), new ExpressionRoleMapping( "everyone_fleet_alone", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("fleet_user"), List.of(), Map.of( "uuid", "b9a59ba9-6b92-4be3-bb8d-02bb270cb3a7", "_foo", "something_else", METADATA_NAME_FIELD, "everyone_fleet_alone" ), false ) ); // write without version increment and assert that change gets applied on restart RoleMappingFileSettingsIT.writeJSONFile(masterNode, testJSONOnlyUpdatedRoleMappings, logger, versionCounter.get()); logger.info("--> restart master"); internalCluster().restartNode(masterNode); ensureGreen(); awaitFileSettingsWatcher(); // Assert busy to give mappings time to update assertBusy( () -> assertRoleMappingsInClusterState( new ExpressionRoleMapping( "everyone_kibana_together", new FieldExpression("username", List.of(new FieldExpression.FieldValue("*"))), List.of("kibana_user", "kibana_admin"), List.of(), Map.of( "uuid", "b9a59ba9-6b92-4be2-bb8d-02bb270cb3a7", "_foo", "something", METADATA_NAME_FIELD, "everyone_kibana_together" ), true ) ), MAX_WAIT_TIME_SECONDS, TimeUnit.SECONDS ); cleanupClusterStateAndAssertNoMappings(masterNode); } private void assertRoleMappingsInClusterStateWithAwait( Tuple<CountDownLatch, AtomicLong> latchWithClusterStateVersion, ExpressionRoleMapping... expectedRoleMappings ) throws InterruptedException { boolean awaitSuccessful = latchWithClusterStateVersion.v1().await(MAX_WAIT_TIME_SECONDS, TimeUnit.SECONDS); assertTrue(awaitSuccessful); var clusterState = clusterAdmin().state( new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(latchWithClusterStateVersion.v2().get()) ).actionGet().getState(); assertRoleMappingsInClusterState(clusterState, expectedRoleMappings); } private void assertRoleMappingsInClusterState(ClusterState clusterState, ExpressionRoleMapping... expectedRoleMappings) { final var project = clusterState.metadata().getProject(ProjectId.DEFAULT); String[] expectedRoleMappingNames = Arrays.stream(expectedRoleMappings).map(ExpressionRoleMapping::getName).toArray(String[]::new); assertRoleMappingReservedMetadata(clusterState, expectedRoleMappingNames); var actualRoleMappings = new ArrayList<>(RoleMappingMetadata.getFromProject(project).getRoleMappings()); assertThat(actualRoleMappings, containsInAnyOrder(expectedRoleMappings)); } private void assertRoleMappingsInClusterState(ExpressionRoleMapping... expectedRoleMappings) { assertRoleMappingsInClusterState( clusterAdmin().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet().getState(), expectedRoleMappings ); } private void cleanupClusterStateAndAssertNoMappings(String masterNode) throws Exception { var savedClusterState = setupClusterStateListenerForCleanup(masterNode); awaitFileSettingsWatcher(); logger.info("--> remove the role mappings with an empty settings file"); RoleMappingFileSettingsIT.writeJSONFile(masterNode, emptyJSON, logger, versionCounter.incrementAndGet()); boolean awaitSuccessful = savedClusterState.v1().await(MAX_WAIT_TIME_SECONDS, TimeUnit.SECONDS); assertTrue(awaitSuccessful); // ensure cluster-state update got propagated to expected version var clusterState = clusterAdmin().state( new ClusterStateRequest(TEST_REQUEST_TIMEOUT).waitForMetadataVersion(savedClusterState.v2().get()) ).actionGet(); assertRoleMappingsInClusterState(clusterState.getState()); } private void assertRoleMappingReservedMetadata(ClusterState clusterState, String... names) { assertThat( clusterState.metadata() .reservedStateMetadata() .get(FileSettingsService.NAMESPACE) .handlers() .get(ReservedRoleMappingAction.NAME) .keys(), containsInAnyOrder(names) ); } private void awaitFileSettingsWatcher() throws Exception { final String masterNode = internalCluster().getMasterName(); FileSettingsService masterFileSettingsService = internalCluster().getInstance(FileSettingsService.class, masterNode); assertBusy(() -> assertTrue(masterFileSettingsService.watching())); } }
FileSettingsRoleMappingsRestartIT
java
elastic__elasticsearch
x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettingsTests.java
{ "start": 1183, "end": 8934 }
class ____ extends AbstractBWCWireSerializationTestCase<RateLimitSettings> { private static final TransportVersion INFERENCE_API_DISABLE_EIS_RATE_LIMITING = TransportVersion.fromName( "inference_api_disable_eis_rate_limiting" ); public static RateLimitSettings createRandom() { return new RateLimitSettings(randomLongBetween(1, 1000000)); } public void testThrows_WhenGiven0() { expectThrows(IllegalArgumentException.class, () -> new RateLimitSettings(0)); } public void testThrows_WhenGivenNegativeValue() { expectThrows(IllegalArgumentException.class, () -> new RateLimitSettings(-3)); } public void testThrows_WhenGiven0_WithTimeUnit() { expectThrows(IllegalArgumentException.class, () -> new RateLimitSettings(0, TimeUnit.MILLISECONDS)); } public void testThrows_WhenGivenNegativeValue_WithTimeUnit() { expectThrows(IllegalArgumentException.class, () -> new RateLimitSettings(-3, TimeUnit.MILLISECONDS)); } public void testOf() { var validation = new ValidationException(); Map<String, Object> settings = new HashMap<>( Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(100))); assertTrue(res.isEnabled()); assertTrue(validation.validationErrors().isEmpty()); } public void testOf_UsesDefaultValue_WhenRateLimit_IsAbsent() { var validation = new ValidationException(); Map<String, Object> settings = new HashMap<>( Map.of("abc", new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(1))); assertTrue(res.isEnabled()); assertTrue(validation.validationErrors().isEmpty()); } public void testOf_UsesDefaultValue_WhenRequestsPerMinute_IsAbsent() { var validation = new ValidationException(); Map<String, Object> settings = new HashMap<>(Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of("abc", 100)))); var res = RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.PERSISTENT); assertThat(res, is(new RateLimitSettings(1))); assertTrue(res.isEnabled()); assertTrue(validation.validationErrors().isEmpty()); } public void testOf_ThrowsException_WithUnknownField_InRequestContext() { var validation = new ValidationException(); Map<String, Object> settings = new HashMap<>(Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of("abc", 100)))); var exception = expectThrows( ElasticsearchStatusException.class, () -> RateLimitSettings.of(settings, new RateLimitSettings(1), validation, "test", ConfigurationParseContext.REQUEST) ); assertThat(exception.getMessage(), is("Configuration contains settings [{abc=100}] unknown to the [test] service")); } public void testToXContent() throws IOException { var settings = new RateLimitSettings(100); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject(); settings.toXContent(builder, null); builder.endObject(); String xContentResult = Strings.toString(builder); assertThat(xContentResult, is(""" {"rate_limit":{"requests_per_minute":100}}""")); } public void testToXContent_WhenDisabled() throws IOException { var settings = new RateLimitSettings(1, TimeUnit.MINUTES, false); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject(); settings.toXContent(builder, null); builder.endObject(); String xContentResult = Strings.toString(builder); assertThat(xContentResult, is(XContentHelper.stripWhitespace(""" { }"""))); } public void testRejectRateLimitFieldForRequestContext_DoesNotAddError_WhenRateLimitFieldDoesNotExist() { var mapWithoutRateLimit = new HashMap<String, Object>(Map.of("abc", 100)); var validation = new ValidationException(); RateLimitSettings.rejectRateLimitFieldForRequestContext( mapWithoutRateLimit, "scope", "service", TaskType.CHAT_COMPLETION, ConfigurationParseContext.REQUEST, validation ); assertTrue(validation.validationErrors().isEmpty()); } public void testRejectRateLimitFieldForRequestContext_DoesNotAddError_WhenRateLimitFieldDoesExist_PersistentContext() { var mapWithRateLimit = new HashMap<String, Object>( Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); var validation = new ValidationException(); RateLimitSettings.rejectRateLimitFieldForRequestContext( mapWithRateLimit, "scope", "service", TaskType.CHAT_COMPLETION, ConfigurationParseContext.PERSISTENT, validation ); assertTrue(validation.validationErrors().isEmpty()); } public void testRejectRateLimitFieldForRequestContext_DoesAddError_WhenRateLimitFieldDoesExist() { var mapWithRateLimit = new HashMap<String, Object>( Map.of(RateLimitSettings.FIELD_NAME, new HashMap<>(Map.of(RateLimitSettings.REQUESTS_PER_MINUTE_FIELD, 100))) ); var validation = new ValidationException(); RateLimitSettings.rejectRateLimitFieldForRequestContext( mapWithRateLimit, "scope", "service", TaskType.CHAT_COMPLETION, ConfigurationParseContext.REQUEST, validation ); assertThat( validation.getMessage(), containsString("[scope] rate limit settings are not permitted for service [service] and task type [chat_completion]") ); } @Override protected Writeable.Reader<RateLimitSettings> instanceReader() { return RateLimitSettings::new; } @Override protected RateLimitSettings createTestInstance() { return createRandom(); } @Override protected RateLimitSettings mutateInstance(RateLimitSettings instance) throws IOException { var requestsPerTimeUnit = instance.requestsPerTimeUnit(); var timeUnit = instance.timeUnit(); var enabled = instance.isEnabled(); switch (randomInt(2)) { case 0 -> requestsPerTimeUnit = randomValueOtherThan(requestsPerTimeUnit, () -> randomLongBetween(1, 1000000)); case 1 -> timeUnit = randomValueOtherThan(timeUnit, () -> randomFrom(TimeUnit.values())); case 2 -> enabled = enabled == false; default -> throw new AssertionError("Illegal randomisation branch"); } return new RateLimitSettings(requestsPerTimeUnit, timeUnit, enabled); } @Override protected RateLimitSettings mutateInstanceForVersion(RateLimitSettings instance, TransportVersion version) { if (version.supports(INFERENCE_API_DISABLE_EIS_RATE_LIMITING) == false) { return new RateLimitSettings(instance.requestsPerTimeUnit(), instance.timeUnit(), true); } else { return instance; } } }
RateLimitSettingsTests
java
apache__maven
compat/maven-model/src/main/java/org/apache/maven/model/merge/ModelMerger.java
{ "start": 93101, "end": 93228 }
interface ____<T> { T merge(T u, T v); } /** * KeyComputer for Dependency */ private final
Remapping
java
apache__flink
flink-python/src/main/java/org/apache/flink/table/runtime/typeutils/serializers/python/DecimalDataSerializer.java
{ "start": 1578, "end": 4202 }
class ____ extends TypeSerializer<DecimalData> { private static final long serialVersionUID = 1L; private final int precision; private final int scale; public DecimalDataSerializer(int precision, int scale) { this.precision = precision; this.scale = scale; } @Override public boolean isImmutableType() { return false; } @Override public TypeSerializer<DecimalData> duplicate() { return new DecimalDataSerializer(precision, scale); } @Override public DecimalData createInstance() { return DecimalData.zero(precision, scale); } @Override public DecimalData copy(DecimalData from) { return from.copy(); } @Override public DecimalData copy(DecimalData from, DecimalData reuse) { return copy(from); } @Override public int getLength() { return -1; } @Override public void serialize(DecimalData record, DataOutputView target) throws IOException { byte[] bytes = StringUtf8Utils.encodeUTF8(record.toBigDecimal().toString()); target.writeInt(bytes.length); target.write(bytes); } @Override public DecimalData deserialize(DataInputView source) throws IOException { final int size = source.readInt(); byte[] bytes = new byte[size]; source.readFully(bytes); BigDecimal bigDecimal = new BigDecimal(StringUtf8Utils.decodeUTF8(bytes, 0, size)); return DecimalData.fromBigDecimal(bigDecimal, precision, scale); } @Override public DecimalData deserialize(DecimalData reuse, DataInputView source) throws IOException { return deserialize(source); } @Override public void copy(DataInputView source, DataOutputView target) throws IOException { serialize(deserialize(source), target); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DecimalDataSerializer that = (DecimalDataSerializer) o; return precision == that.precision && scale == that.scale; } @Override public int hashCode() { int result = precision; result = 31 * result + scale; return result; } @Override public TypeSerializerSnapshot<DecimalData> snapshotConfiguration() { return new DecimalSerializerSnapshot(precision, scale); } /** {@link TypeSerializerSnapshot} for {@link DecimalDataSerializer}. */ public static final
DecimalDataSerializer
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/offload/ThreadOffloadInterceptorTest.java
{ "start": 1127, "end": 1352 }
class ____ { @Simple CompletableFuture<String> doSomething() { return CompletableFuture.completedFuture("hello"); } } @Simple @Priority(1) @Interceptor public static
MyBean
java
spring-projects__spring-security
config/src/test/java/org/springframework/security/config/annotation/web/configurers/CsrfConfigurerIgnoringRequestMatchersTests.java
{ "start": 6762, "end": 7099 }
class ____ { @Bean SecurityFilterChain filterChain(HttpSecurity http) throws Exception { // @formatter:off http .csrf((csrf) -> csrf .ignoringRequestMatchers("/no-csrf")); // @formatter:on return http.build(); } } @Configuration @EnableWebSecurity @EnableWebMvc static
IgnoringPathsAndMatchersPatternConfig
java
micronaut-projects__micronaut-core
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/ErrorHandlerTest.java
{ "start": 14303, "end": 14618 }
class ____ implements ExceptionHandler<MyException, HttpResponse> { @Override public HttpResponse handle(HttpRequest request, MyException exception) { return HttpResponse.ok("Exception Handled") .contentType(MediaType.TEXT_PLAIN); } } static
MyErrorHandler
java
apache__camel
components/camel-kamelet/src/test/java/org/apache/camel/component/kamelet/KameletEipAggregateGroovyTest.java
{ "start": 2232, "end": 3088 }
class ____ { String agg(b1, b2) { b1 + ',' + b2 } }; new MyAgg()") // the groovy is evaluated as a script so must return an instance of the class .templateParameter("count") .from("kamelet:source") .aggregate(constant(true)) .completionSize("{{count}}") // use the groovy script bean for aggregation .aggregationStrategy("{{myAgg}}") .to("log:aggregate") .to("kamelet:sink") .end(); from("direct:start") .kamelet("my-aggregate?count=5") .to("log:info") .to("mock:result"); } }; } }
MyAgg
java
apache__flink
flink-table/flink-table-planner/src/test/java/org/apache/flink/connector/file/table/FileSystemStatisticsReportTest.java
{ "start": 2243, "end": 21641 }
class ____ extends StatisticsReportTestBase { @BeforeEach public void setup(@TempDir File file) throws Exception { super.setup(file); String filePath1 = createFileAndWriteData( file, "00-00.tmp", Arrays.asList("1,1,hi", "2,1,hello", "3,2,hello world")); String ddl1 = String.format( "CREATE TABLE NonPartTable (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") with (\n" + " 'connector' = 'filesystem'," + " 'format' = 'testcsv'," + " 'path' = '%s')", filePath1); tEnv.executeSql(ddl1); File partitionDataPath = new File(file, "partitionData"); partitionDataPath.mkdirs(); writeData(new File(partitionDataPath, "b=1"), Arrays.asList("1,1,hi", "2,1,hello")); writeData(new File(partitionDataPath, "b=2"), Collections.singletonList("3,2,hello world")); writeData(new File(partitionDataPath, "b=3"), Collections.singletonList("4,3,hello")); String ddl2 = String.format( "CREATE TABLE PartTable (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") partitioned by(b) with (\n" + " 'connector' = 'filesystem'," + " 'format' = 'testcsv'," + " 'path' = '%s')", partitionDataPath.toURI()); tEnv.executeSql(ddl2); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .createPartition( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogPartitionImpl(new HashMap<>(), ""), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .createPartition( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogPartitionImpl(new HashMap<>(), ""), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .createPartition( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "3")), new CatalogPartitionImpl(new HashMap<>(), ""), false); String filePath2 = createFileAndWriteData( file, "00-01.tmp", Arrays.asList("1,1,hi", "2,1,hello", "3,2,hello world")); String ddl3 = String.format( "CREATE TABLE DisableSourceReportTable (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") with (\n" + " 'connector' = 'filesystem'," + " 'format' = 'testcsv'," + " 'source.report-statistics' = 'NONE'," + " 'path' = '%s')", filePath2); tEnv.executeSql(ddl3); String emptyPath = createFileAndWriteData(file, "00-02.tmp", Collections.emptyList()); String ddl4 = String.format( "CREATE TABLE emptyTable (\n" + " a bigint,\n" + " b int,\n" + " c varchar\n" + ") with (\n" + " 'connector' = 'filesystem'," + " 'format' = 'testcsv'," + " 'path' = '%s')", emptyPath); tEnv.executeSql(ddl4); } @Override protected String[] properties() { return new String[0]; } private String createFileAndWriteData(File path, String fileName, List<String> data) throws IOException { String file = path.getAbsolutePath() + "/" + fileName; Files.write(new File(file).toPath(), String.join("\n", data).getBytes()); return file; } private void writeData(File file, List<String> data) throws IOException { Files.write(file.toPath(), String.join("\n", data).getBytes()); } @Test public void testCatalogStatisticsExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterTableStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "NonPartTable"), new CatalogTableStatistics(10L, 1, 100L, 100L), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(10)); } @Test public void testCatalogStatisticsDoNotExist() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(3)); } @Test public void testDisableSourceReport() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from DisableSourceReportTable"); assertThat(statistic.getTableStats()).isEqualTo(TableStats.UNKNOWN); } @Test public void testFilterPushDownAndCatalogStatisticsExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterTableStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "NonPartTable"), new CatalogTableStatistics(10L, 1, 100L, 100L), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable where a > 10"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(10)); } @Test public void testFilterPushDownAndCatalogStatisticsDoNotExist() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable where a > 10"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(3)); } @Test public void testFilterPushDownAndReportStatisticsDisabled() { tEnv.getConfig() .set( OptimizerConfigOptions.TABLE_OPTIMIZER_SOURCE_REPORT_STATISTICS_ENABLED, false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable where a > 10"); assertThat(statistic.getTableStats()).isEqualTo(TableStats.UNKNOWN); } @Test public void testLimitPushDownAndCatalogStatisticsDoNotExist() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from NonPartTable limit 1"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(1)); } @Test public void testNoPartitionPushDownAndCatalogStatisticsExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogTableStatistics(6L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "3")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(12)); } @Test public void tesNoPartitionPushDownAndCatalogStatisticsPartialExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogTableStatistics(6L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); // For partition table 'PartTable', partition 'b=3' have no catalog statistics, so get // partition table stats from catalog will return TableStats.UNKNOWN. So we will recompute // stats from source. FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable"); // there are four rows in file system. assertThat(statistic.getTableStats()).isEqualTo(new TableStats(4)); } @Test public void testNoPartitionPushDownAndReportStatisticsDisabled() { tEnv.getConfig() .set( OptimizerConfigOptions.TABLE_OPTIMIZER_SOURCE_REPORT_STATISTICS_ENABLED, false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable"); assertThat(statistic.getTableStats()).isEqualTo(TableStats.UNKNOWN); } @Test public void testPartitionPushDownAndCatalogStatisticsExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogTableStatistics(6L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable where b = 1"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(6)); } @Test public void testPartitionPushDownAndCatalogColumnStatisticsExist() throws Exception { // The purpose of this test case is to test the correctness of stats after partition push // down, and recompute partition table and column stats. For partition table, merged Ndv for // columns which are partition keys using sum instead of max (other columns using max). tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogTableStatistics(6L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "3")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionColumnStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), createSinglePartitionColumnStats(), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionColumnStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), createSinglePartitionColumnStats(), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionColumnStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "3")), createSinglePartitionColumnStats(), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable where b < 3"); assertThat(statistic.getTableStats()) .isEqualTo(new TableStats(9, createMergedPartitionColumnStats())); } @Test public void testFilterPartitionPushDownPushDownAndCatalogStatisticsExist() throws Exception { tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "1")), new CatalogTableStatistics(6L, 1, 100L, 100L), false); tEnv.getCatalog(tEnv.getCurrentCatalog()) .orElseThrow(Exception::new) .alterPartitionStatistics( new ObjectPath(tEnv.getCurrentDatabase(), "PartTable"), new CatalogPartitionSpec(Collections.singletonMap("b", "2")), new CatalogTableStatistics(3L, 1, 100L, 100L), false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable where a > 10 and b = 1"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(6)); } @Test public void testFilterPartitionPushDownAndCatalogStatisticsDoNotExist() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable where a > 10 and b = 1"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(2)); } @Test public void testFilterPartitionPushDownAndReportStatisticsDisabled() { tEnv.getConfig() .set( OptimizerConfigOptions.TABLE_OPTIMIZER_SOURCE_REPORT_STATISTICS_ENABLED, false); FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from PartTable where a > 10 and b = 1"); assertThat(statistic.getTableStats()).isEqualTo(TableStats.UNKNOWN); } @Test public void testFileSystemSourceWithoutData() { FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from emptyTable"); assertThat(statistic.getTableStats()).isEqualTo(TableStats.UNKNOWN); } @Test public void testFileSystemSourceWithoutDataWithLimitPushDown() { // TODO for source support limit push down and query have limit condition, In // PushLimitIntoTableSourceScanRule will give stats a new rowCount value even if this table // have no data. FlinkStatistic statistic = getStatisticsFromOptimizedPlan("select * from emptyTable limit 1"); assertThat(statistic.getTableStats()).isEqualTo(new TableStats(1)); } private CatalogColumnStatistics createSinglePartitionColumnStats() { Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>(); CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(1L, 10L, 5L, 5L); colStatsMap.put("a", longColStats); colStatsMap.put("b", longColStats); CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(10L, 10D, 5L, 5L); colStatsMap.put("c", stringColStats); return new CatalogColumnStatistics(colStatsMap); } private Map<String, ColumnStats> createMergedPartitionColumnStats() { Map<String, CatalogColumnStatisticsDataBase> colStatsMap = new HashMap<>(); CatalogColumnStatisticsDataLong longColStats = new CatalogColumnStatisticsDataLong(1L, 10L, 5L, 10L); colStatsMap.put("a", longColStats); // Merged Ndv for columns which are partition keys using sum instead of max. CatalogColumnStatisticsDataLong longColStats2 = new CatalogColumnStatisticsDataLong(1L, 10L, 10L, 10L); colStatsMap.put("b", longColStats2); CatalogColumnStatisticsDataString stringColStats = new CatalogColumnStatisticsDataString(10L, 10D, 5L, 10L); colStatsMap.put("c", stringColStats); return CatalogTableStatisticsConverter.convertToColumnStatsMap(colStatsMap); } }
FileSystemStatisticsReportTest
java
apache__kafka
clients/src/main/java/org/apache/kafka/clients/consumer/internals/ConsumerCoordinator.java
{ "start": 5074, "end": 5161 }
class ____ the coordination process with the consumer coordinator. */ public final
manages
java
quarkusio__quarkus
extensions/resteasy-reactive/rest-jaxb/deployment/src/test/java/io/quarkus/resteasy/reactive/jaxb/deployment/test/MultipartTest.java
{ "start": 6119, "end": 6522 }
class ____ { private String name; private Integer age; public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getAge() { return age; } public void setAge(Integer age) { this.age = age; } } private static
Person
java
spring-projects__spring-boot
module/spring-boot-liquibase/src/main/java/org/springframework/boot/liquibase/autoconfigure/LiquibaseProperties.java
{ "start": 9411, "end": 9455 }
enum ____ * not used directly. */ public
is
java
elastic__elasticsearch
x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIpErrorTests.java
{ "start": 851, "end": 1496 }
class ____ extends ErrorsForCasesWithoutExamplesTestCase { @Override protected List<TestCaseSupplier> cases() { return Iterators.toList(Iterators.map(ToIpTests.parameters().iterator(), p -> (TestCaseSupplier) p[0])); } @Override protected Expression build(Source source, List<Expression> args) { return new ToIp(source, args.getFirst(), null); } @Override protected Matcher<String> expectedTypeErrorMatcher(List<Set<DataType>> validPerPosition, List<DataType> signature) { return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "ip or string")); } }
ToIpErrorTests
java
spring-projects__spring-boot
core/spring-boot/src/main/java/org/springframework/boot/web/servlet/support/ErrorPageFilterConfiguration.java
{ "start": 1027, "end": 1496 }
class ____ { @Bean ErrorPageFilter errorPageFilter() { return new ErrorPageFilter(); } @Bean FilterRegistrationBean<ErrorPageFilter> errorPageFilterRegistration(ErrorPageFilter filter) { FilterRegistrationBean<ErrorPageFilter> registration = new FilterRegistrationBean<>(filter); registration.setOrder(filter.getOrder()); registration.setDispatcherTypes(DispatcherType.REQUEST, DispatcherType.ASYNC); return registration; } }
ErrorPageFilterConfiguration
java
google__error-prone
core/src/test/java/com/google/errorprone/bugpatterns/checkreturnvalue/CheckReturnValueWellKnownLibrariesTest.java
{ "start": 10947, "end": 11192 }
class ____ { public int f() { return 42; } } """) .addSourceLines( "Test.java", """ import static org.junit.Assert.fail;
Foo
java
alibaba__druid
core/src/test/java/com/alibaba/druid/bvt/pool/DruidDataSourceTest3.java
{ "start": 441, "end": 2890 }
class ____ extends TestCase { private DruidDataSource dataSource; private volatile Exception error; private volatile Exception errorB; protected void setUp() throws Exception { dataSource = new DruidDataSource(); dataSource.setUrl("jdbc:mock:xxx"); dataSource.setTestOnBorrow(false); dataSource.setInitialSize(1); dataSource.getProxyFilters().add(new FilterAdapter() { public ConnectionProxy connection_connect(FilterChain chain, Properties info) throws SQLException { try { Thread.sleep(Long.MAX_VALUE); } catch (InterruptedException e) { // skip } return null; } }); } protected void tearDown() throws Exception { dataSource.close(); } public void test_error() throws Exception { final CountDownLatch startedLatch = new CountDownLatch(1); final CountDownLatch endLatch = new CountDownLatch(1); Thread threadA = new Thread("A") { public void run() { try { startedLatch.countDown(); dataSource.init(); } catch (SQLException e) { error = e; } finally { endLatch.countDown(); } } }; threadA.start(); startedLatch.await(); Thread.sleep(10); assertFalse(dataSource.isInited()); final CountDownLatch startedLatchB = new CountDownLatch(1); final CountDownLatch endLatchB = new CountDownLatch(1); Thread threadB = new Thread("B") { public void run() { try { startedLatchB.countDown(); dataSource.init(); } catch (SQLException e) { errorB = e; } finally { endLatchB.countDown(); } } }; threadB.start(); startedLatchB.await(); threadB.interrupt(); endLatchB.await(); assertNotNull(errorB); assertTrue(errorB.getCause() instanceof InterruptedException); threadA.interrupt(); endLatch.await(); endLatchB.await(); assertNotNull(error); assertEquals(1, dataSource.getCreateErrorCount()); } }
DruidDataSourceTest3
java
elastic__elasticsearch
x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/extractor/ExtractedFields.java
{ "start": 906, "end": 4876 }
class ____ { private final List<ExtractedField> allFields; private final List<ExtractedField> docValueFields; private final List<ProcessedField> processedFields; private final String[] sourceFields; private final Map<String, Long> cardinalitiesForFieldsWithConstraints; public ExtractedFields( List<ExtractedField> allFields, List<ProcessedField> processedFields, Map<String, Long> cardinalitiesForFieldsWithConstraints ) { this.allFields = new ArrayList<>(allFields); this.docValueFields = filterFields(ExtractedField.Method.DOC_VALUE, allFields); this.sourceFields = filterFields(ExtractedField.Method.SOURCE, allFields).stream() .map(ExtractedField::getSearchField) .toArray(String[]::new); this.cardinalitiesForFieldsWithConstraints = Collections.unmodifiableMap(cardinalitiesForFieldsWithConstraints); this.processedFields = processedFields == null ? Collections.emptyList() : processedFields; } public List<ProcessedField> getProcessedFields() { return processedFields; } public List<ExtractedField> getAllFields() { return allFields; } public Set<String> getProcessedFieldInputs() { return processedFields.stream().map(ProcessedField::getInputFieldNames).flatMap(List::stream).collect(Collectors.toSet()); } public String[] getSourceFields() { return sourceFields; } public List<ExtractedField> getDocValueFields() { return docValueFields; } public Map<String, Long> getCardinalitiesForFieldsWithConstraints() { return cardinalitiesForFieldsWithConstraints; } public String[] extractOrganicFeatureNames() { Set<String> processedFieldInputs = getProcessedFieldInputs(); return allFields.stream() .map(ExtractedField::getName) .filter(f -> processedFieldInputs.contains(f) == false) .toArray(String[]::new); } public String[] extractProcessedFeatureNames() { return processedFields.stream().map(ProcessedField::getOutputFieldNames).flatMap(List::stream).toArray(String[]::new); } private static List<ExtractedField> filterFields(ExtractedField.Method method, List<ExtractedField> fields) { return fields.stream().filter(field -> field.getMethod() == method).collect(Collectors.toList()); } public static ExtractedFields build( Set<String> allFields, Set<String> scriptFields, Set<String> searchRuntimeFields, FieldCapabilitiesResponse fieldsCapabilities, Map<String, Long> cardinalitiesForFieldsWithConstraints, List<ProcessedField> processedFields ) { ExtractionMethodDetector extractionMethodDetector = new ExtractionMethodDetector( scriptFields, fieldsCapabilities, searchRuntimeFields ); return new ExtractedFields( allFields.stream().map(extractionMethodDetector::detect).collect(Collectors.toList()), processedFields, cardinalitiesForFieldsWithConstraints ); } public static ExtractedFields build( Set<String> allFields, Set<String> scriptFields, FieldCapabilitiesResponse fieldsCapabilities, Map<String, Long> cardinalitiesForFieldsWithConstraints, List<ProcessedField> processedFields ) { return build( allFields, scriptFields, Collections.emptySet(), fieldsCapabilities, cardinalitiesForFieldsWithConstraints, processedFields ); } public static TimeField newTimeField(String name, ExtractedField.Method method) { return new TimeField(name, method); } public static ExtractedField applyBooleanMapping(ExtractedField field) { return new BooleanMapper<>(field, 1, 0); } public static
ExtractedFields
java
eclipse-vertx__vert.x
vertx-core/src/main/java/io/vertx/core/impl/future/Eventually.java
{ "start": 702, "end": 1266 }
class ____<T, U> extends Operation<T> implements Completable<T> { private final Supplier<Future<U>> supplier; Eventually(ContextInternal context, Supplier<Future<U>> supplier) { super(context); this.supplier = supplier; } @Override public void complete(T result, Throwable failure) { FutureBase<U> future; try { future = (FutureBase<U>) supplier.get(); } catch (Throwable e) { completeInternal(null, failure); return; } future.addListener((ignore, err1) -> completeInternal(result, failure)); } }
Eventually
java
apache__hadoop
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java
{ "start": 17854, "end": 18884 }
class ____ extends AtMostOnceOp { private final String oldName; private final String newName; Rename2Op(DFSClient client, String oldName, String newName) { super("rename2", client); this.oldName = oldName; this.newName = newName; } @Override void prepare() throws Exception { final Path filePath = new Path(oldName); if (!dfs.exists(filePath)) { DFSTestUtil.createFile(dfs, filePath, BlockSize, DataNodes, 0); } } @Override void invoke() throws Exception { client.rename(oldName, newName, Rename.OVERWRITE); } @Override boolean checkNamenodeBeforeReturn() throws Exception { Path targetPath = new Path(newName); boolean renamed = dfs.exists(targetPath); for (int i = 0; i < CHECKTIMES && !renamed; i++) { Thread.sleep(1000); renamed = dfs.exists(targetPath); } return renamed; } @Override Object getResult() { return null; } } /** concat */
Rename2Op
java
spring-projects__spring-framework
spring-aop/src/test/java/org/springframework/aop/target/LazyCreationTargetSourceTests.java
{ "start": 943, "end": 1494 }
class ____ { @Test void testCreateLazy() { TargetSource targetSource = new AbstractLazyCreationTargetSource() { @Override protected Object createObject() { return new InitCountingBean(); } @Override public Class<?> getTargetClass() { return InitCountingBean.class; } }; InitCountingBean proxy = (InitCountingBean) ProxyFactory.getProxy(targetSource); assertThat(InitCountingBean.initCount).as("Init count should be 0").isEqualTo(0); assertThat(targetSource.getTargetClass()).as("Target
LazyCreationTargetSourceTests
java
assertj__assertj-core
assertj-core/src/test/java/org/assertj/core/error/ShouldHaveAtIndex_create_Test.java
{ "start": 1255, "end": 1819 }
class ____ { @Test void should_create_error_message() { // GIVEN ErrorMessageFactory factory = shouldHaveAtIndex(list("Yoda", "Luke"), new TestCondition<>("red lightsaber"), atIndex(1), "Luke"); // WHEN String message = factory.create(new TextDescription("Test"), new StandardRepresentation()); // THEN then(message).isEqualTo("[Test] %nExpecting actual:%n \"Luke\"%nat index 1 to have:%n red lightsaber%nin:%n [\"Yoda\", \"Luke\"]%n".formatted()); } }
ShouldHaveAtIndex_create_Test
java
quarkusio__quarkus
extensions/security-jpa/deployment/src/test/java/io/quarkus/security/jpa/EagerAuthMultiTenantPersistenceUnitTest.java
{ "start": 211, "end": 1579 }
class ____ extends JpaSecurityRealmTest { @RegisterExtension static final QuarkusUnitTest config = new QuarkusUnitTest() .withApplicationRoot((jar) -> jar .addClasses(testClasses) .addClass(MinimalUserEntity.class) .addClass(CustomHibernateTenantResolver.class) .addAsResource("minimal-config/import.sql", "import.sql") .addAsResource("multitenant-persistence-unit/application.properties", "application.properties")); @Test public void testRoutingCtxAccessInsideTenantResolver() { // RoutingContext is not used inside TenantResolver to resolve tenant RestAssured.given().auth().preemptive().basic("user", "user").when().get("/jaxrs-secured/roles-class/routing-context") .then().statusCode(200); // RoutingContext is used and proactive auth is enabled => expect error CustomHibernateTenantResolver.useRoutingContext = true; try { RestAssured.given().auth().preemptive().basic("user", "user").queryParam("tenant", "two").when() .get("/jaxrs-secured/roles-class") .then().statusCode(500); } finally { CustomHibernateTenantResolver.useRoutingContext = false; } } }
EagerAuthMultiTenantPersistenceUnitTest
java
alibaba__nacos
naming/src/main/java/com/alibaba/nacos/naming/healthcheck/extend/AbstractHealthCheckProcessorExtend.java
{ "start": 1567, "end": 1928 }
class ____"); } return String.valueOf(simpleName.charAt(0)).toLowerCase() + simpleName.substring(1); } @Override public void setBeanFactory(BeanFactory beanFactory) throws BeansException { if (beanFactory instanceof SingletonBeanRegistry) { this.registry = (SingletonBeanRegistry) beanFactory; } } }
name
java
spring-projects__spring-framework
spring-tx/src/test/java/org/springframework/transaction/annotation/AnnotationTransactionAttributeSourceTests.java
{ "start": 21036, "end": 21124 }
interface ____ { @Transactional int getAge(); void setAge(int age); }
ITestBean2
java
apache__camel
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailNoRecipientsTest.java
{ "start": 1266, "end": 1992 }
class ____ extends CamelTestSupport { @Test public void testMailNoRecipients() { try { template.sendBody("direct:a", "Hello World"); fail("Should have thrown exception"); } catch (CamelExecutionException e) { IllegalArgumentException iae = assertIsInstanceOf(IllegalArgumentException.class, e.getCause()); assertEquals("The mail message does not have any recipients set.", iae.getMessage()); } } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { from("direct:a").to("smtp://localhost"); } }; } }
MailNoRecipientsTest
java
apache__camel
components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/queue/HazelcastQueueComponent.java
{ "start": 1192, "end": 1878 }
class ____ extends HazelcastDefaultComponent { public HazelcastQueueComponent() { } public HazelcastQueueComponent(final CamelContext context) { super(context); } @Override protected HazelcastDefaultEndpoint doCreateEndpoint( String uri, String remaining, Map<String, Object> parameters, HazelcastInstance hzInstance) throws Exception { final HazelcastQueueConfiguration config = new HazelcastQueueConfiguration(); HazelcastQueueEndpoint answer = new HazelcastQueueEndpoint(hzInstance, uri, this, remaining, config); setProperties(answer, parameters); return answer; } }
HazelcastQueueComponent
java
apache__hadoop
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/TestFSConfigToCSConfigConverterMain.java
{ "start": 7021, "end": 7184 }
class ____ implements Consumer<Integer> { int exitCode; @Override public void accept(Integer t) { this.exitCode = t.intValue(); } } }
ExitFunc
java
apache__flink
flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/io/checkpointing/UnalignedCheckpointsInterruptibleTimersTest.java
{ "start": 2665, "end": 7849 }
class ____ { @Test void testSingleWatermarkHoldingOperatorInTheChain() throws Exception { final Instant firstWindowEnd = Instant.ofEpochMilli(1000L); final int numFirstWindowTimers = 2; final Instant secondWindowEnd = Instant.ofEpochMilli(2000L); final int numSecondWindowTimers = 2; try (final StreamTaskMailboxTestHarness<String> harness = new StreamTaskMailboxTestHarnessBuilder<>(OneInputStreamTask::new, Types.STRING) .addJobConfig( CheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofSeconds(1)) .addJobConfig(CheckpointingOptions.ENABLE_UNALIGNED, true) .addJobConfig( CheckpointingOptions.ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS, true) .modifyStreamConfig( UnalignedCheckpointsInterruptibleTimersTest::setupStreamConfig) .addInput(Types.STRING) .setupOperatorChain( SimpleOperatorFactory.of( new MultipleTimersAtTheSameTimestamp() .withTimers(firstWindowEnd, numFirstWindowTimers) .withTimers( secondWindowEnd, numSecondWindowTimers))) .name("first") .finishForSingletonOperatorChain(StringSerializer.INSTANCE) .build()) { harness.processElement(new StreamRecord<>("register timers")); harness.processAll(); harness.processElement(asWatermark(firstWindowEnd)); harness.processElement(asWatermark(secondWindowEnd)); assertThat(harness.getOutput()) .containsExactly( asFiredRecord("key-0"), asMailRecord("key-0"), asFiredRecord("key-1"), asMailRecord("key-1"), asWatermark(firstWindowEnd), asFiredRecord("key-0"), asMailRecord("key-0"), asFiredRecord("key-1"), asMailRecord("key-1"), asWatermark(secondWindowEnd)); } } @Test void testWatermarkProgressWithNoTimers() throws Exception { final Instant firstWindowEnd = Instant.ofEpochMilli(1000L); final Instant secondWindowEnd = Instant.ofEpochMilli(2000L); try (final StreamTaskMailboxTestHarness<String> harness = new StreamTaskMailboxTestHarnessBuilder<>(OneInputStreamTask::new, Types.STRING) .addJobConfig( CheckpointingOptions.CHECKPOINTING_INTERVAL, Duration.ofSeconds(1)) .addJobConfig(CheckpointingOptions.ENABLE_UNALIGNED, true) .addJobConfig( CheckpointingOptions.ENABLE_UNALIGNED_INTERRUPTIBLE_TIMERS, true) .modifyStreamConfig( UnalignedCheckpointsInterruptibleTimersTest::setupStreamConfig) .addInput(Types.STRING) .setupOperatorChain( SimpleOperatorFactory.of(new MultipleTimersAtTheSameTimestamp())) .name("first") .finishForSingletonOperatorChain(StringSerializer.INSTANCE) .build()) { harness.setAutoProcess(false); harness.processElement(new StreamRecord<>("impulse")); harness.processAll(); harness.processElement(asWatermark(firstWindowEnd)); harness.processElement(asWatermark(secondWindowEnd)); final List<Watermark> seenWatermarks = new ArrayList<>(); while (seenWatermarks.size() < 2) { harness.processSingleStep(); Object outputElement; while ((outputElement = harness.getOutput().poll()) != null) { if (outputElement instanceof Watermark) { seenWatermarks.add((Watermark) outputElement); } } } assertThat(seenWatermarks) .containsExactly(asWatermark(firstWindowEnd), asWatermark(secondWindowEnd)); } } private static Watermark asWatermark(Instant timestamp) { return new Watermark(timestamp.toEpochMilli()); } private static StreamRecord<String> asFiredRecord(String key) { return new StreamRecord("fired-" + key); } private static StreamRecord<String> asMailRecord(String key) { return new StreamRecord("mail-" + key); } private static void setupStreamConfig(StreamConfig cfg) { cfg.setStateKeySerializer(StringSerializer.INSTANCE); } private static
UnalignedCheckpointsInterruptibleTimersTest
java
quarkusio__quarkus
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/cdi/bcextensions/SyntheticObserverTest.java
{ "start": 3333, "end": 3671 }
class ____ { @Inject Event<MyEvent> unqualifiedEvent; @Inject @MyQualifier Event<MyEvent> qualifiedEvent; void fireEvent() { unqualifiedEvent.fire(new MyEvent("Hello World")); qualifiedEvent.fire(new MyEvent("Hello SynObserver")); } } static
MyService
java
hibernate__hibernate-orm
hibernate-envers/src/test/java/org/hibernate/orm/test/envers/integration/query/NotAuditedQueryTest.java
{ "start": 758, "end": 3015 }
class ____ { @Test @JiraKey(value = "HHH-11558") public void testRevisionsOfEntityNotAuditedMultipleResults(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forRevisionsOfEntity( NonAuditedEntity.class, false, false ) .getResultList() ) ); } @Test @JiraKey(value = "HHH-11558") public void testRevisionsOfEntityNotAuditedSingleResult(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forRevisionsOfEntity( NonAuditedEntity.class, false, false ) .setMaxResults( 1 ) .getSingleResult() ) ); } @Test @JiraKey(value = "HHH-11558") public void testForEntitiesAtRevisionNotAuditedMultipleResults(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forEntitiesAtRevision( NonAuditedEntity.class, 1 ) .getResultList() ) ); } @Test @JiraKey(value = "HHH-11558") public void testForEntitiesAtRevisionNotAuditedSingleResult(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forEntitiesAtRevision( NonAuditedEntity.class, 1 ) .setMaxResults( 1 ) .getSingleResult() ) ); } @Test @JiraKey(value = "HHH-11558") public void testForEntitiesModifiedAtRevisionNotAuditedMultipleResults(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forEntitiesModifiedAtRevision( NonAuditedEntity.class, 1 ) .getResultList() ) ); } @Test @JiraKey(value = "HHH-11558") public void testForEntitiesModifiedAtRevisionNotAuditedSingleResult(EntityManagerFactoryScope scope) { scope.inEntityManager( em -> assertThrows( NotAuditedException.class, () -> AuditReaderFactory.get( em ).createQuery() .forEntitiesModifiedAtRevision( NonAuditedEntity.class, 1 ) .setMaxResults( 1 ) .getSingleResult() ) ); } @Entity(name = "NonAuditedEntity") public static
NotAuditedQueryTest