language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/test/proxy/Socks4Proxy.java
|
{
"start": 1188,
"end": 5813
}
|
class ____ extends ProxyBase<Socks4Proxy> {
private static final Logger log = LoggerFactory.getLogger(Socks4Proxy.class);
private static final Buffer clientRequest = Buffer.buffer(new byte[] { 4, 1 });
private static final Buffer connectResponse = Buffer.buffer(new byte[] { 0, 90, 0, 0, 0, 0, 0, 0 });
private static final Buffer errorResponse = Buffer.buffer(new byte[] { 0, 91, 0, 0, 0, 0, 0, 0 });
public static final int DEFAULT_PORT = 11080;
private NetServer server;
private NetClient client;
@Override
public int defaultPort() {
return DEFAULT_PORT;
}
/**
* Start the server.
*
* @param vertx
* Vertx instance to use for creating the server and client
*/
@Override
public Socks4Proxy start(Vertx vertx) throws Exception {
NetServerOptions options = new NetServerOptions();
options.setHost("localhost").setPort(port);
client = vertx.createNetClient(new NetClientOptions());
server = vertx.createNetServer(options);
server.connectHandler(socket -> {
socket.handler(buffer -> {
if (!buffer.getBuffer(0, clientRequest.length()).equals(clientRequest)) {
throw new IllegalStateException("expected " + toHex(clientRequest) + ", got " + toHex(buffer));
}
log.debug("got request: " + toHex(buffer));
int port = buffer.getUnsignedShort(2);
String ip = getByte4(buffer.getBuffer(4, 8));
String authUsername = getString(buffer.getBuffer(8, buffer.length()));
String username = nextUserName();
if (username != null && !authUsername.equals(username)) {
log.debug("auth failed");
log.debug("writing: " + toHex(errorResponse));
socket.write(errorResponse);
socket.close();
} else {
String host;
if (ip.equals("0.0.0.1")) {
host = getString(buffer.getBuffer(9 + authUsername.length(), buffer.length()));
} else {
host = ip;
}
log.debug("connect: " + host + ":" + port);
socket.handler(null);
lastUri = host + ":" + port;
if (forceUri != null) {
host = forceUri.substring(0, forceUri.indexOf(':'));
port = Integer.valueOf(forceUri.substring(forceUri.indexOf(':') + 1));
}
log.debug("connecting to " + host + ":" + port);
client.connect(port, host).onComplete(result -> {
if (result.succeeded()) {
localAddresses.add(result.result().localAddress().toString());
log.debug("writing: " + toHex(connectResponse));
if (successDelayMillis > 0) {
vertx.setTimer(successDelayMillis, tid -> socket.write(connectResponse));
} else {
socket.write(connectResponse);
}
log.debug("connected, starting pump");
NetSocket clientSocket = result.result();
socket.closeHandler(v -> clientSocket.close());
clientSocket.closeHandler(v -> socket.close());
socket.pipeTo(clientSocket);
clientSocket.pipeTo(socket);
} else {
log.error("exception", result.cause());
socket.handler(null);
log.debug("writing: " + toHex(errorResponse));
socket.write(errorResponse);
socket.close();
}
});
}
});
});
CompletableFuture<Void> fut = new CompletableFuture<>();
server.listen().onComplete(ar -> {
if (ar.succeeded()) {
fut.complete(null);
} else {
fut.completeExceptionally(ar.cause());
}
});
fut.get(10, TimeUnit.SECONDS);
log.debug("socks4a server started");
return this;
}
private String getString(Buffer buffer) {
String string = buffer.toString();
return string.substring(0, string.indexOf('\0'));
}
private String getByte4(Buffer buffer) {
return String.format("%d.%d.%d.%d", buffer.getByte(0), buffer.getByte(1), buffer.getByte(2), buffer.getByte(3));
}
private String toHex(Buffer buffer) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < buffer.length(); i++) {
sb.append(String.format("%02X ", buffer.getByte(i)));
}
return sb.toString();
}
/**
* Stop the server.
*
* <p>Doesn't wait for the close operation to finish
*/
@Override
public void stop() {
if (server != null) {
server.close().await();
server = null;
}
if (client != null) {
client.close().await();
client = null;
}
}
}
|
Socks4Proxy
|
java
|
spring-projects__spring-boot
|
test-support/spring-boot-test-support/src/test/java/org/springframework/boot/testsupport/classpath/resources/OnSuperClassWithResourceTests.java
|
{
"start": 1291,
"end": 1601
}
|
class ____");
}
@Test
@WithResource(name = "method-resource", content = "method")
void whenWithResourceIsUsedOnASuperClassAndMethodThenBothResourcesAreAvailable() throws IOException {
assertThat(new ClassPathResource("on-super-class").getContentAsString(StandardCharsets.UTF_8))
.isEqualTo("super-
|
content
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/condition/DoesNotHave_matches_Test.java
|
{
"start": 1033,
"end": 1572
}
|
class ____ {
private TestCondition<Object> condition;
private Condition<Object> doesNotHave;
@BeforeEach
public void setUp() {
condition = new TestCondition<>();
doesNotHave = doesNotHave(condition);
}
@Test
void should_match_if_Condition_not_match() {
condition.shouldMatch(false);
assertThat(doesNotHave.matches("Yoda")).isTrue();
}
@Test
void should_not_match_Conditions_match() {
condition.shouldMatch(true);
assertThat(doesNotHave.matches("Yoda")).isFalse();
}
}
|
DoesNotHave_matches_Test
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/launcher/TestPostDiscoveryTagFilter.java
|
{
"start": 499,
"end": 787
}
|
class ____ implements PostDiscoveryFilter {
@Override
public FilterResult apply(final TestDescriptor object) {
var include = object.getTags().stream().map(TestTag::getName).anyMatch("test-post-discovery"::equals);
return FilterResult.includedIf(include);
}
}
|
TestPostDiscoveryTagFilter
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoSpyBeanOverrideHandlerTests.java
|
{
"start": 1248,
"end": 3440
}
|
class ____ {
@Test
void beanNameIsSetToNullIfAnnotationNameIsEmpty() {
List<BeanOverrideHandler> list = BeanOverrideTestUtils.findHandlers(SampleOneSpy.class);
assertThat(list).singleElement().satisfies(handler -> assertThat(handler.getBeanName()).isNull());
}
@Test
void beanNameIsSetToAnnotationName() {
List<BeanOverrideHandler> list = BeanOverrideTestUtils.findHandlers(SampleOneSpyWithName.class);
assertThat(list).singleElement().satisfies(handler -> assertThat(handler.getBeanName()).isEqualTo("anotherService"));
}
@Test
void isEqualToWithSameInstance() {
MockitoSpyBeanOverrideHandler handler = handlerFor("service");
assertThat(handler).isEqualTo(handler);
assertThat(handler).hasSameHashCodeAs(handler);
}
@Test
void isEqualToWithSameMetadata() {
MockitoSpyBeanOverrideHandler handler1 = handlerFor("service");
MockitoSpyBeanOverrideHandler handler2 = handlerFor("service");
assertThat(handler1).isEqualTo(handler2);
assertThat(handler1).hasSameHashCodeAs(handler2);
}
@Test
void isNotEqualToByTypeLookupWithSameMetadataButDifferentField() {
assertThat(handlerFor("service")).isNotEqualTo(handlerFor("service2"));
}
@Test
void isEqualToByNameLookupWithSameMetadataButDifferentField() {
MockitoSpyBeanOverrideHandler handler1 = handlerFor("service3");
MockitoSpyBeanOverrideHandler handler2 = handlerFor("service4");
assertThat(handler1).isEqualTo(handler2);
assertThat(handler1).hasSameHashCodeAs(handler2);
}
@Test
void isNotEqualToWithSameMetadataButDifferentBeanName() {
assertThat(handlerFor("service")).isNotEqualTo(handlerFor("service3"));
}
@Test
void isNotEqualToWithSameMetadataButDifferentReset() {
assertThat(handlerFor("service")).isNotEqualTo(handlerFor("service5"));
}
private static MockitoSpyBeanOverrideHandler handlerFor(String fieldName) {
Field field = ReflectionUtils.findField(Sample.class, fieldName);
assertThat(field).isNotNull();
MockitoSpyBean annotation = AnnotatedElementUtils.getMergedAnnotation(field, MockitoSpyBean.class);
return new MockitoSpyBeanOverrideHandler(field, ResolvableType.forClass(field.getType()), annotation);
}
static
|
MockitoSpyBeanOverrideHandlerTests
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/OptionalMapToOptionalTest.java
|
{
"start": 2980,
"end": 3350
}
|
class ____ {
public Optional<Integer> test(Optional<Integer> optional) {
return optional.map(i -> 1);
}
}
""")
.doTest();
}
@Test
public void rawOptional() {
helper
.addSourceLines(
"Test.java",
"""
import java.util.Optional;
|
Test
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webclient-test/src/main/java/org/springframework/boot/webclient/test/autoconfigure/AutoConfigureWebClient.java
|
{
"start": 1184,
"end": 1448
}
|
class ____ enable auto-configuration of a
* {@link Builder WebClient.Builder}.
*
* @author Andy Wilkinson
* @since 4.0.0
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Inherited
@AutoConfigureJson
@ImportAutoConfiguration
public @
|
to
|
java
|
apache__camel
|
components/camel-bean/src/main/java/org/apache/camel/component/bean/AbstractBeanProcessor.java
|
{
"start": 1440,
"end": 8399
}
|
class ____ extends AsyncProcessorSupport {
private static final Logger LOG = LoggerFactory.getLogger(AbstractBeanProcessor.class);
private final BeanHolder beanHolder;
private transient Processor processor;
private transient Object bean;
private transient boolean lookupProcessorDone;
private final Lock lock = new ReentrantLock();
private BeanScope scope;
private String method;
private boolean shorthandMethod;
protected AbstractBeanProcessor(Object pojo, BeanInfo beanInfo) {
this(new ConstantBeanHolder(pojo, beanInfo));
}
protected AbstractBeanProcessor(BeanHolder beanHolder) {
this.beanHolder = beanHolder;
}
@Override
public String toString() {
return "BeanProcessor[" + beanHolder + (method != null ? "#" + method : "") + "]";
}
@Override
public boolean process(Exchange exchange, AsyncCallback callback) {
// do we have an explicit method name we always should invoke
final String explicitMethodName = method;
final Object beanInstance;
final BeanInfo beanInfo;
try {
beanInstance = beanHolder.getBean(exchange);
// get bean info for this bean instance (to avoid thread issue)
beanInfo = doGetBeanInfo(beanInstance);
} catch (Exception e) {
exchange.setException(e);
callback.done(true);
return true;
}
// do we have a custom adapter for this POJO to a Processor
// but only do this if allowed
// we need to check beanHolder is Processor is support, to avoid the bean cached issue
if (allowProcessor(explicitMethodName, beanInfo)) {
final Processor target = getCustomAdapter(exchange, beanInstance);
if (target != null) {
useCustomAdapter(exchange, callback, target);
return true;
}
}
return useMethodInvocation(exchange, callback, explicitMethodName, beanInfo, beanInstance);
}
private static boolean useMethodInvocation(
Exchange exchange, AsyncCallback callback, String explicitMethodName, BeanInfo beanInfo, Object beanInstance) {
final Message in = exchange.getIn();
// set explicit method name to invoke as a exchange property, which is how BeanInfo can detect it
if (explicitMethodName != null) {
exchange.setProperty(BeanConstants.BEAN_METHOD_NAME, explicitMethodName);
}
final MethodInvocation invocation;
try {
invocation = beanInfo.createInvocation(beanInstance, exchange, explicitMethodName);
} catch (Exception e) {
exchange.setException(e);
callback.done(true);
return true;
} finally {
// must remove property as they were provisional
if (explicitMethodName != null) {
exchange.removeProperty(BeanConstants.BEAN_METHOD_NAME);
}
}
if (invocation == null) {
exchange.setException(new IllegalStateException(
"No method invocation could be created, no matching method could be found on: " + beanInstance));
callback.done(true);
return true;
}
// invoke invocation
return invocation.proceed(callback);
}
private Processor getCustomAdapter(Exchange exchange, Object beanTmp) {
Processor target = getProcessor();
if (target == null) {
// only attempt to lookup the processor once or nearly once
// allow cache by default or if the scope is singleton
boolean allowCache = scope == null || scope == BeanScope.Singleton;
if (allowCache) {
if (!lookupProcessorDone) {
lock.lock();
try {
lookupProcessorDone = true;
// so if there is a custom type converter for the bean to processor
target = exchange.getContext().getTypeConverter().tryConvertTo(Processor.class, exchange, beanTmp);
processor = target;
} finally {
lock.unlock();
}
}
} else {
// so if there is a custom type converter for the bean to processor
target = exchange.getContext().getTypeConverter().tryConvertTo(Processor.class, exchange, beanTmp);
}
}
return target;
}
private static void useCustomAdapter(Exchange exchange, AsyncCallback callback, Processor target) {
if (LOG.isTraceEnabled()) {
LOG.trace("Using a custom adapter as bean invocation: {}", target);
}
try {
target.process(exchange);
} catch (AssertionError | Exception e) {
exchange.setException(e);
} finally {
callback.done(true);
}
}
private BeanInfo doGetBeanInfo(Object beanTmp) {
BeanInfo beanInfo = beanHolder.getBeanInfo(beanTmp);
if (beanInfo == null) {
// fallback and use old way
beanInfo = beanHolder.getBeanInfo();
}
return beanInfo;
}
protected Processor getProcessor() {
return processor;
}
protected BeanHolder getBeanHolder() {
return this.beanHolder;
}
public Object getBean() {
return beanHolder.getBean(null);
}
// Properties
// -----------------------------------------------------------------------
public String getMethod() {
return method;
}
public BeanScope getScope() {
return scope;
}
public void setScope(BeanScope scope) {
this.scope = scope;
}
/**
* Sets the method name to use
*/
public void setMethod(String method) {
this.method = method;
}
public boolean isShorthandMethod() {
return shorthandMethod;
}
/**
* Sets whether to support getter style method name, so you can say the method is called 'name' but it will invoke
* the 'getName' method.
* <p/>
* Is by default turned off.
*/
public void setShorthandMethod(boolean shorthandMethod) {
this.shorthandMethod = shorthandMethod;
}
// Implementation methods
//-------------------------------------------------------------------------
@Override
protected void doInit() throws Exception {
// optimize to only get (create) a processor if really needed
if (beanHolder.supportProcessor() && allowProcessor(method, beanHolder.getBeanInfo())) {
processor = beanHolder.getProcessor();
ServiceHelper.initService(processor);
} else if (beanHolder instanceof ConstantBeanHolder) {
try {
// Start the bean if it implements Service
|
AbstractBeanProcessor
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/jackson/Initializers.java
|
{
"start": 3468,
"end": 4410
}
|
class ____ extends AbstractInitializer {
@Override
void setupModule(
final SetupContext context, final boolean includeStacktrace, final boolean stacktraceAsString) {
super.setupModule(context, includeStacktrace, stacktraceAsString);
// These classes don't have public constructors, so we instantiate them directly.
// See https://github.com/apache/logging-log4j2/issues/2814
context.addSerializers(new SimpleSerializers(singletonList(new ContextDataAsEntryListSerializer())));
context.addDeserializers(
new SimpleDeserializers(singletonMap(StringMap.class, new ContextDataAsEntryListDeserializer())));
}
}
/**
* Used to set up {@link SetupContext} from different {@link SimpleModule}s.
* <p>
* Serializes the context map as object.
* </p>
*/
static
|
SetupContextAsEntryListInitializer
|
java
|
netty__netty
|
buffer/src/main/java/io/netty/buffer/AdaptivePoolingAllocator.java
|
{
"start": 22949,
"end": 24161
}
|
class ____ implements ChunkControllerFactory {
// To amortize activation/deactivation of chunks, we should have a minimum number of segments per chunk.
// We choose 32 because it seems neither too small nor too big.
// For segments of 16 KiB, the chunks will be half a megabyte.
private static final int MIN_SEGMENTS_PER_CHUNK = 32;
private final int segmentSize;
private final int chunkSize;
private final int[] segmentOffsets;
private SizeClassChunkControllerFactory(int segmentSize) {
this.segmentSize = ObjectUtil.checkPositive(segmentSize, "segmentSize");
chunkSize = Math.max(MIN_CHUNK_SIZE, segmentSize * MIN_SEGMENTS_PER_CHUNK);
int segmentsCount = chunkSize / segmentSize;
segmentOffsets = new int[segmentsCount];
for (int i = 0; i < segmentsCount; i++) {
segmentOffsets[i] = i * segmentSize;
}
}
@Override
public ChunkController create(MagazineGroup group) {
return new SizeClassChunkController(group, segmentSize, chunkSize, segmentOffsets);
}
}
private static final
|
SizeClassChunkControllerFactory
|
java
|
quarkusio__quarkus
|
extensions/security/deployment/src/test/java/io/quarkus/security/test/cdi/inheritance/AuthenticatedBean.java
|
{
"start": 189,
"end": 307
}
|
class ____ {
public String ping() {
return AuthenticatedBean.class.getSimpleName();
}
}
|
AuthenticatedBean
|
java
|
quarkusio__quarkus
|
integration-tests/jpa/src/main/java/io/quarkus/it/jpa/generatedvalue/EntityWithCustomGenericGeneratorReferencedAsClassName.java
|
{
"start": 220,
"end": 524
}
|
class ____ {
@Id
@GeneratedValue(generator = "referenced-as-class-name")
@GenericGenerator(name = "referenced-as-class-name", strategy = "io.quarkus.it.jpa.generatedvalue.MyCustomGenericGeneratorReferencedAsClassName")
public String id;
}
|
EntityWithCustomGenericGeneratorReferencedAsClassName
|
java
|
spring-projects__spring-framework
|
spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/UriTemplateServletAnnotationControllerHandlerMethodTests.java
|
{
"start": 22829,
"end": 23076
}
|
class ____ {
@RequestMapping("type/{var}")
void getFirstLevelFunctionNodes(@PathVariable("var") String var, Writer writer) throws IOException {
writer.write(var);
}
}
@Controller
@RequestMapping("/test")
public static
|
MenuTreeController
|
java
|
alibaba__nacos
|
console/src/test/java/com/alibaba/nacos/console/handler/impl/noop/config/ConfigNoopHandlerTest.java
|
{
"start": 919,
"end": 4329
}
|
class ____ {
ConfigNoopHandler configNoopHandler;
@BeforeEach
void setUp() {
configNoopHandler = new ConfigNoopHandler();
}
@AfterEach
void tearDown() {
}
@Test
void getConfigList() {
assertThrows(NacosApiException.class, () -> configNoopHandler.getConfigList(1, 1, "dataId", "group", "", null),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void getConfigDetail() {
assertThrows(NacosApiException.class, () -> configNoopHandler.getConfigDetail("dataId", "group", ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void publishConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.publishConfig(null, null),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void deleteConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.deleteConfig("", "", "", "", "", ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void batchDeleteConfigs() {
assertThrows(NacosApiException.class, () -> configNoopHandler.batchDeleteConfigs(null, "", ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void getConfigListByContent() {
assertThrows(NacosApiException.class,
() -> configNoopHandler.getConfigListByContent("", 1, 1, "", "", "", null),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void getListeners() {
assertThrows(NacosApiException.class, () -> configNoopHandler.getListeners("", "", "", true),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void getAllSubClientConfigByIp() {
assertThrows(NacosApiException.class, () -> configNoopHandler.getAllSubClientConfigByIp("", true, "", true),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void exportConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.exportConfig("", "", "", "", null),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void importAndPublishConfig() {
assertThrows(NacosApiException.class,
() -> configNoopHandler.importAndPublishConfig(null, "", null, null, null, ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void cloneConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.cloneConfig(null, "", null, null, "", ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void removeBetaConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.removeBetaConfig(null, "", "", "", "", ""),
"Current functionMode is `naming`, config module is disabled.");
}
@Test
void queryBetaConfig() {
assertThrows(NacosApiException.class, () -> configNoopHandler.queryBetaConfig(null, "", ""),
"Current functionMode is `naming`, config module is disabled.");
}
}
|
ConfigNoopHandlerTest
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/ValidatorExternalResourceTest.java
|
{
"start": 972,
"end": 2947
}
|
class ____ extends ContextTestSupport {
@Test
public void testExternalResource() throws InterruptedException {
final MockEndpoint mock = getMockEndpoint("mock:out");
mock.expectedMessageCount(1);
template.sendBody("direct:start", "<ord:order xmlns:ord=\"http://example.org/ord\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xsi:schemaLocation=\"http://example.org/ord order.xsd\">\n" +
" <customer>\n" +
" <name>Priscilla Walmsley</name>\n" +
" <number>12345</number>\n" +
" </customer>\n" +
" <items>\n" +
" <product>\n" +
" <number>98765</number>\n" +
" <name>Short-Sleeved Linen Blouse</name>\n" +
" <size system=\"US-DRESS\">10</size>\n" +
" <color value=\"blue\"/>\n" +
" </product>\n" +
" </items>\n" +
"</ord:order>");
mock.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.toD("validator:https://raw.githubusercontent.com/apache/camel/main/core/camel-core/src/test/resources/org/apache/camel/component/validator/xsds/order.xsd")
.to("mock:out");
}
};
}
}
|
ValidatorExternalResourceTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/hql/SubQueryTest.java
|
{
"start": 1123,
"end": 6685
}
|
class ____ {
@AfterEach
void dropTestData(SessionFactoryScope sessions) {
sessions.dropData();
}
@Test
@JiraKey( value = "HHH-9090" )
public void testCorrelatedJoin(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
Root root = new Root();
root.rootName = "root name";
root.branch = new Branch();
root.branch.branchName = "branch";
root.branch.leaves = new ArrayList<Leaf>();
Leaf leaf1 = new Leaf();
leaf1.leafName = "leaf1";
Leaf leaf2 = new Leaf();
leaf2.leafName = "leaf2";
root.branch.leaves.add( leaf1 );
root.branch.leaves.add( leaf2 );
session.persist( leaf1 );
session.persist( leaf2 );
session.persist( root.branch );
session.persist( root );
Root otherRoot = new Root();
otherRoot.rootName = "other root name";
otherRoot.branch = new Branch();
otherRoot.branch.branchName = "other branch";
otherRoot.branch.leaves = new ArrayList<Leaf>();
Leaf otherLeaf1 = new Leaf();
otherLeaf1.leafName = "leaf1";
Leaf otherLeaf3 = new Leaf();
otherLeaf3.leafName = "leaf3";
otherRoot.branch.leaves.add( otherLeaf1 );
otherRoot.branch.leaves.add( otherLeaf3 );
session.persist( otherLeaf1 );
session.persist( otherLeaf3 );
session.persist( otherRoot.branch );
session.persist( otherRoot );
} );
sessions.inTransaction( (session) -> {
var qry = """
from Root as r
where r.branch.branchName = 'branch'
and exists( from r.branch.leaves as s where s.leafName = 'leaf1')
""";
Root rootQueried = session.createQuery( qry, Root.class ).uniqueResult();
Assertions.assertEquals( "root name", rootQueried.rootName );
Assertions.assertEquals( "branch", rootQueried.branch.branchName );
Assertions.assertEquals( "leaf1", rootQueried.branch.leaves.get( 0 ).leafName );
Assertions.assertEquals( "leaf2", rootQueried.branch.leaves.get( 1 ).leafName );
} );
sessions.inTransaction( (session) -> {
var qry = """
from Root as r
where r.branch.branchName = 'branch'
and exists( from r.branch.leaves as s where s.leafName = 'leaf3')
""";
Assertions.assertNull( session.createQuery( qry ).uniqueResult() );
} );
sessions.inTransaction( (session) -> {
var qry = """
from Root as r
where exists( from r.branch.leaves as s where r.branch.branchName = 'branch'
and s.leafName = 'leaf1')
""";
var rootQueried = session.createQuery( qry, Root.class ).uniqueResult();
Assertions.assertEquals( "root name", rootQueried.rootName );
Assertions.assertEquals( "branch", rootQueried.branch.branchName );
Assertions.assertEquals( "leaf1", rootQueried.branch.leaves.get( 0 ).leafName );
Assertions.assertEquals( "leaf2", rootQueried.branch.leaves.get( 1 ).leafName );
} );
sessions.inTransaction( (session) -> {
var qry = """
from Root as r
where exists( from Root r1 where r1.branch.branchName = r.branch.branchName and r1.branch.branchName != 'other branch')
""";
var rootQueried = session.createQuery( qry, Root.class ).uniqueResult();
Assertions.assertEquals( "root name", rootQueried.rootName );
Assertions.assertEquals( "branch", rootQueried.branch.branchName );
Assertions.assertEquals( "leaf1", rootQueried.branch.leaves.get( 0 ).leafName );
Assertions.assertEquals( "leaf2", rootQueried.branch.leaves.get( 1 ).leafName );
} );
}
@Test
@JiraKey( value = "HHH-1689" )
@JiraKey( value = "SQM-30" )
public void testSubQueryAsSearchedCaseResultExpression(SessionFactoryScope factoryScope) {
factoryScope.inTransaction( (session) -> {
final String query = """
SELECT CASE
WHEN l.id IS NOT NULL
THEN (SELECT COUNT(r.id) FROM Root r)
ELSE 0
END
FROM Leaf l
""";
// simple syntax check
session.createQuery( query ).list();
} );
}
@Test
@JiraKey( value = "HHH-1689" )
@JiraKey( value = "SQM-30" )
public void testSubQueryAsSearchedCaseExpression(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final String query = """
SELECT CASE
WHEN (SELECT COUNT(r.id) FROM Root r) > 1 THEN 1
ELSE 0
END
FROM Leaf l
""";
// simple syntax check
session.createQuery( query ).list();
} );
}
@Test
@JiraKey( value = "HHH-1689" )
@JiraKey( value = "SQM-30" )
public void testSubQueryAsCaseElseResultExpression(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final String query = """
SELECT CASE WHEN l.id > 1 THEN 1
ELSE (SELECT COUNT(r.id) FROM Root r)
END FROM Leaf l
""";
// simple syntax check
session.createQuery( query ).list();
} );
}
@Test
@JiraKey( value = "HHH-1689" )
@JiraKey( value = "SQM-30" )
public void testSubQueryAsSimpleCaseTestExpression(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final String query = """
SELECT CASE (SELECT COUNT(r.id) FROM Root r)
WHEN 1 THEN 1
ELSE 0
END
FROM Leaf l
""";
// simple syntax check
session.createQuery( query ).list();
} );
}
@Test
@JiraKey( value = "HHH-1689" )
@JiraKey( value = "SQM-30" )
public void testSubQueryAsSimpleCaseWhenExpression(SessionFactoryScope sessions) {
sessions.inTransaction( (session) -> {
final String query = """
SELECT CASE l.id
WHEN (SELECT COUNT(r.id) FROM Root r) THEN 1
ELSE 0
END
FROM Leaf l
""";
// simple syntax check
session.createQuery( query ).list();
} );
}
@Entity( name = "Root" )
@Table( name = "ROOT" )
public static
|
SubQueryTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/util/TestByteArrayManager.java
|
{
"start": 12765,
"end": 16450
}
|
class ____ implements Runnable {
static final int NUM_RUNNERS = 5;
static int index2arrayLength(int index) {
return ByteArrayManager.MIN_ARRAY_LENGTH << (index - 1);
}
private final ByteArrayManager bam;
final int maxArrayLength;
final int countThreshold;
final int maxArrays;
final ExecutorService pool;
final List<Future<byte[]>> arrays = new ArrayList<Future<byte[]>>();
final AtomicInteger count = new AtomicInteger();
final int p;
private int n;
final List<AssertionError> assertionErrors = new ArrayList<AssertionError>();
Runner(int index, int countThreshold, int maxArrays,
ExecutorService pool, int p, ByteArrayManager bam) {
this.maxArrayLength = index2arrayLength(index);
this.countThreshold = countThreshold;
this.maxArrays = maxArrays;
this.pool = pool;
this.p = p;
this.bam = bam;
}
boolean isEmpty() {
synchronized (arrays) {
return arrays.isEmpty();
}
}
Future<byte[]> submitAllocate() {
count.incrementAndGet();
final Future<byte[]> f = pool.submit(new Callable<byte[]>() {
@Override
public byte[] call() throws Exception {
final int lower = maxArrayLength == ByteArrayManager.MIN_ARRAY_LENGTH?
0: maxArrayLength >> 1;
final int arrayLength = ThreadLocalRandom.current().nextInt(
maxArrayLength - lower) + lower + 1;
final byte[] array = bam.newByteArray(arrayLength);
try {
assertEquals(maxArrayLength, array.length, "arrayLength=" + arrayLength +
", lower=" + lower);
} catch(AssertionError e) {
assertionErrors.add(e);
}
return array;
}
});
synchronized (arrays) {
arrays.add(f);
}
return f;
}
Future<byte[]> removeFirst() throws Exception {
synchronized (arrays) {
return remove(arrays, 0);
}
}
void recycle() throws Exception {
final Future<byte[]> f = removeFirst();
if (f != null) {
printf("randomRecycler: ");
try {
recycle(f.get(10, TimeUnit.MILLISECONDS));
} catch(TimeoutException e) {
recycle(new byte[maxArrayLength]);
printf("timeout, new byte[%d]\n", maxArrayLength);
}
}
}
int recycle(final byte[] array) {
return bam.release(array);
}
Future<Integer> submitRecycle(final byte[] array) {
count.decrementAndGet();
final Future<Integer> f = pool.submit(new Callable<Integer>() {
@Override
public Integer call() throws Exception {
return recycle(array);
}
});
return f;
}
@Override
public void run() {
for(int i = 0; i < n; i++) {
final boolean isAllocate = ThreadLocalRandom.current()
.nextInt(NUM_RUNNERS) < p;
if (isAllocate) {
submitAllocate();
} else {
try {
final Future<byte[]> f = removeFirst();
if (f != null) {
submitRecycle(f.get());
}
} catch (Exception e) {
e.printStackTrace();
fail(this + " has " + e);
}
}
if ((i & 0xFF) == 0) {
sleepMs(100);
}
}
}
Thread start(int n) {
this.n = n;
final Thread t = new SubjectInheritingThread(this);
t.start();
return t;
}
@Override
public String toString() {
return getClass().getSimpleName() + ": max=" + maxArrayLength
+ ", count=" + count;
}
}
static
|
Runner
|
java
|
quarkusio__quarkus
|
integration-tests/opentelemetry-jdbc-instrumentation/src/test/java/io/quarkus/it/opentelemetry/MariaDbLifecycleManager.java
|
{
"start": 1849,
"end": 2328
}
|
class ____ extends MariaDBContainer<StartedMariaDBContainer> {
public StartedMariaDBContainer() {
super(DockerImageName
.parse(MARIADB_IMAGE)
.asCompatibleSubstituteFor(DockerImageName.parse(MariaDBContainer.NAME)));
withDatabaseName(QUARKUS);
withUsername(QUARKUS);
withPassword(QUARKUS);
addExposedPort(3306);
start();
}
}
}
|
StartedMariaDBContainer
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlShowCatalogsConverter.java
|
{
"start": 1220,
"end": 1702
}
|
class ____ implements SqlNodeConverter<SqlShowCatalogs> {
@Override
public Operation convertSqlNode(SqlShowCatalogs sqlShowCatalogs, ConvertContext context) {
final ShowLikeOperator likeOp =
ShowLikeOperator.of(
LikeType.of(sqlShowCatalogs.getLikeType(), sqlShowCatalogs.isNotLike()),
sqlShowCatalogs.getLikeSqlPattern());
return new ShowCatalogsOperation(likeOp);
}
}
|
SqlShowCatalogsConverter
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/query/ReloadWithPreviousRowEntityTest2.java
|
{
"start": 2793,
"end": 3325
}
|
class ____ {
private Integer id;
private Teacher teacher;
public Student() {
}
public Student(Integer id) {
this.id = id;
}
@Id
@Column(name = "student_id")
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@ManyToOne(optional = false)
@JoinColumn(name = "teacher_fk_id")
public Teacher getTeacher() {
return teacher;
}
public void setTeacher(Teacher teacher) {
this.teacher = teacher;
}
}
@Entity(name = "Teacher")
public static
|
Student
|
java
|
quarkusio__quarkus
|
extensions/websockets-next/deployment/src/test/java/io/quarkus/websockets/next/test/codec/ArrayBinaryCodecTest.java
|
{
"start": 734,
"end": 1446
}
|
class ____ {
@RegisterExtension
public static final QuarkusUnitTest test = new QuarkusUnitTest()
.withApplicationRoot(root -> {
root.addClasses(Item.class, Endpoint.class, WSClient.class, ItemArrayBinaryCodec.class);
});
@Inject
Vertx vertx;
@TestHTTPResource("end")
URI testUri;
@Test
public void testCodec() {
try (WSClient client = new WSClient(vertx)) {
client.connect(testUri);
client.sendAndAwait(Buffer.buffer("Foo"));
client.waitForMessages(1);
assertEquals("Foo", client.getMessages().get(0).toString());
}
}
@Singleton
public static
|
ArrayBinaryCodecTest
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/plugin/Log.java
|
{
"start": 1561,
"end": 5570
}
|
interface ____ {
/**
* {@return true if the <b>debug</b> error level is enabled}
*/
boolean isDebugEnabled();
/**
* Sends a message to the user in the <b>debug</b> error level.
*
* @param content the message to log
*/
void debug(CharSequence content);
/**
* Sends a message (and accompanying exception) to the user in the <b>debug</b> error level.
* The error's stacktrace will be output when this error level is enabled.
*
* @param content the message to log
* @param error the error that caused this log
*/
void debug(CharSequence content, Throwable error);
/**
* Sends an exception to the user in the <b>debug</b> error level.
* The stack trace for this exception will be output when this error level is enabled.
*
* @param error the error that caused this log
*/
void debug(Throwable error);
void debug(Supplier<String> content);
void debug(Supplier<String> content, Throwable error);
/**
* {@return true if the <b>info</b> error level is enabled}
*/
boolean isInfoEnabled();
/**
* Sends a message to the user in the <b>info</b> error level.
*
* @param content the message to log
*/
void info(CharSequence content);
/**
* Sends a message (and accompanying exception) to the user in the <b>info</b> error level.
* The error's stacktrace will be output when this error level is enabled.
*
* @param content the message to log
* @param error the error that caused this log
*/
void info(CharSequence content, Throwable error);
/**
* Sends an exception to the user in the <b>info</b> error level.
* The stack trace for this exception will be output when this error level is enabled.
*
* @param error the error that caused this log
*/
void info(Throwable error);
void info(Supplier<String> content);
void info(Supplier<String> content, Throwable error);
/**
* {@return true if the <b>warn</b> error level is enabled}
*/
boolean isWarnEnabled();
/**
* Sends a message to the user in the <b>warn</b> error level.
*
* @param content the message to log
*/
void warn(CharSequence content);
/**
* Sends a message (and accompanying exception) to the user in the <b>warn</b> error level.
* The error's stacktrace will be output when this error level is enabled.
*
* @param content the message to log
* @param error the error that caused this log
*/
void warn(CharSequence content, Throwable error);
/**
* Sends an exception to the user in the <b>warn</b> error level.
* The stack trace for this exception will be output when this error level is enabled.
*
* @param error the error that caused this log
*/
void warn(Throwable error);
void warn(Supplier<String> content);
void warn(Supplier<String> content, Throwable error);
/**
* {@return true if the <b>error</b> error level is enabled}
*/
boolean isErrorEnabled();
/**
* Sends a message to the user in the <b>error</b> error level.
*
* @param content the message to log
*/
void error(CharSequence content);
/**
* Sends a message (and accompanying exception) to the user in the <b>error</b> error level.
* The error's stacktrace will be output when this error level is enabled.
*
* @param content the message to log
* @param error the error that caused this log
*/
void error(CharSequence content, Throwable error);
/**
* Sends an exception to the user in the <b>error</b> error level.
* The stack trace for this exception will be output when this error level is enabled.
*
* @param error the error that caused this log
*/
void error(Throwable error);
void error(Supplier<String> content);
void error(Supplier<String> content, Throwable error);
}
|
Log
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/test/java/io/vertx/test/fakeresolver/FakeEndpointResolver.java
|
{
"start": 801,
"end": 1025
}
|
class ____ {
final List<SocketAddress> addresses;
final boolean valid;
public Endpoint(List<SocketAddress> addresses, boolean valid) {
this.addresses = addresses;
this.valid = valid;
}
}
|
Endpoint
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/cache/spi/entry/CacheEntryStructure.java
|
{
"start": 313,
"end": 843
}
|
interface ____ {
/**
* Convert the cache item into its "structured" form. Perfectly valid to return the item as-is.
*
* @param item The item to structure.
*
* @return The structured form.
*/
Object structure(Object item);
/**
* Convert the previous structured form of the item back into its item form.
*
* @param structured The structured form.
* @param factory The session factory.
*
* @return The item
*/
Object destructure(Object structured, SessionFactoryImplementor factory);
}
|
CacheEntryStructure
|
java
|
mockito__mockito
|
mockito-core/src/main/java/org/mockito/internal/stubbing/defaultanswers/ReturnsMocks.java
|
{
"start": 469,
"end": 1806
}
|
class ____ implements Answer<Object>, Serializable {
private static final long serialVersionUID = -6755257986994634579L;
private final Answer<Object> delegate = new ReturnsMoreEmptyValues();
@Override
public Object answer(final InvocationOnMock invocation) throws Throwable {
Object defaultReturnValue = delegate.answer(invocation);
if (defaultReturnValue != null) {
return defaultReturnValue;
}
return RetrieveGenericsForDefaultAnswers.returnTypeForMockWithCorrectGenerics(
invocation,
new RetrieveGenericsForDefaultAnswers.AnswerCallback() {
@Override
public Object apply(Class<?> type) {
if (type == null) {
return null;
}
MockCreationSettings<?> mockSettings =
MockUtil.getMockSettings(invocation.getMock());
return Mockito.mock(
type,
new MockSettingsImpl<>()
.defaultAnswer(ReturnsMocks.this)
.mockMaker(mockSettings.getMockMaker()));
}
});
}
}
|
ReturnsMocks
|
java
|
apache__flink
|
flink-tests/src/test/java/org/apache/flink/test/windowing/sessionwindows/SessionConfiguration.java
|
{
"start": 1036,
"end": 3949
}
|
class ____<K, E> {
// key of the session
private final K key;
// id of the session w.r.t. key
private final int sessionId;
// allowed gap between events in one session
private final long gap;
// minimum event time among all events in the session
private final long minEventTimestamp;
// number of timely events in the session
private final int numberOfTimelyEvents;
// factory that produces the events for the session from metadata such as timestamps
private final GeneratorEventFactory<K, E> eventFactory;
public SessionConfiguration(
K key,
int sessionId,
long gap,
long minEventTimestamp,
int numberOfTimelyEvents,
GeneratorEventFactory<K, E> eventFactory) {
Preconditions.checkNotNull(key);
Preconditions.checkNotNull(eventFactory);
Preconditions.checkArgument(numberOfTimelyEvents > 0);
Preconditions.checkArgument(gap > 0);
this.key = key;
this.eventFactory = eventFactory;
this.sessionId = sessionId;
this.gap = gap;
this.numberOfTimelyEvents = numberOfTimelyEvents;
this.minEventTimestamp = minEventTimestamp;
}
public K getKey() {
return key;
}
public GeneratorEventFactory<K, E> getEventFactory() {
return eventFactory;
}
public long getGap() {
return gap;
}
public long getMinEventTimestamp() {
return minEventTimestamp;
}
public int getNumberOfTimelyEvents() {
return numberOfTimelyEvents;
}
public int getSessionId() {
return sessionId;
}
public static <K, E> SessionConfiguration<K, E> of(
K key,
int sessionId,
long timeout,
long startTimestamp,
int numberOfEvents,
GeneratorEventFactory<K, E> eventFactory) {
return new SessionConfiguration<>(
key, sessionId, timeout, startTimestamp, numberOfEvents, eventFactory);
}
@Override
public String toString() {
return "SessionConfiguration{"
+ "key="
+ key
+ ", sessionId="
+ sessionId
+ ", gap="
+ gap
+ ", minEventTimestamp="
+ minEventTimestamp
+ ", numberOfTimelyEvents="
+ numberOfTimelyEvents
+ ", eventFactory="
+ eventFactory
+ '}';
}
public SessionConfiguration<K, E> getFollowupSessionConfiguration(long startTimestamp) {
return SessionConfiguration.of(
getKey(),
getSessionId() + 1,
getGap(),
startTimestamp,
getNumberOfTimelyEvents(),
getEventFactory());
}
}
|
SessionConfiguration
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/testutil/runner/EclipseCompilingExtension.java
|
{
"start": 4108,
"end": 4714
}
|
interface ____ {
CompilationOutcomeDescriptor compileInOtherClassloader(CompilationRequest compilationRequest,
List<String> testCompilationClasspath,
Set<File> sourceFiles,
String sourceDir,
String sourceOutputDir,
String classOutputDir);
}
public static final
|
ClassLoaderHelper
|
java
|
spring-projects__spring-framework
|
spring-core/src/test/java/org/springframework/core/ResolvableTypeTests.java
|
{
"start": 75622,
"end": 75684
}
|
interface ____ extends Methods<String> {
}
static
|
TypedMethods
|
java
|
apache__kafka
|
streams/src/main/java/org/apache/kafka/streams/kstream/internals/GroupedInternal.java
|
{
"start": 962,
"end": 1294
}
|
class ____<K, V> extends Grouped<K, V> {
public GroupedInternal(final Grouped<K, V> grouped) {
super(grouped);
}
public Serde<K> keySerde() {
return keySerde;
}
public Serde<V> valueSerde() {
return valueSerde;
}
public String name() {
return name;
}
}
|
GroupedInternal
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/bugpatterns/BugChecker.java
|
{
"start": 19512,
"end": 19676
}
|
interface ____ extends Suppressible {
Description matchParameterizedType(ParameterizedTypeTree tree, VisitorState state);
}
public
|
ParameterizedTypeTreeMatcher
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/services/Request.java
|
{
"start": 1931,
"end": 2428
}
|
interface ____ designed to be extended by specific request types that handle
* different Maven operations. All implementations must be immutable to ensure thread safety
* and predictable behavior in concurrent environments.
*
* @param <S> the type of ProtoSession associated with this request, allowing for
* type-safe session handling in specific request implementations
*
* @see ProtoSession
* @see RequestTrace
* @see Result
* @since 4.0.0
*/
@Experimental
@Immutable
public
|
is
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/MvnpmBuildItem.java
|
{
"start": 185,
"end": 433
}
|
class ____ extends SimpleBuildItem {
private final Set<URL> mvnpmJars;
public MvnpmBuildItem(Set<URL> mvnpmJars) {
this.mvnpmJars = mvnpmJars;
}
public Set<URL> getMvnpmJars() {
return mvnpmJars;
}
}
|
MvnpmBuildItem
|
java
|
apache__logging-log4j2
|
log4j-core-test/src/test/java/org/apache/logging/log4j/core/LoggerDateTest.java
|
{
"start": 1196,
"end": 1630
}
|
class ____ {
private final FileAppender fileApp;
public LoggerDateTest(@Named("File") final FileAppender fileApp) {
this.fileApp = fileApp;
}
@Test
void testFileName() {
final String name = fileApp.getFileName();
final int year = Calendar.getInstance().get(Calendar.YEAR);
assertTrue(name.contains(Integer.toString(year)), "Date was not substituted: " + name);
}
}
|
LoggerDateTest
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/select/MySqlSelectTest_159.java
|
{
"start": 555,
"end": 37943
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "SELECT COUNT(*) AS COUNT \n" +
"FROM ( \n" +
"\tSELECT dcm.name AS 渠道, 大组, dcm1.name AS 城市, 供应商ID, 供应商名称 , 品牌ID, 品牌名称, round(成本汇总.订单实付 / 100, 6) AS 订单实付, round(成本汇总.理论成交 / 100, 6) AS 理论成交, round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 100, 6) AS 折前毛利 , concat(round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 成本汇总.理论成交 * 100, 6), '%') AS 折前毛利率, round(成本汇总.猫超承担店铺优惠券金额 / 100, 6) AS 猫超承担店铺优惠券金额, concat(round(成本汇总.猫超承担店铺优惠券金额 / 成本汇总.理论成交, 6), '%') AS 店铺优惠券占比, round(成本汇总.猫超承担店铺优惠券其他金额 / 100, 6) AS 猫超承担店铺优惠券其他金额, concat(round(成本汇总.猫超承担店铺优惠券其他金额 / 成本汇总.理论成交, 6), '%') AS 店铺优惠其他占比 , round(成本汇总.猫超承担跨店优惠非金本位 / 100, 6) AS 猫超承担跨店优惠非金本位, concat(round(成本汇总.猫超承担跨店优惠非金本位 / 成本汇总.理论成交 * 100, 6), '%') AS 跨店优惠非金本位占比, round(成本汇总.猫超承担跨店优惠金本位 / 100, 6) AS 猫超承担跨店优惠金本位, concat(round(成本汇总.猫超承担跨店优惠金本位 / 成本汇总.理论成交 * 100, 6), '%') AS 跨店优惠金本位占比, round(成本汇总.超级会员折扣 / 100, 6) AS 超级会员折扣 , concat(round(成本汇总.超级会员折扣 / 成本汇总.理论成交 * 100, 6), '%') AS 超级会员折扣占比, round(成本汇总.猫超承担补贴金额 / 100, 6) AS 猫超承担补贴金额, concat(round(成本汇总.猫超承担补贴金额 / 成本汇总.理论成交 * 100, 6), '%') AS 补贴占比, round(成本汇总.折后毛利 / 100, 6) AS 折后毛利, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * 100, 6), '%') AS 折后毛利率 , 物流成本, 物流成本占比, round(成本汇总.折后毛利 / 100, 6) AS 运营毛利, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * 100, 6), '%') AS 运营毛利率, round(成本汇总.物流收入 / 100, 6) AS 物流收入 , round(成本汇总.淘客收入 / 100, 6) AS 淘客收入, 免费货收入, round((成本汇总.折后毛利 + 成本汇总.物流收入) / 100, 6) AS 综合毛利, concat(round((成本汇总.折后毛利 + 成本汇总.物流收入) / 成本汇总.理论成交 * 100, 6), '%') AS 综合毛利率, round(成本汇总.正向分账金额 / 100, 6) AS 正向分账金额 , round(成本汇总.逆向分账金额 / 100, 6) AS 逆向分账金额, round(成本汇总.正向营销费用 / 100, 6) AS 正向营销费用, round(成本汇总.逆向营销费用 / 100, 6) AS 逆向营销费用 \n" +
"FROM (\n" +
"\tSELECT CASE WHEN '0' <> '0' THEN channel ELSE CAST(-1 AS bigint) END AS 渠道\n" +
"\t\t, CASE WHEN '0' <> '0' THEN group_name ELSE '-' END AS 大组\n" +
"\t\t, CASE WHEN '0' <> '0' THEN city ELSE '-1' END AS 城市\n" +
"\t\t, CASE WHEN '0' <> '0' THEN supplier_code ELSE '-' END AS 供应商ID\n" +
"\t\t, CASE WHEN '0' <> '0' THEN supplier_name ELSE '-' END AS 供应商名称 \n" +
"\t\t, CASE WHEN '0' <> '0' THEN CAST(brand_id AS bigint) ELSE CAST(-1 AS bigint) END AS 品牌ID\n" +
"\t\t, CASE WHEN '0' <> '0' THEN brand_name ELSE '-' END AS 品牌名称, SUM(abs(trade_paid_money) - abs(refund_paid_money)) AS 订单实付, SUM(abs(trade_paid_money) - abs(refund_paid_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + abs(trade_susidy_fee) - abs(refund_susidy_fee) + abs(trade_shop_coupon_supp_burden_fee) - abs(refund_shop_coupon_supp_burden_fee) + abs(trade_shop_coupon_other_supp_burden_fee) - abs(refund_shop_coupon_other_supp_burden_fee) + abs(trade_tmall_coupon_not_gold_supp_burden_fee) - abs(refund_tmall_coupon_not_gold_supp_burden_fee) + abs(trade_tmall_coupon_gold1_supp_burden_fee) - abs(refund_tmall_coupon_gold1_supp_burden_fee) + abs(trade_tmall_coupon_gold2_supp_burden_fee) - abs(refund_tmall_coupon_gold2_supp_burden_fee) + (abs(trade_tmall_vip_supp_burden_fee) - abs(refund_tmall_vip_supp_burden_fee))) AS 理论成交, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + (abs(trade_susidy_fee) - abs(refund_susidy_fee))) AS 折前毛利 , SUM(abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee)) AS 猫超承担店铺优惠券金额, SUM(abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee)) AS 猫超承担店铺优惠券其他金额, SUM(abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee)) AS 猫超承担跨店优惠非金本位, SUM(abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee)) AS 猫超承担跨店优惠金本位, SUM(abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee)) AS 超级会员折扣 , SUM(abs(trade_susidy_fee) - abs(refund_susidy_fee)) AS 猫超承担补贴金额, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money)) AS 折后毛利, '-' AS 物流成本, '-' AS 物流成本占比, SUM(abs(trade_postfee_share)) AS 物流收入 , SUM(abs(taoke_trade_money) - abs(taoke_refund_money)) AS 淘客收入, '-' AS 免费货收入, SUM(trade_payment_amount) AS 正向分账金额, SUM(refund_payment_amount) AS 逆向分账金额, SUM(trade_marketing_fee) AS 正向营销费用 , SUM(refund_marketing_fee) AS 逆向营销费用 \n" +
"FROM dws_ascm_cost_di WHERE 1 = 1 AND 1 = 1 AND 1 = 1 AND 1 = 1 AND 1 = 1 AND brand_id = '119079' AND stat_date >= '20170401' AND stat_date <= '20180228' GROUP BY 1, CASE WHEN '0' <> '0' THEN channel ELSE CAST(-1 AS bigint) END, CASE WHEN '0' <> '0' THEN group_name ELSE '-' END, CASE WHEN '0' <> '0' THEN city ELSE '-1' END, CASE WHEN '0' <> '0' THEN supplier_code ELSE '-' END, CASE WHEN '0' <> '0' THEN supplier_name ELSE '-' END, CASE WHEN '0' <> '0' THEN CAST(brand_id AS bigint) ELSE CAST(-1 AS bigint) END, CASE WHEN '0' <> '0' THEN brand_name ELSE '-' END ) 成本汇总 LEFT JOIN dim_channel_maochao dcm ON 成本汇总.渠道 = dcm.id LEFT JOIN dim_city_maochao dcm1 ON CAST(成本汇总.城市 AS bigint) = dcm1.id ) quark_t1";
List<SQLStatement> statementList = SQLUtils.parseStatements(sql, JdbcConstants.MYSQL, SQLParserFeature.TDDLHint);
SQLSelectStatement stmt = (SQLSelectStatement) statementList.get(0);
assertEquals(1, statementList.size());
assertEquals("SELECT COUNT(*) AS COUNT\n" +
"FROM (\n" +
"\tSELECT dcm.name AS 渠道, 大组, dcm1.name AS 城市, 供应商ID, 供应商名称\n" +
"\t\t, 品牌ID, 品牌名称\n" +
"\t\t, round(成本汇总.订单实付 / 100, 6) AS 订单实付\n" +
"\t\t, round(成本汇总.理论成交 / 100, 6) AS 理论成交\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 100, 6) AS 折前毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 成本汇总.理论成交 * 100, 6), '%') AS 折前毛利率\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券金额 / 100, 6) AS 猫超承担店铺优惠券金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券金额 / 成本汇总.理论成交, 6), '%') AS 店铺优惠券占比\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券其他金额 / 100, 6) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券其他金额 / 成本汇总.理论成交, 6), '%') AS 店铺优惠其他占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠非金本位 / 100, 6) AS 猫超承担跨店优惠非金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠非金本位 / 成本汇总.理论成交 * 100, 6), '%') AS 跨店优惠非金本位占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠金本位 / 100, 6) AS 猫超承担跨店优惠金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠金本位 / 成本汇总.理论成交 * 100, 6), '%') AS 跨店优惠金本位占比\n" +
"\t\t, round(成本汇总.超级会员折扣 / 100, 6) AS 超级会员折扣\n" +
"\t\t, concat(round(成本汇总.超级会员折扣 / 成本汇总.理论成交 * 100, 6), '%') AS 超级会员折扣占比\n" +
"\t\t, round(成本汇总.猫超承担补贴金额 / 100, 6) AS 猫超承担补贴金额\n" +
"\t\t, concat(round(成本汇总.猫超承担补贴金额 / 成本汇总.理论成交 * 100, 6), '%') AS 补贴占比\n" +
"\t\t, round(成本汇总.折后毛利 / 100, 6) AS 折后毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * 100, 6), '%') AS 折后毛利率\n" +
"\t\t, 物流成本, 物流成本占比\n" +
"\t\t, round(成本汇总.折后毛利 / 100, 6) AS 运营毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * 100, 6), '%') AS 运营毛利率\n" +
"\t\t, round(成本汇总.物流收入 / 100, 6) AS 物流收入\n" +
"\t\t, round(成本汇总.淘客收入 / 100, 6) AS 淘客收入\n" +
"\t\t, 免费货收入\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.物流收入) / 100, 6) AS 综合毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.物流收入) / 成本汇总.理论成交 * 100, 6), '%') AS 综合毛利率\n" +
"\t\t, round(成本汇总.正向分账金额 / 100, 6) AS 正向分账金额\n" +
"\t\t, round(成本汇总.逆向分账金额 / 100, 6) AS 逆向分账金额\n" +
"\t\t, round(成本汇总.正向营销费用 / 100, 6) AS 正向营销费用\n" +
"\t\t, round(成本汇总.逆向营销费用 / 100, 6) AS 逆向营销费用\n" +
"\tFROM (\n" +
"\t\tSELECT CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN channel\n" +
"\t\t\t\tELSE CAST(-1 AS bigint)\n" +
"\t\t\tEND AS 渠道\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN group_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND AS 大组\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN city\n" +
"\t\t\t\tELSE '-1'\n" +
"\t\t\tEND AS 城市\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN supplier_code\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND AS 供应商ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN supplier_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND AS 供应商名称\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(-1 AS bigint)\n" +
"\t\t\tEND AS 品牌ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN brand_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND AS 品牌名称\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money)) AS 订单实付\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + abs(trade_susidy_fee) - abs(refund_susidy_fee) + abs(trade_shop_coupon_supp_burden_fee) - abs(refund_shop_coupon_supp_burden_fee) + abs(trade_shop_coupon_other_supp_burden_fee) - abs(refund_shop_coupon_other_supp_burden_fee) + abs(trade_tmall_coupon_not_gold_supp_burden_fee) - abs(refund_tmall_coupon_not_gold_supp_burden_fee) + abs(trade_tmall_coupon_gold1_supp_burden_fee) - abs(refund_tmall_coupon_gold1_supp_burden_fee) + abs(trade_tmall_coupon_gold2_supp_burden_fee) - abs(refund_tmall_coupon_gold2_supp_burden_fee) + (abs(trade_tmall_vip_supp_burden_fee) - abs(refund_tmall_vip_supp_burden_fee))) AS 理论成交\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + (abs(trade_susidy_fee) - abs(refund_susidy_fee))) AS 折前毛利\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee)) AS 猫超承担店铺优惠券金额\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee)) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee)) AS 猫超承担跨店优惠非金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee)) AS 猫超承担跨店优惠金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee)) AS 超级会员折扣\n" +
"\t\t\t, SUM(abs(trade_susidy_fee) - abs(refund_susidy_fee)) AS 猫超承担补贴金额\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money)) AS 折后毛利\n" +
"\t\t\t, '-' AS 物流成本, '-' AS 物流成本占比, SUM(abs(trade_postfee_share)) AS 物流收入\n" +
"\t\t\t, SUM(abs(taoke_trade_money) - abs(taoke_refund_money)) AS 淘客收入\n" +
"\t\t\t, '-' AS 免费货收入, SUM(trade_payment_amount) AS 正向分账金额, SUM(refund_payment_amount) AS 逆向分账金额\n" +
"\t\t\t, SUM(trade_marketing_fee) AS 正向营销费用, SUM(refund_marketing_fee) AS 逆向营销费用\n" +
"\t\tFROM dws_ascm_cost_di\n" +
"\t\tWHERE 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND brand_id = '119079'\n" +
"\t\t\tAND stat_date >= '20170401'\n" +
"\t\t\tAND stat_date <= '20180228'\n" +
"\t\tGROUP BY 1, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN channel\n" +
"\t\t\t\tELSE CAST(-1 AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN group_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN city\n" +
"\t\t\t\tELSE '-1'\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN supplier_code\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN supplier_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(-1 AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN '0' <> '0' THEN brand_name\n" +
"\t\t\t\tELSE '-'\n" +
"\t\t\tEND\n" +
"\t) 成本汇总\n" +
"\t\tLEFT JOIN dim_channel_maochao dcm ON 成本汇总.渠道 = dcm.id\n" +
"\t\tLEFT JOIN dim_city_maochao dcm1 ON CAST(成本汇总.城市 AS bigint) = dcm1.id\n" +
") quark_t1", stmt.toString());
assertEquals("SELECT COUNT(*) AS COUNT\n" +
"FROM (\n" +
"\tSELECT dcm.name AS 渠道, 大组, dcm1.name AS 城市, 供应商ID, 供应商名称\n" +
"\t\t, 品牌ID, 品牌名称\n" +
"\t\t, round(成本汇总.订单实付 / ?, ?) AS 订单实付\n" +
"\t\t, round(成本汇总.理论成交 / ?, ?) AS 理论成交\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / ?, ?) AS 折前毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 成本汇总.理论成交 * ?, ?), ?) AS 折前毛利率\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券金额 / ?, ?) AS 猫超承担店铺优惠券金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券金额 / 成本汇总.理论成交, ?), ?) AS 店铺优惠券占比\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券其他金额 / ?, ?) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券其他金额 / 成本汇总.理论成交, ?), ?) AS 店铺优惠其他占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠非金本位 / ?, ?) AS 猫超承担跨店优惠非金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠非金本位 / 成本汇总.理论成交 * ?, ?), ?) AS 跨店优惠非金本位占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠金本位 / ?, ?) AS 猫超承担跨店优惠金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠金本位 / 成本汇总.理论成交 * ?, ?), ?) AS 跨店优惠金本位占比\n" +
"\t\t, round(成本汇总.超级会员折扣 / ?, ?) AS 超级会员折扣\n" +
"\t\t, concat(round(成本汇总.超级会员折扣 / 成本汇总.理论成交 * ?, ?), ?) AS 超级会员折扣占比\n" +
"\t\t, round(成本汇总.猫超承担补贴金额 / ?, ?) AS 猫超承担补贴金额\n" +
"\t\t, concat(round(成本汇总.猫超承担补贴金额 / 成本汇总.理论成交 * ?, ?), ?) AS 补贴占比\n" +
"\t\t, round(成本汇总.折后毛利 / ?, ?) AS 折后毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * ?, ?), ?) AS 折后毛利率\n" +
"\t\t, 物流成本, 物流成本占比\n" +
"\t\t, round(成本汇总.折后毛利 / ?, ?) AS 运营毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * ?, ?), ?) AS 运营毛利率\n" +
"\t\t, round(成本汇总.物流收入 / ?, ?) AS 物流收入\n" +
"\t\t, round(成本汇总.淘客收入 / ?, ?) AS 淘客收入\n" +
"\t\t, 免费货收入\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.物流收入) / ?, ?) AS 综合毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.物流收入) / 成本汇总.理论成交 * ?, ?), ?) AS 综合毛利率\n" +
"\t\t, round(成本汇总.正向分账金额 / ?, ?) AS 正向分账金额\n" +
"\t\t, round(成本汇总.逆向分账金额 / ?, ?) AS 逆向分账金额\n" +
"\t\t, round(成本汇总.正向营销费用 / ?, ?) AS 正向营销费用\n" +
"\t\t, round(成本汇总.逆向营销费用 / ?, ?) AS 逆向营销费用\n" +
"\tFROM (\n" +
"\t\tSELECT CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN channel\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND AS 渠道\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN group_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 大组\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN city\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 城市\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_code\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 供应商ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 供应商名称\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND AS 品牌ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN brand_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 品牌名称\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money)) AS 订单实付\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + abs(trade_susidy_fee) - abs(refund_susidy_fee) + abs(trade_shop_coupon_supp_burden_fee) - abs(refund_shop_coupon_supp_burden_fee) + abs(trade_shop_coupon_other_supp_burden_fee) - abs(refund_shop_coupon_other_supp_burden_fee) + abs(trade_tmall_coupon_not_gold_supp_burden_fee) - abs(refund_tmall_coupon_not_gold_supp_burden_fee) + abs(trade_tmall_coupon_gold1_supp_burden_fee) - abs(refund_tmall_coupon_gold1_supp_burden_fee) + abs(trade_tmall_coupon_gold2_supp_burden_fee) - abs(refund_tmall_coupon_gold2_supp_burden_fee) + (abs(trade_tmall_vip_supp_burden_fee) - abs(refund_tmall_vip_supp_burden_fee))) AS 理论成交\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + (abs(trade_susidy_fee) - abs(refund_susidy_fee))) AS 折前毛利\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee)) AS 猫超承担店铺优惠券金额\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee)) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee)) AS 猫超承担跨店优惠非金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee)) AS 猫超承担跨店优惠金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee)) AS 超级会员折扣\n" +
"\t\t\t, SUM(abs(trade_susidy_fee) - abs(refund_susidy_fee)) AS 猫超承担补贴金额\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money)) AS 折后毛利\n" +
"\t\t\t, ? AS 物流成本, ? AS 物流成本占比, SUM(abs(trade_postfee_share)) AS 物流收入\n" +
"\t\t\t, SUM(abs(taoke_trade_money) - abs(taoke_refund_money)) AS 淘客收入\n" +
"\t\t\t, ? AS 免费货收入, SUM(trade_payment_amount) AS 正向分账金额, SUM(refund_payment_amount) AS 逆向分账金额\n" +
"\t\t\t, SUM(trade_marketing_fee) AS 正向营销费用, SUM(refund_marketing_fee) AS 逆向营销费用\n" +
"\t\tFROM dws_ascm_cost_di\n" +
"\t\tWHERE 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND brand_id = ?\n" +
"\t\t\tAND stat_date >= ?\n" +
"\t\t\tAND stat_date <= ?\n" +
"\t\tGROUP BY 1, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN channel\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN group_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN city\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_code\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN brand_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND\n" +
"\t) 成本汇总\n" +
"\t\tLEFT JOIN dim_channel_maochao dcm ON 成本汇总.渠道 = dcm.id\n" +
"\t\tLEFT JOIN dim_city_maochao dcm1 ON CAST(成本汇总.城市 AS bigint) = dcm1.id\n" +
") quark_t1",
ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL, VisitorFeature.OutputParameterizedZeroReplaceNotUseOriginalSql));
List<Object> params = new ArrayList<Object>();
assertEquals("SELECT COUNT(*) AS COUNT\n" +
"FROM (\n" +
"\tSELECT dcm.name AS 渠道, 大组, dcm1.name AS 城市, 供应商ID, 供应商名称\n" +
"\t\t, 品牌ID, 品牌名称\n" +
"\t\t, round(成本汇总.订单实付 / ?, ?) AS 订单实付\n" +
"\t\t, round(成本汇总.理论成交 / ?, ?) AS 理论成交\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / ?, ?) AS 折前毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.猫超承担店铺优惠券金额 + 成本汇总.猫超承担店铺优惠券其他金额 + 成本汇总.猫超承担跨店优惠非金本位 + 成本汇总.猫超承担跨店优惠金本位 + 成本汇总.超级会员折扣 + 成本汇总.猫超承担补贴金额) / 成本汇总.理论成交 * ?, ?), ?) AS 折前毛利率\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券金额 / ?, ?) AS 猫超承担店铺优惠券金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券金额 / 成本汇总.理论成交, ?), ?) AS 店铺优惠券占比\n" +
"\t\t, round(成本汇总.猫超承担店铺优惠券其他金额 / ?, ?) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t, concat(round(成本汇总.猫超承担店铺优惠券其他金额 / 成本汇总.理论成交, ?), ?) AS 店铺优惠其他占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠非金本位 / ?, ?) AS 猫超承担跨店优惠非金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠非金本位 / 成本汇总.理论成交 * ?, ?), ?) AS 跨店优惠非金本位占比\n" +
"\t\t, round(成本汇总.猫超承担跨店优惠金本位 / ?, ?) AS 猫超承担跨店优惠金本位\n" +
"\t\t, concat(round(成本汇总.猫超承担跨店优惠金本位 / 成本汇总.理论成交 * ?, ?), ?) AS 跨店优惠金本位占比\n" +
"\t\t, round(成本汇总.超级会员折扣 / ?, ?) AS 超级会员折扣\n" +
"\t\t, concat(round(成本汇总.超级会员折扣 / 成本汇总.理论成交 * ?, ?), ?) AS 超级会员折扣占比\n" +
"\t\t, round(成本汇总.猫超承担补贴金额 / ?, ?) AS 猫超承担补贴金额\n" +
"\t\t, concat(round(成本汇总.猫超承担补贴金额 / 成本汇总.理论成交 * ?, ?), ?) AS 补贴占比\n" +
"\t\t, round(成本汇总.折后毛利 / ?, ?) AS 折后毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * ?, ?), ?) AS 折后毛利率\n" +
"\t\t, 物流成本, 物流成本占比\n" +
"\t\t, round(成本汇总.折后毛利 / ?, ?) AS 运营毛利\n" +
"\t\t, concat(round(成本汇总.折后毛利 / 成本汇总.理论成交 * ?, ?), ?) AS 运营毛利率\n" +
"\t\t, round(成本汇总.物流收入 / ?, ?) AS 物流收入\n" +
"\t\t, round(成本汇总.淘客收入 / ?, ?) AS 淘客收入\n" +
"\t\t, 免费货收入\n" +
"\t\t, round((成本汇总.折后毛利 + 成本汇总.物流收入) / ?, ?) AS 综合毛利\n" +
"\t\t, concat(round((成本汇总.折后毛利 + 成本汇总.物流收入) / 成本汇总.理论成交 * ?, ?), ?) AS 综合毛利率\n" +
"\t\t, round(成本汇总.正向分账金额 / ?, ?) AS 正向分账金额\n" +
"\t\t, round(成本汇总.逆向分账金额 / ?, ?) AS 逆向分账金额\n" +
"\t\t, round(成本汇总.正向营销费用 / ?, ?) AS 正向营销费用\n" +
"\t\t, round(成本汇总.逆向营销费用 / ?, ?) AS 逆向营销费用\n" +
"\tFROM (\n" +
"\t\tSELECT CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN channel\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND AS 渠道\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN group_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 大组\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN city\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 城市\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_code\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 供应商ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 供应商名称\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND AS 品牌ID\n" +
"\t\t\t, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN brand_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND AS 品牌名称\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money)) AS 订单实付\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + abs(trade_susidy_fee) - abs(refund_susidy_fee) + abs(trade_shop_coupon_supp_burden_fee) - abs(refund_shop_coupon_supp_burden_fee) + abs(trade_shop_coupon_other_supp_burden_fee) - abs(refund_shop_coupon_other_supp_burden_fee) + abs(trade_tmall_coupon_not_gold_supp_burden_fee) - abs(refund_tmall_coupon_not_gold_supp_burden_fee) + abs(trade_tmall_coupon_gold1_supp_burden_fee) - abs(refund_tmall_coupon_gold1_supp_burden_fee) + abs(trade_tmall_coupon_gold2_supp_burden_fee) - abs(refund_tmall_coupon_gold2_supp_burden_fee) + (abs(trade_tmall_vip_supp_burden_fee) - abs(refund_tmall_vip_supp_burden_fee))) AS 理论成交\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money) + abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee) + abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee) + abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee) + abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee) + abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee) + (abs(trade_susidy_fee) - abs(refund_susidy_fee))) AS 折前毛利\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_biz_burden_fee) - abs(refund_shop_coupon_biz_burden_fee)) AS 猫超承担店铺优惠券金额\n" +
"\t\t\t, SUM(abs(trade_shop_coupon_other_biz_burden_fee) - abs(refund_shop_coupon_other_biz_burden_fee)) AS 猫超承担店铺优惠券其他金额\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_not_gold_biz_burden_fee) - abs(refund_tmall_coupon_not_gold_biz_burden_fee)) AS 猫超承担跨店优惠非金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_coupon_gold1_biz_burden_fee) - abs(refund_tmall_coupon_gold1_biz_burden_fee) + abs(trade_tmall_coupon_gold2_biz_burden_fee) - abs(refund_tmall_coupon_gold2_biz_burden_fee)) AS 猫超承担跨店优惠金本位\n" +
"\t\t\t, SUM(abs(trade_tmall_vip_biz_burden_fee) - abs(refund_tmall_vip_biz_burden_fee)) AS 超级会员折扣\n" +
"\t\t\t, SUM(abs(trade_susidy_fee) - abs(refund_susidy_fee)) AS 猫超承担补贴金额\n" +
"\t\t\t, SUM(abs(trade_paid_money) - abs(refund_paid_money) - abs(trade_payment_amount) + abs(trade_marketing_fee) + abs(taoke_trade_money) + abs(refund_payment_amount) - abs(refund_marketing_fee) - abs(taoke_refund_money)) AS 折后毛利\n" +
"\t\t\t, ? AS 物流成本, ? AS 物流成本占比, SUM(abs(trade_postfee_share)) AS 物流收入\n" +
"\t\t\t, SUM(abs(taoke_trade_money) - abs(taoke_refund_money)) AS 淘客收入\n" +
"\t\t\t, ? AS 免费货收入, SUM(trade_payment_amount) AS 正向分账金额, SUM(refund_payment_amount) AS 逆向分账金额\n" +
"\t\t\t, SUM(trade_marketing_fee) AS 正向营销费用, SUM(refund_marketing_fee) AS 逆向营销费用\n" +
"\t\tFROM dws_ascm_cost_di\n" +
"\t\tWHERE 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND 1 = 1\n" +
"\t\t\tAND brand_id = ?\n" +
"\t\t\tAND stat_date >= ?\n" +
"\t\t\tAND stat_date <= ?\n" +
"\t\tGROUP BY 1, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN channel\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN group_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN city\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_code\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN supplier_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN CAST(brand_id AS bigint)\n" +
"\t\t\t\tELSE CAST(? AS bigint)\n" +
"\t\t\tEND, CASE\n" +
"\t\t\t\tWHEN ? <> ? THEN brand_name\n" +
"\t\t\t\tELSE ?\n" +
"\t\t\tEND\n" +
"\t) 成本汇总\n" +
"\t\tLEFT JOIN dim_channel_maochao dcm ON 成本汇总.渠道 = dcm.id\n" +
"\t\tLEFT JOIN dim_city_maochao dcm1 ON CAST(成本汇总.城市 AS bigint) = dcm1.id\n" +
") quark_t1",
ParameterizedOutputVisitorUtils.parameterize(sql, JdbcConstants.MYSQL, params, VisitorFeature.OutputParameterizedZeroReplaceNotUseOriginalSql));
assertEquals(112, params.size());
assertEquals("[100,6,100,6,100,6,100,6,\"%\",100,6,6,\"%\",100,6,6,\"%\",100,6,100,6,\"%\",100,6,100,6,\"%\",100,6,100,6,\"%\",100,6,100,6,\"%\",100,6,100,6,\"%\",100,6,100,6,\"%\",100,6,100,6,100,6,100,6,\"%\",100,6,100,6,100,6,100,6,\"0\",\"0\",-1,\"0\",\"0\",\"-\",\"0\",\"0\",\"-1\",\"0\",\"0\",\"-\",\"0\",\"0\",\"-\",\"0\",\"0\",-1,\"0\",\"0\",\"-\",\"-\",\"-\",\"-\",\"119079\",\"20170401\",\"20180228\",\"0\",\"0\",-1,\"0\",\"0\",\"-\",\"0\",\"0\",\"-1\",\"0\",\"0\",\"-\",\"0\",\"0\",\"-\",\"0\",\"0\",-1,\"0\",\"0\",\"-\"]", JSON.toJSONString(params));
}
}
|
MySqlSelectTest_159
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/TaskCounter.java
|
{
"start": 1061,
"end": 1742
}
|
enum ____ {
MAP_INPUT_RECORDS,
MAP_OUTPUT_RECORDS,
MAP_SKIPPED_RECORDS,
MAP_OUTPUT_BYTES,
MAP_OUTPUT_MATERIALIZED_BYTES,
SPLIT_RAW_BYTES,
COMBINE_INPUT_RECORDS,
COMBINE_OUTPUT_RECORDS,
REDUCE_INPUT_GROUPS,
REDUCE_SHUFFLE_BYTES,
REDUCE_INPUT_RECORDS,
REDUCE_OUTPUT_RECORDS,
REDUCE_SKIPPED_GROUPS,
REDUCE_SKIPPED_RECORDS,
SPILLED_RECORDS,
SHUFFLED_MAPS,
FAILED_SHUFFLE,
MERGED_MAP_OUTPUTS,
GC_TIME_MILLIS,
CPU_MILLISECONDS,
PHYSICAL_MEMORY_BYTES,
VIRTUAL_MEMORY_BYTES,
COMMITTED_HEAP_BYTES,
MAP_PHYSICAL_MEMORY_BYTES_MAX,
MAP_VIRTUAL_MEMORY_BYTES_MAX,
REDUCE_PHYSICAL_MEMORY_BYTES_MAX,
REDUCE_VIRTUAL_MEMORY_BYTES_MAX;
}
|
TaskCounter
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/RollupJob.java
|
{
"start": 1288,
"end": 4199
}
|
class ____ implements SimpleDiffable<RollupJob>, PersistentTaskParams {
public static final String NAME = "xpack/rollup/job";
private final Map<String, String> headers;
private final RollupJobConfig config;
private static final ParseField CONFIG = new ParseField("config");
private static final ParseField HEADERS = new ParseField("headers");
@SuppressWarnings("unchecked")
public static final ConstructingObjectParser<RollupJob, Void> PARSER = new ConstructingObjectParser<>(
NAME,
a -> new RollupJob((RollupJobConfig) a[0], (Map<String, String>) a[1])
);
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG);
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS);
}
public RollupJob(RollupJobConfig config, Map<String, String> headers) {
this.config = Objects.requireNonNull(config);
this.headers = headers == null ? Collections.emptyMap() : headers;
}
public RollupJob(StreamInput in) throws IOException {
this.config = new RollupJobConfig(in);
headers = in.readMap(StreamInput::readString);
}
public RollupJobConfig getConfig() {
return config;
}
public Map<String, String> getHeaders() {
return headers;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CONFIG.getPreferredName(), config);
assertNoAuthorizationHeader(headers);
builder.field(HEADERS.getPreferredName(), headers);
builder.endObject();
return builder;
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
config.writeTo(out);
out.writeMap(headers, StreamOutput::writeString);
}
static Diff<RollupJob> readJobDiffFrom(StreamInput in) throws IOException {
return SimpleDiffable.readDiffFrom(RollupJob::new, in);
}
public static RollupJob fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other == null || getClass() != other.getClass()) {
return false;
}
RollupJob that = (RollupJob) other;
return Objects.equals(this.config, that.config) && Objects.equals(this.headers, that.headers);
}
@Override
public int hashCode() {
return Objects.hash(config, headers);
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.minimumCompatible();
}
}
|
RollupJob
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/issue_1300/Issue1306.java
|
{
"start": 822,
"end": 1109
}
|
class ____<ID extends Serializable> implements Cloneable, Serializable{
private static final long serialVersionUID = 4877536176216854937L;
public IdEntity() {}
public abstract ID getId();
public abstract void setId(ID id);
}
public static
|
IdEntity
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/query/NativeQueryResultTypeAutoDiscoveryTest.java
|
{
"start": 23187,
"end": 23350
}
|
class ____ extends TestedEntity<Boolean> {
@JdbcTypeCode(Types.BIT)
public Boolean getTestedProperty() {
return testedProperty;
}
}
public static
|
BitEntity
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemaupdate/foreignkeys/ForeignKeyDropTest.java
|
{
"start": 2070,
"end": 4006
}
|
class ____ {
@Test
@JiraKey(value = "HHH-11236")
public void testForeignKeyDropIsCorrectlyGenerated(
ServiceRegistryScope registryScope,
DomainModelScope modelScope,
@TempDir File tmpDir) throws Exception {
final var metadata = modelScope.getDomainModel();
metadata.orderColumns( false );
metadata.validate();
final var scriptFile = new File( tmpDir, "script.sql" );
final var schemaExport = new SchemaExport().setHaltOnError( false ).setOutputFile( scriptFile.getAbsolutePath() );
schemaExport.drop( EnumSet.of( TargetType.SCRIPT, TargetType.DATABASE ), metadata );
final Dialect dialect = registryScope.getRegistry().requireService( JdbcEnvironment.class ).getDialect();
MatcherAssert.assertThat( "The ddl foreign key drop command has not been properly generated",
checkDropForeignKeyConstraint( "CHILD_ENTITY", scriptFile, dialect ), is( true ) );
}
private boolean checkDropForeignKeyConstraint(
String tableName,
File scriptFile,
Dialect dialect) throws IOException {
boolean matches = false;
String regex = dialect.getAlterTableString( tableName );
regex += " " + dialect.getDropForeignKeyString() + " ";
if ( dialect.supportsIfExistsBeforeConstraintName() ) {
regex += "if exists ";
}
regex += "fk(.)*";
if ( dialect.supportsIfExistsAfterConstraintName() ) {
regex += " if exists";
}
return isMatching( matches, regex.toLowerCase(), scriptFile );
}
private boolean isMatching(boolean matches, String regex, File scriptFile) throws IOException {
List<String> commands = Files.readAllLines( scriptFile.toPath() );
Pattern p = Pattern.compile( regex );
for ( String line : commands ) {
final Matcher matcher = p.matcher( line.toLowerCase() );
if ( matcher.matches() ) {
matches = true;
}
}
return matches;
}
@SuppressWarnings("unused")
@Entity(name = "ParentEntity")
@Table(name = "PARENT_ENTITY")
public static
|
ForeignKeyDropTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/guice/AssistedParametersTest.java
|
{
"start": 1573,
"end": 2094
}
|
class ____ {
@Inject
// BUG: Diagnostic contains: java.lang.String: x, y
public TestClass1(int n, @Assisted String x, @Assisted String y, int z) {}
@Inject
// BUG: Diagnostic contains: java.lang.String, @Assisted("baz"): x, z
public TestClass1(
@Assisted("foo") int a,
@Assisted("foo") int b,
@Assisted("baz") String x,
@Assisted("baz") String z) {}
}
/** Class has constructor with two @Assisted parameters of the same type and same value. */
public
|
TestClass1
|
java
|
apache__dubbo
|
dubbo-cluster/src/main/java/org/apache/dubbo/rpc/cluster/Merger.java
|
{
"start": 900,
"end": 950
}
|
interface ____<T> {
T merge(T... items);
}
|
Merger
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerFileExpressionTest.java
|
{
"start": 1165,
"end": 3651
}
|
class ____ extends ContextTestSupport {
@Override
protected Registry createCamelRegistry() throws Exception {
Registry jndi = super.createCamelRegistry();
jndi.bind("counter", new MyGuidGenerator());
return jndi;
}
@Test
public void testConsumeFileBasedOnBeanName() throws Exception {
template.sendBodyAndHeader(fileUri("bean"), "Hello World", Exchange.FILE_NAME, "122.txt");
template.sendBodyAndHeader(fileUri("bean"), "Goodday World", Exchange.FILE_NAME, "123.txt");
template.sendBodyAndHeader(fileUri("bean"), "Bye World", Exchange.FILE_NAME, "124.txt");
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from(fileUri("bean"
+ "?initialDelay=0&delay=10&fileName=${bean:counter.next}.txt&delete=true"))
.to("mock:result");
}
});
// we should only get one as we only poll a single file using the file
// expression
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Goodday World");
assertMockEndpointsSatisfied();
}
@Test
public void testConsumeFileBasedOnDatePattern() throws Exception {
template.sendBodyAndHeader(fileUri("date"), "Bye World", Exchange.FILE_NAME,
"myfile-20081128.txt");
template.sendBodyAndHeader(fileUri("date"), "Hello World", Exchange.FILE_NAME,
"myfile-20081129.txt");
template.sendBodyAndHeader(fileUri("date"), "Goodday World", Exchange.FILE_NAME,
context.resolveLanguage("simple").createExpression("myfile-${date:now:yyyyMMdd}.txt"));
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
from(fileUri("date"
+ "?initialDelay=0&delay=10&fileName=myfile-${date:now:yyyyMMdd}.txt"))
.convertBodyTo(String.class)
.to("mock:result");
// END SNIPPET: e1
}
});
// we should only get one as we only poll a single file using the file
// expression
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedBodiesReceived("Goodday World");
assertMockEndpointsSatisfied();
}
public static
|
FileConsumerFileExpressionTest
|
java
|
mapstruct__mapstruct
|
core/src/main/java/org/mapstruct/Mapper.java
|
{
"start": 642,
"end": 858
}
|
class ____ a mapper and activates the generation of a implementation of that type via
* MapStruct.
*
* <p>
* <strong>Example 1:</strong> Creating mapper
* </p>
* <pre><code class='java'>
* @Mapper
* public
|
as
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/script/ScriptCacheStats.java
|
{
"start": 957,
"end": 4948
}
|
class ____ deprecated in favor of ScriptStats and ScriptContextStats
public record ScriptCacheStats(Map<String, ScriptStats> context, ScriptStats general) implements Writeable, ToXContentFragment {
public ScriptCacheStats(Map<String, ScriptStats> context) {
this(Collections.unmodifiableMap(context), null);
}
public ScriptCacheStats(ScriptStats general) {
this(null, Objects.requireNonNull(general));
}
public static ScriptCacheStats read(StreamInput in) throws IOException {
boolean isContext = in.readBoolean();
if (isContext == false) {
return new ScriptCacheStats(ScriptStats.read(in));
}
int size = in.readInt();
Map<String, ScriptStats> context = Maps.newMapWithExpectedSize(size);
for (int i = 0; i < size; i++) {
String name = in.readString();
context.put(name, ScriptStats.read(in));
}
return new ScriptCacheStats(context);
}
private Map.Entry<String, ScriptStats>[] sortedContextStats() {
@SuppressWarnings("unchecked")
Map.Entry<String, ScriptStats>[] stats = context.entrySet().toArray(Map.Entry[]::new);
Arrays.sort(stats, Map.Entry.comparingByKey());
return stats;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
if (general != null) {
out.writeBoolean(false);
general.writeTo(out);
return;
}
out.writeBoolean(true);
out.writeInt(context.size());
for (Map.Entry<String, ScriptStats> stats : sortedContextStats()) {
out.writeString(stats.getKey());
stats.getValue().writeTo(out);
}
}
private static void scriptStatsToXContent(ScriptStats s, XContentBuilder builder) throws IOException {
builder.field(ScriptStats.Fields.COMPILATIONS, s.getCompilations());
builder.field(ScriptStats.Fields.CACHE_EVICTIONS, s.getCacheEvictions());
builder.field(ScriptStats.Fields.COMPILATION_LIMIT_TRIGGERED, s.getCompilationLimitTriggered());
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(Fields.SCRIPT_CACHE_STATS);
builder.startObject(Fields.SUM);
if (general != null) {
scriptStatsToXContent(general, builder);
builder.endObject().endObject();
return builder;
}
scriptStatsToXContent(sum(), builder);
builder.endObject();
builder.startArray(Fields.CONTEXTS);
for (Map.Entry<String, ScriptStats> stats : sortedContextStats()) {
builder.startObject();
builder.field(Fields.CONTEXT, stats.getKey());
scriptStatsToXContent(stats.getValue(), builder);
builder.endObject();
}
builder.endArray();
builder.endObject();
return builder;
}
/**
* Get the context specific stats, null if using general cache
*/
public Map<String, ScriptStats> getContextStats() {
return context;
}
/**
* Get the general stats, null if using context cache
*/
public ScriptStats getGeneralStats() {
return general;
}
/**
* The sum of all script stats, either the general stats or the sum of all stats of the context stats.
*/
public ScriptStats sum() {
if (general != null) {
return general;
}
long compilations = 0;
long cacheEvictions = 0;
long compilationLimitTriggered = 0;
for (ScriptStats stat : context.values()) {
compilations += stat.getCompilations();
cacheEvictions += stat.getCacheEvictions();
compilationLimitTriggered += stat.getCompilationLimitTriggered();
}
return new ScriptStats(compilations, cacheEvictions, compilationLimitTriggered, null, null);
}
static final
|
is
|
java
|
elastic__elasticsearch
|
x-pack/plugin/migrate/src/main/java/org/elasticsearch/system_indices/task/SystemIndexMigrationExecutor.java
|
{
"start": 1651,
"end": 5844
}
|
class ____ extends PersistentTasksExecutor<SystemIndexMigrationTaskParams> {
private final Client client; // NOTE: *NOT* an OriginSettingClient. We have to do that later.
private final ClusterService clusterService;
private final SystemIndices systemIndices;
private final IndexScopedSettings indexScopedSettings;
private final ThreadPool threadPool;
private final ProjectResolver projectResolver;
public SystemIndexMigrationExecutor(
Client client,
ClusterService clusterService,
SystemIndices systemIndices,
IndexScopedSettings indexScopedSettings,
ThreadPool threadPool
) {
super(SYSTEM_INDEX_UPGRADE_TASK_NAME, clusterService.threadPool().generic());
this.client = client;
this.clusterService = clusterService;
this.systemIndices = systemIndices;
this.indexScopedSettings = indexScopedSettings;
this.threadPool = threadPool;
this.projectResolver = client.projectResolver();
}
@Override
protected void nodeOperation(AllocatedPersistentTask task, SystemIndexMigrationTaskParams params, PersistentTaskState state) {
SystemIndexMigrator upgrader = (SystemIndexMigrator) task;
SystemIndexMigrationTaskState upgraderState = (SystemIndexMigrationTaskState) state;
upgrader.run(upgraderState);
}
@Override
protected AllocatedPersistentTask createTask(
long id,
String type,
String action,
TaskId parentTaskId,
PersistentTasksCustomMetadata.PersistentTask<SystemIndexMigrationTaskParams> taskInProgress,
Map<String, String> headers
) {
return new SystemIndexMigrator(
client,
id,
type,
action,
parentTaskId,
headers,
clusterService,
systemIndices,
indexScopedSettings,
threadPool,
projectResolver.getProjectId()
);
}
@Override
protected PersistentTasksCustomMetadata.Assignment doGetAssignment(
SystemIndexMigrationTaskParams params,
Collection<DiscoveryNode> candidateNodes,
ClusterState clusterState,
@Nullable ProjectId projectId
) {
// This should select from master-eligible nodes because we already require all master-eligible nodes to have all plugins installed.
// However, due to a misunderstanding, this code as-written needs to run on the master node in particular. This is not a fundamental
// problem, but more that you can't submit cluster state update tasks from non-master nodes. If we translate the process of updating
// the cluster state to a Transport action, we can revert this to selecting any master-eligible node.
DiscoveryNode discoveryNode = clusterState.nodes().getMasterNode();
if (discoveryNode == null) {
return NO_NODE_FOUND;
} else {
return new PersistentTasksCustomMetadata.Assignment(discoveryNode.getId(), "");
}
}
public static List<NamedXContentRegistry.Entry> getNamedXContentParsers() {
return List.of(
new NamedXContentRegistry.Entry(
PersistentTaskParams.class,
new ParseField(SystemIndexMigrationTaskParams.SYSTEM_INDEX_UPGRADE_TASK_NAME),
SystemIndexMigrationTaskParams::fromXContent
),
new NamedXContentRegistry.Entry(
PersistentTaskState.class,
new ParseField(SystemIndexMigrationTaskParams.SYSTEM_INDEX_UPGRADE_TASK_NAME),
SystemIndexMigrationTaskState::fromXContent
)
);
}
public static List<NamedWriteableRegistry.Entry> getNamedWriteables() {
return List.of(
new NamedWriteableRegistry.Entry(PersistentTaskState.class, SYSTEM_INDEX_UPGRADE_TASK_NAME, SystemIndexMigrationTaskState::new),
new NamedWriteableRegistry.Entry(
PersistentTaskParams.class,
SYSTEM_INDEX_UPGRADE_TASK_NAME,
SystemIndexMigrationTaskParams::new
)
);
}
}
|
SystemIndexMigrationExecutor
|
java
|
spring-projects__spring-framework
|
spring-test/src/test/java/org/springframework/test/context/support/BootstrapTestUtilsContextInitializerTests.java
|
{
"start": 4292,
"end": 4423
}
|
class ____ {
}
@ContextConfiguration(classes = BarConfig.class, initializers = BarInitializer.class)
private static
|
InitializersFoo
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
|
{
"start": 56267,
"end": 56984
}
|
class ____ extends
RMAppTransition {
@Override
public void transition(RMAppImpl app, RMAppEvent event) {
if (app.targetedFinalState.equals(RMAppState.FAILED)
|| app.targetedFinalState.equals(RMAppState.KILLED)) {
// Ignore Attempt_Finished event if we were supposed to reach FAILED
// FINISHED state
return;
}
// pass in the earlier attempt_unregistered event, as it is needed in
// AppFinishedFinalStateSavedTransition later on
app.rememberTargetTransitions(event,
new AppFinishedFinalStateSavedTransition(app.eventCausingFinalSaving),
RMAppState.FINISHED);
};
}
private static
|
AttemptFinishedAtFinalSavingTransition
|
java
|
elastic__elasticsearch
|
client/rest/src/test/java/org/elasticsearch/client/HasAttributeNodeSelectorTests.java
|
{
"start": 1245,
"end": 2794
}
|
class ____ extends RestClientTestCase {
public void testHasAttribute() {
Node hasAttributeValue = dummyNode(singletonMap("attr", singletonList("val")));
Node hasAttributeButNotValue = dummyNode(singletonMap("attr", singletonList("notval")));
Node hasAttributeValueInList = dummyNode(singletonMap("attr", Arrays.asList("val", "notval")));
Node notHasAttribute = dummyNode(singletonMap("notattr", singletonList("val")));
List<Node> nodes = new ArrayList<>();
nodes.add(hasAttributeValue);
nodes.add(hasAttributeButNotValue);
nodes.add(hasAttributeValueInList);
nodes.add(notHasAttribute);
List<Node> expected = new ArrayList<>();
expected.add(hasAttributeValue);
expected.add(hasAttributeValueInList);
new HasAttributeNodeSelector("attr", "val").select(nodes);
assertEquals(expected, nodes);
}
private static Node dummyNode(Map<String, List<String>> attributes) {
final Set<String> roles = new TreeSet<>();
if (randomBoolean()) {
roles.add("master");
}
if (randomBoolean()) {
roles.add("data");
}
if (randomBoolean()) {
roles.add("ingest");
}
return new Node(
new HttpHost("dummy"),
Collections.<HttpHost>emptySet(),
randomAsciiAlphanumOfLength(5),
randomAsciiAlphanumOfLength(5),
new Roles(roles),
attributes
);
}
}
|
HasAttributeNodeSelectorTests
|
java
|
hibernate__hibernate-orm
|
hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/MariaDBLegacySqlAstTranslator.java
|
{
"start": 2050,
"end": 13680
}
|
class ____<T extends JdbcOperation> extends AbstractSqlAstTranslator<T> {
private final MariaDBLegacyDialect dialect;
public MariaDBLegacySqlAstTranslator(SessionFactoryImplementor sessionFactory, Statement statement, MariaDBLegacyDialect dialect) {
super( sessionFactory, statement );
this.dialect = dialect;
}
@Override
protected void visitInsertSource(InsertSelectStatement statement) {
if ( statement.getSourceSelectStatement() != null ) {
if ( statement.getConflictClause() != null ) {
final List<ColumnReference> targetColumnReferences = statement.getTargetColumns();
final List<String> columnNames = new ArrayList<>( targetColumnReferences.size() );
for ( ColumnReference targetColumnReference : targetColumnReferences ) {
columnNames.add( targetColumnReference.getColumnExpression() );
}
appendSql( "select * from " );
emulateQueryPartTableReferenceColumnAliasing(
new QueryPartTableReference(
new SelectStatement( statement.getSourceSelectStatement() ),
"excluded",
columnNames,
false,
getSessionFactory()
)
);
}
else {
statement.getSourceSelectStatement().accept( this );
}
}
else {
visitValuesList( statement.getValuesList() );
}
}
@Override
public void visitColumnReference(ColumnReference columnReference) {
final Statement currentStatement;
if ( "excluded".equals( columnReference.getQualifier() )
&& ( currentStatement = getStatementStack().getCurrent() ) instanceof InsertSelectStatement
&& ( (InsertSelectStatement) currentStatement ).getSourceSelectStatement() == null ) {
// Accessing the excluded row for an insert-values statement in the conflict clause requires the values qualifier
appendSql( "values(" );
columnReference.appendReadExpression( this, null );
append( ')' );
}
else {
super.visitColumnReference( columnReference );
}
}
@Override
protected void renderDeleteClause(DeleteStatement statement) {
appendSql( "delete" );
final Stack<Clause> clauseStack = getClauseStack();
try {
clauseStack.push( Clause.DELETE );
renderTableReferenceIdentificationVariable( statement.getTargetTable() );
if ( statement.getFromClause().getRoots().isEmpty() ) {
appendSql( " from " );
renderDmlTargetTableExpression( statement.getTargetTable() );
}
else {
visitFromClause( statement.getFromClause() );
}
}
finally {
clauseStack.pop();
}
}
@Override
protected void renderUpdateClause(UpdateStatement updateStatement) {
if ( updateStatement.getFromClause().getRoots().isEmpty() ) {
super.renderUpdateClause( updateStatement );
}
else {
appendSql( "update " );
renderFromClauseSpaces( updateStatement.getFromClause() );
}
}
@Override
protected void renderDmlTargetTableExpression(NamedTableReference tableReference) {
super.renderDmlTargetTableExpression( tableReference );
if ( getClauseStack().getCurrent() != Clause.INSERT ) {
renderTableReferenceIdentificationVariable( tableReference );
}
}
@Override
protected JdbcOperationQueryInsert translateInsert(InsertSelectStatement sqlAst) {
visitInsertStatement( sqlAst );
return new JdbcOperationQueryInsertImpl(
getSql(),
getParameterBinders(),
getAffectedTableNames(),
getUniqueConstraintNameThatMayFail(sqlAst)
);
}
@Override
protected void visitConflictClause(ConflictClause conflictClause) {
visitOnDuplicateKeyConflictClause( conflictClause );
}
@Override
protected String determineColumnReferenceQualifier(ColumnReference columnReference) {
final DmlTargetColumnQualifierSupport qualifierSupport = getDialect().getDmlTargetColumnQualifierSupport();
final MutationStatement currentDmlStatement;
final String dmlAlias;
// Since MariaDB does not support aliasing the insert target table,
// we must detect column reference that are used in the conflict clause
// and use the table expression as qualifier instead
if ( getClauseStack().getCurrent() != Clause.SET
|| !( ( currentDmlStatement = getCurrentDmlStatement() ) instanceof InsertSelectStatement )
|| ( dmlAlias = currentDmlStatement.getTargetTable().getIdentificationVariable() ) == null
|| !dmlAlias.equals( columnReference.getQualifier() ) ) {
return columnReference.getQualifier();
}
// Qualify the column reference with the table expression also when in subqueries
else if ( qualifierSupport != DmlTargetColumnQualifierSupport.NONE || !getQueryPartStack().isEmpty() ) {
return getCurrentDmlStatement().getTargetTable().getTableExpression();
}
else {
return null;
}
}
@Override
protected void renderExpressionAsClauseItem(Expression expression) {
expression.accept( this );
}
@Override
protected void visitRecursivePath(Expression recursivePath, int sizeEstimate) {
// MariaDB determines the type and size of a column in a recursive CTE based on the expression of the non-recursive part
// Due to that, we have to cast the path in the non-recursive path to a varchar of appropriate size to avoid data truncation errors
if ( sizeEstimate == -1 ) {
super.visitRecursivePath( recursivePath, sizeEstimate );
}
else {
appendSql( "cast(" );
recursivePath.accept( this );
appendSql( " as char(" );
appendSql( sizeEstimate );
appendSql( "))" );
}
}
@Override
public void visitBooleanExpressionPredicate(BooleanExpressionPredicate booleanExpressionPredicate) {
final boolean isNegated = booleanExpressionPredicate.isNegated();
if ( isNegated ) {
appendSql( "not(" );
}
booleanExpressionPredicate.getExpression().accept( this );
if ( isNegated ) {
appendSql( CLOSE_PARENTHESIS );
}
}
protected boolean shouldEmulateFetchClause(QueryPart queryPart) {
// Check if current query part is already row numbering to avoid infinite recursion
return useOffsetFetchClause( queryPart ) && getQueryPartForRowNumbering() != queryPart && supportsWindowFunctions() && !isRowsOnlyFetchClauseType( queryPart );
}
@Override
protected boolean shouldEmulateLateralWithIntersect(QueryPart queryPart) {
// Intersect emulation requires nested correlation when no simple query grouping is possible
// and the query has an offset/fetch clause, so we have to disable the emulation in this case,
// because nested correlation is not supported though
return getDialect().supportsSimpleQueryGrouping() || !queryPart.hasOffsetOrFetchClause();
}
@Override
public void visitQueryGroup(QueryGroup queryGroup) {
if ( shouldEmulateFetchClause( queryGroup ) ) {
emulateFetchOffsetWithWindowFunctions( queryGroup, true );
}
else {
super.visitQueryGroup( queryGroup );
}
}
@Override
public void visitQuerySpec(QuerySpec querySpec) {
if ( shouldEmulateFetchClause( querySpec ) ) {
emulateFetchOffsetWithWindowFunctions( querySpec, true );
}
else {
super.visitQuerySpec( querySpec );
}
}
@Override
public void visitQueryPartTableReference(QueryPartTableReference tableReference) {
emulateQueryPartTableReferenceColumnAliasing( tableReference );
}
@Override
protected void renderDerivedTableReferenceIdentificationVariable(DerivedTableReference tableReference) {
renderTableReferenceIdentificationVariable( tableReference );
}
@Override
public void visitOffsetFetchClause(QueryPart queryPart) {
if ( !isRowNumberingCurrentQueryPart() ) {
renderCombinedLimitClause( queryPart );
}
}
@Override
protected void renderComparison(Expression lhs, ComparisonOperator operator, Expression rhs) {
final JdbcMappingContainer lhsExpressionType = lhs.getExpressionType();
if ( lhsExpressionType != null && lhsExpressionType.getJdbcTypeCount() == 1
&& lhsExpressionType.getSingleJdbcMapping().getJdbcType().isJson() ) {
switch ( operator ) {
case DISTINCT_FROM:
appendSql( "case when json_equals(" );
lhs.accept( this );
appendSql( ',' );
rhs.accept( this );
appendSql( ")=1 or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=1" );
break;
case NOT_DISTINCT_FROM:
appendSql( "case when json_equals(" );
lhs.accept( this );
appendSql( ',' );
rhs.accept( this );
appendSql( ")=1 or " );
lhs.accept( this );
appendSql( " is null and " );
rhs.accept( this );
appendSql( " is null then 0 else 1 end=0" );
break;
case NOT_EQUAL:
appendSql( "json_equals(" );
lhs.accept( this );
appendSql( ',' );
rhs.accept( this );
appendSql( ")=0" );
break;
case EQUAL:
appendSql( "json_equals(" );
lhs.accept( this );
appendSql( ',' );
rhs.accept( this );
appendSql( ")=1" );
break;
default:
renderComparisonDistinctOperator( lhs, operator, rhs );
break;
}
}
else {
renderComparisonDistinctOperator( lhs, operator, rhs );
}
}
@Override
protected void renderPartitionItem(Expression expression) {
if ( expression instanceof Literal ) {
appendSql( "'0'" );
}
else if ( expression instanceof Summarization ) {
Summarization summarization = (Summarization) expression;
renderCommaSeparated( summarization.getGroupings() );
appendSql( " with " );
appendSql( summarization.getKind().sqlText() );
}
else {
expression.accept( this );
}
}
@Override
public void visitLikePredicate(LikePredicate likePredicate) {
if ( likePredicate.isCaseSensitive() ) {
likePredicate.getMatchExpression().accept( this );
if ( likePredicate.isNegated() ) {
appendSql( " not" );
}
appendSql( " like " );
renderBackslashEscapedLikePattern(
likePredicate.getPattern(),
likePredicate.getEscapeCharacter(),
dialect.isNoBackslashEscapesEnabled()
);
}
else {
appendSql( dialect.getLowercaseFunction() );
appendSql( OPEN_PARENTHESIS );
likePredicate.getMatchExpression().accept( this );
appendSql( CLOSE_PARENTHESIS );
if ( likePredicate.isNegated() ) {
appendSql( " not" );
}
appendSql( " like " );
appendSql( dialect.getLowercaseFunction() );
appendSql( OPEN_PARENTHESIS );
renderBackslashEscapedLikePattern(
likePredicate.getPattern(),
likePredicate.getEscapeCharacter(),
dialect.isNoBackslashEscapesEnabled()
);
appendSql( CLOSE_PARENTHESIS );
}
if ( likePredicate.getEscapeCharacter() != null ) {
appendSql( " escape " );
likePredicate.getEscapeCharacter().accept( this );
}
}
@Override
public MariaDBLegacyDialect getDialect() {
return dialect;
}
private boolean supportsWindowFunctions() {
return dialect.getVersion().isSameOrAfter( 10, 2 );
}
@Override
public void visitCastTarget(CastTarget castTarget) {
String sqlType = MySQLSqlAstTranslator.getSqlType( castTarget, getSessionFactory() );
if ( sqlType != null ) {
appendSql( sqlType );
}
else {
super.visitCastTarget( castTarget );
}
}
@Override
protected void renderStringContainsExactlyPredicate(Expression haystack, Expression needle) {
// MariaDB can't cope with NUL characters in the position function, so we use a like predicate instead
haystack.accept( this );
appendSql( " like concat('%',replace(replace(replace(" );
needle.accept( this );
appendSql( ",'~','~~'),'?','~?'),'%','~%'),'%') escape '~'" );
}
@Override
protected void appendAssignmentColumn(ColumnReference column) {
column.appendColumnForWrite(
this,
getAffectedTableNames().size() > 1 && !(getStatement() instanceof InsertSelectStatement)
? determineColumnReferenceQualifier( column )
: null );
}
}
|
MariaDBLegacySqlAstTranslator
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/transaction/operation/set/AddOperation.java
|
{
"start": 895,
"end": 2178
}
|
class ____ extends SetOperation {
private String readLockName;
private Object value;
public AddOperation(RObject set, Object value, String readLockName, String transactionId, long threadId) {
this(set.getName(), set.getCodec(), readLockName, value, transactionId, threadId);
}
public AddOperation(String name, Codec codec, String readLockName, Object value, String transactionId, long threadId) {
super(name, codec, transactionId, threadId);
this.value = value;
this.readLockName = readLockName;
}
@Override
public void commit(CommandAsyncExecutor commandExecutor) {
RSet<Object> set = new RedissonSet<>(codec, commandExecutor, name, null);
set.addAsync(value);
getLock(set, commandExecutor, value).unlockAsync(threadId);
getReadLock(readLockName, commandExecutor).unlockAsync(threadId);
}
@Override
public void rollback(CommandAsyncExecutor commandExecutor) {
RSet<Object> set = new RedissonSet<>(codec, commandExecutor, name, null);
getLock(set, commandExecutor, value).unlockAsync(threadId);
getReadLock(readLockName, commandExecutor).unlockAsync(threadId);
}
public Object getValue() {
return value;
}
}
|
AddOperation
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/StateBackend.java
|
{
"start": 10058,
"end": 10155
}
|
interface ____ {
void addMetric(String name, long value);
}
}
|
CustomInitializationMetrics
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java
|
{
"start": 3332,
"end": 9425
}
|
class ____ extends AbstractConvertFunction implements EvaluatorMapper {
public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "ToString", ToString::new);
private static final Map<DataType, BuildFactory> EVALUATORS = Map.ofEntries(
Map.entry(KEYWORD, (source, fieldEval) -> fieldEval),
Map.entry(BOOLEAN, ToStringFromBooleanEvaluator.Factory::new),
Map.entry(DATETIME, ToStringFromDatetimeEvaluator.Factory::new),
Map.entry(DATE_NANOS, ToStringFromDateNanosEvaluator.Factory::new),
Map.entry(IP, ToStringFromIPEvaluator.Factory::new),
Map.entry(DENSE_VECTOR, ToStringFromFloatEvaluator.Factory::new),
Map.entry(DOUBLE, ToStringFromDoubleEvaluator.Factory::new),
Map.entry(LONG, ToStringFromLongEvaluator.Factory::new),
Map.entry(INTEGER, ToStringFromIntEvaluator.Factory::new),
Map.entry(TEXT, (source, fieldEval) -> fieldEval),
Map.entry(VERSION, ToStringFromVersionEvaluator.Factory::new),
Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new),
Map.entry(GEO_POINT, ToStringFromGeoPointEvaluator.Factory::new),
Map.entry(CARTESIAN_POINT, ToStringFromCartesianPointEvaluator.Factory::new),
Map.entry(CARTESIAN_SHAPE, ToStringFromCartesianShapeEvaluator.Factory::new),
Map.entry(GEO_SHAPE, ToStringFromGeoShapeEvaluator.Factory::new),
Map.entry(GEOHASH, (source, fieldEval) -> new ToStringFromGeoGridEvaluator.Factory(source, fieldEval, GEOHASH)),
Map.entry(GEOTILE, (source, fieldEval) -> new ToStringFromGeoGridEvaluator.Factory(source, fieldEval, GEOTILE)),
Map.entry(GEOHEX, (source, fieldEval) -> new ToStringFromGeoGridEvaluator.Factory(source, fieldEval, GEOHEX)),
Map.entry(AGGREGATE_METRIC_DOUBLE, ToStringFromAggregateMetricDoubleEvaluator.Factory::new)
);
@FunctionInfo(
returnType = "keyword",
description = "Converts an input value into a string.",
examples = {
@Example(file = "string", tag = "to_string"),
@Example(description = "It also works fine on multivalued fields:", file = "string", tag = "to_string_multivalue") }
)
public ToString(
Source source,
@Param(
name = "field",
type = {
"aggregate_metric_double",
"boolean",
"cartesian_point",
"cartesian_shape",
"date",
"date_nanos",
"dense_vector",
"double",
"geo_point",
"geo_shape",
"geohash",
"geotile",
"geohex",
"integer",
"ip",
"keyword",
"long",
"text",
"unsigned_long",
"version" },
description = "Input value. The input can be a single- or multi-valued column or an expression."
) Expression v
) {
super(source, v);
}
private ToString(StreamInput in) throws IOException {
super(in);
}
@Override
public String getWriteableName() {
return ENTRY.name;
}
@Override
protected Map<DataType, BuildFactory> factories() {
return EVALUATORS;
}
@Override
public DataType dataType() {
return KEYWORD;
}
@Override
public Expression replaceChildren(List<Expression> newChildren) {
return new ToString(source(), newChildren.get(0));
}
@Override
protected NodeInfo<? extends Expression> info() {
return NodeInfo.create(this, ToString::new, field());
}
@ConvertEvaluator(extraName = "FromBoolean")
static BytesRef fromBoolean(boolean bool) {
return numericBooleanToString(bool);
}
@ConvertEvaluator(extraName = "FromIP")
static BytesRef fromIP(BytesRef ip) {
return new BytesRef(ipToString(ip));
}
@ConvertEvaluator(extraName = "FromDatetime")
static BytesRef fromDatetime(long datetime) {
return new BytesRef(dateTimeToString(datetime));
}
@ConvertEvaluator(extraName = "FromDateNanos")
static BytesRef fromDateNanos(long datetime) {
return new BytesRef(nanoTimeToString(datetime));
}
@ConvertEvaluator(extraName = "FromDouble")
static BytesRef fromDouble(double dbl) {
return numericBooleanToString(dbl);
}
@ConvertEvaluator(extraName = "FromFloat")
static BytesRef fromFloat(float flt) {
return numericBooleanToString(flt);
}
@ConvertEvaluator(extraName = "FromLong")
static BytesRef fromDouble(long lng) {
return numericBooleanToString(lng);
}
@ConvertEvaluator(extraName = "FromInt")
static BytesRef fromDouble(int integer) {
return numericBooleanToString(integer);
}
@ConvertEvaluator(extraName = "FromVersion")
static BytesRef fromVersion(BytesRef version) {
return new BytesRef(versionToString(version));
}
@ConvertEvaluator(extraName = "FromUnsignedLong")
static BytesRef fromUnsignedLong(long lng) {
return unsignedLongToString(lng);
}
@ConvertEvaluator(extraName = "FromGeoPoint")
static BytesRef fromGeoPoint(BytesRef wkb) {
return new BytesRef(spatialToString(wkb));
}
@ConvertEvaluator(extraName = "FromCartesianPoint")
static BytesRef fromCartesianPoint(BytesRef wkb) {
return new BytesRef(spatialToString(wkb));
}
@ConvertEvaluator(extraName = "FromCartesianShape")
static BytesRef fromCartesianShape(BytesRef wkb) {
return new BytesRef(spatialToString(wkb));
}
@ConvertEvaluator(extraName = "FromGeoShape")
static BytesRef fromGeoShape(BytesRef wkb) {
return new BytesRef(spatialToString(wkb));
}
@ConvertEvaluator(extraName = "FromGeoGrid")
static BytesRef fromGeoGrid(long gridId, @Fixed DataType dataType) {
return new BytesRef(geoGridToString(gridId, dataType));
}
}
|
ToString
|
java
|
netty__netty
|
common/src/main/java/io/netty/util/internal/SWARUtil.java
|
{
"start": 749,
"end": 5871
}
|
class ____ {
/**
* Compiles given byte into a long pattern suitable for SWAR operations.
*/
public static long compilePattern(byte byteToFind) {
return (byteToFind & 0xFFL) * 0x101010101010101L;
}
/**
* Applies a compiled pattern to given word.
* Returns a word where each byte that matches the pattern has the highest bit set.
*
* @param word the word to apply the pattern to
* @param pattern the pattern to apply
* @return a word where each byte that matches the pattern has the highest bit set
*/
public static long applyPattern(final long word, final long pattern) {
long input = word ^ pattern;
long tmp = (input & 0x7F7F7F7F7F7F7F7FL) + 0x7F7F7F7F7F7F7F7FL;
return ~(tmp | input | 0x7F7F7F7F7F7F7F7FL);
}
/**
* Returns the index of the first occurrence of byte that specificied in the pattern.
* If no pattern is found, returns 8.
*
* @param word the return value of {@link #applyPattern(long, long)}
* @param isBigEndian if true, if given word is big endian
* if false, if given word is little endian
* @return the index of the first occurrence of the specified pattern in the specified word.
* If no pattern is found, returns 8.
*/
public static int getIndex(final long word, final boolean isBigEndian) {
final int zeros = isBigEndian? Long.numberOfLeadingZeros(word) : Long.numberOfTrailingZeros(word);
return zeros >>> 3;
}
/**
* Returns a word where each ASCII uppercase byte has the highest bit set.
*/
private static long applyUpperCasePattern(final long word) {
// Inspired by https://github.com/facebook/folly/blob/add4049dd6c2371eac05b92b6fd120fd6dd74df5/folly/String.cpp
long rotated = word & 0x7F7F7F7F7F7F7F7FL;
rotated += 0x2525252525252525L;
rotated &= 0x7F7F7F7F7F7F7F7FL;
rotated += 0x1A1A1A1A1A1A1A1AL;
rotated &= ~word;
rotated &= 0x8080808080808080L;
return rotated;
}
/**
* Returns a word where each ASCII uppercase byte has the highest bit set.
*/
private static int applyUpperCasePattern(final int word) {
int rotated = word & 0x7F7F7F7F;
rotated += 0x25252525;
rotated &= 0x7F7F7F7F;
rotated += 0x1A1A1A1A;
rotated &= ~word;
rotated &= 0x80808080;
return rotated;
}
/**
* Returns a word where each ASCII lowercase byte has the highest bit set.
*/
private static long applyLowerCasePattern(final long word) {
long rotated = word & 0x7F7F7F7F7F7F7F7FL;
rotated += 0x0505050505050505L;
rotated &= 0x7F7F7F7F7F7F7F7FL;
rotated += 0x1A1A1A1A1A1A1A1AL;
rotated &= ~word;
rotated &= 0x8080808080808080L;
return rotated;
}
/**
* Returns a word where each lowercase ASCII byte has the highest bit set.
*/
private static int applyLowerCasePattern(final int word) {
int rotated = word & 0x7F7F7F7F;
rotated += 0x05050505;
rotated &= 0x7F7F7F7F;
rotated += 0x1A1A1A1A;
rotated &= ~word;
rotated &= 0x80808080;
return rotated;
}
/**
* Returns true if the given word contains at least one ASCII uppercase byte.
*/
public static boolean containsUpperCase(final long word) {
return applyUpperCasePattern(word) != 0;
}
/**
* Returns true if the given word contains at least one ASCII uppercase byte.
*/
public static boolean containsUpperCase(final int word) {
return applyUpperCasePattern(word) != 0;
}
/**
* Returns true if the given word contains at least one ASCII lowercase byte.
*/
public static boolean containsLowerCase(final long word) {
return applyLowerCasePattern(word) != 0;
}
/**
* Returns true if the given word contains at least one ASCII lowercase byte.
*/
public static boolean containsLowerCase(final int word) {
return applyLowerCasePattern(word) != 0;
}
/**
* Returns a word with all bytes converted to lowercase ASCII.
*/
public static long toLowerCase(final long word) {
final long mask = applyUpperCasePattern(word) >>> 2;
return word | mask;
}
/**
* Returns a word with all bytes converted to lowercase ASCII.
*/
public static int toLowerCase(final int word) {
final int mask = applyUpperCasePattern(word) >>> 2;
return word | mask;
}
/**
* Returns a word with all bytes converted to uppercase ASCII.
*/
public static long toUpperCase(final long word) {
final long mask = applyLowerCasePattern(word) >>> 2;
return word & ~mask;
}
/**
* Returns a word with all bytes converted to uppercase ASCII.
*/
public static int toUpperCase(final int word) {
final int mask = applyLowerCasePattern(word) >>> 2;
return word & ~mask;
}
private SWARUtil() {
// Utility
}
}
|
SWARUtil
|
java
|
micronaut-projects__micronaut-core
|
http-server-tck/src/main/java/io/micronaut/http/server/tck/tests/cors/SimpleRequestWithCorsNotEnabledTest.java
|
{
"start": 1773,
"end": 6229
}
|
class ____ {
private static final String SPECNAME = "SimpleRequestWithCorsNotEnabledTest";
private static final String PROPERTY_MICRONAUT_SERVER_CORS_LOCALHOST_PASS_THROUGH = "micronaut.server.cors.localhost-pass-through";
/**
* @see <a href="https://github.com/micronaut-projects/micronaut-core/security/advisories/GHSA-583g-g682-crxf">GHSA-583g-g682-crxf</a>
* A malicious/compromised website can make HTTP requests to localhost. This test verifies a CORS simple request is denied when invoked against a Micronaut application running in localhost without cors enabled.
* @throws IOException scenario step fails
*/
@Test
void corsSimpleRequestNotAllowedForLocalhostAndAny() throws IOException {
asserts(SPECNAME,
createRequest("https://sdelamo.github.io"),
(server, request) -> {
RefreshCounter refreshCounter = server.getApplicationContext().getBean(RefreshCounter.class);
assertEquals(0, refreshCounter.getRefreshCount());
AssertionUtils.assertThrows(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.FORBIDDEN)
.assertResponse(response -> assertFalse(response.getHeaders().contains("Vary")))
.build());
assertEquals(0, refreshCounter.getRefreshCount());
});
}
/**
* This test verifies a CORS simple request is allowed when invoked against a Micronaut application running in localhost without cors enabled but with localhost-pass-through switched on.
* @see <a href="https://github.com/micronaut-projects/micronaut-core/pull/8751">PR-8751</a>
*
* @throws IOException
*/
@Test
void corsSimpleRequestAllowedForLocalhostAndAnyWhenConfiguredToAllowIt() throws IOException {
asserts(SPECNAME,
Collections.singletonMap(PROPERTY_MICRONAUT_SERVER_CORS_LOCALHOST_PASS_THROUGH, StringUtils.TRUE),
createRequest("https://sdelamo.github.io"),
(server, request) -> {
RefreshCounter refreshCounter = server.getApplicationContext().getBean(RefreshCounter.class);
assertEquals(0, refreshCounter.getRefreshCount());
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.build());
assertEquals(1, refreshCounter.getRefreshCount());
});
}
/**
* It should not deny a cors request coming from a localhost origin if the micronaut application resolved host is localhost.
* @throws IOException scenario step fails
*/
@Test
void corsSimpleRequestAllowedForLocalhostAndOriginLocalhost() throws IOException {
asserts(SPECNAME,
createRequest("http://localhost:8000"),
(server, request) -> {
RefreshCounter refreshCounter = server.getApplicationContext().getBean(RefreshCounter.class);
assertEquals(0, refreshCounter.getRefreshCount());
AssertionUtils.assertDoesNotThrow(server, request, HttpResponseAssertion.builder()
.status(HttpStatus.OK)
.build());
assertEquals(1, refreshCounter.getRefreshCount());
});
}
private static HttpRequest<?> createRequest(String origin) {
return HttpRequest.POST("/refresh", Collections.emptyMap())
.header("Accept", "*/*")
.header("Accept-Encoding", "gzip, deflate, br")
.header("Accept-Language", "en-GB,en-US;q=0.9,en;q=0.8")
.header("Connection", "keep-alive")
.header("Content-Length", "0")
.header("Host", "localhost:8080")
.header("Origin", origin)
.header("sec-ch-ua", "\"Not?A_Brand\";v=\"8\", \"Chromium\";v=\"108\", \"Google Chrome\";v=\"108\"")
.header("sec-ch-ua-mobile", "?0")
.header("sec-ch-ua-platform", "\"macOS\"")
.header("Sec-Fetch-Dest", "empty")
.header("Sec-Fetch-Mode", "cors")
.header("Sec-Fetch-Site", "cross-site")
.header("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36");
}
@Requires(property = "spec.name", value = SPECNAME)
@Controller
static
|
SimpleRequestWithCorsNotEnabledTest
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java
|
{
"start": 3797,
"end": 8730
}
|
class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
SaslDataTransferServer.class);
private final BlockPoolTokenSecretManager blockPoolTokenSecretManager;
private final DNConf dnConf;
// Store the most recent successfully negotiated QOP,
// for testing purpose only
private String negotiatedQOP;
/**
* Creates a new SaslDataTransferServer.
*
* @param dnConf configuration of DataNode
* @param blockPoolTokenSecretManager used for checking block access tokens
* and encryption keys
*/
public SaslDataTransferServer(DNConf dnConf,
BlockPoolTokenSecretManager blockPoolTokenSecretManager) {
this.blockPoolTokenSecretManager = blockPoolTokenSecretManager;
this.dnConf = dnConf;
}
/**
* Receives SASL negotiation from a peer on behalf of a server.
*
* @param peer connection peer
* @param underlyingOut connection output stream
* @param underlyingIn connection input stream
* @param xferPort data transfer port of DataNode accepting connection
* @param datanodeId ID of DataNode accepting connection
* @return new pair of streams, wrapped after SASL negotiation
* @throws IOException for any error
*/
public IOStreamPair receive(Peer peer, OutputStream underlyingOut,
InputStream underlyingIn, int xferPort, DatanodeID datanodeId)
throws IOException {
if (dnConf.getEncryptDataTransfer()) {
LOG.debug(
"SASL server doing encrypted handshake for peer = {}, datanodeId = {}",
peer, datanodeId);
return getEncryptedStreams(peer, underlyingOut, underlyingIn);
} else if (!UserGroupInformation.isSecurityEnabled()) {
LOG.debug(
"SASL server skipping handshake in unsecured configuration for "
+ "peer = {}, datanodeId = {}", peer, datanodeId);
return new IOStreamPair(underlyingIn, underlyingOut);
} else if (SecurityUtil.isPrivilegedPort(xferPort)) {
LOG.debug(
"SASL server skipping handshake in secured configuration for "
+ "peer = {}, datanodeId = {}", peer, datanodeId);
return new IOStreamPair(underlyingIn, underlyingOut);
} else if (dnConf.getSaslPropsResolver() != null) {
LOG.debug(
"SASL server doing general handshake for peer = {}, datanodeId = {}",
peer, datanodeId);
return getSaslStreams(peer, underlyingOut, underlyingIn);
} else if (dnConf.getIgnoreSecurePortsForTesting()) {
// It's a secured cluster using non-privileged ports, but no SASL. The
// only way this can happen is if the DataNode has
// ignore.secure.ports.for.testing configured, so this is a rare edge case.
LOG.debug(
"SASL server skipping handshake in secured configuration with no SASL "
+ "protection configured for peer = {}, datanodeId = {}",
peer, datanodeId);
return new IOStreamPair(underlyingIn, underlyingOut);
} else {
// The error message here intentionally does not mention
// ignore.secure.ports.for.testing. That's intended for dev use only.
// This code path is not expected to execute ever, because DataNode startup
// checks for invalid configuration and aborts.
throw new IOException(String.format("Cannot create a secured " +
"connection if DataNode listens on unprivileged port (%d) and no " +
"protection is defined in configuration property %s.",
datanodeId.getXferPort(), DFS_DATA_TRANSFER_PROTECTION_KEY));
}
}
/**
* Receives SASL negotiation for specialized encrypted handshake.
*
* @param peer connection peer
* @param underlyingOut connection output stream
* @param underlyingIn connection input stream
* @return new pair of streams, wrapped after SASL negotiation
* @throws IOException for any error
*/
private IOStreamPair getEncryptedStreams(Peer peer,
OutputStream underlyingOut, InputStream underlyingIn) throws IOException {
if (peer.hasSecureChannel() ||
dnConf.getTrustedChannelResolver().isTrusted(getPeerAddress(peer))) {
return new IOStreamPair(underlyingIn, underlyingOut);
}
Map<String, String> saslProps = createSaslPropertiesForEncryption(
dnConf.getEncryptionAlgorithm());
if (LOG.isDebugEnabled()) {
LOG.debug("Server using encryption algorithm " +
dnConf.getEncryptionAlgorithm());
}
final CallbackHandler callbackHandler = new SaslServerCallbackHandler(dnConf.getConf(),
new PasswordFunction() {
@Override
public char[] apply(String userName) throws IOException {
return encryptionKeyToPassword(getEncryptionKeyFromUserName(userName));
}
});
return doSaslHandshake(peer, underlyingOut, underlyingIn, saslProps,
callbackHandler);
}
/**
* The SASL handshake for encrypted vs. general-purpose uses different logic
* for determining the password. This
|
SaslDataTransferServer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/function/scalar/UnaryScalarFunction.java
|
{
"start": 847,
"end": 1968
}
|
class ____ extends ScalarFunction {
private final Expression field;
protected UnaryScalarFunction(Source source) {
super(source);
this.field = null;
}
protected UnaryScalarFunction(Source source, Expression field) {
super(source, singletonList(field));
this.field = field;
}
@Override
public final UnaryScalarFunction replaceChildren(List<Expression> newChildren) {
return replaceChild(newChildren.get(0));
}
protected abstract UnaryScalarFunction replaceChild(Expression newChild);
public Expression field() {
return field;
}
@Override
public final Pipe makePipe() {
return new UnaryPipe(source(), this, Expressions.pipe(field()), makeProcessor());
}
protected abstract Processor makeProcessor();
@Override
public boolean foldable() {
return field.foldable();
}
@Override
public Object fold() {
return makeProcessor().process(field().fold());
}
@Override
public ScriptTemplate asScript() {
return asScript(field);
}
}
|
UnaryScalarFunction
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java
|
{
"start": 168031,
"end": 170630
}
|
class ____ extends IndexWriter {
private final Logger logger;
ElasticsearchIndexWriter(Directory directory, IndexWriterConfig indexWriterConfig, Logger logger) throws IOException {
super(directory, indexWriterConfig);
this.logger = logger;
}
@Override
public void onTragicEvent(Throwable tragedy, String location) {
assert tragedy != null;
try {
if (getConfig().getMergeScheduler() instanceof ThreadPoolMergeScheduler mergeScheduler) {
try {
// Must be executed before calling IndexWriter#onTragicEvent
mergeScheduler.onTragicEvent(tragedy);
} catch (Exception e) {
logger.warn("Exception thrown when notifying the merge scheduler of a tragic event", e);
if (tragedy != e) {
tragedy.addSuppressed(e);
}
}
}
} finally {
super.onTragicEvent(tragedy, location);
}
}
@Override
public long deleteDocuments(Term... terms) throws IOException {
if (Assertions.ENABLED) {
throw new AssertionError("must not hard delete documents");
}
return super.deleteDocuments(terms);
}
@Override
public long tryDeleteDocument(IndexReader readerIn, int docID) throws IOException {
if (Assertions.ENABLED) {
throw new AssertionError("tryDeleteDocument is not supported. See Lucene#DirectoryReaderWithAllLiveDocs");
}
return super.tryDeleteDocument(readerIn, docID);
}
}
/**
* Returned the last local checkpoint value has been refreshed internally.
*/
final long lastRefreshedCheckpoint() {
return lastRefreshedCheckpointListener.refreshedCheckpoint.get();
}
private final Object refreshIfNeededMutex = new Object();
/**
* Refresh this engine **internally** iff the requesting seq_no is greater than the last refreshed checkpoint.
*/
protected final void refreshIfNeeded(String source, long requestingSeqNo) {
if (lastRefreshedCheckpoint() < requestingSeqNo) {
synchronized (refreshIfNeededMutex) {
if (lastRefreshedCheckpoint() < requestingSeqNo) {
refreshInternalSearcher(source, true);
}
}
}
}
private final
|
ElasticsearchIndexWriter
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/fs/contract/router/TestRouterHDFSContractConcat.java
|
{
"start": 1242,
"end": 1819
}
|
class ____ extends AbstractContractConcatTest {
@BeforeAll
public static void createCluster() throws IOException {
RouterHDFSContract.createCluster();
// perform a simple operation on the cluster to verify it is up
RouterHDFSContract.getFileSystem().getDefaultBlockSize(new Path("/"));
}
@AfterAll
public static void teardownCluster() throws IOException {
RouterHDFSContract.destroyCluster();
}
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new RouterHDFSContract(conf);
}
}
|
TestRouterHDFSContractConcat
|
java
|
apache__camel
|
core/camel-support/src/main/java/org/apache/camel/support/task/Tasks.java
|
{
"start": 938,
"end": 2096
}
|
class ____ {
private Tasks() {
}
/**
* Creates a new background task builder
*
* @return an instance of a background task builder
*/
public static BackgroundTask.BackgroundTaskBuilder backgroundTask() {
return task(BackgroundTask.BackgroundTaskBuilder::new);
}
/**
* Creates a new foreground task builder
*
* @return an instance of a foreground task builder
*/
public static ForegroundTask.ForegroundTaskBuilder foregroundTask() {
return task(ForegroundTask.ForegroundTaskBuilder::new);
}
/**
* A generic builder for task builders
*
* @param taskBuilderSupplier A supplier of tasks (usually a parameterless constructor in the form of Builder::new)
* @param <T> the type of tasks that the builder builds
* @param <Y> the type of the task builder to provide
* @return A new instance of the given task builder
*/
public static <T extends BlockingTask, Y extends TaskBuilder<T>> Y task(Supplier<Y> taskBuilderSupplier) {
return taskBuilderSupplier.get();
}
}
|
Tasks
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClientCluster.java
|
{
"start": 2071,
"end": 7885
}
|
class ____ {
}
@BeforeAll
public static void setup() throws IOException {
final Configuration conf = new Configuration();
final Path TEST_ROOT_DIR = new Path(System.getProperty("test.build.data",
"/tmp"));
testdir = new Path(TEST_ROOT_DIR, "TestMiniMRClientCluster");
inDir = new Path(testdir, "in");
outDir = new Path(testdir, "out");
FileSystem fs = FileSystem.getLocal(conf);
if (fs.exists(testdir) && !fs.delete(testdir, true)) {
throw new IOException("Could not delete " + testdir);
}
if (!fs.mkdirs(inDir)) {
throw new IOException("Mkdirs failed to create " + inDir);
}
for (int i = 0; i < inFiles.length; i++) {
inFiles[i] = new Path(inDir, "part_" + i);
createFile(inFiles[i], conf);
}
// create the mini cluster to be used for the tests
mrCluster = MiniMRClientClusterFactory.create(
InternalClass.class, 1, new Configuration());
}
@AfterAll
public static void cleanup() throws IOException {
// clean up the input and output files
final Configuration conf = new Configuration();
final FileSystem fs = testdir.getFileSystem(conf);
if (fs.exists(testdir)) {
fs.delete(testdir, true);
}
// stopping the mini cluster
mrCluster.stop();
}
@Test
public void testRestart() throws Exception {
String rmAddress1 = mrCluster.getConfig().get(YarnConfiguration.RM_ADDRESS);
String rmAdminAddress1 = mrCluster.getConfig().get(
YarnConfiguration.RM_ADMIN_ADDRESS);
String rmSchedAddress1 = mrCluster.getConfig().get(
YarnConfiguration.RM_SCHEDULER_ADDRESS);
String rmRstrackerAddress1 = mrCluster.getConfig().get(
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS);
String rmWebAppAddress1 = mrCluster.getConfig().get(
YarnConfiguration.RM_WEBAPP_ADDRESS);
String mrHistAddress1 = mrCluster.getConfig().get(
JHAdminConfig.MR_HISTORY_ADDRESS);
String mrHistWebAppAddress1 = mrCluster.getConfig().get(
JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS);
mrCluster.restart();
String rmAddress2 = mrCluster.getConfig().get(YarnConfiguration.RM_ADDRESS);
String rmAdminAddress2 = mrCluster.getConfig().get(
YarnConfiguration.RM_ADMIN_ADDRESS);
String rmSchedAddress2 = mrCluster.getConfig().get(
YarnConfiguration.RM_SCHEDULER_ADDRESS);
String rmRstrackerAddress2 = mrCluster.getConfig().get(
YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS);
String rmWebAppAddress2 = mrCluster.getConfig().get(
YarnConfiguration.RM_WEBAPP_ADDRESS);
String mrHistAddress2 = mrCluster.getConfig().get(
JHAdminConfig.MR_HISTORY_ADDRESS);
String mrHistWebAppAddress2 = mrCluster.getConfig().get(
JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS);
assertEquals(rmAddress1, rmAddress2, "Address before restart: " + rmAddress1
+ " is different from new address: " + rmAddress2);
assertEquals(rmAdminAddress1, rmAdminAddress2,
"Address before restart: " + rmAdminAddress1
+ " is different from new address: " + rmAdminAddress2);
assertEquals(rmSchedAddress1, rmSchedAddress2,
"Address before restart: " + rmSchedAddress1
+ " is different from new address: " + rmSchedAddress2);
assertEquals(rmRstrackerAddress1, rmRstrackerAddress2,
"Address before restart: " + rmRstrackerAddress1
+ " is different from new address: " + rmRstrackerAddress2);
assertEquals(rmWebAppAddress1, rmWebAppAddress2,
"Address before restart: " + rmWebAppAddress1
+ " is different from new address: " + rmWebAppAddress2);
assertEquals(mrHistAddress1, mrHistAddress2,
"Address before restart: " + mrHistAddress1
+ " is different from new address: " + mrHistAddress2);
assertEquals(mrHistWebAppAddress1, mrHistWebAppAddress2,
"Address before restart: " + mrHistWebAppAddress1
+ " is different from new address: " + mrHistWebAppAddress2);
}
@Test
public void testJob() throws Exception {
final Job job = createJob();
org.apache.hadoop.mapreduce.lib.input.FileInputFormat.setInputPaths(job,
inDir);
org.apache.hadoop.mapreduce.lib.output.FileOutputFormat.setOutputPath(job,
new Path(outDir, "testJob"));
assertTrue(job.waitForCompletion(true));
validateCounters(job.getCounters(), 5, 25, 5, 5);
}
private void validateCounters(Counters counters, long mapInputRecords,
long mapOutputRecords, long reduceInputGroups, long reduceOutputRecords) {
assertEquals(mapInputRecords, counters.findCounter("MyCounterGroup",
"MAP_INPUT_RECORDS").getValue(), "MapInputRecords");
assertEquals(mapOutputRecords, counters.findCounter("MyCounterGroup",
"MAP_OUTPUT_RECORDS").getValue(), "MapOutputRecords");
assertEquals(reduceInputGroups, counters.findCounter("MyCounterGroup",
"REDUCE_INPUT_GROUPS").getValue(), "ReduceInputGroups");
assertEquals(reduceOutputRecords, counters.findCounter("MyCounterGroup",
"REDUCE_OUTPUT_RECORDS").getValue(), "ReduceOutputRecords");
}
private static void createFile(Path inFile, Configuration conf)
throws IOException {
final FileSystem fs = inFile.getFileSystem(conf);
if (fs.exists(inFile)) {
return;
}
FSDataOutputStream out = fs.create(inFile);
out.writeBytes("This is a test file");
out.close();
}
public static Job createJob() throws IOException {
final Job baseJob = Job.getInstance(mrCluster.getConfig());
baseJob.setOutputKeyClass(Text.class);
baseJob.setOutputValueClass(IntWritable.class);
baseJob.setMapperClass(MyMapper.class);
baseJob.setReducerClass(MyReducer.class);
baseJob.setNumReduceTasks(1);
return baseJob;
}
public static
|
InternalClass
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/server/federation/policies/amrmproxy/TestBroadcastAMRMProxyFederationPolicy.java
|
{
"start": 1893,
"end": 4452
}
|
class ____
extends BaseFederationPoliciesTest {
@BeforeEach
public void setUp() throws Exception {
setPolicy(new BroadcastAMRMProxyPolicy());
// needed for base test to work
setPolicyInfo(mock(WeightedPolicyInfo.class));
for (int i = 1; i <= 2; i++) {
SubClusterIdInfo sc = new SubClusterIdInfo("sc" + i);
SubClusterInfo sci = SubClusterInfo.newInstance(
sc.toId(), "dns1:80", "dns1:81", "dns1:82", "dns1:83", SubClusterState.SC_RUNNING,
System.currentTimeMillis(), "something");
getActiveSubclusters().put(sc.toId(), sci);
}
FederationPoliciesTestUtil.initializePolicyContext(getPolicy(),
mock(WeightedPolicyInfo.class), getActiveSubclusters());
}
@Test
public void testSplitAllocateRequest() throws Exception {
// verify the request is broadcasted to all subclusters
String[] hosts = new String[] {"host1", "host2" };
List<ResourceRequest> resourceRequests = FederationPoliciesTestUtil
.createResourceRequests(hosts, 2 * 1024, 2, 1, 3, null, false);
Map<SubClusterId, List<ResourceRequest>> response =
((FederationAMRMProxyPolicy) getPolicy()).splitResourceRequests(
resourceRequests, new HashSet<SubClusterId>());
assertTrue(response.size() == 2);
for (Map.Entry<SubClusterId, List<ResourceRequest>> entry : response
.entrySet()) {
assertTrue(getActiveSubclusters().get(entry.getKey()) != null);
for (ResourceRequest r : entry.getValue()) {
assertTrue(resourceRequests.contains(r));
}
}
for (SubClusterId subClusterId : getActiveSubclusters().keySet()) {
for (ResourceRequest r : response.get(subClusterId)) {
assertTrue(resourceRequests.contains(r));
}
}
}
@Test
public void testNotifyOfResponseFromUnknownSubCluster() throws Exception {
String[] hosts = new String[] {"host1", "host2" };
List<ResourceRequest> resourceRequests = FederationPoliciesTestUtil
.createResourceRequests(hosts, 2 * 1024, 2, 1, 3, null, false);
Map<SubClusterId, List<ResourceRequest>> response =
((FederationAMRMProxyPolicy) getPolicy()).splitResourceRequests(
resourceRequests, new HashSet<SubClusterId>());
((FederationAMRMProxyPolicy) getPolicy()).notifyOfResponse(
SubClusterId.newInstance("sc3"), mock(AllocateResponse.class));
((FederationAMRMProxyPolicy) getPolicy()).notifyOfResponse(
SubClusterId.newInstance("sc1"), mock(AllocateResponse.class));
}
}
|
TestBroadcastAMRMProxyFederationPolicy
|
java
|
mockito__mockito
|
mockito-core/src/test/java/org/mockitousage/verification/BasicVerificationInOrderTest.java
|
{
"start": 1215,
"end": 13977
}
|
class ____ extends TestBase {
private IMethods mockOne;
private IMethods mockTwo;
private IMethods mockThree;
private InOrder inOrder;
@Before
public void setUp() {
mockOne = mock(IMethods.class);
mockTwo = mock(IMethods.class);
mockThree = mock(IMethods.class);
inOrder = inOrder(mockOne, mockTwo, mockThree);
mockOne.simpleMethod(1);
mockTwo.simpleMethod(2);
mockTwo.simpleMethod(2);
mockThree.simpleMethod(3);
mockTwo.simpleMethod(2);
mockOne.simpleMethod(4);
}
@Test
public void shouldVerifyInOrder() {
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
inOrder.verify(mockOne).simpleMethod(4);
verifyNoMoreInteractions(mockOne, mockTwo, mockThree);
}
@Test
public void shouldVerifyInOrderUsingAtLeastOnce() {
inOrder.verify(mockOne, atLeastOnce()).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
inOrder.verify(mockOne, atLeastOnce()).simpleMethod(4);
verifyNoMoreInteractions(mockOne, mockTwo, mockThree);
}
@Test
public void shouldVerifyInOrderWhenExpectingSomeInvocationsToBeCalledZeroTimes() {
inOrder.verify(mockOne, times(0)).oneArg(false);
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockTwo, times(0)).simpleMethod(22);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
inOrder.verify(mockOne).simpleMethod(4);
inOrder.verify(mockThree, times(0)).oneArg(false);
verifyNoMoreInteractions(mockOne, mockTwo, mockThree);
}
@Test
public void shouldFailWhenFirstMockCalledTwice() {
inOrder.verify(mockOne).simpleMethod(1);
try {
inOrder.verify(mockOne).simpleMethod(1);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailWhenLastMockCalledTwice() {
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
inOrder.verify(mockOne).simpleMethod(4);
try {
inOrder.verify(mockOne).simpleMethod(4);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnFirstMethodBecauseOneInvocationWanted() {
assertThatThrownBy(
() -> {
inOrder.verify(mockOne, times(0)).simpleMethod(1);
})
.isInstanceOf(VerificationInOrderFailure.class)
.hasMessageContainingAll(
"Verification in order failure:",
"iMethods.simpleMethod(1);",
"Wanted 0 times:",
"-> at ",
"But was 1 time:",
"-> at ");
}
@Test
public void shouldFailOnFirstMethodBecauseOneInvocationWantedAgain() {
assertThatThrownBy(
() -> {
inOrder.verify(mockOne, times(2)).simpleMethod(1);
})
.isInstanceOf(VerificationInOrderFailure.class)
.hasMessageContainingAll(
"Verification in order failure:",
"iMethods.simpleMethod(1);",
"Wanted 2 times:",
"-> at ",
"But was 1 time:",
"-> at ");
}
@Test
public void shouldFailOnSecondMethodBecauseFourInvocationsWanted() {
inOrder.verify(mockOne, times(1)).simpleMethod(1);
try {
inOrder.verify(mockTwo, times(4)).simpleMethod(2);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnSecondMethodBecauseTwoInvocationsWantedAgain() {
inOrder.verify(mockOne, times(1)).simpleMethod(1);
try {
inOrder.verify(mockTwo, times(0)).simpleMethod(2);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnLastMethodBecauseOneInvocationWanted() {
inOrder.verify(mockOne, atLeastOnce()).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree, atLeastOnce()).simpleMethod(3);
inOrder.verify(mockTwo, atLeastOnce()).simpleMethod(2);
try {
inOrder.verify(mockOne, times(0)).simpleMethod(4);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnLastMethodBecauseOneInvocationWantedAgain() {
inOrder.verify(mockOne, atLeastOnce()).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree, atLeastOnce()).simpleMethod(3);
inOrder.verify(mockTwo, atLeastOnce()).simpleMethod(2);
try {
inOrder.verify(mockOne, times(2)).simpleMethod(4);
fail();
} catch (VerificationInOrderFailure e) {
}
}
/* ------------- */
@Test
public void shouldFailOnFirstMethodBecauseDifferentArgsWanted() {
assertThatThrownBy(
() -> {
inOrder.verify(mockOne).simpleMethod(100);
})
.isInstanceOf(ArgumentsAreDifferent.class)
.hasMessageContainingAll(
"Argument(s) are different! Wanted:",
"iMethods.simpleMethod(100);",
"-> at ",
"Actual invocations have different arguments:",
"iMethods.simpleMethod(1);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(3);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(4);",
"-> at ");
}
@Test
public void shouldFailOnFirstMethodBecauseDifferentMethodWanted() {
assertThatThrownBy(
() -> {
inOrder.verify(mockOne).oneArg(true);
})
.isInstanceOf(WantedButNotInvoked.class)
.hasMessageContainingAll(
"Wanted but not invoked:",
"iMethods.oneArg(true);",
"-> at ",
"However, there were exactly 6 interactions with this mock:",
"iMethods.simpleMethod(1);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(3);",
"-> at ",
"iMethods.simpleMethod(2);",
"-> at ",
"iMethods.simpleMethod(4);",
"-> at ");
}
@Test
public void shouldFailOnSecondMethodBecauseDifferentArgsWanted() {
inOrder.verify(mockOne).simpleMethod(1);
try {
inOrder.verify(mockTwo, times(2)).simpleMethod(-999);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnSecondMethodBecauseDifferentMethodWanted() {
inOrder.verify(mockOne, times(1)).simpleMethod(1);
try {
inOrder.verify(mockTwo, times(2)).oneArg(true);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnLastMethodBecauseDifferentArgsWanted() {
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
try {
inOrder.verify(mockOne).simpleMethod(-666);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnLastMethodBecauseDifferentMethodWanted() {
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
try {
inOrder.verify(mockOne).oneArg(false);
fail();
} catch (VerificationInOrderFailure e) {
}
}
/* -------------- */
@Test
public void shouldFailWhenLastMethodVerifiedFirst() {
inOrder.verify(mockOne).simpleMethod(4);
try {
inOrder.verify(mockOne).simpleMethod(1);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailWhenMiddleMethodVerifiedFirst() {
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
try {
inOrder.verify(mockOne).simpleMethod(1);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailWhenMiddleMethodVerifiedFirstInAtLeastOnceMode() {
inOrder.verify(mockTwo, atLeastOnce()).simpleMethod(2);
try {
inOrder.verify(mockOne).simpleMethod(1);
fail();
} catch (VerificationInOrderFailure e) {
}
}
@Test
public void shouldFailOnVerifyNoMoreInteractions() {
inOrder.verify(mockOne).simpleMethod(1);
inOrder.verify(mockTwo, times(2)).simpleMethod(2);
inOrder.verify(mockThree).simpleMethod(3);
inOrder.verify(mockTwo).simpleMethod(2);
try {
verifyNoMoreInteractions(mockOne, mockTwo, mockThree);
fail();
} catch (NoInteractionsWanted e) {
}
}
@Test
public void shouldFailOnVerifyNoInteractions() {
assertThatThrownBy(
() -> {
verifyNoInteractions(mockOne);
})
.isInstanceOf(NoInteractionsWanted.class)
.hasMessageContainingAll(
"No interactions wanted here:",
"-> at ",
"But found these interactions on mock 'iMethods':",
"-> at ",
"-> at ",
"***",
"For your reference, here is the list of all invocations ([?] - means unverified).",
"1. [?]-> at ",
"2. [?]-> at ");
}
@SuppressWarnings({"all", "CheckReturnValue", "MockitoUsage"})
@Test
public void shouldScreamWhenNullPassed() {
assertThatThrownBy(
() -> {
inOrder((Object[]) null);
})
.isInstanceOf(MockitoException.class)
.hasMessageContainingAll(
"Method requires argument(s)!",
"Pass mocks that require verification in order.",
"For example:",
" InOrder inOrder = inOrder(mockOne, mockTwo);");
}
@Test
public void shouldThrowNullPassedToVerifyException() {
try {
inOrder.verify(null);
fail();
} catch (NullInsteadOfMockException e) {
assertThat(e)
.hasMessageContaining(
"Argument passed to verify() should be a mock but is null!");
}
}
@Test
public void shouldThrowNotAMockPassedToVerifyException() {
Object object = new Object();
try {
inOrder.verify(object);
fail();
} catch (NotAMockException e) {
assertThat(e)
.hasMessageContaining(
"Argument passed to verify() is of type Object and is not a mock!");
}
}
}
|
BasicVerificationInOrderTest
|
java
|
spring-cloud__spring-cloud-gateway
|
spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/LoadBalancerServiceInstanceCookieFilterTests.java
|
{
"start": 1835,
"end": 5063
}
|
class ____ {
private final LoadBalancerProperties properties = new LoadBalancerProperties();
private final GatewayFilterChain chain = mock(GatewayFilterChain.class);
private final ServerWebExchange exchange = MockServerWebExchange
.from(MockServerHttpRequest.get("http://localhost/get").build());
private final LoadBalancerServiceInstanceCookieFilter filter = new LoadBalancerServiceInstanceCookieFilter(
properties);
@BeforeEach
void setUp() {
properties.getStickySession().setAddServiceInstanceCookie(true);
}
@Test
void shouldAddServiceInstanceCookieHeader() {
exchange.getAttributes()
.put(GATEWAY_LOADBALANCER_RESPONSE_ATTR,
new DefaultResponse(new DefaultServiceInstance("test-01", "test", "host", 8080, false)));
ServerWebExchange filteredExchange = testFilter(exchange);
assertThat(filteredExchange.getRequest().getHeaders().get(HttpHeaders.COOKIE)).hasSize(1);
assertThat(filteredExchange.getRequest().getHeaders().get(HttpHeaders.COOKIE))
.containsExactly("sc-lb-instance-id=test-01");
}
@Test
void shouldAppendServiceInstanceCookieHeaderIfCookiesPresent() {
ServerWebExchange exchange = MockServerWebExchange.from(MockServerHttpRequest.get("http://localhost/get")
.cookie(new HttpCookie("testCookieName", "testCookieValue"))
.build());
exchange.getAttributes()
.put(GATEWAY_LOADBALANCER_RESPONSE_ATTR,
new DefaultResponse(new DefaultServiceInstance("test-01", "test", "host", 8080, false)));
ServerWebExchange filteredExchange = testFilter(exchange);
assertThat(filteredExchange.getRequest().getHeaders().get(HttpHeaders.COOKIE))
.containsExactly("testCookieName=testCookieValue", "sc-lb-instance-id=test-01");
}
@Test
void shouldContinueChainWhenNoServiceInstanceResponse() {
ServerWebExchange filteredExchange = testFilter(exchange);
assertThat(filteredExchange.getRequest().getHeaders().isEmpty()).isTrue();
}
@Test
void shouldContinueChainWhenNullServiceInstanceCookieName() {
exchange.getAttributes()
.put(GATEWAY_LOADBALANCER_RESPONSE_ATTR,
new DefaultResponse(new DefaultServiceInstance("test-01", "test", "host", 8080, false)));
properties.getStickySession().setInstanceIdCookieName(null);
ServerWebExchange filteredExchange = testFilter(exchange);
assertThat(filteredExchange.getRequest().getHeaders().isEmpty()).isTrue();
}
@Test
void shouldContinueChainWhenEmptyServiceInstanceCookieName() {
exchange.getAttributes()
.put(GATEWAY_LOADBALANCER_RESPONSE_ATTR,
new DefaultResponse(new DefaultServiceInstance("test-01", "test", "host", 8080, false)));
properties.getStickySession().setInstanceIdCookieName("");
ServerWebExchange filteredExchange = testFilter(exchange);
assertThat(filteredExchange.getRequest().getHeaders().isEmpty()).isTrue();
}
private ServerWebExchange testFilter(ServerWebExchange exchange) {
ArgumentCaptor<ServerWebExchange> captor = ArgumentCaptor.forClass(ServerWebExchange.class);
when(chain.filter(captor.capture())).thenReturn(Mono.empty());
filter.filter(exchange, chain).block();
verify(chain).filter(any(ServerWebExchange.class));
verifyNoMoreInteractions(chain);
return captor.getValue();
}
}
|
LoadBalancerServiceInstanceCookieFilterTests
|
java
|
apache__camel
|
components/camel-jsch/src/main/java/org/apache/camel/component/scp/ScpEndpoint.java
|
{
"start": 2415,
"end": 3885
}
|
class ____ extends RemoteFileEndpoint<ScpFile> {
@UriParam
private ScpConfiguration configuration;
public ScpEndpoint() {
}
public ScpEndpoint(String uri, ScpComponent component, ScpConfiguration configuration) {
super(uri, component, configuration);
this.configuration = configuration;
}
@Override
public ScpConfiguration getConfiguration() {
return configuration;
}
@Override
protected RemoteFileConsumer<ScpFile> buildConsumer(Processor processor) {
throw new UnsupportedOperationException("This component does not support consuming from this endpoint");
}
@Override
protected GenericFileProducer<ScpFile> buildProducer() {
return new ScpProducer(this, createRemoteFileOperations());
}
@Override
public RemoteFileOperations<ScpFile> createRemoteFileOperations() {
ScpOperations operations = new ScpOperations();
operations.setEndpoint(this);
return operations;
}
@Override
protected GenericFileProcessStrategy<ScpFile> createGenericFileStrategy() {
return new ScpProcessStrategyFactory().createGenericFileProcessStrategy(getCamelContext(), getParamsAsMap());
}
@Override
public String getScheme() {
return "scp";
}
@Override
public Expression getTempFileName() {
// creation of temporary files not supported by the scp: protocol
return null;
}
}
|
ScpEndpoint
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/converted/enums/VarcharEnumTypeTest.java
|
{
"start": 1164,
"end": 5369
}
|
class ____ {
@BeforeEach
protected void createTestData(SessionFactoryScope scope) {
scope.inTransaction(
(session) -> {
final Person person = Person.person( Gender.MALE, HairColor.BROWN );
session.persist( person );
session.persist( Person.person( Gender.MALE, HairColor.BLACK ) );
session.persist( Person.person( Gender.FEMALE, HairColor.BROWN ) );
session.persist( Person.person( Gender.FEMALE, HairColor.BLACK ) );
}
);
}
@AfterEach
public void dropTestData(SessionFactoryScope scope) {
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
@JiraKey("HHH-12978")
public void testEnumAsBindParameterAndExtract(SessionFactoryScope scope, MessageKeyWatcher loggingWatcher) {
scope.inTransaction(
(session) -> {
session.createQuery( "select p.id from Person p where p.id = :id", Long.class )
.setParameter( "id", 1L )
.list();
assertTrue( loggingWatcher.wasTriggered() );
}
);
loggingWatcher.reset();
scope.inTransaction(
(session) -> {
final String qry = "select p.gender from Person p where p.gender = :gender and p.hairColor = :hairColor";
session.createQuery( qry, Gender.class )
.setParameter( "gender", Gender.MALE )
.setParameter( "hairColor", HairColor.BROWN )
.getSingleResult();
assertTrue( loggingWatcher.wasTriggered() );
}
);
}
@Test
@JiraKey("HHH-10282")
public void hqlTestEnumShortHandSyntax(SessionFactoryScope scope, MessageKeyWatcher loggingWatcher) {
scope.inTransaction(
(session) -> {
session.createQuery(
"select id from Person where originalHairColor = BLONDE")
.getResultList();
assertTrue( loggingWatcher.wasTriggered() );
}
);
}
@Test
@JiraKey("HHH-10282")
public void hqlTestEnumQualifiedShortHandSyntax(SessionFactoryScope scope, MessageKeyWatcher loggingWatcher) {
final String qry = "select id from Person where originalHairColor = HairColor.BLONDE";
scope.inTransaction(
(session) -> {
session.createQuery( qry ).getResultList();
assertTrue( loggingWatcher.wasTriggered() );
}
);
}
@Test
@JiraKey("HHH-10282")
public void hqlTestEnumShortHandSyntaxInPredicate(SessionFactoryScope scope, MessageKeyWatcher loggingWatcher) {
scope.inTransaction(
(session) -> {
final String qry = "select id from Person where originalHairColor in (BLONDE, BROWN)";
session.createQuery( qry ).getResultList();
assertTrue( loggingWatcher.wasTriggered() );
}
);
}
@Test
@JiraKey("HHH-10282")
public void hqlTestEnumQualifiedShortHandSyntaxInPredicate(SessionFactoryScope scope, MessageKeyWatcher loggingWatcher) {
scope.inTransaction(
(session) -> {
final String qry = "select id from Person where originalHairColor in (HairColor.BLONDE, HairColor.BROWN)";
session.createQuery( qry ).getResultList();
assertTrue( loggingWatcher.wasTriggered() );
}
);
}
@Test
@JiraKey("HHH-16739")
public void testCompareEnumParameterWithDifferentTypedExpressions(SessionFactoryScope scope) {
scope.inSession(
s -> {
s.createQuery( "select p.id from Person p where p.gender = :gender and :gender = 'MALE'", Long.class )
.setParameter( "gender", Gender.MALE )
.getResultList();
s.createQuery( "select p.id from Person p where p.gender = :gender and :gender = org.hibernate.orm.test.mapping.converted.enums.Gender.MALE", Long.class )
.setParameter( "gender", Gender.MALE )
.getResultList();
s.createQuery( "select p.id from Person p where :gender = org.hibernate.orm.test.mapping.converted.enums.Gender.MALE and p.gender = :gender", Long.class )
.setParameter( "gender", Gender.MALE )
.getResultList();
s.createQuery( "select p.id from Person p where :gender = 'MALE' and p.gender = :gender", Long.class )
.setParameter( "gender", Gender.MALE )
.getResultList();
s.createQuery( "select p.id from Person p where :gender = 'MALE' or :gender = 'FEMALE' and p.gender = :gender", Long.class )
.setParameter( "gender", Gender.MALE )
.getResultList();
}
);
}
@Entity(name = "Person")
public static
|
VarcharEnumTypeTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/TransportTermsEnumAction.java
|
{
"start": 36031,
"end": 36749
}
|
class ____ implements Iterator<String>, Comparable<TermIterator> {
private final Iterator<String> iterator;
private String current;
private TermIterator(Iterator<String> iterator) {
this.iterator = iterator;
this.current = iterator.next();
}
public String term() {
return current;
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public String next() {
return current = iterator.next();
}
@Override
public int compareTo(TermIterator o) {
return current.compareTo(o.term());
}
}
}
|
TermIterator
|
java
|
netty__netty
|
transport-native-io_uring/src/test/java/io/netty/channel/uring/IoUringEventLoopTest.java
|
{
"start": 1349,
"end": 4008
}
|
class ____ extends AbstractSingleThreadEventLoopTest {
public static final Runnable EMPTY_RUNNABLE = () -> {
};
@BeforeAll
public static void loadJNI() {
assumeTrue(IoUring.isAvailable());
}
protected IoHandlerFactory newIoHandlerFactory() {
return IoUringIoHandler.newFactory();
}
@Override
protected EventLoopGroup newEventLoopGroup() {
return new MultiThreadIoEventLoopGroup(1, newIoHandlerFactory());
}
@Override
protected EventLoopGroup newAutoScalingEventLoopGroup() {
return new MultiThreadIoEventLoopGroup(SCALING_MAX_THREADS, (Executor) null, AUTO_SCALING_CHOOSER_FACTORY,
IoUringIoHandler.newFactory(new IoUringIoHandlerConfig().setSingleIssuer(false)));
}
@Override
protected Channel newChannel() {
return new IoUringServerSocketChannel();
}
@Override
protected Class<? extends ServerChannel> serverChannelClass() {
return IoUringServerSocketChannel.class;
}
@Test
public void testSubmitMultipleTasksAndEnsureTheseAreExecuted() throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, newIoHandlerFactory());
try {
EventLoop loop = group.next();
loop.submit(EMPTY_RUNNABLE).sync();
loop.submit(EMPTY_RUNNABLE).sync();
loop.submit(EMPTY_RUNNABLE).sync();
loop.submit(EMPTY_RUNNABLE).sync();
} finally {
group.shutdownGracefully();
}
}
@RepeatedTest(100)
public void shutdownNotSoGracefully() throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, newIoHandlerFactory());
CountDownLatch latch = new CountDownLatch(1);
group.submit(() -> latch.countDown());
latch.await(5, TimeUnit.SECONDS);
assertTrue(group.shutdownGracefully(0L, 0L, TimeUnit.NANOSECONDS)
.await(1500L, TimeUnit.MILLISECONDS));
}
@Test
public void shutsDownGracefully() throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, newIoHandlerFactory());
assertTrue(group.shutdownGracefully(1L, 1L, TimeUnit.MILLISECONDS)
.await(1500L, TimeUnit.MILLISECONDS));
}
@Test
public void testSchedule() throws Exception {
EventLoopGroup group = new MultiThreadIoEventLoopGroup(1, IoUringIoHandler.newFactory());
try {
EventLoop loop = group.next();
loop.schedule(EMPTY_RUNNABLE, 1, TimeUnit.SECONDS).sync();
} finally {
group.shutdownGracefully();
}
}
}
|
IoUringEventLoopTest
|
java
|
mockito__mockito
|
mockito-integration-tests/extensions-tests/src/test/java/org/mockitousage/plugins/logger/MyMockitoLogger.java
|
{
"start": 319,
"end": 1390
}
|
class ____ implements MockitoLogger {
private static final ThreadLocal<Boolean> enabled =
new ThreadLocal<Boolean>() {
@Override
protected Boolean initialValue() {
return false;
}
};
private static final ThreadLocal<List<Object>> loggedItems =
new ThreadLocal<List<Object>>() {
@Override
protected List<Object> initialValue() {
return new ArrayList<Object>();
}
};
private final MockitoLogger defaultLogger = new ConsoleMockitoLogger();
@Override
public void log(Object what) {
if (enabled.get()) {
loggedItems.get().add(what);
} else {
defaultLogger.log(what);
}
}
static void enable() {
enabled.set(true);
}
static List<Object> getLoggedItems() {
return loggedItems.get();
}
static void clear() {
enabled.remove();
loggedItems.remove();
}
}
|
MyMockitoLogger
|
java
|
spring-projects__spring-security
|
crypto/src/test/java/org/springframework/security/crypto/encrypt/EncryptorsTests.java
|
{
"start": 782,
"end": 2807
}
|
class ____ {
@Test
public void stronger() throws Exception {
CryptoAssumptions.assumeGCMJCE();
BytesEncryptor encryptor = Encryptors.stronger("password", "5c0744940b5c369b");
byte[] result = encryptor.encrypt("text".getBytes("UTF-8"));
assertThat(result).isNotNull();
assertThat(new String(result).equals("text")).isFalse();
assertThat(new String(encryptor.decrypt(result))).isEqualTo("text");
assertThat(new String(result)).isNotEqualTo(new String(encryptor.encrypt("text".getBytes())));
}
@Test
public void standard() throws Exception {
CryptoAssumptions.assumeCBCJCE();
BytesEncryptor encryptor = Encryptors.standard("password", "5c0744940b5c369b");
byte[] result = encryptor.encrypt("text".getBytes("UTF-8"));
assertThat(result).isNotNull();
assertThat(new String(result).equals("text")).isFalse();
assertThat(new String(encryptor.decrypt(result))).isEqualTo("text");
assertThat(new String(result)).isNotEqualTo(new String(encryptor.encrypt("text".getBytes())));
}
@Test
public void preferred() {
CryptoAssumptions.assumeGCMJCE();
TextEncryptor encryptor = Encryptors.delux("password", "5c0744940b5c369b");
String result = encryptor.encrypt("text");
assertThat(result).isNotNull();
assertThat(result.equals("text")).isFalse();
assertThat(encryptor.decrypt(result)).isEqualTo("text");
assertThat(result.equals(encryptor.encrypt("text"))).isFalse();
}
@Test
public void text() {
CryptoAssumptions.assumeCBCJCE();
TextEncryptor encryptor = Encryptors.text("password", "5c0744940b5c369b");
String result = encryptor.encrypt("text");
assertThat(result).isNotNull();
assertThat(result.equals("text")).isFalse();
assertThat(encryptor.decrypt(result)).isEqualTo("text");
assertThat(result.equals(encryptor.encrypt("text"))).isFalse();
}
@Test
public void noOpText() {
TextEncryptor encryptor = Encryptors.noOpText();
assertThat(encryptor.encrypt("text")).isEqualTo("text");
assertThat(encryptor.decrypt("text")).isEqualTo("text");
}
}
|
EncryptorsTests
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorProcessor.java
|
{
"start": 1064,
"end": 5699
}
|
class ____ implements Processor {
private ProcessingEnvironment env;
@Override
public Set<String> getSupportedOptions() {
return Set.of();
}
@Override
public Set<String> getSupportedAnnotationTypes() {
return Set.of(Evaluator.class.getName(), MvEvaluator.class.getName(), ConvertEvaluator.class.getName());
}
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.RELEASE_21;
}
@Override
public void init(ProcessingEnvironment processingEnvironment) {
this.env = processingEnvironment;
}
@Override
public Iterable<? extends Completion> getCompletions(
Element element,
AnnotationMirror annotationMirror,
ExecutableElement executableElement,
String s
) {
return List.of();
}
@Override
public boolean process(Set<? extends TypeElement> set, RoundEnvironment roundEnvironment) {
for (TypeElement ann : set) {
for (Element evaluatorMethod : roundEnvironment.getElementsAnnotatedWith(ann)) {
var warnExceptionsTypes = Annotations.listAttributeValues(
evaluatorMethod,
Set.of(Evaluator.class, MvEvaluator.class, ConvertEvaluator.class),
"warnExceptions"
);
Evaluator evaluatorAnn = evaluatorMethod.getAnnotation(Evaluator.class);
if (evaluatorAnn != null) {
try {
AggregatorProcessor.write(
evaluatorMethod,
"evaluator",
new EvaluatorImplementer(
env.getElementUtils(),
env.getTypeUtils(),
(ExecutableElement) evaluatorMethod,
evaluatorAnn.extraName(),
warnExceptionsTypes,
evaluatorAnn.allNullsIsNull()
).sourceFile(),
env
);
} catch (Exception e) {
env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement());
throw e;
}
}
MvEvaluator mvEvaluatorAnn = evaluatorMethod.getAnnotation(MvEvaluator.class);
if (mvEvaluatorAnn != null) {
try {
AggregatorProcessor.write(
evaluatorMethod,
"evaluator",
new MvEvaluatorImplementer(
env.getElementUtils(),
(ExecutableElement) evaluatorMethod,
mvEvaluatorAnn.extraName(),
mvEvaluatorAnn.finish(),
mvEvaluatorAnn.single(),
mvEvaluatorAnn.ascending(),
warnExceptionsTypes
).sourceFile(),
env
);
} catch (Exception e) {
env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement());
throw e;
}
}
ConvertEvaluator convertEvaluatorAnn = evaluatorMethod.getAnnotation(ConvertEvaluator.class);
if (convertEvaluatorAnn != null) {
try {
AggregatorProcessor.write(
evaluatorMethod,
"evaluator",
new ConvertEvaluatorImplementer(
env.getElementUtils(),
env.getTypeUtils(),
(ExecutableElement) evaluatorMethod,
convertEvaluatorAnn.extraName(),
warnExceptionsTypes
).sourceFile(),
env
);
} catch (Exception e) {
env.getMessager().printMessage(Diagnostic.Kind.ERROR, "failed to build " + evaluatorMethod.getEnclosingElement());
throw e;
}
}
}
}
return true;
}
}
|
EvaluatorProcessor
|
java
|
grpc__grpc-java
|
testing/src/main/java/io/grpc/testing/TestMethodDescriptors.java
|
{
"start": 961,
"end": 1806
}
|
class ____ {
private TestMethodDescriptors() {}
/**
* Creates a new method descriptor that always creates zero length messages, and always parses to
* null objects.
*
* @since 1.1.0
*/
public static MethodDescriptor<Void, Void> voidMethod() {
return MethodDescriptor.<Void, Void>newBuilder()
.setType(MethodType.UNARY)
.setFullMethodName(MethodDescriptor.generateFullMethodName("service_foo", "method_bar"))
.setRequestMarshaller(TestMethodDescriptors.voidMarshaller())
.setResponseMarshaller(TestMethodDescriptors.voidMarshaller())
.build();
}
/**
* Creates a new marshaller that does nothing.
*
* @since 1.1.0
*/
public static MethodDescriptor.Marshaller<Void> voidMarshaller() {
return new NoopMarshaller();
}
private static final
|
TestMethodDescriptors
|
java
|
google__error-prone
|
core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByExpression.java
|
{
"start": 5704,
"end": 7664
}
|
class ____ extends VarSymbol {
EnclosingInstanceSymbol(Names names, Symbol curr) {
super(
Flags.SYNTHETIC,
names.fromString(GuardedByExpression.ENCLOSING_INSTANCE_NAME),
curr.type,
curr);
}
@Override
public int hashCode() {
return Objects.hash(ENCLOSING_INSTANCE_NAME, owner.hashCode());
}
@Override
public boolean equals(Object other) {
if (!(other instanceof VarSymbol that)) {
return false;
}
if (!that.getSimpleName().contentEquals(ENCLOSING_INSTANCE_NAME)) {
return false;
}
return owner.equals(that.owner);
}
}
ClassLiteral classLiteral(Symbol clazz) {
return ClassLiteral.create(clazz);
}
TypeLiteral typeLiteral(Symbol type) {
return TypeLiteral.create(type);
}
Select select(GuardedByExpression base, Symbol member) {
if (member instanceof VarSymbol varSymbol) {
return select(base, varSymbol);
}
if (member instanceof MethodSymbol methodSymbol) {
return select(base, methodSymbol);
}
throw new IllegalStateException("Bad select expression: expected symbol " + member.getKind());
}
Select select(GuardedByExpression base, Symbol.VarSymbol member) {
return Select.create(base, member, member.type);
}
Select select(GuardedByExpression base, Symbol.MethodSymbol member) {
return Select.create(base, member, member.getReturnType());
}
GuardedByExpression select(GuardedByExpression base, Select select) {
return Select.create(base, select.sym(), select.type());
}
LocalVariable localVariable(Symbol.VarSymbol varSymbol) {
return LocalVariable.create(varSymbol);
}
Erroneous error(String guardString) {
return new Erroneous(guardString);
}
}
/** {@link GuardedByExpression} kind. */
public
|
EnclosingInstanceSymbol
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassUtils.java
|
{
"start": 2558,
"end": 3339
}
|
class ____.
* @since 6.0.10
*/
static final String CANDIDATE_ATTRIBUTE =
Conventions.getQualifiedAttributeName(ConfigurationClassPostProcessor.class, "candidate");
static final String CONFIGURATION_CLASS_ATTRIBUTE =
Conventions.getQualifiedAttributeName(ConfigurationClassPostProcessor.class, "configurationClass");
static final String ORDER_ATTRIBUTE =
Conventions.getQualifiedAttributeName(ConfigurationClassPostProcessor.class, "order");
private static final Log logger = LogFactory.getLog(ConfigurationClassUtils.class);
private static final Set<String> candidateIndicators = Set.of(
Component.class.getName(),
ComponentScan.class.getName(),
Import.class.getName(),
ImportResource.class.getName());
/**
* Initialize a configuration
|
candidate
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/ClassOrdererContext.java
|
{
"start": 864,
"end": 1677
}
|
class ____; never {@code null}
*/
List<? extends ClassDescriptor> getClassDescriptors();
/**
* Get the configuration parameter stored under the specified {@code key}.
*
* <p>If no such key is present in the {@code ConfigurationParameters} for
* the JUnit Platform, an attempt will be made to look up the value as a
* JVM system property. If no such system property exists, an attempt will
* be made to look up the value in the JUnit Platform properties file.
*
* @param key the key to look up; never {@code null} or blank
* @return an {@code Optional} containing the value; never {@code null}
* but potentially empty
*
* @see System#getProperty(String)
* @see org.junit.platform.engine.ConfigurationParameters
*/
Optional<String> getConfigurationParameter(String key);
}
|
descriptors
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/id/uuid/generator/UUID2GeneratorStringUniqueIdentifierIdTest.java
|
{
"start": 2265,
"end": 2780
}
|
class ____ {
@Id
@GenericGenerator(name = "uuid", strategy = "uuid2")
@GeneratedValue(generator = "uuid")
@Column(columnDefinition = "UNIQUEIDENTIFIER")
private String id;
@ElementCollection
@JoinTable(name = "foo_values")
@Column(name = "foo_value")
private final Set<String> fooValues = new HashSet<>();
public String getId() {
return id.toUpperCase();
}
public void setId(String id) {
this.id = id;
}
public Set<String> getFooValues() {
return fooValues;
}
}
}
|
FooEntity
|
java
|
reactor__reactor-core
|
reactor-core/src/main/java/reactor/util/retry/Retry.java
|
{
"start": 4507,
"end": 11733
}
|
interface ____ {
/**
* The total number of retries since the source first was subscribed to (in other words the number of errors -1
* since the source was first subscribed to).
*
* @return the total number of retries since the source first was subscribed to.
*/
long totalRetries();
/**
* Retry counter resetting after each {@link org.reactivestreams.Subscriber#onNext(Object) onNext} (in other
* words the number of errors -1 since the latest {@link org.reactivestreams.Subscriber#onNext(Object) onNext}).
*
* @return the number of retries since the latest {@link org.reactivestreams.Subscriber#onNext(Object) onNext},
* or the number of retries since the source first was subscribed to if there hasn't been any
* {@link org.reactivestreams.Subscriber#onNext(Object) onNext} signals (in which case
* {@link RetrySignal#totalRetries()} and {@link RetrySignal#totalRetriesInARow()} are equivalent).
*/
long totalRetriesInARow();
/**
* The {@link Throwable} that caused the current {@link org.reactivestreams.Subscriber#onError(Throwable) onError} signal.
*
* @return the current failure.
*/
Throwable failure();
/**
* Return a read-only view of the user provided context, which may be used to store
* objects to be reset/rolled-back or otherwise mutated before or after a retry.
*
* @return a read-only view of a user provided context.
*/
default ContextView retryContextView() {
return Context.empty();
}
/**
* An immutable copy of this {@link RetrySignal} which is guaranteed to give a consistent view
* of the state at the time at which this method is invoked.
* This is especially useful if this {@link RetrySignal} is a transient view of the state of the underlying
* retry subscriber.
*
* @return an immutable copy of the current {@link RetrySignal}, always safe to use
*/
default RetrySignal copy() {
return new ImmutableRetrySignal(totalRetries(), totalRetriesInARow(), failure(), retryContextView());
}
}
/**
* A {@link RetryBackoffSpec} preconfigured for exponential backoff strategy with jitter, given a maximum number of retry attempts
* and a minimum {@link Duration} for the backoff.
* <p>
* <img class="marble" src="doc-files/marbles/retrySpecBackoff.svg" alt="">
*
* @param maxAttempts the maximum number of retry attempts to allow
* @param minBackoff the minimum {@link Duration} for the first backoff
* @return the exponential backoff spec for further configuration
* @see RetryBackoffSpec#maxAttempts(long)
* @see RetryBackoffSpec#minBackoff(Duration)
*/
public static RetryBackoffSpec backoff(long maxAttempts, Duration minBackoff) {
return new RetryBackoffSpec(Context.empty(), maxAttempts, t -> true, false, minBackoff, MAX_BACKOFF, 2, 0.5d, Schedulers::parallel,
NO_OP_CONSUMER, NO_OP_CONSUMER, NO_OP_BIFUNCTION, NO_OP_BIFUNCTION,
RetryBackoffSpec.BACKOFF_EXCEPTION_GENERATOR);
}
/**
* A {@link RetryBackoffSpec} preconfigured for fixed delays (min backoff equals max backoff, no jitter), given a maximum number of retry attempts
* and the fixed {@link Duration} for the backoff.
* <p>
* <img class="marble" src="doc-files/marbles/retrySpecFixed.svg" alt="">
* <p>
* Note that calling {@link RetryBackoffSpec#minBackoff(Duration)} or {@link RetryBackoffSpec#maxBackoff(Duration)} would switch
* back to an exponential backoff strategy.
*
* @param maxAttempts the maximum number of retry attempts to allow
* @param fixedDelay the {@link Duration} of the fixed delays
* @return the fixed delays spec for further configuration
* @see RetryBackoffSpec#maxAttempts(long)
* @see RetryBackoffSpec#minBackoff(Duration)
* @see RetryBackoffSpec#maxBackoff(Duration)
*/
public static RetryBackoffSpec fixedDelay(long maxAttempts, Duration fixedDelay) {
return new RetryBackoffSpec(Context.empty(), maxAttempts, t -> true, false, fixedDelay, fixedDelay, 2, 0d, Schedulers::parallel,
NO_OP_CONSUMER, NO_OP_CONSUMER, NO_OP_BIFUNCTION, NO_OP_BIFUNCTION,
RetryBackoffSpec.BACKOFF_EXCEPTION_GENERATOR);
}
/**
* A {@link RetrySpec} preconfigured for a simple strategy with maximum number of retry attempts.
* <p>
* <img class="marble" src="doc-files/marbles/retrySpecAttempts.svg" alt="">
*
* @param max the maximum number of retry attempts to allow
* @return the max attempt spec for further configuration
* @see RetrySpec#maxAttempts(long)
*/
public static RetrySpec max(long max) {
return new RetrySpec(Context.empty(), max, t -> true, false, NO_OP_CONSUMER, NO_OP_CONSUMER, NO_OP_BIFUNCTION, NO_OP_BIFUNCTION,
RetrySpec.RETRY_EXCEPTION_GENERATOR);
}
/**
* A {@link RetrySpec} preconfigured for a simple strategy with maximum number of retry attempts over
* subsequent transient errors. An {@link org.reactivestreams.Subscriber#onNext(Object)} between
* errors resets the counter (see {@link RetrySpec#transientErrors(boolean)}).
* <p>
* <img class="marble" src="doc-files/marbles/retrySpecInARow.svg" alt="">
*
* @param maxInARow the maximum number of retry attempts to allow in a row, reset by successful onNext
* @return the max in a row spec for further configuration
* @see RetrySpec#maxAttempts(long)
* @see RetrySpec#transientErrors(boolean)
*/
public static RetrySpec maxInARow(long maxInARow) {
return new RetrySpec(Context.empty(), maxInARow, t -> true, true, NO_OP_CONSUMER, NO_OP_CONSUMER, NO_OP_BIFUNCTION, NO_OP_BIFUNCTION,
RETRY_EXCEPTION_GENERATOR);
}
/**
* A {@link RetrySpec} preconfigured for the most simplistic retry strategy: retry immediately and indefinitely
* (similar to {@link Flux#retry()}).
*
* @return the retry indefinitely spec for further configuration
*/
public static RetrySpec indefinitely() {
return new RetrySpec(Context.empty(), Long.MAX_VALUE, t -> true, false, NO_OP_CONSUMER, NO_OP_CONSUMER, NO_OP_BIFUNCTION, NO_OP_BIFUNCTION,
RetrySpec.RETRY_EXCEPTION_GENERATOR);
}
/**
* A wrapper around {@link Function} to provide {@link Retry} by using lambda expressions.
*
* @param function the {@link Function} representing the desired {@link Retry} strategy as a lambda
* @return the {@link Retry} strategy adapted from the {@link Function}
*/
public static final Retry from(Function<Flux<RetrySignal>, ? extends Publisher<?>> function) {
return new Retry(Context.empty()) {
@Override
public Publisher<?> generateCompanion(Flux<RetrySignal> retrySignalCompanion) {
return function.apply(retrySignalCompanion);
}
};
}
/**
* An adapter for {@link Flux} of {@link Throwable}-based {@link Function} to provide {@link Retry}
* from a legacy retryWhen {@link Function}.
*
* @param function the {@link Function} representing the desired {@link Retry} strategy as a lambda
* @return the {@link Retry} strategy adapted from the {@link Function}
*/
public static final Retry withThrowable(Function<Flux<Throwable>, ? extends Publisher<?>> function) {
return new Retry(Context.empty()) {
@Override
public Publisher<?> generateCompanion(Flux<RetrySignal> retrySignals) {
return function.apply(retrySignals.map(RetrySignal::failure));
}
};
}
}
|
RetrySignal
|
java
|
google__error-prone
|
check_api/src/main/java/com/google/errorprone/fixes/SuggestedFix.java
|
{
"start": 14027,
"end": 14506
}
|
class ____ extends InsertionFix {
public static PostfixInsertion create(DiagnosticPosition position, String insertion) {
checkArgument(position.getStartPosition() >= 0, "invalid start position");
return new AutoValue_SuggestedFix_PostfixInsertion(position, insertion);
}
@Override
protected int getInsertionIndex(EndPosTable endPositions) {
return position().getEndPosition(endPositions);
}
}
@AutoValue
abstract static
|
PostfixInsertion
|
java
|
elastic__elasticsearch
|
x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/enrollment/TransportKibanaEnrollmentActionTests.java
|
{
"start": 2307,
"end": 7345
}
|
class ____ extends ESTestCase {
private List<CreateServiceAccountTokenRequest> createServiceAccountTokenRequests;
private TransportKibanaEnrollmentAction action;
private Client client;
private static final String TOKEN_NAME = TransportKibanaEnrollmentAction.getTokenName();
private static final SecureString TOKEN_VALUE = new SecureString("token-value".toCharArray());
@BeforeClass
public static void muteInFips() {
assumeFalse("Enrollment is not supported in FIPS 140-2 as we are using PKCS#12 keystores", inFipsJvm());
}
@Before
@SuppressWarnings("unchecked")
public void setup() throws Exception {
createServiceAccountTokenRequests = new ArrayList<>();
final Environment env = mock(Environment.class);
final Path tempDir = createTempDir();
final Path httpCaPath = tempDir.resolve("httpCa.p12");
Files.copy(getDataPath("/org/elasticsearch/xpack/security/action/enrollment/httpCa.p12"), httpCaPath);
when(env.configDir()).thenReturn(tempDir);
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("keystore.secure_password", "password");
final Settings settings = Settings.builder().put("keystore.path", httpCaPath).setSecureSettings(secureSettings).build();
when(env.settings()).thenReturn(settings);
final SSLService sslService = mock(SSLService.class);
final SslConfiguration sslConfiguration = SslSettingsLoader.load(settings, null, env);
when(sslService.getHttpTransportSSLConfiguration()).thenReturn(sslConfiguration);
final ThreadContext threadContext = new ThreadContext(settings);
final ThreadPool threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(threadContext);
client = mock(Client.class);
when(client.threadPool()).thenReturn(threadPool);
doAnswer(invocation -> {
CreateServiceAccountTokenRequest createServiceAccountTokenRequest = (CreateServiceAccountTokenRequest) invocation
.getArguments()[1];
createServiceAccountTokenRequests.add(createServiceAccountTokenRequest);
ActionListener<CreateServiceAccountTokenResponse> listener = (ActionListener) invocation.getArguments()[2];
listener.onResponse(CreateServiceAccountTokenResponse.created(TOKEN_NAME, TOKEN_VALUE));
return null;
}).when(client).execute(eq(CreateServiceAccountTokenAction.INSTANCE), any(), any());
final TransportService transportService = new TransportService(
Settings.EMPTY,
mock(Transport.class),
threadPool,
TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null,
null,
Collections.emptySet()
);
action = new TransportKibanaEnrollmentAction(transportService, client, sslService, mock(ActionFilters.class));
}
public void testKibanaEnrollment() {
assertThat(TOKEN_NAME, startsWith("enroll-process-token-"));
final KibanaEnrollmentRequest request = new KibanaEnrollmentRequest();
final PlainActionFuture<KibanaEnrollmentResponse> future = new PlainActionFuture<>();
action.doExecute(mock(Task.class), request, future);
final KibanaEnrollmentResponse response = future.actionGet();
assertThat(
response.getHttpCa(),
startsWith(
"MIIDSjCCAjKgAwIBAgIVALCgZXvbceUrjJaQMheDCX0kXnRJMA0GCSqGSIb3DQEBCwUAMDQxMjAw"
+ "BgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2VuZXJhdGVkIENBMB4XDTIx"
+ "MDQyODEyNTY0MVoXDTI0MDQyNzEyNTY0MVowNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZp"
+ "Y2F0ZSBUb29sIEF1dG9nZW5lcmF0ZWQgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK"
+ "AoIBAQCCJbOU4JvxDD/F"
)
);
assertThat(response.getTokenValue(), equalTo(TOKEN_VALUE));
assertThat(createServiceAccountTokenRequests, hasSize(1));
}
public void testKibanaEnrollmentFailedTokenCreation() {
// Override change password mock
doAnswer(invocation -> {
@SuppressWarnings("unchecked")
ActionListener<CreateServiceAccountTokenResponse> listener = (ActionListener) invocation.getArguments()[2];
listener.onFailure(new IllegalStateException());
return null;
}).when(client).execute(eq(CreateServiceAccountTokenAction.INSTANCE), any(), any());
final KibanaEnrollmentRequest request = new KibanaEnrollmentRequest();
final PlainActionFuture<KibanaEnrollmentResponse> future = new PlainActionFuture<>();
action.doExecute(mock(Task.class), request, future);
ElasticsearchException e = expectThrows(ElasticsearchException.class, future::actionGet);
assertThat(e.getDetailedMessage(), containsString("Failed to create token for the [elastic/kibana] service account"));
}
}
|
TransportKibanaEnrollmentActionTests
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/buildextension/beans/BeanRegistrarTest.java
|
{
"start": 9527,
"end": 9798
}
|
class ____ implements BeanCreator<List<String>> {
@Override
public List<String> create(SyntheticCreationalContext<List<String>> context) {
return List.of(context.getInjectedReference(String.class));
}
}
public static
|
ListCreator
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java
|
{
"start": 1538,
"end": 11401
}
|
class ____ extends MockScriptPlugin {
@Override
public Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("dummy", params -> null);
}
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(DummyScriptPlugin.class);
}
@Override
public void setupSuiteScopeCluster() throws Exception {
assertAcked(prepareCreate("idx").setMapping("ip", "type=ip", "ips", "type=ip"));
waitForRelocation(ClusterHealthStatus.GREEN);
indexRandom(
true,
prepareIndex("idx").setId("1").setSource("ip", "192.168.1.7", "ips", Arrays.asList("192.168.0.13", "192.168.1.2")),
prepareIndex("idx").setId("2").setSource("ip", "192.168.1.10", "ips", Arrays.asList("192.168.1.25", "192.168.1.28")),
prepareIndex("idx").setId("3")
.setSource("ip", "2001:db8::ff00:42:8329", "ips", Arrays.asList("2001:db8::ff00:42:8329", "2001:db8::ff00:42:8380"))
);
assertAcked(prepareCreate("idx_unmapped"));
waitForRelocation(ClusterHealthStatus.GREEN);
refresh();
}
public void testSingleValuedField() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ip")
.addUnboundedTo("192.168.1.0")
.addRange("192.168.1.0", "192.168.1.10")
.addUnboundedFrom("192.168.1.10")
),
response -> {
Range range = response.getAggregations().get("my_range");
assertEquals(3, range.getBuckets().size());
Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount());
}
);
}
public void testMultiValuedField() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ips")
.addUnboundedTo("192.168.1.0")
.addRange("192.168.1.0", "192.168.1.10")
.addUnboundedFrom("192.168.1.10")
),
response -> {
Range range = response.getAggregations().get("my_range");
assertEquals(3, range.getBuckets().size());
Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(1, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount());
}
);
}
public void testIpMask() {
assertNoFailuresAndResponse(
prepareSearch("idx").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ips")
.addMaskRange("::/0")
.addMaskRange("0.0.0.0/0")
.addMaskRange("2001:db8::/64")
),
response -> {
Range range = response.getAggregations().get("my_range");
assertEquals(3, range.getBuckets().size());
Range.Bucket bucket1 = range.getBuckets().get(0);
assertEquals("::/0", bucket1.getKey());
assertEquals(3, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("0.0.0.0/0", bucket2.getKey());
assertEquals(2, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("2001:db8::/64", bucket3.getKey());
assertEquals(1, bucket3.getDocCount());
}
);
}
public void testPartiallyUnmapped() {
assertNoFailuresAndResponse(
prepareSearch("idx", "idx_unmapped").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ip")
.addUnboundedTo("192.168.1.0")
.addRange("192.168.1.0", "192.168.1.10")
.addUnboundedFrom("192.168.1.10")
),
response -> {
Range range = response.getAggregations().get("my_range");
assertEquals(3, range.getBuckets().size());
Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount());
}
);
}
public void testUnmapped() {
assertNoFailuresAndResponse(
prepareSearch("idx_unmapped").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ip")
.addUnboundedTo("192.168.1.0")
.addRange("192.168.1.0", "192.168.1.10")
.addUnboundedFrom("192.168.1.10")
),
response -> {
Range range = response.getAggregations().get("my_range");
assertEquals(3, range.getBuckets().size());
Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(0, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(0, bucket3.getDocCount());
}
);
}
public void testRejectsScript() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> prepareSearch("idx").addAggregation(
AggregationBuilders.ipRange("my_range").script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap()))
).get()
);
assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
}
public void testRejectsValueScript() {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> prepareSearch("idx").addAggregation(
AggregationBuilders.ipRange("my_range")
.field("ip")
.script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap()))
).get()
);
assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
}
public void testNoRangesInQuery() {
try {
prepareSearch("idx").addAggregation(AggregationBuilders.ipRange("my_range").field("ip")).get();
fail();
} catch (SearchPhaseExecutionException spee) {
Throwable rootCause = spee.getCause().getCause();
assertThat(rootCause, instanceOf(IllegalArgumentException.class));
assertEquals(rootCause.getMessage(), "No [ranges] specified for the [my_range] aggregation");
}
}
}
|
DummyScriptPlugin
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/EventTimeTemporalJoinRewriteRule.java
|
{
"start": 2959,
"end": 8130
}
|
class ____
extends RelRule<EventTimeTemporalJoinRewriteRule.Config> {
public static final RuleSet EVENT_TIME_TEMPORAL_JOIN_REWRITE_RULES =
RuleSets.ofList(
Config.JOIN_CALC_SNAPSHOT_CALC_WMA_CALC_TS.toRule(),
Config.JOIN_CALC_SNAPSHOT_CALC_WMA_TS.toRule(),
Config.JOIN_CALC_SNAPSHOT_WMA_CALC_TS.toRule(),
Config.JOIN_CALC_SNAPSHOT_WMA_TS.toRule(),
Config.JOIN_SNAPSHOT_CALC_WMA_CALC_TS.toRule(),
Config.JOIN_SNAPSHOT_CALC_WMA_TS.toRule(),
Config.JOIN_SNAPSHOT_WMA_CALC_TS.toRule(),
Config.JOIN_SNAPSHOT_WMA_TS.toRule());
public EventTimeTemporalJoinRewriteRule(Config config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
FlinkLogicalJoin join = call.rel(0);
RexNode joinCondition = join.getCondition();
// only matches event time temporal join
return joinCondition != null
&& TemporalTableJoinUtil.isEventTimeTemporalJoin(joinCondition);
}
@Override
public void onMatch(RelOptRuleCall call) {
FlinkLogicalJoin join = call.rel(0);
FlinkLogicalRel joinRightChild = call.rel(2);
RelNode newRight = transmitSnapshotRequirement(joinRightChild);
call.transformTo(
join.copy(join.getTraitSet(), Lists.newArrayList(join.getLeft(), newRight)));
}
private RelNode transmitSnapshotRequirement(RelNode node) {
if (node instanceof FlinkLogicalCalc) {
final FlinkLogicalCalc calc = (FlinkLogicalCalc) node;
// filter is not allowed because it will corrupt the version table
if (null != calc.getProgram().getCondition()) {
throw new TableException(
"Filter is not allowed for right changelog input of event time temporal join,"
+ " it will corrupt the versioning of data. Please consider removing the filter before joining.");
}
final RelNode child = calc.getInput();
final RelNode newChild = transmitSnapshotRequirement(child);
if (newChild != child) {
return calc.copy(calc.getTraitSet(), newChild, calc.getProgram());
}
return calc;
}
if (node instanceof FlinkLogicalSnapshot) {
final FlinkLogicalSnapshot snapshot = (FlinkLogicalSnapshot) node;
assert isEventTime(snapshot.getPeriod().getType());
final RelNode child = snapshot.getInput();
final RelNode newChild = transmitSnapshotRequirement(child);
if (newChild != child) {
return snapshot.copy(snapshot.getTraitSet(), newChild, snapshot.getPeriod());
}
return snapshot;
}
if (node instanceof HepRelVertex) {
return transmitSnapshotRequirement(((HepRelVertex) node).getCurrentRel());
}
if (node instanceof FlinkLogicalWatermarkAssigner) {
final FlinkLogicalWatermarkAssigner wma = (FlinkLogicalWatermarkAssigner) node;
final RelNode child = wma.getInput();
final RelNode newChild = transmitSnapshotRequirement(child);
if (newChild != child) {
return wma.copy(
wma.getTraitSet(),
newChild,
wma.getHints(),
wma.rowtimeFieldIndex(),
wma.watermarkExpr());
}
return wma;
}
if (node instanceof FlinkLogicalTableSourceScan) {
final FlinkLogicalTableSourceScan ts = (FlinkLogicalTableSourceScan) node;
// update eventTimeSnapshotRequired to true
return ts.copy(ts.getTraitSet(), ts.relOptTable(), true);
}
return node;
}
private boolean isEventTime(RelDataType period) {
if (period instanceof TimeIndicatorRelDataType) {
return ((TimeIndicatorRelDataType) period).isEventTime();
}
return false;
}
/**
* Configuration for {@link EventTimeTemporalJoinRewriteRule}.
*
* <p>Operator tree:
*
* <pre>{@code
* Join (event time temporal)
* / \
* RelNode [Calc]
* \
* Snapshot
* \
* [Calc]
* \
* WatermarkAssigner
* \
* [Calc]
* \
* TableScan
* }</pre>
*
* <p>8 variants:
*
* <ul>
* <li>JOIN_CALC_SNAPSHOT_CALC_WMA_CALC_TS
* <li>JOIN_CALC_SNAPSHOT_CALC_WMA_TS
* <li>JOIN_CALC_SNAPSHOT_WMA_CALC_TS
* <li>JOIN_CALC_SNAPSHOT_WMA_TS
* <li>JOIN_SNAPSHOT_CALC_WMA_CALC_TS
* <li>JOIN_SNAPSHOT_CALC_WMA_TS
* <li>JOIN_SNAPSHOT_WMA_CALC_TS
* <li>JOIN_SNAPSHOT_WMA_TS
* </ul>
*/
@Value.Immutable(singleton = false)
public
|
EventTimeTemporalJoinRewriteRule
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/dispatcher/runner/DispatcherRunner.java
|
{
"start": 1180,
"end": 1551
}
|
interface ____ extends AutoCloseableAsync {
/**
* Return shut down future of this runner. The shut down future is being completed with the
* final {@link ApplicationStatus} once the runner wants to shut down.
*
* @return future with the final application status
*/
CompletableFuture<ApplicationStatus> getShutDownFuture();
}
|
DispatcherRunner
|
java
|
processing__processing4
|
core/src/processing/opengl/PGraphicsOpenGL.java
|
{
"start": 165917,
"end": 210727
}
|
class ____ {
// PImage formats used internally to offload
// color format conversion to save threads
static final int OPENGL_NATIVE = -1;
static final int OPENGL_NATIVE_OPAQUE = -2;
static final int BUFFER_COUNT = 3;
int[] pbos;
long[] fences;
File[] files;
int[] widths;
int[] heights;
int head;
int tail;
int size;
boolean supportsAsyncTransfers;
boolean calledThisFrame;
/// PGRAPHICS API //////////////////////////////////////////////////////////
public AsyncPixelReader() {
supportsAsyncTransfers = pgl.hasPBOs() && pgl.hasSynchronization();
if (supportsAsyncTransfers) {
pbos = new int[BUFFER_COUNT];
fences = new long[BUFFER_COUNT];
files = new File[BUFFER_COUNT];
widths = new int[BUFFER_COUNT];
heights = new int[BUFFER_COUNT];
IntBuffer intBuffer = PGL.allocateIntBuffer(BUFFER_COUNT);
intBuffer.rewind();
pgl.genBuffers(BUFFER_COUNT, intBuffer);
for (int i = 0; i < BUFFER_COUNT; i++) {
pbos[i] = intBuffer.get(i);
}
}
}
public void dispose() {
if (fences != null) {
while (size > 0) {
pgl.deleteSync(fences[tail]);
size--;
tail = (tail + 1) % BUFFER_COUNT;
}
fences = null;
}
if (pbos != null) {
for (int i = 0; i < BUFFER_COUNT; i++) {
IntBuffer intBuffer = PGL.allocateIntBuffer(pbos);
pgl.deleteBuffers(BUFFER_COUNT, intBuffer);
}
pbos = null;
}
files = null;
widths = null;
heights = null;
size = 0;
head = 0;
tail = 0;
calledThisFrame = false;
ongoingPixelTransfers.remove(this);
}
public void readAndSaveAsync(final File file) {
if (size > 0) {
boolean shouldRead = (size == BUFFER_COUNT);
if (!shouldRead) shouldRead = isLastTransferComplete();
if (shouldRead) endTransfer();
} else {
ongoingPixelTransfers.add(this);
}
beginTransfer(file);
calledThisFrame = true;
}
public void completeFinishedTransfers() {
if (size <= 0 || !asyncImageSaver.hasAvailableTarget()) return;
boolean needEndDraw = false;
if (!drawing) {
beginDraw();
needEndDraw = true;
}
while (asyncImageSaver.hasAvailableTarget() &&
isLastTransferComplete()) {
endTransfer();
}
// make sure to always unregister if there are no ongoing transfers
// so that PGraphics can be GC'd if needed
if (size <= 0) ongoingPixelTransfers.remove(this);
if (needEndDraw) endDraw();
}
protected void completeAllTransfers() {
if (size <= 0) return;
completeTransfers(size);
}
protected void completeTransfers(int count) {
if (size <= 0) return;
if (count <= 0) return;
boolean needEndDraw = false;
if (!drawing) {
beginDraw();
needEndDraw = true;
}
while (size > 0 && count > 0) {
endTransfer();
count--;
}
// make sure to always unregister if there are no ongoing transfers
// so that PGraphics can be GC'd if needed
if (size <= 0) {
ongoingPixelTransfers.remove(this);
}
if (needEndDraw) endDraw();
}
protected void awaitTransferCompletion(File file) {
if (size <= 0) return;
int i = tail; // tail -> head, wraps around (we have circular queue)
int j = 0; // 0 -> size, simple counter
int lastIndex = 0;
do {
if (file.equals(files[i])) {
lastIndex = j; // no 'break' here, we need last index for this filename
}
i = (i + 1) % BUFFER_COUNT;
j++;
} while (i != head);
if (lastIndex <= 0) return;
// Saving this file is in progress, block until transfers complete
completeTransfers(lastIndex + 1);
}
/// TRANSFERS //////////////////////////////////////////////////////////////
public boolean isLastTransferComplete() {
if (size <= 0) return false;
int status = pgl.clientWaitSync(fences[tail], 0, 0);
return (status == PGL.ALREADY_SIGNALED) ||
(status == PGL.CONDITION_SATISFIED);
}
public void beginTransfer(File file) {
// check the size of the buffer
if (widths[head] != pixelWidth || heights[head] != pixelHeight) {
if (widths[head] * heights[head] != pixelWidth * pixelHeight) {
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, pbos[head]);
pgl.bufferData(PGL.PIXEL_PACK_BUFFER,
Integer.SIZE/8 * pixelWidth * pixelHeight,
null, PGL.STREAM_READ);
}
widths[head] = pixelWidth;
heights[head] = pixelHeight;
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, 0);
}
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, pbos[head]);
pgl.readPixels(0, 0, pixelWidth, pixelHeight, PGL.RGBA, PGL.UNSIGNED_BYTE, 0);
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, 0);
fences[head] = pgl.fenceSync(PGL.SYNC_GPU_COMMANDS_COMPLETE, 0);
files[head] = file;
head = (head + 1) % BUFFER_COUNT;
size++;
}
public void endTransfer() {
pgl.deleteSync(fences[tail]);
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, pbos[tail]);
ByteBuffer readBuffer = pgl.mapBuffer(PGL.PIXEL_PACK_BUFFER,
PGL.READ_ONLY);
if (readBuffer != null) {
int format = primaryGraphics ? OPENGL_NATIVE_OPAQUE : OPENGL_NATIVE;
PImage target = asyncImageSaver.getAvailableTarget(widths[tail],
heights[tail],
format);
if (target == null) return;
readBuffer.rewind();
readBuffer.asIntBuffer().get(target.pixels);
pgl.unmapBuffer(PGL.PIXEL_PACK_BUFFER);
asyncImageSaver.saveTargetAsync(PGraphicsOpenGL.this, target,
files[tail]);
}
pgl.bindBuffer(PGL.PIXEL_PACK_BUFFER, 0);
size--;
tail = (tail + 1) % BUFFER_COUNT;
}
}
//////////////////////////////////////////////////////////////
// LOAD/UPDATE TEXTURE
// Load the current contents of the drawing surface into a texture.
public void loadTexture() {
boolean needEndDraw = false;
if (!drawing) {
beginDraw();
needEndDraw = true;
}
flush(); // To make sure the color buffer is updated.
if (primaryGraphics) {
updatePixelSize();
if (pgl.isFBOBacked()) {
// In the case of MSAA, this is needed so the back buffer is in sync
// with the rendering.
pgl.syncBackTexture();
} else {
loadTextureImpl(Texture.POINT, false);
// Here we go the slow route: we first copy the contents of the color
// buffer into a pixels array (but we keep it in native format) and
// then copy this array into the texture.
if (nativePixels == null || nativePixels.length < pixelWidth * pixelHeight) {
nativePixels = new int[pixelWidth * pixelHeight];
nativePixelBuffer = PGL.allocateIntBuffer(nativePixels);
}
beginPixelsOp(OP_READ);
try {
// See comments in readPixels() for the reason for this try/catch.
pgl.readPixelsImpl(0, 0, pixelWidth, pixelHeight, PGL.RGBA, PGL.UNSIGNED_BYTE,
nativePixelBuffer);
} catch (IndexOutOfBoundsException e) {
// ignored
}
endPixelsOp();
if (texture != null) {
texture.setNative(nativePixelBuffer, 0, 0, pixelWidth, pixelHeight);
}
}
} else if (offscreenMultisample) {
// We need to copy the contents of the multisampled buffer to the color
// buffer, so the later is up-to-date with the last drawing.
FrameBuffer ofb = offscreenFramebuffer;
FrameBuffer mfb = multisampleFramebuffer;
if (ofb != null && mfb != null) {
mfb.copyColor(ofb);
}
}
if (needEndDraw) {
endDraw();
}
}
// Just marks the whole texture as updated
public void updateTexture() {
if (texture != null) {
texture.updateTexels();
}
}
// Marks the specified rectanglular subregion in the texture as
// updated.
public void updateTexture(int x, int y, int w, int h) {
if (texture != null) {
texture.updateTexels(x, y, w, h);
}
}
// Draws wherever it is in the screen texture right now to the display.
public void updateDisplay() {
flush();
beginPixelsOp(OP_WRITE);
drawTexture();
endPixelsOp();
}
protected void loadTextureImpl(int sampling, boolean mipmap) {
updatePixelSize();
if (pixelWidth == 0 || pixelHeight == 0) return;
if (texture == null || texture.contextIsOutdated()) {
Texture.Parameters params = new Texture.Parameters(ARGB,
sampling, mipmap);
texture = new Texture(this, pixelWidth, pixelHeight, params);
texture.invertedY(true);
texture.colorBuffer(true);
setCache(this, texture);
}
}
protected void createPTexture() {
updatePixelSize();
if (texture != null) {
ptexture = new Texture(this, pixelWidth, pixelHeight, texture.getParameters());
ptexture.invertedY(true);
ptexture.colorBuffer(true);
}
}
protected void swapOffscreenTextures() {
FrameBuffer ofb = offscreenFramebuffer;
if (texture != null && ptexture != null && ofb != null) {
int temp = texture.glName;
texture.glName = ptexture.glName;
ptexture.glName = temp;
ofb.setColorBuffer(texture);
}
}
protected void drawTexture() {
if (texture != null) {
// No blend so the texure replaces wherever is on the screen,
// irrespective of the alpha
pgl.disable(PGL.BLEND);
pgl.drawTexture(texture.glTarget, texture.glName,
texture.glWidth, texture.glHeight,
0, 0, width, height);
pgl.enable(PGL.BLEND);
}
}
protected void drawTexture(int x, int y, int w, int h) {
if (texture != null) {
// Processing Y axis is inverted with respect to OpenGL, so we need to
// invert the y coordinates of the screen rectangle.
pgl.disable(PGL.BLEND);
pgl.drawTexture(texture.glTarget, texture.glName,
texture.glWidth, texture.glHeight,
0, 0, pixelWidth, pixelHeight, 1,
x, y, x + w, y + h,
x, pixelHeight - (y + h), x + w, pixelHeight - y);
pgl.enable(PGL.BLEND);
}
}
protected void drawPTexture() {
if (ptexture != null) {
// No blend so the texure replaces wherever is on the screen,
// irrespective of the alpha
pgl.disable(PGL.BLEND);
pgl.drawTexture(ptexture.glTarget, ptexture.glName,
ptexture.glWidth, ptexture.glHeight,
0, 0, width, height);
pgl.enable(PGL.BLEND);
}
}
//////////////////////////////////////////////////////////////
// MASK
// @Override
// public void mask(int alpha[]) {
// PImage temp = get();
// temp.mask(alpha);
// set(0, 0, temp);
// }
@Override
public void mask(PImage alpha) {
updatePixelSize();
if (alpha.pixelWidth != pixelWidth || alpha.pixelHeight != pixelHeight) {
throw new RuntimeException("The PImage used with mask() must be " +
"the same size as the applet.");
}
PGraphicsOpenGL ppg = getPrimaryPG();
if (ppg.maskShader == null) {
ppg.maskShader = new PShader(parent, defTextureShaderVertURL,
maskShaderFragURL);
}
ppg.maskShader.set("mask", alpha);
filter(ppg.maskShader);
}
//////////////////////////////////////////////////////////////
// FILTER
/**
* This is really inefficient and not a good idea in OpenGL. Use get() and
* set() with a smaller image area, or call the filter on an image instead,
* and then draw that.
*/
@Override
public void filter(int kind) {
PImage temp = get();
temp.filter(kind);
set(0, 0, temp);
}
/**
* This is really inefficient and not a good idea in OpenGL. Use get() and
* set() with a smaller image area, or call the filter on an image instead,
* and then draw that.
*/
@Override
public void filter(int kind, float param) {
PImage temp = get();
temp.filter(kind, param);
set(0, 0, temp);
}
@Override
public void filter(PShader shader) {
if (!shader.isPolyShader()) {
PGraphics.showWarning(INVALID_FILTER_SHADER_ERROR);
return;
}
boolean needEndDraw = false;
if (primaryGraphics) {
pgl.enableFBOLayer();
} else if (!drawing) {
beginDraw();
needEndDraw = true;
}
loadTexture();
if (filterTexture == null || filterTexture.contextIsOutdated()) {
filterTexture = new Texture(this, texture.width, texture.height,
texture.getParameters());
filterTexture.invertedY(true);
filterImage = wrapTexture(filterTexture);
}
filterTexture.set(texture);
// Disable writing to the depth buffer, so that after applying the filter we
// can still use the depth information to keep adding geometry to the scene.
pgl.depthMask(false);
// Also disabling depth testing so the texture is drawn on top of everything
// that has been drawn before.
pgl.disable(PGL.DEPTH_TEST);
// Drawing a textured quad in 2D, covering the entire screen,
// with the filter shader applied to it:
begin2D();
// Changing light configuration and shader after begin2D()
// because it calls flush().
boolean prevLights = lights;
lights = false;
int prevTextureMode = textureMode;
textureMode = NORMAL;
boolean prevStroke = stroke;
stroke = false;
int prevBlendMode = blendMode;
blendMode(REPLACE);
PShader prevShader = polyShader;
polyShader = shader;
beginShape(QUADS);
texture(filterImage);
vertex(0, 0, 0, 0);
vertex(width, 0, 1, 0);
vertex(width, height, 1, 1);
vertex(0, height, 0, 1);
endShape();
end2D();
// Restoring previous configuration.
polyShader = prevShader;
stroke = prevStroke;
lights = prevLights;
textureMode = prevTextureMode;
blendMode(prevBlendMode);
if (!hints[DISABLE_DEPTH_TEST]) {
pgl.enable(PGL.DEPTH_TEST);
}
if (!hints[DISABLE_DEPTH_MASK]) {
pgl.depthMask(true);
}
if (needEndDraw) {
endDraw();
}
}
//////////////////////////////////////////////////////////////
// COPY
@Override
public void copy(int sx, int sy, int sw, int sh,
int dx, int dy, int dw, int dh) {
if (primaryGraphics) pgl.enableFBOLayer();
loadTexture();
if (filterTexture == null || filterTexture.contextIsOutdated()) {
filterTexture = new Texture(this, texture.width, texture.height, texture.getParameters());
filterTexture.invertedY(true);
filterImage = wrapTexture(filterTexture);
}
filterTexture.put(texture, sx, height - (sy + sh), sw, height - sy);
copy(filterImage, sx, sy, sw, sh, dx, dy, dw, dh);
}
@Override
public void copy(PImage src,
int sx, int sy, int sw, int sh,
int dx, int dy, int dw, int dh) {
boolean needEndDraw = false;
if (!drawing) {
beginDraw();
needEndDraw = true;
}
flush(); // make sure that the screen contents are up to date.
Texture tex = getTexture(src);
boolean invX = tex.invertedX();
boolean invY = tex.invertedY();
int scrX0, scrX1;
int scrY0, scrY1;
if (invX) {
scrX0 = (dx + dw) / src.pixelDensity;
scrX1 = dx / src.pixelDensity;
} else {
scrX0 = dx / src.pixelDensity;
scrX1 = (dx + dw) / src.pixelDensity;
}
int texX0 = sx;
int texX1 = sx + sw;
int texY0, texY1;
if (invY) {
scrY0 = height - (dy + dh) / src.pixelDensity;
scrY1 = height - dy / src.pixelDensity;
texY0 = tex.height - (sy + sh);
texY1 = tex.height - sy;
} else {
// Because drawTexture uses bottom-to-top orientation of Y axis.
scrY0 = height - dy / src.pixelDensity;
scrY1 = height - (dy + dh) / src.pixelDensity;
texY0 = sy;
texY1 = sy + sh;
}
pgl.drawTexture(tex.glTarget, tex.glName, tex.glWidth, tex.glHeight,
0, 0, width, height,
texX0, texY0, texX1, texY1,
scrX0, scrY0, scrX1, scrY1);
if (needEndDraw) {
endDraw();
}
}
//////////////////////////////////////////////////////////////
// BLEND
/**
* Allows to set custom blend modes for the entire scene, using openGL.
* Reference article about blending modes:
* http://www.pegtop.net/delphi/articles/blendmodes/
* DIFFERENCE, HARD_LIGHT, SOFT_LIGHT, OVERLAY, DODGE, BURN modes cannot be
* implemented in fixed-function pipeline because they require
* conditional blending and non-linear blending equations.
*/
@Override
protected void blendModeImpl() {
if (blendMode != lastBlendMode) {
// Flush any geometry that uses a different blending mode.
flush();
}
pgl.enable(PGL.BLEND);
if (blendMode == REPLACE) {
if (blendEqSupported) {
pgl.blendEquation(PGL.FUNC_ADD);
}
pgl.blendFunc(PGL.ONE, PGL.ZERO);
} else if (blendMode == BLEND) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_ADD,
PGL.FUNC_ADD);
}
pgl.blendFuncSeparate(PGL.SRC_ALPHA, PGL.ONE_MINUS_SRC_ALPHA,
PGL.ONE, PGL.ONE);
} else if (blendMode == ADD) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_ADD,
PGL.FUNC_ADD);
}
pgl.blendFuncSeparate(PGL.SRC_ALPHA, PGL.ONE,
PGL.ONE, PGL.ONE);
} else if (blendMode == SUBTRACT) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_REVERSE_SUBTRACT,
PGL.FUNC_ADD);
pgl.blendFuncSeparate(PGL.SRC_ALPHA, PGL.ONE,
PGL.ONE, PGL.ONE);
} else {
PGraphics.showWarning(BLEND_DRIVER_ERROR, "SUBTRACT");
}
} else if (blendMode == LIGHTEST) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_MAX,
PGL.FUNC_ADD);
pgl.blendFuncSeparate(PGL.ONE, PGL.ONE,
PGL.ONE, PGL.ONE);
} else {
PGraphics.showWarning(BLEND_DRIVER_ERROR, "LIGHTEST");
}
} else if (blendMode == DARKEST) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_MIN,
PGL.FUNC_ADD);
pgl.blendFuncSeparate(PGL.ONE, PGL.ONE,
PGL.ONE, PGL.ONE);
} else {
PGraphics.showWarning(BLEND_DRIVER_ERROR, "DARKEST");
}
} else if (blendMode == EXCLUSION) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_ADD,
PGL.FUNC_ADD);
}
pgl.blendFuncSeparate(PGL.ONE_MINUS_DST_COLOR, PGL.ONE_MINUS_SRC_COLOR,
PGL.ONE, PGL.ONE);
} else if (blendMode == MULTIPLY) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_ADD,
PGL.FUNC_ADD);
}
pgl.blendFuncSeparate(PGL.ZERO, PGL.SRC_COLOR,
PGL.ONE, PGL.ONE);
} else if (blendMode == SCREEN) {
if (blendEqSupported) {
pgl.blendEquationSeparate(PGL.FUNC_ADD,
PGL.FUNC_ADD);
}
pgl.blendFuncSeparate(PGL.ONE_MINUS_DST_COLOR, PGL.ONE,
PGL.ONE, PGL.ONE);
} else if (blendMode == DIFFERENCE) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "DIFFERENCE");
} else if (blendMode == OVERLAY) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "OVERLAY");
} else if (blendMode == HARD_LIGHT) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "HARD_LIGHT");
} else if (blendMode == SOFT_LIGHT) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "SOFT_LIGHT");
} else if (blendMode == DODGE) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "DODGE");
} else if (blendMode == BURN) {
PGraphics.showWarning(BLEND_RENDERER_ERROR, "BURN");
}
lastBlendMode = blendMode;
}
//////////////////////////////////////////////////////////////
// SAVE
// public void save(String filename) // PImage calls loadPixels()
//////////////////////////////////////////////////////////////
// TEXTURE UTILS
/**
* Not an approved function, this will change or be removed in the future.
* This utility method returns the texture associated to the renderer's.
* drawing surface, making sure is updated to reflect the current contents
* off the screen (or offscreen drawing surface).
*/
public Texture getTexture() {
return getTexture(true);
}
/**
* Not an approved function either, don't use it.
*/
public Texture getTexture(boolean load) {
if (load) loadTexture();
return texture;
}
/**
* Not an approved function, this will change or be removed in the future.
* This utility method returns the texture associated to the image.
* creating and/or updating it if needed.
*
* @param img the image to have a texture metadata associated to it
*/
public Texture getTexture(PImage img) {
Texture tex = (Texture)initCache(img);
if (tex == null) return null;
if (img.isModified()) {
if (img.pixelWidth != tex.width || img.pixelHeight != tex.height) {
tex.init(img.pixelWidth, img.pixelHeight);
}
updateTexture(img, tex);
}
if (tex.hasBuffers()) {
tex.bufferUpdate();
}
checkTexture(tex);
return tex;
}
/**
* Not an approved function, test its use in libraries to grab the FB objects
* for offscreen PGraphics.
*/
public FrameBuffer getFrameBuffer() {
return getFrameBuffer(false);
}
public FrameBuffer getFrameBuffer(boolean multi) {
if (multi) {
return multisampleFramebuffer;
} else {
return offscreenFramebuffer;
}
}
protected Object initCache(PImage img) {
if (!checkGLThread()) {
return null;
}
Texture tex = (Texture)getCache(img);
if (tex == null || tex.contextIsOutdated()) {
tex = addTexture(img);
if (tex != null) {
boolean dispose = img.pixels == null;
img.loadPixels();
tex.set(img.pixels, img.format);
img.setModified();
if (dispose) {
// We only used the pixels to load the image into the texture and the user did not request
// to load the pixels, so we should dispose the pixels array to avoid wasting memory
img.pixels = null;
img.loaded = false;
}
}
}
return tex;
}
protected void bindFrontTexture() {
if (primaryGraphics) {
pgl.bindFrontTexture();
} else {
if (ptexture == null) {
createPTexture();
}
ptexture.bind();
}
}
protected void unbindFrontTexture() {
if (primaryGraphics) {
pgl.unbindFrontTexture();
} else {
ptexture.unbind();
}
}
/**
* This utility method creates a texture for the provided image, and adds it
* to the metadata cache of the image.
* @param img the image to have a texture metadata associated to it
*/
protected Texture addTexture(PImage img) {
Texture.Parameters params =
new Texture.Parameters(ARGB, textureSampling,
getHint(ENABLE_TEXTURE_MIPMAPS), textureWrap);
return addTexture(img, params);
}
protected Texture addTexture(PImage img, Texture.Parameters params) {
if (img.width == 0 || img.height == 0) {
// Cannot add textures of size 0
return null;
}
if (img.parent == null) {
img.parent = parent;
}
Texture tex = new Texture(this, img.pixelWidth, img.pixelHeight, params);
setCache(img, tex);
return tex;
}
protected void checkTexture(Texture tex) {
if (!tex.colorBuffer() &&
(tex.usingMipmaps == hints[DISABLE_TEXTURE_MIPMAPS] ||
tex.currentSampling() != textureSampling)) {
tex.usingMipmaps(!hints[DISABLE_TEXTURE_MIPMAPS], textureSampling);
}
if ((tex.usingRepeat && textureWrap == CLAMP) ||
(!tex.usingRepeat && textureWrap == REPEAT)) {
tex.usingRepeat(textureWrap != CLAMP);
}
}
protected PImage wrapTexture(Texture tex) {
// We don't use the PImage(int width, int height, int mode) constructor to
// avoid initializing the pixels array.
PImage img = new PImage();
img.parent = parent;
img.width = img.pixelWidth = tex.width;
img.height = img.pixelHeight = tex.height;
img.format = ARGB;
setCache(img, tex);
return img;
}
protected void updateTexture(PImage img, Texture tex) {
if (tex != null) {
if (img.isModified()) {
int x = img.getModifiedX1();
int y = img.getModifiedY1();
int w = img.getModifiedX2() - x;
int h = img.getModifiedY2() - y;
tex.set(img.pixels, x, y, w, h, img.format);
}
}
img.setModified(false);
}
protected void deleteSurfaceTextures() {
if (texture != null) {
texture.dispose();
}
if (ptexture != null) {
ptexture.dispose();
}
if (filterTexture != null) {
filterTexture.dispose();
}
}
protected boolean checkGLThread() {
if (pgl.threadIsCurrent()) {
return true;
} else {
PGraphics.showWarning(OPENGL_THREAD_ERROR);
return false;
}
}
//////////////////////////////////////////////////////////////
// RESIZE
@Override
public void resize(int wide, int high) {
PGraphics.showMethodWarning("resize");
}
//////////////////////////////////////////////////////////////
// INITIALIZATION ROUTINES
protected void initPrimary() {
pgl.initSurface(smooth);
if (texture != null) {
removeCache(this);
texture = null;
ptexture = null;
}
initialized = true;
}
protected void beginOnscreenDraw() {
updatePixelSize();
pgl.beginRender();
if (drawFramebuffer == null) {
drawFramebuffer = new FrameBuffer(this, pixelWidth, pixelHeight, true);
}
drawFramebuffer.setFBO(pgl.getDrawFramebuffer());
if (readFramebuffer == null) {
readFramebuffer = new FrameBuffer(this, pixelWidth, pixelHeight, true);
}
readFramebuffer.setFBO(pgl.getReadFramebuffer());
if (currentFramebuffer == null) {
setFramebuffer(drawFramebuffer);
}
if (pgl.isFBOBacked()) {
texture = pgl.wrapBackTexture(texture);
ptexture = pgl.wrapFrontTexture(ptexture);
}
}
protected void endOnscreenDraw() {
pgl.endRender(parent.sketchWindowColor());
}
protected void initOffscreen() {
// Getting the context and capabilities from the main renderer.
loadTextureImpl(textureSampling, false);
FrameBuffer ofb = offscreenFramebuffer;
FrameBuffer mfb = multisampleFramebuffer;
// In case of re-initialization (for example, when the smooth level
// is changed), we make sure that all the OpenGL resources associated
// to the surface are released by calling delete().
if (ofb != null) {
ofb.dispose();
ofb = null;
}
if (mfb != null) {
mfb.dispose();
mfb = null;
}
boolean packed = depthBits == 24 && stencilBits == 8 &&
packedDepthStencilSupported;
if (PGraphicsOpenGL.fboMultisampleSupported && 1 < PGL.smoothToSamples(smooth)) {
mfb = new FrameBuffer(this, texture.glWidth, texture.glHeight, PGL.smoothToSamples(smooth), 0,
depthBits, stencilBits, packed, false);
mfb.clear();
multisampleFramebuffer = mfb;
offscreenMultisample = true;
// The offscreen framebuffer where the multisampled image is finally drawn
// to. If depth reading is disabled it doesn't need depth and stencil buffers
// since they are part of the multisampled framebuffer.
if (hints[ENABLE_BUFFER_READING]) {
ofb = new FrameBuffer(this, texture.glWidth, texture.glHeight, 1, 1,
depthBits, stencilBits, packed, false);
} else {
ofb = new FrameBuffer(this, texture.glWidth, texture.glHeight, 1, 1,
0, 0, false, false);
}
} else {
smooth = 0;
ofb = new FrameBuffer(this, texture.glWidth, texture.glHeight, 1, 1,
depthBits, stencilBits, packed, false);
offscreenMultisample = false;
}
ofb.setColorBuffer(texture);
ofb.clear();
offscreenFramebuffer = ofb;
initialized = true;
}
protected void beginOffscreenDraw() {
if (!initialized) {
initOffscreen();
} else {
FrameBuffer ofb = offscreenFramebuffer;
FrameBuffer mfb = multisampleFramebuffer;
boolean outdated = ofb != null && ofb.contextIsOutdated();
boolean outdatedMulti = mfb != null && mfb.contextIsOutdated();
if (outdated || outdatedMulti) {
restartPGL();
initOffscreen();
} else {
// The back texture of the past frame becomes the front,
// and the front texture becomes the new back texture where the
// new frame is drawn to.
swapOffscreenTextures();
}
}
pushFramebuffer();
if (offscreenMultisample) {
FrameBuffer mfb = multisampleFramebuffer;
if (mfb != null) {
setFramebuffer(mfb);
}
} else {
FrameBuffer ofb = offscreenFramebuffer;
if (ofb != null) {
setFramebuffer(ofb);
}
}
// Render previous back texture (now is the front) as background
drawPTexture();
// Restoring the clipping configuration of the offscreen surface.
if (clip) {
pgl.enable(PGL.SCISSOR_TEST);
pgl.scissor(clipRect[0], clipRect[1], clipRect[2], clipRect[3]);
} else {
pgl.disable(PGL.SCISSOR_TEST);
}
}
protected void endOffscreenDraw() {
if (offscreenMultisample) {
FrameBuffer ofb = offscreenFramebuffer;
FrameBuffer mfb = multisampleFramebuffer;
if (ofb != null && mfb != null) {
mfb.copyColor(ofb);
}
}
popFramebuffer();
if (backgroundA == 1) {
// Set alpha channel to opaque in order to match behavior of JAVA2D, not
// on the multisampled FBO because it leads to wrong background color
// on some Macbooks with AMD graphics.
pgl.colorMask(false, false, false, true);
pgl.clearColor(0, 0, 0, backgroundA);
pgl.clear(PGL.COLOR_BUFFER_BIT);
pgl.colorMask(true, true, true, true);
}
if (texture != null) {
texture.updateTexels(); // Mark all texels in screen texture as modified.
}
getPrimaryPG().restoreGL();
}
protected void setViewport() {
viewport.put(0, 0); viewport.put(1, 0);
viewport.put(2, width); viewport.put(3, height);
pgl.viewport(viewport.get(0), viewport.get(1),
viewport.get(2), viewport.get(3));
}
@Override
protected void checkSettings() {
super.checkSettings();
setGLSettings();
}
protected void setGLSettings() {
inGeo.clear();
tessGeo.clear();
texCache.clear();
// Each frame starts with textures disabled.
super.noTexture();
// Making sure that OpenGL is using the last blend mode set by the user.
blendModeImpl();
// this is necessary for 3D drawing
if (hints[DISABLE_DEPTH_TEST]) {
pgl.disable(PGL.DEPTH_TEST);
} else {
pgl.enable(PGL.DEPTH_TEST);
}
// use <= since that's what processing.core does
pgl.depthFunc(PGL.LEQUAL);
if (hints[DISABLE_OPTIMIZED_STROKE]) {
flushMode = FLUSH_CONTINUOUSLY;
} else {
flushMode = FLUSH_WHEN_FULL;
}
if (primaryGraphics) {
// pgl.getIntegerv(PGL.SAMPLES, intBuffer);
// int temp = intBuffer.get(0);
// if (smooth != temp && 1 < temp && 1 < smooth) {
// TODO check why the samples is higher that initialized smooth level.
// quality = temp;
// }
}
if (pgl.isES()) {
// neither GL_MULTISAMPLE nor GL_POLYGON_SMOOTH are part of GLES2 or GLES3
} else if (smooth < 1) {
pgl.disable(PGL.MULTISAMPLE);
} else {
pgl.enable(PGL.MULTISAMPLE);
}
if (!pgl.isES()) {
pgl.disable(PGL.POLYGON_SMOOTH);
}
if (sized) {
// reapplySettings();
// To avoid having garbage in the screen after a resize,
// in the case background is not called in draw().
if (primaryGraphics) {
background(backgroundColor);
} else {
// offscreen surfaces are transparent by default.
background(backgroundColor & 0xFFFFFF);
// Recreate offscreen FBOs
restartPGL();
}
// Sets the default projection and camera (initializes modelview).
// If the user has setup up their own projection, they'll need
// to fix it after resize anyway. This helps the people who haven't
// set up their own projection.
defaultPerspective();
defaultCamera();
// clear the flag
sized = false;
} else {
// Eliminating any user's transformations by going back to the
// original camera setup.
modelview.set(camera);
modelviewInv.set(cameraInv);
updateProjmodelview();
}
if (is3D()) {
noLights();
lightFalloff(1, 0, 0);
lightSpecular(0, 0, 0);
}
// Vertices should be specified by user in CW order (left-handed)
// That is CCW order (right-handed). Vertex shader inverts
// Y-axis and outputs vertices in CW order (right-handed).
// Culling occurs after the vertex shader, so FRONT FACE
// has to be set to CW (right-handed) for OpenGL to correctly
// recognize FRONT and BACK faces.
pgl.frontFace(PGL.CW);
pgl.disable(PGL.CULL_FACE);
// Processing uses only one texture unit.
pgl.activeTexture(PGL.TEXTURE0);
// The current normal vector is set to be parallel to the Z axis.
normalX = normalY = 0;
normalZ = 1;
pgl.clearDepthStencil();
if (hints[DISABLE_DEPTH_MASK]) {
pgl.depthMask(false);
} else {
pgl.depthMask(true);
}
pixelsOp = OP_NONE;
modified = false;
loaded = false;
}
protected void getGLParameters() {
OPENGL_VENDOR = pgl.getString(PGL.VENDOR);
OPENGL_RENDERER = pgl.getString(PGL.RENDERER);
OPENGL_VERSION = pgl.getString(PGL.VERSION);
OPENGL_EXTENSIONS = pgl.getString(PGL.EXTENSIONS);
GLSL_VERSION = pgl.getString(PGL.SHADING_LANGUAGE_VERSION);
npotTexSupported = pgl.hasNpotTexSupport();
autoMipmapGenSupported = pgl.hasAutoMipmapGenSupport();
fboMultisampleSupported = pgl.hasFboMultisampleSupport();
packedDepthStencilSupported = pgl.hasPackedDepthStencilSupport();
anisoSamplingSupported = pgl.hasAnisoSamplingSupport();
readBufferSupported = pgl.hasReadBuffer();
drawBufferSupported = pgl.hasDrawBuffer();
try {
pgl.blendEquation(PGL.FUNC_ADD);
blendEqSupported = true;
} catch (Exception e) {
blendEqSupported = false;
}
depthBits = pgl.getDepthBits();
stencilBits = pgl.getStencilBits();
pgl.getIntegerv(PGL.MAX_TEXTURE_SIZE, intBuffer);
maxTextureSize = intBuffer.get(0);
// work around runtime exceptions in Broadcom's VC IV driver
if (false == OPENGL_RENDERER.equals("VideoCore IV HW")) {
pgl.getIntegerv(PGL.MAX_SAMPLES, intBuffer);
maxSamples = intBuffer.get(0);
}
if (anisoSamplingSupported) {
pgl.getFloatv(PGL.MAX_TEXTURE_MAX_ANISOTROPY, floatBuffer);
maxAnisoAmount = floatBuffer.get(0);
}
// overwrite the default shaders with vendor specific versions
// if needed
if (OPENGL_RENDERER.equals("VideoCore IV HW")) { // Broadcom's binary driver for Raspberry Pi
defLightShaderVertURL =
PGraphicsOpenGL.class.getResource("/processing/opengl/shaders/LightVert-brcm.glsl");
defTexlightShaderVertURL =
PGraphicsOpenGL.class.getResource("/processing/opengl/shaders/TexLightVert-brcm.glsl");
} else if (OPENGL_RENDERER.contains("VC4")) { // Mesa driver for same hardware
defLightShaderVertURL =
PGraphicsOpenGL.class.getResource("/processing/opengl/shaders/LightVert-vc4.glsl");
defTexlightShaderVertURL =
PGraphicsOpenGL.class.getResource("/processing/opengl/shaders/TexLightVert-vc4.glsl");
}
glParamsRead = true;
}
//////////////////////////////////////////////////////////////
// SHADER HANDLING
@Override
public PShader loadShader(String fragFilename) {
if (fragFilename == null || fragFilename.equals("")) {
PGraphics.showWarning(MISSING_FRAGMENT_SHADER);
return null;
}
int type = PShader.getShaderType(parent.loadStrings(fragFilename),
PShader.POLY);
PShader shader = new PShader(parent);
shader.setType(type);
shader.setFragmentShader(fragFilename);
if (type == PShader.POINT) {
String[] vertSource = pgl.loadVertexShader(defPointShaderVertURL);
shader.setVertexShader(vertSource);
} else if (type == PShader.LINE) {
String[] vertSource = pgl.loadVertexShader(defLineShaderVertURL);
shader.setVertexShader(vertSource);
} else if (type == PShader.TEXLIGHT) {
String[] vertSource = pgl.loadVertexShader(defTexlightShaderVertURL);
shader.setVertexShader(vertSource);
} else if (type == PShader.LIGHT) {
String[] vertSource = pgl.loadVertexShader(defLightShaderVertURL);
shader.setVertexShader(vertSource);
} else if (type == PShader.TEXTURE) {
String[] vertSource = pgl.loadVertexShader(defTextureShaderVertURL);
shader.setVertexShader(vertSource);
} else if (type == PShader.COLOR) {
String[] vertSource = pgl.loadVertexShader(defColorShaderVertURL);
shader.setVertexShader(vertSource);
} else {
String[] vertSource = pgl.loadVertexShader(defTextureShaderVertURL);
shader.setVertexShader(vertSource);
}
return shader;
}
@Override
public PShader loadShader(String fragFilename, String vertFilename) {
PShader shader = null;
if (fragFilename == null || fragFilename.equals("")) {
PGraphics.showWarning(MISSING_FRAGMENT_SHADER);
} else if (vertFilename == null || vertFilename.equals("")) {
PGraphics.showWarning(MISSING_VERTEX_SHADER);
} else {
shader = new PShader(parent, vertFilename, fragFilename);
}
return shader;
}
@Override
public void shader(PShader shader) {
flush(); // Flushing geometry drawn with a different shader.
if (shader != null) shader.init();
if (shader.isPolyShader()) polyShader = shader;
else if (shader.isLineShader()) lineShader = shader;
else if (shader.isPointShader()) pointShader = shader;
else PGraphics.showWarning(UNKNOWN_SHADER_KIND_ERROR);
}
@Override
public void shader(PShader shader, int kind) {
flush(); // Flushing geometry drawn with a different shader.
if (shader != null) shader.init();
if (kind == TRIANGLES) polyShader = shader;
else if (kind == LINES) lineShader = shader;
else if (kind == POINTS) pointShader = shader;
else PGraphics.showWarning(UNKNOWN_SHADER_KIND_ERROR);
}
@Override
public void resetShader() {
resetShader(TRIANGLES);
}
@Override
public void resetShader(int kind) {
flush(); // Flushing geometry drawn with a different shader.
if (kind == TRIANGLES || kind == QUADS || kind == POLYGON) {
polyShader = null;
} else if (kind == LINES) {
lineShader = null;
} else if (kind == POINTS) {
pointShader = null;
} else {
PGraphics.showWarning(UNKNOWN_SHADER_KIND_ERROR);
}
}
protected PShader getPolyShader(boolean lit, boolean tex) {
PShader shader;
PGraphicsOpenGL ppg = getPrimaryPG();
boolean useDefault = polyShader == null;
if (polyShader != null) {
polyShader.setRenderer(this);
polyShader.loadAttributes();
polyShader.loadUniforms();
}
if (lit) {
if (tex) {
if (useDefault || !polyShader.checkPolyType(PShader.TEXLIGHT)) {
if (ppg.defTexlightShader == null) {
String[] vertSource = pgl.loadVertexShader(defTexlightShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defTexlightShaderFragURL);
ppg.defTexlightShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defTexlightShader;
} else {
shader = polyShader;
}
} else {
if (useDefault || !polyShader.checkPolyType(PShader.LIGHT)) {
if (ppg.defLightShader == null) {
String[] vertSource = pgl.loadVertexShader(defLightShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defLightShaderFragURL);
ppg.defLightShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defLightShader;
} else {
shader = polyShader;
}
}
} else {
if (polyShader != null && polyShader.accessLightAttribs()) {
PGraphics.showWarning(SHADER_NEED_LIGHT_ATTRIBS);
useDefault = true;
}
if (tex) {
if (useDefault || !polyShader.checkPolyType(PShader.TEXTURE)) {
if (ppg.defTextureShader == null) {
String[] vertSource = pgl.loadVertexShader(defTextureShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defTextureShaderFragURL);
ppg.defTextureShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defTextureShader;
} else {
shader = polyShader;
}
} else {
if (useDefault || !polyShader.checkPolyType(PShader.COLOR)) {
if (ppg.defColorShader == null) {
String[] vertSource = pgl.loadVertexShader(defColorShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defColorShaderFragURL);
ppg.defColorShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defColorShader;
} else {
shader = polyShader;
}
}
}
if (shader != polyShader) {
shader.setRenderer(this);
shader.loadAttributes();
shader.loadUniforms();
}
return shader;
}
protected PShader getLineShader() {
PShader shader;
PGraphicsOpenGL ppg = getPrimaryPG();
if (lineShader == null) {
if (ppg.defLineShader == null) {
String[] vertSource = pgl.loadVertexShader(defLineShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defLineShaderFragURL);
ppg.defLineShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defLineShader;
} else {
shader = lineShader;
}
shader.setRenderer(this);
shader.loadAttributes();
shader.loadUniforms();
return shader;
}
protected PShader getPointShader() {
PShader shader;
PGraphicsOpenGL ppg = getPrimaryPG();
if (pointShader == null) {
if (ppg.defPointShader == null) {
String[] vertSource = pgl.loadVertexShader(defPointShaderVertURL);
String[] fragSource = pgl.loadFragmentShader(defPointShaderFragURL);
ppg.defPointShader = new PShader(parent, vertSource, fragSource);
}
shader = ppg.defPointShader;
} else {
shader = pointShader;
}
shader.setRenderer(this);
shader.loadAttributes();
shader.loadUniforms();
return shader;
}
//////////////////////////////////////////////////////////////
// Utils
static protected int expandArraySize(int currSize, int newMinSize) {
int newSize = currSize;
while (newSize < newMinSize) {
newSize <<= 1;
}
return newSize;
}
//////////////////////////////////////////////////////////////
// Generic vertex attributes.
static protected AttributeMap newAttributeMap() {
return new AttributeMap();
}
static protected
|
AsyncPixelReader
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/jsontype/PolymorphicViaRefTypeTest.java
|
{
"start": 526,
"end": 695
}
|
class ____ extends DatabindTestUtil
{
@JsonSubTypes({
@JsonSubTypes.Type(name = "impl5", value = ImplForAtomic.class)
})
static
|
PolymorphicViaRefTypeTest
|
java
|
apache__camel
|
components/camel-ignite/src/test/java/org/apache/camel/component/ignite/IgniteComputeTest.java
|
{
"start": 1666,
"end": 9727
}
|
class ____ extends AbstractIgniteTest {
private static final List<Ignite> ADDITIONAL_INSTANCES = Lists.newArrayList();
private static final List<UUID> LISTENERS = Lists.newArrayList();
@Override
protected String getScheme() {
return "ignite-compute";
}
@Override
protected AbstractIgniteComponent createComponent() {
return IgniteComputeComponent.fromConfiguration(createConfiguration());
}
@Test
public void testExecuteWithWrongPayload() {
try {
template.requestBody("ignite-compute:" + resourceUid + "?executionType=EXECUTE",
TestIgniteComputeResources.TEST_CALLABLE, String.class);
} catch (Exception e) {
Assertions.assertThat(ObjectHelper.getException(RuntimeCamelException.class, e).getMessage())
.startsWith("Ignite Compute endpoint with EXECUTE");
return;
}
fail();
}
@Test
@SuppressWarnings("unchecked")
public void testCall() {
TestIgniteComputeResources.COUNTER.set(0);
// Single Callable.
String result = template.requestBody("ignite-compute:" + resourceUid + "?executionType=CALL",
TestIgniteComputeResources.TEST_CALLABLE, String.class);
Assertions.assertThat(result).isEqualTo("hello");
// Collection of Callables.
Object[] callables = new Object[5];
Arrays.fill(callables, TestIgniteComputeResources.TEST_CALLABLE);
Collection<String> colResult = template.requestBody("ignite-compute:" + resourceUid + "?executionType=CALL",
Lists.newArrayList(callables), Collection.class);
Assertions.assertThat(colResult).containsExactly("hello", "hello", "hello", "hello", "hello");
// Callables with a Reducer.
String reduced = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=CALL",
Lists.newArrayList(callables),
IgniteConstants.IGNITE_COMPUTE_REDUCER, TestIgniteComputeResources.STRING_JOIN_REDUCER, String.class);
Assertions.assertThat(reduced).isEqualTo("hellohellohellohellohello");
}
@Test
public void testRun() {
TestIgniteComputeResources.COUNTER.set(0);
// Single Runnable.
Object result = template.requestBody("ignite-compute:" + resourceUid + "?executionType=RUN",
TestIgniteComputeResources.TEST_RUNNABLE_COUNTER, Object.class);
Assertions.assertThat(result).isNull();
Assertions.assertThat(TestIgniteComputeResources.COUNTER.get()).isEqualTo(1);
// Multiple Runnables.
Object[] runnables = new Object[5];
Arrays.fill(runnables, TestIgniteComputeResources.TEST_RUNNABLE_COUNTER);
result = template.requestBody("ignite-compute:" + resourceUid + "?executionType=RUN", Lists.newArrayList(runnables),
Collection.class);
Assertions.assertThat(result).isNull();
Assertions.assertThat(TestIgniteComputeResources.COUNTER.get()).isEqualTo(6);
}
@Test
@SuppressWarnings("unchecked")
public void testBroadcast() {
TestIgniteComputeResources.COUNTER.set(0);
startAdditionalGridInstance();
startAdditionalGridInstance();
ignite().events().enableLocal(EventType.EVT_JOB_FINISHED);
LISTENERS.add(
ignite().events().remoteListen(null, TestIgniteComputeResources.EVENT_COUNTER, EventType.EVT_JOB_FINISHED));
// Single Runnable.
Object result = template.requestBody("ignite-compute:" + resourceUid + "?executionType=BROADCAST",
TestIgniteComputeResources.TEST_RUNNABLE, Object.class);
Assertions.assertThat(result).isNull();
Assertions.assertThat(TestIgniteComputeResources.COUNTER.get()).isEqualTo(3);
// Single Callable.
Collection<String> colResult = template.requestBody("ignite-compute:" + resourceUid + "?executionType=BROADCAST",
TestIgniteComputeResources.TEST_CALLABLE,
Collection.class);
Assertions.assertThat(colResult).isNotNull().containsExactly("hello", "hello", "hello");
// Single Closure.
colResult = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=BROADCAST",
TestIgniteComputeResources.TEST_CLOSURE,
IgniteConstants.IGNITE_COMPUTE_PARAMS, "Camel", Collection.class);
Assertions.assertThat(colResult).isNotNull().containsExactly("hello Camel", "hello Camel", "hello Camel");
}
@Test
public void testExecute() {
TestIgniteComputeResources.COUNTER.set(0);
startAdditionalGridInstance();
startAdditionalGridInstance();
ignite().events().enableLocal(EventType.EVT_JOB_RESULTED);
LISTENERS.add(
ignite().events().remoteListen(null, TestIgniteComputeResources.EVENT_COUNTER, EventType.EVT_JOB_RESULTED));
// ComputeTask instance.
String result = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=EXECUTE",
TestIgniteComputeResources.COMPUTE_TASK,
IgniteConstants.IGNITE_COMPUTE_PARAMS, 10, String.class);
Assertions.assertThat(result).isNotNull();
Assertions.assertThat(Splitter.on(",").splitToList(result)).contains("a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7",
"a8", "a9");
// ComputeTask class.
result = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=EXECUTE",
TestIgniteComputeResources.COMPUTE_TASK.getClass(),
IgniteConstants.IGNITE_COMPUTE_PARAMS, 10, String.class);
Assertions.assertThat(result).isNotNull();
Assertions.assertThat(Splitter.on(",").splitToList(result)).contains("a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7",
"a8", "a9");
}
@Test
@SuppressWarnings("unchecked")
public void testApply() {
TestIgniteComputeResources.COUNTER.set(0);
// Closure with a single parameter.
String result = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=APPLY",
TestIgniteComputeResources.TEST_CLOSURE,
IgniteConstants.IGNITE_COMPUTE_PARAMS, "Camel", String.class);
Assertions.assertThat(result).isEqualTo("hello Camel");
// Closure with a Collection of parameters.
Collection<String> colResult = template.requestBodyAndHeader("ignite-compute:" + resourceUid + "?executionType=APPLY",
TestIgniteComputeResources.TEST_CLOSURE,
IgniteConstants.IGNITE_COMPUTE_PARAMS, Lists.newArrayList("Camel1", "Camel2", "Camel3"), Collection.class);
Assertions.assertThat(colResult).contains("hello Camel1", "hello Camel2", "hello Camel3");
// Closure with a Collection of parameters and a Reducer.
Map<String, Object> headers = ImmutableMap.<String, Object> of(IgniteConstants.IGNITE_COMPUTE_PARAMS,
Lists.newArrayList("Camel1", "Camel2", "Camel3"),
IgniteConstants.IGNITE_COMPUTE_REDUCER, TestIgniteComputeResources.STRING_JOIN_REDUCER);
result = template.requestBodyAndHeaders("ignite-compute:" + resourceUid + "?executionType=APPLY",
TestIgniteComputeResources.TEST_CLOSURE, headers, String.class);
Assertions.assertThat(result).isEqualTo("hello Camel1hello Camel2hello Camel3");
}
private void startAdditionalGridInstance() {
ADDITIONAL_INSTANCES.add(Ignition.start(createConfiguration()));
}
@AfterEach
public void stopAdditionalIgniteInstances() {
for (Ignite ignite : ADDITIONAL_INSTANCES) {
ignite.close();
}
ADDITIONAL_INSTANCES.clear();
}
@AfterEach
public void stopRemoteListeners() {
for (UUID uuid : LISTENERS) {
ignite().events().stopRemoteListen(uuid);
}
LISTENERS.clear();
}
}
|
IgniteComputeTest
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/node/NodeTests.java
|
{
"start": 4065,
"end": 18285
}
|
class ____ extends Plugin {
public static final BootstrapCheck CHECK = new BootstrapCheck() {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
return BootstrapCheck.BootstrapCheckResult.success();
}
@Override
public ReferenceDocs referenceDocs() {
return ReferenceDocs.BOOTSTRAP_CHECKS;
}
};
@Override
public List<BootstrapCheck> getBootstrapChecks() {
return Collections.singletonList(CHECK);
}
}
private List<Class<? extends Plugin>> basePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>();
plugins.add(getTestTransportPlugin());
plugins.add(MockHttpTransport.TestPlugin.class);
return plugins;
}
public void testLoadPluginBootstrapChecks() throws IOException {
final String name = randomBoolean() ? randomAlphaOfLength(10) : null;
Settings.Builder settings = baseSettings();
if (name != null) {
settings.put(Node.NODE_NAME_SETTING.getKey(), name);
}
AtomicBoolean executed = new AtomicBoolean(false);
List<Class<? extends Plugin>> plugins = basePlugins();
plugins.add(CheckPlugin.class);
try (Node node = new MockNode(settings.build(), plugins) {
@Override
protected void validateNodeBeforeAcceptingRequests(
BootstrapContext context,
BoundTransportAddress boundTransportAddress,
List<BootstrapCheck> bootstrapChecks
) throws NodeValidationException {
assertEquals(1, bootstrapChecks.size());
assertSame(CheckPlugin.CHECK, bootstrapChecks.get(0));
executed.set(true);
throw new NodeValidationException("boom");
}
}) {
expectThrows(NodeValidationException.class, () -> node.start());
assertTrue(executed.get());
}
}
public void testNodeAttributes() throws IOException {
String attr = randomAlphaOfLength(5);
Settings.Builder settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "test_attr", attr);
try (Node node = new MockNode(settings.build(), basePlugins())) {
final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings();
assertEquals(attr, Node.NODE_ATTRIBUTES.getAsMap(nodeSettings).get("test_attr"));
}
// leading whitespace not allowed
attr = " leading";
settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "test_attr", attr);
try (Node node = new MockNode(settings.build(), basePlugins())) {
fail("should not allow a node attribute with leading whitespace");
} catch (IllegalArgumentException e) {
assertEquals("node.attr.test_attr cannot have leading or trailing whitespace [ leading]", e.getMessage());
}
// trailing whitespace not allowed
attr = "trailing ";
settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "test_attr", attr);
try (Node node = new MockNode(settings.build(), basePlugins())) {
fail("should not allow a node attribute with trailing whitespace");
} catch (IllegalArgumentException e) {
assertEquals("node.attr.test_attr cannot have leading or trailing whitespace [trailing ]", e.getMessage());
}
}
public void testServerNameNodeAttribute() throws IOException {
String attr = "valid-hostname";
Settings.Builder settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "server_name", attr);
int i = 0;
try (Node node = new MockNode(settings.build(), basePlugins())) {
final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings();
assertEquals(attr, Node.NODE_ATTRIBUTES.getAsMap(nodeSettings).get("server_name"));
}
// non-LDH hostname not allowed
attr = "invalid_hostname";
settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "server_name", attr);
try (Node node = new MockNode(settings.build(), basePlugins())) {
fail("should not allow a server_name attribute with an underscore");
} catch (IllegalArgumentException e) {
assertEquals("invalid node.attr.server_name [invalid_hostname]", e.getMessage());
}
}
private static Settings.Builder baseSettings() {
final Path tempDir = createTempDir();
return Settings.builder()
.put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong()))
.put(Environment.PATH_HOME_SETTING.getKey(), tempDir)
.put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType())
// default the watermarks low values to prevent tests from failing on nodes without enough disk space
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
.put(dataNode());
}
public void testCloseOnOutstandingTask() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
ThreadPool threadpool = node.injector().getInstance(ThreadPool.class);
AtomicBoolean shouldRun = new AtomicBoolean(true);
final CountDownLatch threadRunning = new CountDownLatch(1);
threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> {
threadRunning.countDown();
while (shouldRun.get())
;
});
threadRunning.await();
node.close();
shouldRun.set(false);
assertTrue(node.awaitClose(10L, TimeUnit.SECONDS));
}
public void testCloseRaceWithTaskExecution() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
ThreadPool threadpool = node.injector().getInstance(ThreadPool.class);
AtomicBoolean shouldRun = new AtomicBoolean(true);
final CountDownLatch running = new CountDownLatch(3);
Thread submitThread = new Thread(() -> {
running.countDown();
try {
running.await();
} catch (InterruptedException e) {
throw new AssertionError("interrupted while waiting", e);
}
try {
threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> {
while (shouldRun.get())
;
});
} catch (RejectedExecutionException e) {
assertThat(e.getMessage(), containsString("[Terminated,"));
}
});
Thread closeThread = new Thread(() -> {
running.countDown();
try {
running.await();
} catch (InterruptedException e) {
throw new AssertionError("interrupted while waiting", e);
}
try {
node.close();
} catch (IOException e) {
throw new AssertionError("node close failed", e);
}
});
submitThread.start();
closeThread.start();
running.countDown();
running.await();
submitThread.join();
closeThread.join();
shouldRun.set(false);
assertTrue(node.awaitClose(10L, TimeUnit.SECONDS));
}
public void testAwaitCloseTimeoutsOnNonInterruptibleTask() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
ThreadPool threadpool = node.injector().getInstance(ThreadPool.class);
AtomicBoolean shouldRun = new AtomicBoolean(true);
final CountDownLatch threadRunning = new CountDownLatch(1);
threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> {
threadRunning.countDown();
while (shouldRun.get())
;
});
threadRunning.await();
node.close();
assertFalse(node.awaitClose(0, TimeUnit.MILLISECONDS));
shouldRun.set(false);
assertTrue(node.awaitClose(10L, TimeUnit.SECONDS));
}
public void testCloseOnInterruptibleTask() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
ThreadPool threadpool = node.injector().getInstance(ThreadPool.class);
final CountDownLatch threadRunning = new CountDownLatch(1);
final CountDownLatch latch = new CountDownLatch(1);
final CountDownLatch finishLatch = new CountDownLatch(1);
final AtomicBoolean interrupted = new AtomicBoolean(false);
threadpool.executor(ThreadPool.Names.SEARCH).execute(() -> {
threadRunning.countDown();
try {
latch.await();
} catch (InterruptedException e) {
interrupted.set(true);
Thread.currentThread().interrupt();
} finally {
finishLatch.countDown();
}
});
threadRunning.await();
node.close();
// close should not interrupt ongoing tasks
assertFalse(interrupted.get());
// but awaitClose should
node.awaitClose(0, TimeUnit.SECONDS);
finishLatch.await();
assertTrue(interrupted.get());
}
public void testCloseOnLeakedIndexReaderReference() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
IndicesService indicesService = node.injector().getInstance(IndicesService.class);
assertAcked(node.client().admin().indices().prepareCreate("test").setSettings(indexSettings(1, 0)));
IndexService indexService = indicesService.iterator().next();
IndexShard shard = indexService.getShard(0);
Searcher searcher = shard.acquireSearcher("test");
node.close();
IllegalStateException e = expectThrows(IllegalStateException.class, () -> node.awaitClose(10L, TimeUnit.SECONDS));
searcher.close();
assertThat(e.getMessage(), containsString("Something is leaking index readers or store references"));
}
public void testCloseOnLeakedStoreReference() throws Exception {
Node node = new MockNode(baseSettings().build(), basePlugins());
node.start();
IndicesService indicesService = node.injector().getInstance(IndicesService.class);
assertAcked(node.client().admin().indices().prepareCreate("test").setSettings(indexSettings(1, 0)));
IndexService indexService = indicesService.iterator().next();
IndexShard shard = indexService.getShard(0);
shard.store().incRef();
node.close();
IllegalStateException e = expectThrows(IllegalStateException.class, () -> node.awaitClose(10L, TimeUnit.SECONDS));
shard.store().decRef();
assertThat(e.getMessage(), containsString("Something is leaking index readers or store references"));
}
public void testStartOnClosedTransport() throws IOException {
try (Node node = new MockNode(baseSettings().build(), basePlugins())) {
node.prepareForClose();
expectThrows(AssertionError.class, node::start); // this would be IllegalStateException in a real Node with assertions off
}
}
public void testCreateWithCircuitBreakerPlugins() throws IOException {
Settings.Builder settings = baseSettings().put("breaker.test_breaker.limit", "50b");
List<Class<? extends Plugin>> plugins = basePlugins();
plugins.add(MockCircuitBreakerPlugin.class);
try (Node node = new MockNode(settings.build(), plugins)) {
CircuitBreakerService service = node.injector().getInstance(CircuitBreakerService.class);
assertThat(service.getBreaker("test_breaker"), is(not(nullValue())));
assertThat(service.getBreaker("test_breaker").getLimit(), equalTo(50L));
CircuitBreakerPlugin breakerPlugin = node.getPluginsService().filterPlugins(CircuitBreakerPlugin.class).findFirst().get();
assertTrue(breakerPlugin instanceof MockCircuitBreakerPlugin);
assertSame(
"plugin circuit breaker instance is not the same as breaker service's instance",
((MockCircuitBreakerPlugin) breakerPlugin).myCircuitBreaker.get(),
service.getBreaker("test_breaker")
);
}
}
/**
* TODO: Remove this test once classpath plugins are fully moved to MockNode.
* In production, plugin name clashes are checked in a completely different way.
* See {@link PluginsServiceTests#testPluginNameClash()}
*/
public void testNodeFailsToStartWhenThereAreMultipleRecoveryPlannerPluginsLoaded() {
List<Class<? extends Plugin>> plugins = basePlugins();
plugins.add(MockRecoveryPlannerPlugin.class);
plugins.add(MockRecoveryPlannerPlugin.class);
IllegalStateException exception = expectThrows(IllegalStateException.class, () -> new MockNode(baseSettings().build(), plugins));
assertThat(exception.getMessage(), containsString("Duplicate key org.elasticsearch.node.NodeTests$MockRecoveryPlannerPlugin"));
}
public void testHeadersToCopyInTaskManagerAreTheSameAsDeclaredInTask() throws IOException {
Settings.Builder settings = baseSettings();
try (Node node = new MockNode(settings.build(), basePlugins())) {
final TransportService transportService = node.injector().getInstance(TransportService.class);
final Set<String> taskHeaders = transportService.getTaskManager().getTaskHeaders();
assertThat(taskHeaders, containsInAnyOrder(Task.HEADERS_TO_COPY.toArray()));
}
}
public static
|
CheckPlugin
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricType.java
|
{
"start": 850,
"end": 1032
}
|
enum ____ {
/**
* A monotonically increasing metric that can be used
* to calculate throughput
*/
COUNTER,
/**
* An arbitrary varying metric
*/
GAUGE
}
|
MetricType
|
java
|
apache__camel
|
dsl/camel-yaml-dsl/camel-yaml-dsl-deserializers/src/generated/java/org/apache/camel/dsl/yaml/deserializers/ModelDeserializers.java
|
{
"start": 1032411,
"end": 1036007
}
|
class ____ extends YamlDeserializerBase<SetHeaderDefinition> {
public SetHeaderDefinitionDeserializer() {
super(SetHeaderDefinition.class);
}
@Override
protected SetHeaderDefinition newInstance() {
return new SetHeaderDefinition();
}
@Override
protected boolean setProperty(SetHeaderDefinition target, String propertyKey,
String propertyName, Node node) {
propertyKey = org.apache.camel.util.StringHelper.dashToCamelCase(propertyKey);
switch(propertyKey) {
case "disabled": {
String val = asText(node);
target.setDisabled(val);
break;
}
case "expression": {
org.apache.camel.model.language.ExpressionDefinition val = asType(node, org.apache.camel.model.language.ExpressionDefinition.class);
target.setExpression(val);
break;
}
case "name": {
String val = asText(node);
target.setName(val);
break;
}
case "id": {
String val = asText(node);
target.setId(val);
break;
}
case "description": {
String val = asText(node);
target.setDescription(val);
break;
}
case "note": {
String val = asText(node);
target.setNote(val);
break;
}
default: {
ExpressionDefinition ed = target.getExpressionType();
if (ed != null) {
throw new org.apache.camel.dsl.yaml.common.exception.DuplicateFieldException(node, propertyName, "as an expression");
}
ed = ExpressionDeserializers.constructExpressionType(propertyKey, node);
if (ed != null) {
target.setExpressionType(ed);
} else {
return false;
}
}
}
return true;
}
}
@YamlType(
nodes = "setHeaders",
types = org.apache.camel.model.SetHeadersDefinition.class,
order = org.apache.camel.dsl.yaml.common.YamlDeserializerResolver.ORDER_LOWEST - 1,
displayName = "Set Headers",
description = "Allows setting multiple headers on the message at the same time.",
deprecated = false,
properties = {
@YamlProperty(name = "description", type = "string", description = "Sets the description of this node", displayName = "Description"),
@YamlProperty(name = "disabled", type = "boolean", defaultValue = "false", description = "Disables this EIP from the route.", displayName = "Disabled"),
@YamlProperty(name = "headers", type = "array:org.apache.camel.model.SetHeaderDefinition", description = "Contains the headers to be set", displayName = "Headers"),
@YamlProperty(name = "id", type = "string", description = "Sets the id of this node", displayName = "Id"),
@YamlProperty(name = "note", type = "string", description = "Sets the note of this node", displayName = "Note")
}
)
public static
|
SetHeaderDefinitionDeserializer
|
java
|
elastic__elasticsearch
|
x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/GetTopNFunctionsAction.java
|
{
"start": 362,
"end": 679
}
|
class ____ extends ActionType<GetTopNFunctionsResponse> {
public static final GetTopNFunctionsAction INSTANCE = new GetTopNFunctionsAction();
public static final String NAME = "indices:data/read/profiling/topn/functions";
private GetTopNFunctionsAction() {
super(NAME);
}
}
|
GetTopNFunctionsAction
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/typehandler/User.java
|
{
"start": 707,
"end": 1267
}
|
class ____ {
private Integer id;
private String name;
private String city;
private String state;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
}
|
User
|
java
|
micronaut-projects__micronaut-core
|
inject-groovy/src/test/groovy/io/micronaut/inject/configurations/requiresconditiontrue/TrueEnvCondition.java
|
{
"start": 782,
"end": 926
}
|
class ____ implements Condition {
@Override
public boolean matches(ConditionContext context) {
return true;
}
}
|
TrueEnvCondition
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/test/java/org/springframework/data/jpa/repository/query/JpqlQueryBuilderUnitTests.java
|
{
"start": 12033,
"end": 12085
}
|
class ____ {
@ManyToOne Person person;
}
}
|
GroupId
|
java
|
google__gson
|
test-jpms/src/test/java/com/google/gson/jpms_test/opened/ReflectionTest.java
|
{
"start": 1034,
"end": 1480
}
|
class ____ {
int i;
}
@Test
public void testDeserialization() {
Gson gson = new Gson();
MyClass deserialized = gson.fromJson("{\"i\":1}", MyClass.class);
assertThat(deserialized.i).isEqualTo(1);
}
@Test
public void testSerialization() {
Gson gson = new Gson();
MyClass obj = new MyClass();
obj.i = 1;
String serialized = gson.toJson(obj);
assertThat(serialized).isEqualTo("{\"i\":1}");
}
}
|
MyClass
|
java
|
processing__processing4
|
app/src/processing/app/syntax/InputHandler.java
|
{
"start": 26773,
"end": 27054
}
|
class ____ implements ActionListener
{
public void actionPerformed(ActionEvent evt)
{
JEditTextArea textArea = getTextArea(evt);
textArea.setOverwriteEnabled(
!textArea.isOverwriteEnabled());
}
}
public static
|
overwrite
|
java
|
apache__camel
|
components/camel-zookeeper/src/test/java/org/apache/camel/component/zookeeper/ZooKeeperUtilsTest.java
|
{
"start": 1336,
"end": 2940
}
|
class ____ {
private CamelContext camelContext = new DefaultCamelContext();
@Test
public void testCreateModeExtraction() {
assertEquals(CreateMode.EPHEMERAL, getCreateModeFromString("EPHEMERAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.EPHEMERAL_SEQUENTIAL, getCreateModeFromString("EPHEMERAL_SEQUENTIAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.PERSISTENT, getCreateModeFromString("PERSISTENT", CreateMode.EPHEMERAL));
assertEquals(CreateMode.PERSISTENT_SEQUENTIAL, getCreateModeFromString("PERSISTENT_SEQUENTIAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.EPHEMERAL, getCreateModeFromString("DOESNOTEXIST", CreateMode.EPHEMERAL));
}
@Test
public void testCreateModeExtractionFromMessageHeader() {
assertEquals(CreateMode.EPHEMERAL, testModeInMessage("EPHEMERAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.EPHEMERAL_SEQUENTIAL, testModeInMessage("EPHEMERAL_SEQUENTIAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.PERSISTENT, testModeInMessage("PERSISTENT", CreateMode.EPHEMERAL));
assertEquals(CreateMode.PERSISTENT_SEQUENTIAL, testModeInMessage("PERSISTENT_SEQUENTIAL", CreateMode.EPHEMERAL));
assertEquals(CreateMode.EPHEMERAL, testModeInMessage("DOESNOTEXIST", CreateMode.EPHEMERAL));
}
private CreateMode testModeInMessage(String mode, CreateMode defaultMode) {
Message m = new DefaultMessage(camelContext);
m.setHeader(ZooKeeperMessage.ZOOKEEPER_CREATE_MODE, mode);
return getCreateMode(m, defaultMode);
}
}
|
ZooKeeperUtilsTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/generics/GenericBasicValuedPathTest.java
|
{
"start": 3347,
"end": 3500
}
|
class ____ extends MySuper<Integer, String> {
public MyEntity() {
}
public MyEntity(Integer id, String data) {
super( id, data );
}
}
}
|
MyEntity
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/utils/StreamExchangeModeUtils.java
|
{
"start": 1219,
"end": 1277
}
|
class ____ load job-wide exchange mode. */
@Internal
public
|
to
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/output/ReplayOutput.java
|
{
"start": 2989,
"end": 3272
}
|
class ____ extends Signal {
final long message;
Integer(long message) {
this.message = message;
}
@Override
protected void replay(CommandOutput<?, ?, ?> target) {
target.set(message);
}
}
static
|
Integer
|
java
|
spring-projects__spring-boot
|
core/spring-boot/src/main/java/org/springframework/boot/task/ThreadPoolTaskExecutorCustomizer.java
|
{
"start": 756,
"end": 946
}
|
interface ____ can be used to customize a {@link ThreadPoolTaskExecutor}.
*
* @author Stephane Nicoll
* @since 3.2.0
* @see ThreadPoolTaskExecutorBuilder
*/
@FunctionalInterface
public
|
that
|
java
|
alibaba__nacos
|
client/src/main/java/com/alibaba/nacos/client/lock/remote/grpc/LockGrpcClient.java
|
{
"start": 2250,
"end": 6068
}
|
class ____ extends AbstractLockClient {
private final String uuid;
private final Long requestTimeout;
private final RpcClient rpcClient;
public LockGrpcClient(NacosClientProperties properties, ServerListFactory serverListFactory,
SecurityProxy securityProxy) throws NacosException {
super(securityProxy);
this.uuid = UUID.randomUUID().toString();
this.requestTimeout = Long.parseLong(properties.getProperty(PropertyConstants.LOCK_REQUEST_TIMEOUT, "-1"));
Map<String, String> labels = new HashMap<>();
labels.put(RemoteConstants.LABEL_SOURCE, RemoteConstants.LABEL_SOURCE_SDK);
labels.put(RemoteConstants.LABEL_MODULE, RemoteConstants.LABEL_MODULE_LOCK);
labels.put(Constants.APPNAME, AppNameUtils.getAppName());
this.rpcClient = RpcClientFactory.createClient(uuid, ConnectionType.GRPC, labels,
RpcClientTlsConfigFactory.getInstance().createSdkConfig(properties.asProperties()));
start(serverListFactory);
}
private void start(ServerListFactory serverListFactory) throws NacosException {
rpcClient.serverListFactory(serverListFactory);
rpcClient.start();
}
@Override
public Boolean lock(LockInstance instance) throws NacosException {
if (!isAbilitySupportedByServer()) {
throw new NacosRuntimeException(NacosException.SERVER_NOT_IMPLEMENTED,
"Request Nacos server version is too low, not support lock feature.");
}
LockOperationRequest request = new LockOperationRequest();
request.setLockInstance(instance);
request.setLockOperationEnum(LockOperationEnum.ACQUIRE);
LockOperationResponse acquireLockResponse = requestToServer(request, LockOperationResponse.class);
return (Boolean) acquireLockResponse.getResult();
}
@Override
public Boolean unLock(LockInstance instance) throws NacosException {
if (!isAbilitySupportedByServer()) {
throw new NacosRuntimeException(NacosException.SERVER_NOT_IMPLEMENTED,
"Request Nacos server version is too low, not support lock feature.");
}
LockOperationRequest request = new LockOperationRequest();
request.setLockInstance(instance);
request.setLockOperationEnum(LockOperationEnum.RELEASE);
LockOperationResponse acquireLockResponse = requestToServer(request, LockOperationResponse.class);
return (Boolean) acquireLockResponse.getResult();
}
@Override
public void shutdown() throws NacosException {
rpcClient.shutdown();
}
private <T extends Response> T requestToServer(AbstractLockRequest request, Class<T> responseClass)
throws NacosException {
try {
request.putAllHeader(getSecurityHeaders());
Response response =
requestTimeout < 0 ? rpcClient.request(request) : rpcClient.request(request, requestTimeout);
if (ResponseCode.SUCCESS.getCode() != response.getResultCode()) {
throw new NacosException(response.getErrorCode(), response.getMessage());
}
if (responseClass.isAssignableFrom(response.getClass())) {
return (T) response;
}
} catch (NacosException e) {
throw e;
} catch (Exception e) {
throw new NacosException(NacosException.SERVER_ERROR, "Request nacos server failed: ", e);
}
throw new NacosException(NacosException.SERVER_ERROR, "Server return invalid response");
}
private boolean isAbilitySupportedByServer() {
return rpcClient.getConnectionAbility(AbilityKey.SERVER_DISTRIBUTED_LOCK) == AbilityStatus.SUPPORTED;
}
}
|
LockGrpcClient
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.