language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
spring-projects__spring-framework
|
spring-test/src/main/java/org/springframework/test/context/transaction/TransactionContextHolder.java
|
{
"start": 931,
"end": 1635
}
|
class ____ {
private static final ThreadLocal<TransactionContext> currentTransactionContext =
new NamedInheritableThreadLocal<>("Test Transaction Context");
private TransactionContextHolder() {
}
static void setCurrentTransactionContext(TransactionContext transactionContext) {
currentTransactionContext.set(transactionContext);
}
static @Nullable TransactionContext getCurrentTransactionContext() {
return currentTransactionContext.get();
}
static @Nullable TransactionContext removeCurrentTransactionContext() {
TransactionContext transactionContext = currentTransactionContext.get();
currentTransactionContext.remove();
return transactionContext;
}
}
|
TransactionContextHolder
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/json/bvt/WriteClassNameTest.java
|
{
"start": 239,
"end": 854
}
|
class ____ extends TestCase {
protected void setUp() throws Exception {
ParserConfig.global.addAccept("com.alibaba.json.bvt.WriteClassNameTest.");
}
public void test_0() throws Exception {
Entity entity = new Entity(3, "jobs");
String text = JSON.toJSONString(entity, SerializerFeature.WriteClassName);
System.out.println(text);
Entity entity2 = (Entity) JSON.parseObject(text, Object.class);
Assert.assertEquals(entity.getId(), entity2.getId());
Assert.assertEquals(entity.getName(), entity2.getName());
}
public static
|
WriteClassNameTest
|
java
|
quarkusio__quarkus
|
extensions/smallrye-fault-tolerance/deployment/src/test/java/io/quarkus/smallrye/faulttolerance/test/asynchronous/context/propagation/FaultToleranceContextPropagationTest.java
|
{
"start": 718,
"end": 1492
}
|
class ____ {
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(MyAppService.class, MyReqService.class));
@Inject
MyAppService appService;
@Inject
MyReqService reqService;
@Test
@ActivateRequestContext
public void test() throws ExecutionException, InterruptedException {
String data = UUID.randomUUID().toString();
reqService.setState(data);
String result = appService.call().toCompletableFuture().get();
assertThat(result).startsWith(data + "|");
assertThat(result).isNotEqualTo(data + "|" + Thread.currentThread().getName());
}
@ApplicationScoped
public static
|
FaultToleranceContextPropagationTest
|
java
|
google__dagger
|
examples/bazel/java/example/dagger/PumpModule.java
|
{
"start": 754,
"end": 832
}
|
class ____ {
@Binds
abstract Pump providePump(Thermosiphon pump);
}
|
PumpModule
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/event/collection/BrokenCollectionEventTest.java
|
{
"start": 1550,
"end": 7311
}
|
class ____ {
@AfterEach
protected void cleanupTest(SessionFactoryScope scope) {
scope.inTransaction( s -> {
s.getSessionFactory().getSchemaManager().truncateMappedObjects();
} );
}
public ParentWithCollection createParent(String name) {
return new ParentWithCollectionOfEntities( name );
}
public Collection createCollection() {
return new HashSet<>();
}
@Test
@FailureExpected(jiraKey = "unknown")
public void testUpdateDetachedParentNoChildrenToNull(SessionFactoryScope scope) {
CollectionListeners listeners = new CollectionListeners( scope.getSessionFactory() );
ParentWithCollection parent = createParentWithNoChildren( scope, "parent" );
listeners.clear();
assertEquals( 0, parent.getChildren().size() );
var oldCollection = scope.fromTransaction( s -> {
Collection oc = parent.getChildren();
parent.newChildren( null );
s.merge( parent );
return oc;
} );
int index = 0;
checkResult( listeners, listeners.getPreCollectionRemoveListener(), parent, oldCollection, index++ );
checkResult( listeners, listeners.getPostCollectionRemoveListener(), parent, oldCollection, index++ );
// pre- and post- collection recreate events should be created when updating an entity with a "null" collection
checkResult( listeners, listeners.getPreCollectionRecreateListener(), parent, index++ );
checkResult( listeners, listeners.getPostCollectionRecreateListener(), parent, index++ );
checkNumberOfResults( listeners, index );
}
@Test
@FailureExpected(jiraKey = "unknown")
public void testSaveParentNullChildren(SessionFactoryScope scope) {
CollectionListeners listeners = new CollectionListeners( scope.getSessionFactory() );
ParentWithCollection parent = createParentWithNullChildren( scope, "parent" );
assertNull( parent.getChildren() );
int index = 0;
// pre- and post- collection recreate events should be created when creating an entity with a "null" collection
checkResult( listeners, listeners.getPreCollectionRecreateListener(), parent, index++ );
checkResult( listeners, listeners.getPostCollectionRecreateListener(), parent, index++ );
checkNumberOfResults( listeners, index );
listeners.clear();
var p = scope.fromTransaction( s -> {
return (ParentWithCollection) s.get( parent.getClass(), parent.getId() );
} );
assertNotNull( p.getChildren() );
checkNumberOfResults( listeners, 0 );
}
@Test
@FailureExpected(jiraKey = "unknown")
public void testUpdateParentNoChildrenToNull(SessionFactoryScope scope) {
CollectionListeners listeners = new CollectionListeners( scope.getSessionFactory() );
ParentWithCollection parent = createParentWithNoChildren( scope, "parent" );
listeners.clear();
assertEquals( 0, parent.getChildren().size() );
Long id = parent.getId();
var data = scope.fromTransaction( s -> {
var p = (ParentWithCollection) s.get( ParentWithCollection.class, id );
Collection oldCollection = p.getChildren();
p.newChildren( null );
return List.of( p, oldCollection );
} );
int index = 0;
Collection oldCollection = (Collection) data.get( 1 );
parent = (ParentWithCollection) data.get( 0 );
if ( ((PersistentCollection) oldCollection).wasInitialized() ) {
checkResult( listeners, listeners.getInitializeCollectionListener(), parent, oldCollection, index++ );
}
checkResult( listeners, listeners.getPreCollectionRemoveListener(), parent, oldCollection, index++ );
checkResult( listeners, listeners.getPostCollectionRemoveListener(), parent, oldCollection, index++ );
// pre- and post- collection recreate events should be created when updating an entity with a "null" collection
checkResult( listeners, listeners.getPreCollectionRecreateListener(), parent, index++ );
checkResult( listeners, listeners.getPostCollectionRecreateListener(), parent, index++ );
checkNumberOfResults( listeners, index );
}
private ParentWithCollection createParentWithNullChildren(SessionFactoryScope scope, String parentName) {
return scope.fromTransaction( s -> {
ParentWithCollection parent = createParent( parentName );
s.persist( parent );
return parent;
} );
}
private ParentWithCollection createParentWithNoChildren(SessionFactoryScope scope, String parentName) {
return scope.fromTransaction( s -> {
ParentWithCollection parent = createParent( parentName );
parent.setChildren( createCollection() );
s.persist( parent );
return parent;
} );
}
protected void checkResult(CollectionListeners listeners,
CollectionListeners.Listener listenerExpected,
ParentWithCollection parent,
int index) {
checkResult( listeners, listenerExpected, parent, parent.getChildren(), index );
}
protected void checkResult(CollectionListeners listeners,
CollectionListeners.Listener listenerExpected,
Entity ownerExpected,
Collection collExpected,
int index) {
assertSame( listenerExpected, listeners.getListenersCalled().get( index ) );
assertSame(
ownerExpected,
((AbstractCollectionEvent) listeners.getEvents().get( index )).getAffectedOwnerOrNull()
);
assertEquals(
ownerExpected.getId(),
((AbstractCollectionEvent) listeners.getEvents().get( index )).getAffectedOwnerIdOrNull()
);
assertEquals(
ownerExpected.getClass().getName(),
((AbstractCollectionEvent) listeners.getEvents().get( index )).getAffectedOwnerEntityName()
);
assertSame(
collExpected, ((AbstractCollectionEvent) listeners.getEvents().get( index )).getCollection()
);
}
private void checkNumberOfResults(CollectionListeners listeners, int nEventsExpected) {
assertEquals( nEventsExpected, listeners.getListenersCalled().size() );
assertEquals( nEventsExpected, listeners.getEvents().size() );
}
}
|
BrokenCollectionEventTest
|
java
|
apache__camel
|
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpHeaderTest.java
|
{
"start": 1327,
"end": 4279
}
|
class ____ extends BaseJettyTest {
@Test
public void testHttpHeaders() {
String result = template.requestBody("direct:start", "hello", String.class);
assertEquals("Find the key!", result, "Should send a right http header to the server.");
}
@Test
public void testServerHeader() {
Exchange ex = template.request("http://localhost:{{port}}/server/mytest", new Processor() {
@Override
public void process(Exchange exchange) {
// Do nothing here
}
});
assertNotNull(ex.getMessage().getHeader("Server"));
assertNull(ex.getMessage().getHeader("Date"));
ex = template.request("http://localhost:{{port2}}/server/mytest", new Processor() {
@Override
public void process(Exchange exchange) {
// Do nothing here
}
});
assertNull(ex.getMessage().getHeader("Server"));
assertNotNull(ex.getMessage().getHeader("Date"));
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start").setHeader("SOAPAction", constant("http://xxx.com/interfaces/ticket"))
.setHeader("Content-Type", constant("text/xml; charset=utf-8"))
.setHeader(Exchange.HTTP_PROTOCOL_VERSION, constant("HTTP/1.1"))
.to("http://localhost:{{port}}/myapp/mytest");
from("jetty:http://localhost:{{port}}/myapp/mytest").process(new Processor() {
public void process(Exchange exchange) {
Map<String, Object> headers = exchange.getIn().getHeaders();
ServletRequest request = exchange.getIn(HttpMessage.class).getRequest();
assertNotNull(request);
assertEquals("HTTP/1.1", request.getProtocol(), "Get a wong http protocol version");
for (Entry<String, Object> entry : headers.entrySet()) {
if ("SOAPAction".equals(entry.getKey())
&& "http://xxx.com/interfaces/ticket".equals(entry.getValue())) {
exchange.getMessage().setBody("Find the key!");
return;
}
}
exchange.getMessage().setBody("Cannot find the key!");
}
});
from("jetty:http://localhost:{{port}}/server/mytest").transform(constant("Response!"));
// The setting only effect on a new server endpoint
from("jetty:http://localhost:{{port2}}/server/mytest?sendServerVersion=false&sendDateHeader=true")
.transform(constant("Response!"));
}
};
}
}
|
HttpHeaderTest
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/cache/QueryCacheWithObjectParameterTest.java
|
{
"start": 1541,
"end": 10014
}
|
class ____ {
@BeforeAll
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
Parent p = new Parent( "John" );
Address address = new Address( "via Milano", "Roma" );
p.setAddress( address );
session.persist( p );
Child c = new Child( "Alex", p );
session.persist( c );
}
);
}
@Test
public void testQueryWithEmbeddableParameter(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
evictQueryRegion( session );
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.address = :address",
Parent.class
);
queryParent.setParameter( "address", new Address( "via Milano", "Roma" ) );
queryParent.setCacheable( true );
List<Parent> resultList = queryParent.getResultList();
assertThat( resultList ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 0 );
}
);
scope.inTransaction(
session -> {
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.address = :address",
Parent.class
);
queryParent.setParameter( "address", new Address( "via Milano", "Roma" ) );
queryParent.setCacheable( true );
List<Parent> resultList = queryParent.getResultList();
assertThat( resultList ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 1 );
}
);
}
@Test
public void testQueryWithEmbeddableParameterWithANull(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
evictQueryRegion( session );
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.address = :address",
Parent.class
);
queryParent.setParameter( "address", new Address( "via Milano", null ) );
queryParent.setCacheable( true );
List<Parent> resultList = queryParent.getResultList();
assertThat( resultList ).hasSize( 0 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 0 );
}
);
scope.inTransaction(
session -> {
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.address = :address",
Parent.class
);
queryParent.setParameter( "address", new Address( "via Milano", null ) );
queryParent.setCacheable( true );
List<Parent> resultList = queryParent.getResultList();
assertThat( resultList ).hasSize( 0 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 1 );
}
);
}
@Test
public void testQueryCacheHits(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
evictQueryRegion( session );
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery( "from Child c where c.parent = ?1", Child.class );
queryChildren.setParameter( 1, p.get( 0 ) );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 0 );
}
);
scope.inTransaction(
session -> {
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery( "from Child c where c.parent = ?1", Child.class );
queryChildren.setParameter( 1, p.get( 0 ) );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 1 );
}
);
}
@Test
public void testQueryCacheHits2(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
evictQueryRegion( session );
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery(
"from Child c where c.parent.id = ?1",
Child.class
);
queryChildren.setParameter( 1, p.get( 0 ).getId() );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 0 );
}
);
scope.inTransaction(
session -> {
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery(
"from Child c where c.parent.id = ?1",
Child.class
);
queryChildren.setParameter( 1, p.get( 0 ).getId() );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 1 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 1 );
}
);
}
@Test
public void testQueryCacheHitsNullParameter(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
evictQueryRegion( session );
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery(
"from Child c where c.parent.id = ?1",
Child.class
);
queryChildren.setParameter( 1, null );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 0 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 0 );
}
);
scope.inTransaction(
session -> {
Query<Parent> queryParent = session.createQuery(
"from Parent p where p.name = 'John'",
Parent.class
);
List<Parent> p = queryParent.getResultList();
assertThat( p ).hasSize( 1 );
Query<Child> queryChildren = session.createQuery(
"from Child c where c.parent.id = ?1",
Child.class
);
queryChildren.setParameter( 1, null );
queryChildren.setCacheable( true );
List<Child> c = queryChildren.getResultList();
assertThat( c ).hasSize( 0 );
CacheRegionStatistics defaultQueryCacheRegionStatistics = getQueryCacheRegionStatistics( session );
assertThat( defaultQueryCacheRegionStatistics.getHitCount() ).isEqualTo( 1 );
}
);
}
private static void evictQueryRegion(SessionImplementor session) {
session.getSessionFactory()
.getCache()
.evictQueryRegion( RegionFactory.DEFAULT_QUERY_RESULTS_REGION_UNQUALIFIED_NAME );
session.getSessionFactory().getStatistics().clear();
}
private static CacheRegionStatistics getQueryCacheRegionStatistics(SessionImplementor session) {
StatisticsImplementor statistics = session.getSessionFactory().getStatistics();
return statistics.getQueryRegionStatistics( RegionFactory.DEFAULT_QUERY_RESULTS_REGION_UNQUALIFIED_NAME );
}
@Entity(name = "Parent")
public static
|
QueryCacheWithObjectParameterTest
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/integer_/IntegerAssert_isEqualTo_int_Test.java
|
{
"start": 892,
"end": 1209
}
|
class ____ extends IntegerAssertBaseTest {
@Override
protected IntegerAssert invoke_api_method() {
return assertions.isEqualTo(8);
}
@Override
protected void verify_internal_effects() {
verify(integers).assertEqual(getInfo(assertions), getActual(assertions), 8);
}
}
|
IntegerAssert_isEqualTo_int_Test
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/PriorityTests.java
|
{
"start": 413,
"end": 686
}
|
class ____ extends ESTestCase {
public void testToAndFromString() {
for (Priority priority : Priority.values()) {
String value = priority.toString();
assertThat(Priority.fromString(value), equalTo(priority));
}
}
}
|
PriorityTests
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastSedaEndpointBuilderFactory.java
|
{
"start": 40602,
"end": 40934
}
|
interface ____
extends
AdvancedHazelcastSedaEndpointConsumerBuilder,
AdvancedHazelcastSedaEndpointProducerBuilder {
default HazelcastSedaEndpointBuilder basic() {
return (HazelcastSedaEndpointBuilder) this;
}
}
public
|
AdvancedHazelcastSedaEndpointBuilder
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerClientInitialResponseTest.java
|
{
"start": 1285,
"end": 6783
}
|
class ____ {
/*
Test how a client would build a response
*/
@Test
public void testBuildClientResponseToBytes() throws Exception {
String expectedMessage = "n,,\u0001auth=Bearer 123.345.567\u0001nineteen=42\u0001\u0001";
Map<String, String> extensions = new HashMap<>();
extensions.put("nineteen", "42");
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse("123.345.567", new SaslExtensions(extensions));
String message = new String(response.toBytes(), StandardCharsets.UTF_8);
assertEquals(expectedMessage, message);
}
@Test
public void testBuildServerResponseToBytes() throws Exception {
String serverMessage = "n,,\u0001auth=Bearer 123.345.567\u0001nineteen=42\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(serverMessage.getBytes(StandardCharsets.UTF_8));
String message = new String(response.toBytes(), StandardCharsets.UTF_8);
assertEquals(serverMessage, message);
}
@Test
public void testThrowsSaslExceptionOnInvalidExtensionKey() {
Map<String, String> extensions = new HashMap<>();
extensions.put("19", "42"); // keys can only be a-z
assertThrows(SaslException.class, () -> new OAuthBearerClientInitialResponse("123.345.567", new SaslExtensions(extensions)));
}
@Test
public void testToken() throws Exception {
String message = "n,,\u0001auth=Bearer 123.345.567\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("123.345.567", response.tokenValue());
assertEquals("", response.authorizationId());
}
@Test
public void testAuthorizationId() throws Exception {
String message = "n,a=myuser,\u0001auth=Bearer 345\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("345", response.tokenValue());
assertEquals("myuser", response.authorizationId());
}
@Test
public void testExtensions() throws Exception {
String message = "n,,\u0001propA=valueA1, valueA2\u0001auth=Bearer 567\u0001propB=valueB\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("567", response.tokenValue());
assertEquals("", response.authorizationId());
assertEquals("valueA1, valueA2", response.extensions().map().get("propA"));
assertEquals("valueB", response.extensions().map().get("propB"));
}
// The example in the RFC uses `vF9dft4qmTc2Nvb3RlckBhbHRhdmlzdGEuY29tCg==` as the token
// But since we use Base64Url encoding, padding is omitted. Hence this test verifies without '='.
@Test
public void testRfc7688Example() throws Exception {
String message = "n,a=user@example.com,\u0001host=server.example.com\u0001port=143\u0001" +
"auth=Bearer vF9dft4qmTc2Nvb3RlckBhbHRhdmlzdGEuY29tCg\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("vF9dft4qmTc2Nvb3RlckBhbHRhdmlzdGEuY29tCg", response.tokenValue());
assertEquals("user@example.com", response.authorizationId());
assertEquals("server.example.com", response.extensions().map().get("host"));
assertEquals("143", response.extensions().map().get("port"));
}
// RFC 6750 token format 1*( ALPHA / DIGIT /"-" / "." / "_" / "~" / "+" / "/" ) *"="
@Test
public void testCharSupportForRfc6750Token() throws Exception {
String message = "n,a=user@example.com,\u0001host=server.example.com\u0001port=143\u0001" +
"auth=Bearer vF-9.df_t4qm~Tc2Nvb3RlckBhbHR+hdmlzdGEuY29/tCg==\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("vF-9.df_t4qm~Tc2Nvb3RlckBhbHR+hdmlzdGEuY29/tCg==", response.tokenValue());
assertEquals("user@example.com", response.authorizationId());
assertEquals("server.example.com", response.extensions().map().get("host"));
assertEquals("143", response.extensions().map().get("port"));
}
@Test
public void testNoExtensionsFromByteArray() throws Exception {
String message = "n,a=user@example.com,\u0001" +
"auth=Bearer vF9dft4qmTc2Nvb3RlckBhbHRhdmlzdGEuY29tCg\u0001\u0001";
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse(message.getBytes(StandardCharsets.UTF_8));
assertEquals("vF9dft4qmTc2Nvb3RlckBhbHRhdmlzdGEuY29tCg", response.tokenValue());
assertEquals("user@example.com", response.authorizationId());
assertTrue(response.extensions().map().isEmpty());
}
@Test
public void testNoExtensionsFromTokenAndNullExtensions() throws Exception {
OAuthBearerClientInitialResponse response = new OAuthBearerClientInitialResponse("token", null);
assertTrue(response.extensions().map().isEmpty());
}
@Test
public void testValidateNullExtensions() throws Exception {
OAuthBearerClientInitialResponse.validateExtensions(null);
}
}
|
OAuthBearerClientInitialResponseTest
|
java
|
spring-projects__spring-framework
|
spring-beans/src/main/java/org/springframework/beans/factory/xml/NamespaceHandler.java
|
{
"start": 917,
"end": 1156
}
|
interface ____ by the {@link DefaultBeanDefinitionDocumentReader}
* for handling custom namespaces in a Spring XML configuration file.
*
* <p>Implementations are expected to return implementations of the
* {@link BeanDefinitionParser}
|
used
|
java
|
apache__camel
|
core/camel-main/src/main/java/org/apache/camel/main/MainCommandLineSupport.java
|
{
"start": 12577,
"end": 13627
}
|
class ____ extends Option {
private final String parameterName;
public ParameterOption(String abbreviation, String fullName, String description, String parameterName) {
super(abbreviation, fullName, description);
this.parameterName = parameterName;
}
@Override
protected void doProcess(String arg, LinkedList<String> remainingArgs) {
if (remainingArgs.isEmpty()) {
System.err.println("Expected fileName for ");
showOptions();
completed();
} else {
String parameter = remainingArgs.removeFirst();
doProcess(arg, parameter, remainingArgs);
}
}
@Override
public String getInformation() {
return " " + getAbbreviation() + " or " + getFullName() + " <" + parameterName + "> = " + getDescription();
}
protected abstract void doProcess(String arg, String parameter, LinkedList<String> remainingArgs);
}
}
|
ParameterOption
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/batch/CompositeIdAndElementCollectionBatchingTest.java
|
{
"start": 1476,
"end": 2804
}
|
class ____ {
private static final EntityB ENTITY_B = new EntityB( 1L );
private static final EntityA ENTITY_A = new EntityA(
new EntityId( "EntityA", ENTITY_B ),
Collections.singleton( new EmbeddableA( "EmbeddableA" ) )
);
private static final EntityA ENTITY_A2 = new EntityA(
new EntityId( "EntityA2", ENTITY_B ),
Collections.singleton( new EmbeddableA( "EmbeddableB" ) )
);
@BeforeEach
public void setUp(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
session.persist( ENTITY_B );
session.persist( ENTITY_A );
session.persist( ENTITY_A2 );
}
);
}
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
void testSelect(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA a = session.find( EntityA.class, ENTITY_A.id );
assertThat( a.elementCollection ).hasSize( 1 );
}
);
}
@Test
void testSelect2(SessionFactoryScope scope) {
scope.inTransaction(
session -> {
EntityA a = session.find( EntityA.class, ENTITY_A.id );
EntityA a2 = session.find( EntityA.class, ENTITY_A2.id );
assertThat( a.elementCollection ).hasSize( 1 );
}
);
}
@Entity(name = "EntityA")
public static
|
CompositeIdAndElementCollectionBatchingTest
|
java
|
resilience4j__resilience4j
|
resilience4j-circuitbreaker/src/test/java/io/github/resilience4j/circuitbreaker/event/CircuitBreakerEventTest.java
|
{
"start": 1166,
"end": 5667
}
|
class ____ {
@Test
public void testCircuitBreakerOnErrorEvent() {
CircuitBreakerOnErrorEvent circuitBreakerEvent = new CircuitBreakerOnErrorEvent("test",
Duration.ofSeconds(1), new IOException());
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getElapsedDuration().getSeconds()).isEqualTo(1);
assertThat(circuitBreakerEvent.getThrowable()).isInstanceOf(IOException.class);
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.ERROR);
assertThat(circuitBreakerEvent.toString())
.contains("CircuitBreaker 'test' recorded an error: 'java.io.IOException'.");
}
@Test
public void testCircuitBreakerOnIgnoredErrorEvent() {
CircuitBreakerOnIgnoredErrorEvent circuitBreakerEvent = new CircuitBreakerOnIgnoredErrorEvent(
"test",
Duration.ofSeconds(1), new IOException());
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getElapsedDuration().getSeconds()).isEqualTo(1);
assertThat(circuitBreakerEvent.getThrowable()).isInstanceOf(IOException.class);
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.IGNORED_ERROR);
assertThat(circuitBreakerEvent.toString()).contains(
"CircuitBreaker 'test' recorded an error which has been ignored: 'java.io.IOException'.");
}
@Test
public void testCircuitBreakerOnStateTransitionEvent() {
CircuitBreakerOnStateTransitionEvent circuitBreakerEvent = new CircuitBreakerOnStateTransitionEvent(
"test",
StateTransition.CLOSED_TO_OPEN);
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getStateTransition())
.isEqualTo(StateTransition.CLOSED_TO_OPEN);
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.STATE_TRANSITION);
assertThat(circuitBreakerEvent.toString())
.contains("CircuitBreaker 'test' changed state from CLOSED to OPEN");
}
@Test
public void testCircuitBreakerOnResetEvent() {
CircuitBreakerOnResetEvent circuitBreakerEvent = new CircuitBreakerOnResetEvent("test");
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.RESET);
assertThat(circuitBreakerEvent.toString()).contains("CircuitBreaker 'test' reset");
}
@Test
public void testCircuitBreakerOnSuccessEvent() {
CircuitBreakerOnSuccessEvent circuitBreakerEvent = new CircuitBreakerOnSuccessEvent("test",
Duration.ofSeconds(1));
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getElapsedDuration().getSeconds()).isEqualTo(1);
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.SUCCESS);
assertThat(circuitBreakerEvent.toString())
.contains("CircuitBreaker 'test' recorded a successful call.");
}
@Test
public void testCircuitBreakerOnCallNotPermittedEvent() {
CircuitBreakerOnCallNotPermittedEvent circuitBreakerEvent = new CircuitBreakerOnCallNotPermittedEvent(
"test");
assertThat(circuitBreakerEvent.getCircuitBreakerName()).isEqualTo("test");
assertThat(circuitBreakerEvent.getEventType()).isEqualTo(Type.NOT_PERMITTED);
assertThat(circuitBreakerEvent.toString())
.contains("CircuitBreaker 'test' recorded a call which was not permitted.");
}
@Test
public void name() throws InterruptedException {
final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS");
final ZonedDateTime now = ZonedDateTime.now();
System.out.println(now.format(formatter));
final Clock clock = Clock.systemDefaultZone();
final Instant instant0 = clock.instant();
assertThat(clock.millis()).isEqualTo(instant0.toEpochMilli());
SECONDS.sleep(1);
final Instant instant1 = clock.instant();
assertThat(instant1.toEpochMilli() - instant0.toEpochMilli()).isGreaterThanOrEqualTo(1000L);
final ZonedDateTime dateTime = ZonedDateTime.ofInstant(instant1, clock.getZone());
assertThat(dateTime.format(formatter)).isNotEqualTo(now.format(formatter));
}
}
|
CircuitBreakerEventTest
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ChangeCaseEvaluator.java
|
{
"start": 5279,
"end": 6110
}
|
class ____ implements EvalOperator.ExpressionEvaluator.Factory {
private final Source source;
private final EvalOperator.ExpressionEvaluator.Factory val;
private final Locale locale;
private final ChangeCase.Case caseType;
public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, Locale locale,
ChangeCase.Case caseType) {
this.source = source;
this.val = val;
this.locale = locale;
this.caseType = caseType;
}
@Override
public ChangeCaseEvaluator get(DriverContext context) {
return new ChangeCaseEvaluator(source, val.get(context), locale, caseType, context);
}
@Override
public String toString() {
return "ChangeCaseEvaluator[" + "val=" + val + ", locale=" + locale + ", caseType=" + caseType + "]";
}
}
}
|
Factory
|
java
|
spring-projects__spring-framework
|
spring-core-test/src/main/java/org/springframework/core/test/tools/DynamicClassLoader.java
|
{
"start": 1403,
"end": 5588
}
|
class ____ extends ClassLoader {
private final ClassFiles classFiles;
private final ResourceFiles resourceFiles;
private final Map<String, DynamicClassFileObject> dynamicClassFiles;
private final Map<String, DynamicResourceFileObject> dynamicResourceFiles;
private final @Nullable Method defineClassMethod;
public DynamicClassLoader(ClassLoader parent, ClassFiles classFiles, ResourceFiles resourceFiles,
Map<String, DynamicClassFileObject> dynamicClassFiles,
Map<String, DynamicResourceFileObject> dynamicResourceFiles) {
super(parent);
this.classFiles = classFiles;
this.resourceFiles = resourceFiles;
this.dynamicClassFiles = dynamicClassFiles;
this.dynamicResourceFiles = dynamicResourceFiles;
Class<? extends ClassLoader> parentClass = parent.getClass();
if (parentClass.getName().equals(CompileWithForkedClassLoaderClassLoader.class.getName())) {
Method setClassResourceLookupMethod = lookupMethod(parentClass,
"setClassResourceLookup", Function.class);
ReflectionUtils.makeAccessible(setClassResourceLookupMethod);
ReflectionUtils.invokeMethod(setClassResourceLookupMethod,
getParent(), (Function<String, byte @Nullable []>) this::findClassBytes);
this.defineClassMethod = lookupMethod(parentClass,
"defineDynamicClass", String.class, byte[].class, int.class, int.class);
ReflectionUtils.makeAccessible(this.defineClassMethod);
this.dynamicClassFiles.forEach((name, file) -> defineClass(name, file.getBytes()));
}
else {
this.defineClassMethod = null;
}
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
Class<?> clazz = defineClass(name, findClassBytes(name));
return (clazz != null ? clazz : super.findClass(name));
}
private @Nullable Class<?> defineClass(String name, byte @Nullable [] bytes) {
if (bytes == null) {
return null;
}
if (this.defineClassMethod != null) {
return (Class<?>) ReflectionUtils.invokeMethod(this.defineClassMethod,
getParent(), name, bytes, 0, bytes.length);
}
return defineClass(name, bytes, 0, bytes.length);
}
@Override
protected Enumeration<URL> findResources(String name) throws IOException {
URL resource = findResource(name);
if (resource != null) {
return new SingletonEnumeration<>(resource);
}
return super.findResources(name);
}
@Override
protected @Nullable URL findResource(String name) {
if (name.endsWith(ClassUtils.CLASS_FILE_SUFFIX)) {
String className = ClassUtils.convertResourcePathToClassName(name.substring(0,
name.length() - ClassUtils.CLASS_FILE_SUFFIX.length()));
byte[] classBytes = findClassBytes(className);
if (classBytes != null) {
return createResourceUrl(name, () -> classBytes);
}
}
DynamicResourceFileObject dynamicResourceFile = this.dynamicResourceFiles.get(name);
if (dynamicResourceFile != null && dynamicResourceFile.getBytes() != null) {
return createResourceUrl(dynamicResourceFile.getName(), dynamicResourceFile::getBytes);
}
ResourceFile resourceFile = this.resourceFiles.get(name);
if (resourceFile != null) {
return createResourceUrl(resourceFile.getPath(), resourceFile::getBytes);
}
return super.findResource(name);
}
private byte @Nullable [] findClassBytes(String name) {
ClassFile classFile = this.classFiles.get(name);
if (classFile != null) {
return classFile.getContent();
}
DynamicClassFileObject dynamicClassFile = this.dynamicClassFiles.get(name);
return (dynamicClassFile != null ? dynamicClassFile.getBytes() : null);
}
@SuppressWarnings("deprecation") // on JDK 20
private URL createResourceUrl(String name, Supplier<byte[]> bytesSupplier) {
try {
return new URL(null, "resource:///" + name,
new ResourceFileHandler(bytesSupplier));
}
catch (MalformedURLException ex) {
throw new IllegalStateException(ex);
}
}
private static Method lookupMethod(Class<?> target, String name, Class<?>... parameterTypes) {
Method method = ReflectionUtils.findMethod(target, name, parameterTypes);
Assert.notNull(method, () -> "Could not find method '%s' on '%s'".formatted(name, target.getName()));
return method;
}
private static
|
DynamicClassLoader
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxBytesRefAggregatorFunctionSupplier.java
|
{
"start": 650,
"end": 1611
}
|
class ____ implements AggregatorFunctionSupplier {
public MaxBytesRefAggregatorFunctionSupplier() {
}
@Override
public List<IntermediateStateDesc> nonGroupingIntermediateStateDesc() {
return MaxBytesRefAggregatorFunction.intermediateStateDesc();
}
@Override
public List<IntermediateStateDesc> groupingIntermediateStateDesc() {
return MaxBytesRefGroupingAggregatorFunction.intermediateStateDesc();
}
@Override
public MaxBytesRefAggregatorFunction aggregator(DriverContext driverContext,
List<Integer> channels) {
return MaxBytesRefAggregatorFunction.create(driverContext, channels);
}
@Override
public MaxBytesRefGroupingAggregatorFunction groupingAggregator(DriverContext driverContext,
List<Integer> channels) {
return MaxBytesRefGroupingAggregatorFunction.create(channels, driverContext);
}
@Override
public String describe() {
return "max of bytes";
}
}
|
MaxBytesRefAggregatorFunctionSupplier
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/NamenodeHeartbeatService.java
|
{
"start": 20976,
"end": 23493
}
|
interface ____ the Namenode to monitor.
*/
private void getFsNamesystemMetrics(String address) {
String query = "Hadoop:service=NameNode,name=FSNamesystem*";
this.fsNamesystemMetrics = FederationUtil.getJmx(query, address, connectionFactory, scheme);
}
/**
* Populates FSNamesystem* metrics into report.
* @param aux FSNamesystem* metrics from namenode.
* @param report Namenode status report to update with JMX data.
* @throws JSONException When invalid JSONObject is found.
*/
private void populateFsNamesystemMetrics(JSONArray aux, NamenodeStatusReport report)
throws JSONException {
if (aux != null) {
for (int i = 0; i < aux.length(); i++) {
JSONObject jsonObject = aux.getJSONObject(i);
String name = jsonObject.getString("name");
if (name.equals("Hadoop:service=NameNode,name=FSNamesystemState")) {
report.setDatanodeInfo(
jsonObject.getInt("NumLiveDataNodes"),
jsonObject.getInt("NumDeadDataNodes"),
jsonObject.getInt("NumStaleDataNodes"),
jsonObject.getInt("NumDecommissioningDataNodes"),
jsonObject.getInt("NumDecomLiveDataNodes"),
jsonObject.getInt("NumDecomDeadDataNodes"),
jsonObject.optInt("NumInMaintenanceLiveDataNodes"),
jsonObject.optInt("NumInMaintenanceDeadDataNodes"),
jsonObject.optInt("NumEnteringMaintenanceDataNodes"),
jsonObject.optLong("ScheduledReplicationBlocks"));
} else if (name.equals(
"Hadoop:service=NameNode,name=FSNamesystem")) {
report.setNamesystemInfo(
jsonObject.getLong("CapacityRemaining"),
jsonObject.getLong("CapacityTotal"),
jsonObject.getLong("FilesTotal"),
jsonObject.getLong("BlocksTotal"),
jsonObject.getLong("MissingBlocks"),
jsonObject.getLong("PendingReplicationBlocks"),
jsonObject.getLong("UnderReplicatedBlocks"),
jsonObject.getLong("PendingDeletionBlocks"),
jsonObject.optLong("ProvidedCapacityTotal"),
jsonObject.getInt("PendingSPSPaths"));
}
}
}
}
@Override
protected void serviceStop() throws Exception {
LOG.info("Stopping NamenodeHeartbeat service for, NS {} NN {} ",
this.nameserviceId, this.namenodeId);
if (this.connectionFactory != null) {
this.connectionFactory.destroy();
}
super.serviceStop();
}
}
|
of
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/clients/consumer/RangeAssignorTest.java
|
{
"start": 2723,
"end": 34081
}
|
class ____ {
private final RangeAssignor assignor = new RangeAssignor();
// For plural tests
private final String topic1 = "topic1";
private final String topic2 = "topic2";
private final String consumer1 = "consumer1";
private final String instance1 = "instance1";
private final String consumer2 = "consumer2";
private final String instance2 = "instance2";
private final String consumer3 = "consumer3";
private final String instance3 = "instance3";
private int numBrokerRacks;
private boolean hasConsumerRack;
private List<MemberInfo> staticMemberInfos;
private int replicationFactor = 3;
@BeforeEach
public void setUp() {
staticMemberInfos = new ArrayList<>();
staticMemberInfos.add(new MemberInfo(consumer1, Optional.of(instance1)));
staticMemberInfos.add(new MemberInfo(consumer2, Optional.of(instance2)));
staticMemberInfos.add(new MemberInfo(consumer3, Optional.of(instance3)));
}
@ParameterizedTest(name = TEST_NAME_WITH_CONSUMER_RACK)
@ValueSource(booleans = {true, false})
public void testOneConsumerNoTopic(boolean hasConsumerRack) {
initializeRacks(hasConsumerRack ? RackConfig.BROKER_AND_CONSUMER_RACK : RackConfig.NO_CONSUMER_RACK);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic,
Collections.singletonMap(consumer1, subscription(Collections.emptyList(), 0)));
assertEquals(Collections.singleton(consumer1), assignment.keySet());
assertTrue(assignment.get(consumer1).isEmpty());
}
@ParameterizedTest(name = TEST_NAME_WITH_CONSUMER_RACK)
@ValueSource(booleans = {true, false})
public void testOneConsumerNonexistentTopic(boolean hasConsumerRack) {
initializeRacks(hasConsumerRack ? RackConfig.BROKER_AND_CONSUMER_RACK : RackConfig.NO_CONSUMER_RACK);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic,
Collections.singletonMap(consumer1, subscription(topics(topic1), 0)));
assertEquals(Collections.singleton(consumer1), assignment.keySet());
assertTrue(assignment.get(consumer1).isEmpty());
}
@ParameterizedTest(name = "{displayName}.rackConfig = {0}")
@EnumSource(RackConfig.class)
public void testOneConsumerOneTopic(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
partitionsPerTopic.put(topic1, partitionInfos(topic1, 3));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic,
Collections.singletonMap(consumer1, subscription(topics(topic1), 0)));
assertEquals(Collections.singleton(consumer1), assignment.keySet());
assertAssignment(partitions(tp(topic1, 0), tp(topic1, 1), tp(topic1, 2)), assignment.get(consumer1));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testOnlyAssignsPartitionsFromSubscribedTopics(RackConfig rackConfig) {
initializeRacks(rackConfig);
String otherTopic = "other";
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
partitionsPerTopic.put(topic1, partitionInfos(topic1, 3));
partitionsPerTopic.put(otherTopic, partitionInfos(otherTopic, 3));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic,
Collections.singletonMap(consumer1, subscription(topics(topic1), 0)));
assertEquals(Collections.singleton(consumer1), assignment.keySet());
assertAssignment(partitions(tp(topic1, 0), tp(topic1, 1), tp(topic1, 2)), assignment.get(consumer1));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testOneConsumerMultipleTopics(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(1, 2);
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic,
Collections.singletonMap(consumer1, subscription(topics(topic1, topic2), 0)));
assertEquals(Collections.singleton(consumer1), assignment.keySet());
assertAssignment(partitions(tp(topic1, 0), tp(topic2, 0), tp(topic2, 1)), assignment.get(consumer1));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testTwoConsumersOneTopicOnePartition(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
partitionsPerTopic.put(topic1, partitionInfos(topic1, 1));
Map<String, Subscription> consumers = new HashMap<>();
consumers.put(consumer1, subscription(topics(topic1), 0));
consumers.put(consumer2, subscription(topics(topic1), 1));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0)), assignment.get(consumer1));
assertAssignment(Collections.emptyList(), assignment.get(consumer2));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testTwoConsumersOneTopicTwoPartitions(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
partitionsPerTopic.put(topic1, partitionInfos(topic1, 2));
Map<String, Subscription> consumers = new HashMap<>();
consumers.put(consumer1, subscription(topics(topic1), 0));
consumers.put(consumer2, subscription(topics(topic1), 1));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0)), assignment.get(consumer1));
assertAssignment(partitions(tp(topic1, 1)), assignment.get(consumer2));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testMultipleConsumersMixedTopics(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 2);
Map<String, Subscription> consumers = new HashMap<>();
consumers.put(consumer1, subscription(topics(topic1), 0));
consumers.put(consumer2, subscription(topics(topic1, topic2), 1));
consumers.put(consumer3, subscription(topics(topic1), 2));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0)), assignment.get(consumer1));
assertAssignment(partitions(tp(topic1, 1), tp(topic2, 0), tp(topic2, 1)), assignment.get(consumer2));
assertAssignment(partitions(tp(topic1, 2)), assignment.get(consumer3));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testTwoConsumersTwoTopicsSixPartitions(RackConfig rackConfig) {
initializeRacks(rackConfig);
String topic1 = "topic1";
String topic2 = "topic2";
String consumer1 = "consumer1";
String consumer2 = "consumer2";
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 3);
Map<String, Subscription> consumers = new HashMap<>();
consumers.put(consumer1, subscription(topics(topic1, topic2), 0));
consumers.put(consumer2, subscription(topics(topic1, topic2), 1));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0), tp(topic1, 1), tp(topic2, 0), tp(topic2, 1)), assignment.get(consumer1));
assertAssignment(partitions(tp(topic1, 2), tp(topic2, 2)), assignment.get(consumer2));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testTwoStaticConsumersTwoTopicsSixPartitions(RackConfig rackConfig) {
initializeRacks(rackConfig);
// although consumer high has a higher rank than consumer low, the comparison happens on
// instance id level.
String consumerIdLow = "consumer-b";
String consumerIdHigh = "consumer-a";
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 3);
Map<String, Subscription> consumers = new HashMap<>();
Subscription consumerLowSubscription = subscription(topics(topic1, topic2), 0);
consumerLowSubscription.setGroupInstanceId(Optional.of(instance1));
consumers.put(consumerIdLow, consumerLowSubscription);
Subscription consumerHighSubscription = subscription(topics(topic1, topic2), 1);
consumerHighSubscription.setGroupInstanceId(Optional.of(instance2));
consumers.put(consumerIdHigh, consumerHighSubscription);
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0), tp(topic1, 1), tp(topic2, 0), tp(topic2, 1)), assignment.get(consumerIdLow));
assertAssignment(partitions(tp(topic1, 2), tp(topic2, 2)), assignment.get(consumerIdHigh));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testOneStaticConsumerAndOneDynamicConsumerTwoTopicsSixPartitions(RackConfig rackConfig) {
initializeRacks(rackConfig);
// although consumer high has a higher rank than low, consumer low will win the comparison
// because it has instance id while consumer 2 doesn't.
String consumerIdLow = "consumer-b";
String consumerIdHigh = "consumer-a";
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(3, 3);
Map<String, Subscription> consumers = new HashMap<>();
Subscription consumerLowSubscription = subscription(topics(topic1, topic2), 0);
consumerLowSubscription.setGroupInstanceId(Optional.of(instance1));
consumers.put(consumerIdLow, consumerLowSubscription);
consumers.put(consumerIdHigh, subscription(topics(topic1, topic2), 1));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertAssignment(partitions(tp(topic1, 0), tp(topic1, 1), tp(topic2, 0), tp(topic2, 1)), assignment.get(consumerIdLow));
assertAssignment(partitions(tp(topic1, 2), tp(topic2, 2)), assignment.get(consumerIdHigh));
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testStaticMemberRangeAssignmentPersistent(RackConfig rackConfig) {
initializeRacks(rackConfig, 5);
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(5, 4);
Map<String, Subscription> consumers = new HashMap<>();
int consumerIndex = 0;
for (MemberInfo m : staticMemberInfos) {
Subscription subscription = subscription(topics(topic1, topic2), consumerIndex++);
subscription.setGroupInstanceId(m.groupInstanceId);
consumers.put(m.memberId, subscription);
}
// Consumer 4 is a dynamic member.
String consumer4 = "consumer4";
consumers.put(consumer4, subscription(topics(topic1, topic2), consumerIndex++));
Map<String, List<TopicPartition>> expectedAssignment = new HashMap<>();
// Have 3 static members instance1, instance2, instance3 to be persistent
// across generations. Their assignment shall be the same.
expectedAssignment.put(consumer1, partitions(tp(topic1, 0), tp(topic1, 1), tp(topic2, 0)));
expectedAssignment.put(consumer2, partitions(tp(topic1, 2), tp(topic2, 1)));
expectedAssignment.put(consumer3, partitions(tp(topic1, 3), tp(topic2, 2)));
expectedAssignment.put(consumer4, partitions(tp(topic1, 4), tp(topic2, 3)));
Map<String, List<TopicPartition>> assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertEquals(expectedAssignment, assignment);
// Replace dynamic member 4 with a new dynamic member 5.
consumers.remove(consumer4);
String consumer5 = "consumer5";
consumers.put(consumer5, subscription(topics(topic1, topic2), consumerIndex++));
expectedAssignment.remove(consumer4);
expectedAssignment.put(consumer5, partitions(tp(topic1, 4), tp(topic2, 3)));
assignment = assignor.assignPartitions(partitionsPerTopic, consumers);
assertEquals(expectedAssignment, assignment);
}
@ParameterizedTest(name = TEST_NAME_WITH_RACK_CONFIG)
@EnumSource(RackConfig.class)
public void testStaticMemberRangeAssignmentPersistentAfterMemberIdChanges(RackConfig rackConfig) {
initializeRacks(rackConfig);
Map<String, List<PartitionInfo>> partitionsPerTopic = setupPartitionsPerTopicWithTwoTopics(5, 5);
Map<String, Subscription> consumers = new HashMap<>();
int consumerIndex = 0;
for (MemberInfo m : staticMemberInfos) {
Subscription subscription = subscription(topics(topic1, topic2), consumerIndex++);
subscription.setGroupInstanceId(m.groupInstanceId);
consumers.put(m.memberId, subscription);
}
Map<String, List<TopicPartition>> expectedInstanceAssignment = new HashMap<>();
expectedInstanceAssignment.put(instance1,
partitions(tp(topic1, 0), tp(topic1, 1), tp(topic2, 0), tp(topic2, 1)));
expectedInstanceAssignment.put(instance2,
partitions(tp(topic1, 2), tp(topic1, 3), tp(topic2, 2), tp(topic2, 3)));
expectedInstanceAssignment.put(instance3,
partitions(tp(topic1, 4), tp(topic2, 4)));
Map<String, List<TopicPartition>> staticAssignment =
checkStaticAssignment(assignor, partitionsPerTopic, consumers);
assertEquals(expectedInstanceAssignment, staticAssignment);
// Now switch the member.id fields for each member info, the assignment should
// stay the same as last time.
String consumer4 = "consumer4";
String consumer5 = "consumer5";
consumers.put(consumer4, consumers.get(consumer3));
consumers.remove(consumer3);
consumers.put(consumer5, consumers.get(consumer2));
consumers.remove(consumer2);
Map<String, List<TopicPartition>> newStaticAssignment =
checkStaticAssignment(assignor, partitionsPerTopic, consumers);
assertEquals(staticAssignment, newStaticAssignment);
}
@Test
public void testRackAwareStaticMemberRangeAssignmentPersistentAfterMemberIdChanges() {
initializeRacks(RackConfig.BROKER_AND_CONSUMER_RACK);
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
int replicationFactor = 2;
int numBrokerRacks = 3;
partitionsPerTopic.put(topic1, AbstractPartitionAssignorTest.partitionInfos(topic1, 5, replicationFactor, numBrokerRacks, 0));
partitionsPerTopic.put(topic2, AbstractPartitionAssignorTest.partitionInfos(topic2, 5, replicationFactor, numBrokerRacks, 0));
List<MemberInfo> staticMemberInfos = new ArrayList<>();
staticMemberInfos.add(new MemberInfo(consumer1, Optional.of(instance1), Optional.of(ALL_RACKS[0])));
staticMemberInfos.add(new MemberInfo(consumer2, Optional.of(instance2), Optional.of(ALL_RACKS[1])));
staticMemberInfos.add(new MemberInfo(consumer3, Optional.of(instance3), Optional.of(ALL_RACKS[2])));
Map<String, Subscription> consumers = new HashMap<>();
int consumerIndex = 0;
for (MemberInfo m : staticMemberInfos) {
Subscription subscription = subscription(topics(topic1, topic2), consumerIndex++);
subscription.setGroupInstanceId(m.groupInstanceId);
consumers.put(m.memberId, subscription);
}
Map<String, List<TopicPartition>> expectedInstanceAssignment = new HashMap<>();
expectedInstanceAssignment.put(instance1,
partitions(tp(topic1, 0), tp(topic1, 2), tp(topic2, 0), tp(topic2, 2)));
expectedInstanceAssignment.put(instance2,
partitions(tp(topic1, 1), tp(topic1, 3), tp(topic2, 1), tp(topic2, 3)));
expectedInstanceAssignment.put(instance3,
partitions(tp(topic1, 4), tp(topic2, 4)));
Map<String, List<TopicPartition>> staticAssignment =
checkStaticAssignment(assignor, partitionsPerTopic, consumers);
assertEquals(expectedInstanceAssignment, staticAssignment);
// Now switch the member.id fields for each member info, the assignment should
// stay the same as last time.
String consumer4 = "consumer4";
String consumer5 = "consumer5";
consumers.put(consumer4, consumers.get(consumer3));
consumers.remove(consumer3);
consumers.put(consumer5, consumers.get(consumer2));
consumers.remove(consumer2);
Map<String, List<TopicPartition>> newStaticAssignment =
checkStaticAssignment(assignor, partitionsPerTopic, consumers);
assertEquals(staticAssignment, newStaticAssignment);
}
@Test
public void testRackAwareAssignmentWithUniformSubscription() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 6), mkEntry("t2", 7), mkEntry("t3", 2));
List<String> allTopics = asList("t1", "t2", "t3");
List<List<String>> consumerTopics = asList(allTopics, allTopics, allTopics);
// Verify combinations where rack-aware logic is not used.
verifyNonRackAwareAssignment(topics, consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t3-0", "t1-2, t1-3, t2-3, t2-4, t3-1", "t1-4, t1-5, t2-5, t2-6"));
// Verify best-effort rack-aware assignment for lower replication factor where racks have a subset of partitions.
verifyRackAssignment(assignor, topics, 1, racks(3), racks(3), consumerTopics,
asList("t1-0, t1-3, t2-0, t2-3, t2-6", "t1-1, t1-4, t2-1, t2-4, t3-0", "t1-2, t1-5, t2-2, t2-5, t3-1"), 0);
verifyRackAssignment(assignor, topics, 2, racks(3), racks(3), consumerTopics,
asList("t1-0, t1-2, t2-0, t2-2, t2-3, t3-1", "t1-1, t1-3, t2-1, t2-4, t3-0", "t1-4, t1-5, t2-5, t2-6"), 1);
// One consumer on a rack with no partitions
verifyRackAssignment(assignor, topics, 3, racks(2), racks(3), consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t3-0", "t1-2, t1-3, t2-3, t2-4, t3-1", "t1-4, t1-5, t2-5, t2-6"), 4);
}
@Test
public void testRackAwareAssignmentWithNonEqualSubscription() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 6), mkEntry("t2", 7), mkEntry("t3", 2));
List<String> allTopics = asList("t1", "t2", "t3");
List<List<String>> consumerTopics = asList(allTopics, allTopics, asList("t1", "t3"));
// Verify combinations where rack-aware logic is not used.
verifyNonRackAwareAssignment(topics, consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t2-3, t3-0", "t1-2, t1-3, t2-4, t2-5, t2-6, t3-1", "t1-4, t1-5"));
// Verify best-effort rack-aware assignment for lower replication factor where racks have a subset of partitions.
verifyRackAssignment(assignor, topics, 1, racks(3), racks(3), consumerTopics,
asList("t1-0, t1-3, t2-0, t2-2, t2-3, t2-6", "t1-1, t1-4, t2-1, t2-4, t2-5, t3-0", "t1-2, t1-5, t3-1"), 2);
verifyRackAssignment(assignor, topics, 2, racks(3), racks(3), consumerTopics,
asList("t1-0, t1-2, t2-0, t2-2, t2-3, t2-5, t3-1", "t1-1, t1-3, t2-1, t2-4, t2-6, t3-0", "t1-4, t1-5"), 0);
// One consumer on a rack with no partitions
verifyRackAssignment(assignor, topics, 3, racks(2), racks(3), consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t2-3, t3-0", "t1-2, t1-3, t2-4, t2-5, t2-6, t3-1", "t1-4, t1-5"), 2);
}
@Test
public void testRackAwareAssignmentWithUniformPartitions() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 5), mkEntry("t2", 5), mkEntry("t3", 5));
List<String> allTopics = asList("t1", "t2", "t3");
List<List<String>> consumerTopics = asList(allTopics, allTopics, allTopics);
List<String> nonRackAwareAssignment = asList(
"t1-0, t1-1, t2-0, t2-1, t3-0, t3-1",
"t1-2, t1-3, t2-2, t2-3, t3-2, t3-3",
"t1-4, t2-4, t3-4"
);
// Verify combinations where rack-aware logic is not used.
verifyNonRackAwareAssignment(topics, consumerTopics, nonRackAwareAssignment);
// Verify that co-partitioning is prioritized over rack-alignment
verifyRackAssignment(assignor, topics, 1, racks(3), racks(3), consumerTopics, nonRackAwareAssignment, 10);
verifyRackAssignment(assignor, topics, 2, racks(3), racks(3), consumerTopics, nonRackAwareAssignment, 5);
verifyRackAssignment(assignor, topics, 3, racks(2), racks(3), consumerTopics, nonRackAwareAssignment, 3);
}
@Test
public void testRackAwareAssignmentWithUniformPartitionsNonEqualSubscription() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 5), mkEntry("t2", 5), mkEntry("t3", 5));
List<String> allTopics = asList("t1", "t2", "t3");
List<List<String>> consumerTopics = asList(allTopics, allTopics, asList("t1", "t3"));
// Verify combinations where rack-aware logic is not used.
verifyNonRackAwareAssignment(topics, consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t3-0, t3-1", "t1-2, t1-3, t2-3, t2-4, t3-2, t3-3", "t1-4, t3-4"));
// Verify that co-partitioning is prioritized over rack-alignment for topics with equal subscriptions
verifyRackAssignment(assignor, topics, 1, racks(3), racks(3), consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-4, t3-0, t3-1", "t1-2, t1-3, t2-2, t2-3, t3-2, t3-3", "t1-4, t3-4"), 9);
verifyRackAssignment(assignor, topics, 2, racks(3), racks(3), consumerTopics,
asList("t1-2, t2-0, t2-1, t2-3, t3-2", "t1-0, t1-3, t2-2, t2-4, t3-0, t3-3", "t1-1, t1-4, t3-1, t3-4"), 0);
// One consumer on a rack with no partitions
verifyRackAssignment(assignor, topics, 3, racks(2), racks(3), consumerTopics,
asList("t1-0, t1-1, t2-0, t2-1, t2-2, t3-0, t3-1", "t1-2, t1-3, t2-3, t2-4, t3-2, t3-3", "t1-4, t3-4"), 2);
}
@Test
public void testRackAwareAssignmentWithCoPartitioning() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 6), mkEntry("t2", 6), mkEntry("t3", 2), mkEntry("t4", 2));
List<List<String>> consumerTopics = asList(asList("t1", "t2"), asList("t1", "t2"), asList("t3", "t4"), asList("t3", "t4"));
List<String> consumerRacks = asList(ALL_RACKS[0], ALL_RACKS[1], ALL_RACKS[1], ALL_RACKS[0]);
List<String> nonRackAwareAssignment = asList(
"t1-0, t1-1, t1-2, t2-0, t2-1, t2-2",
"t1-3, t1-4, t1-5, t2-3, t2-4, t2-5",
"t3-0, t4-0",
"t3-1, t4-1"
);
verifyRackAssignment(assignor, topics, 3, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 3, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 2, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, 0);
verifyRackAssignment(assignor, topics, 1, racks(2), consumerRacks, consumerTopics,
asList("t1-0, t1-2, t1-4, t2-0, t2-2, t2-4", "t1-1, t1-3, t1-5, t2-1, t2-3, t2-5", "t3-1, t4-1", "t3-0, t4-0"), 0);
List<String> allTopics = asList("t1", "t2", "t3", "t4");
consumerTopics = asList(allTopics, allTopics, allTopics, allTopics);
nonRackAwareAssignment = asList(
"t1-0, t1-1, t2-0, t2-1, t3-0, t4-0",
"t1-2, t1-3, t2-2, t2-3, t3-1, t4-1",
"t1-4, t2-4",
"t1-5, t2-5"
);
verifyRackAssignment(assignor, topics, 3, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 3, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 2, racks(2), consumerRacks, consumerTopics, nonRackAwareAssignment, 0);
verifyRackAssignment(assignor, topics, 1, racks(2), consumerRacks, consumerTopics,
asList("t1-0, t1-2, t2-0, t2-2, t3-0, t4-0", "t1-1, t1-3, t2-1, t2-3, t3-1, t4-1", "t1-5, t2-5", "t1-4, t2-4"), 0);
verifyRackAssignment(assignor, topics, 1, racks(3), consumerRacks, consumerTopics,
asList("t1-0, t1-3, t2-0, t2-3, t3-0, t4-0", "t1-1, t1-4, t2-1, t2-4, t3-1, t4-1", "t1-2, t2-2", "t1-5, t2-5"), 6);
}
@Test
public void testCoPartitionedAssignmentWithSameSubscription() {
Map<String, Integer> topics = mkMap(mkEntry("t1", 6), mkEntry("t2", 6),
mkEntry("t3", 2), mkEntry("t4", 2),
mkEntry("t5", 4), mkEntry("t6", 4));
List<String> topicList = asList("t1", "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9");
List<List<String>> consumerTopics = asList(topicList, topicList, topicList);
List<String> consumerRacks = asList(ALL_RACKS[0], ALL_RACKS[1], ALL_RACKS[2]);
List<String> nonRackAwareAssignment = asList(
"t1-0, t1-1, t2-0, t2-1, t3-0, t4-0, t5-0, t5-1, t6-0, t6-1",
"t1-2, t1-3, t2-2, t2-3, t3-1, t4-1, t5-2, t6-2",
"t1-4, t1-5, t2-4, t2-5, t5-3, t6-3"
);
verifyRackAssignment(assignor, topics, 3, nullRacks(3), consumerRacks, consumerTopics, nonRackAwareAssignment, -1);
AbstractPartitionAssignorTest.preferRackAwareLogic(assignor, true);
verifyRackAssignment(assignor, topics, 3, racks(3), consumerRacks, consumerTopics, nonRackAwareAssignment, 0);
List<String> rackAwareAssignment = asList(
"t1-0, t1-2, t2-0, t2-2, t3-0, t4-0, t5-1, t6-1",
"t1-1, t1-3, t2-1, t2-3, t3-1, t4-1, t5-2, t6-2",
"t1-4, t1-5, t2-4, t2-5, t5-0, t5-3, t6-0, t6-3"
);
verifyRackAssignment(assignor, topics, 2, racks(3), consumerRacks, consumerTopics, rackAwareAssignment, 0);
}
private void verifyNonRackAwareAssignment(Map<String, Integer> topics, List<List<String>> consumerTopics, List<String> nonRackAwareAssignment) {
verifyRackAssignment(assignor, topics, 3, nullRacks(3), racks(3), consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 3, racks(3), nullRacks(3), consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 3, racks(3), racks(3), consumerTopics, nonRackAwareAssignment, 0);
verifyRackAssignment(assignor, topics, 4, racks(4), racks(3), consumerTopics, nonRackAwareAssignment, 0);
verifyRackAssignment(assignor, topics, 3, racks(3), asList("d", "e", "f"), consumerTopics, nonRackAwareAssignment, -1);
verifyRackAssignment(assignor, topics, 3, racks(3), asList(null, "e", "f"), consumerTopics, nonRackAwareAssignment, -1);
AbstractPartitionAssignorTest.preferRackAwareLogic(assignor, true);
verifyRackAssignment(assignor, topics, 3, racks(3), racks(3), consumerTopics, nonRackAwareAssignment, 0);
AbstractPartitionAssignorTest.preferRackAwareLogic(assignor, false);
}
private static Map<String, List<TopicPartition>> checkStaticAssignment(AbstractPartitionAssignor assignor,
Map<String, List<PartitionInfo>> partitionsPerTopic,
Map<String, Subscription> consumers) {
Map<String, List<TopicPartition>> assignmentByMemberId = assignor.assignPartitions(partitionsPerTopic, consumers);
Map<String, List<TopicPartition>> assignmentByInstanceId = new HashMap<>();
for (Map.Entry<String, Subscription> entry : consumers.entrySet()) {
String memberId = entry.getKey();
Optional<String> instanceId = entry.getValue().groupInstanceId();
instanceId.ifPresent(id -> assignmentByInstanceId.put(id, assignmentByMemberId.get(memberId)));
}
return assignmentByInstanceId;
}
private void assertAssignment(List<TopicPartition> expected, List<TopicPartition> actual) {
// order doesn't matter for assignment, so convert to a set
assertEquals(new HashSet<>(expected), new HashSet<>(actual));
}
private Map<String, List<PartitionInfo>> setupPartitionsPerTopicWithTwoTopics(int numberOfPartitions1, int numberOfPartitions2) {
Map<String, List<PartitionInfo>> partitionsPerTopic = new HashMap<>();
partitionsPerTopic.put(topic1, partitionInfos(topic1, numberOfPartitions1));
partitionsPerTopic.put(topic2, partitionInfos(topic2, numberOfPartitions2));
return partitionsPerTopic;
}
private List<PartitionInfo> partitionInfos(String topic, int numberOfPartitions) {
return AbstractPartitionAssignorTest.partitionInfos(topic, numberOfPartitions, replicationFactor, numBrokerRacks, 0);
}
private Subscription subscription(List<String> topics, int consumerIndex) {
int numRacks = numBrokerRacks > 0 ? numBrokerRacks : ALL_RACKS.length;
Optional<String> rackId = Optional.ofNullable(hasConsumerRack ? ALL_RACKS[consumerIndex % numRacks] : null);
return new Subscription(topics, null, Collections.emptyList(), -1, rackId);
}
private static List<String> topics(String... topics) {
return Arrays.asList(topics);
}
private static List<TopicPartition> partitions(TopicPartition... partitions) {
return Arrays.asList(partitions);
}
private static TopicPartition tp(String topic, int partition) {
return new TopicPartition(topic, partition);
}
void initializeRacks(RackConfig rackConfig) {
initializeRacks(rackConfig, 3);
}
void initializeRacks(RackConfig rackConfig, int maxConsumers) {
this.replicationFactor = maxConsumers;
this.numBrokerRacks = rackConfig != RackConfig.NO_BROKER_RACK ? maxConsumers : 0;
this.hasConsumerRack = rackConfig != RackConfig.NO_CONSUMER_RACK;
// Rack and consumer ordering are the same in all the tests, so we can verify
// rack-aware logic using the same tests.
AbstractPartitionAssignorTest.preferRackAwareLogic(assignor, true);
}
}
|
RangeAssignorTest
|
java
|
apache__camel
|
components/camel-smb/src/test/java/org/apache/camel/component/smb/PollDynamicFileNameTest.java
|
{
"start": 1129,
"end": 4043
}
|
class ____ extends SmbServerTestSupport {
protected String getSmbUrl() {
return String.format(
"smb:%s/%s/noop?username=%s&password=%s&noop=true",
service.address(), service.shareName(), service.userName(), service.password());
}
@Override
public void doPostSetup() throws Exception {
prepareSmbServer();
}
@Test
public void testPollEnrichFileOne() throws Exception {
getMockEndpoint("mock:result").expectedMessageCount(2);
getMockEndpoint("mock:result").message(0).body().isEqualTo("Hello World");
getMockEndpoint("mock:result").message(1).body().isNull();
template.sendBodyAndHeader("direct:start", "Foo", "target", "myfile.txt");
template.sendBodyAndHeader("direct:start", "Bar", "target", "unknown.txt");
MockEndpoint.assertIsSatisfied(context);
// there should only be 1 file endpoint
long c = context.getEndpoints().stream()
.filter(e -> e.getEndpointKey().startsWith("smb") && e.getEndpointUri().contains("?fileName=")).count();
Assertions.assertEquals(1, c, "There should only be 1 smb endpoint");
}
@Test
public void testPollEnrichFileTwo() throws Exception {
getMockEndpoint("mock:result").expectedBodiesReceivedInAnyOrder("Hello World", "Bye World");
template.sendBodyAndHeader(getSmbUrl(), "Bye World", Exchange.FILE_NAME, "myfile2.txt");
template.sendBodyAndHeader("direct:start", "Foo", "target", "myfile.txt");
template.sendBodyAndHeader("direct:start", "Bar", "target", "myfile2.txt");
MockEndpoint.assertIsSatisfied(context);
// there should only be 1 file endpoint
long c = context.getEndpoints().stream()
.filter(e -> e.getEndpointKey().startsWith("smb") && e.getEndpointUri().contains("?fileName=")).count();
Assertions.assertEquals(1, c, "There should only be 1 smb endpoint");
}
private void prepareSmbServer() throws Exception {
// prepares the smb Server by creating a file on the server that we want
// to unit test that we can pool and store as a local file
Endpoint endpoint = context.getEndpoint(getSmbUrl());
Exchange exchange = endpoint.createExchange();
exchange.getIn().setBody("Hello World");
exchange.getIn().setHeader(Exchange.FILE_NAME, "myfile.txt");
Producer producer = endpoint.createProducer();
producer.start();
producer.process(exchange);
producer.stop();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
from("direct:start")
.poll(getSmbUrl() + "&fileName=${header.target}", 2000)
.to("mock:result");
}
};
}
}
|
PollDynamicFileNameTest
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/RedisCommandBuilder.java
|
{
"start": 2365,
"end": 175822
}
|
class ____<K, V> extends BaseRedisCommandBuilder<K, V> {
RedisCommandBuilder(RedisCodec<K, V> codec) {
super(codec);
}
Command<K, V, Set<AclCategory>> aclCat() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(CAT);
return createCommand(ACL, new EnumSetOutput<>(codec, AclCategory.class, String::toUpperCase, it -> null), args);
}
Command<K, V, Set<CommandType>> aclCat(AclCategory category) {
LettuceAssert.notNull(category, "Category " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(CAT).add(category.name().toLowerCase());
return createCommand(ACL, new EnumSetOutput<>(codec, CommandType.class, String::toUpperCase, it -> null), args);
}
Command<K, V, Long> aclDeluser(String... usernames) {
notEmpty(usernames);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(DELUSER);
for (String username : usernames) {
args.add(username);
}
return createCommand(ACL, new IntegerOutput<>(codec), args);
}
Command<K, V, String> aclDryRun(String username, String command, String... commandArgs) {
LettuceAssert.notNull(username, "username " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(command, "command " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(DRYRUN).add(username).add(command);
for (String commandArg : commandArgs) {
args.add(commandArg);
}
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, String> aclDryRun(String username, RedisCommand<K, V, ?> command) {
LettuceAssert.notNull(username, "username " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(command, "command " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(DRYRUN).add(username).add(command.getType()).addAll(command.getArgs());
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, String> aclGenpass() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(GENPASS);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, String> aclGenpass(int bits) {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(GENPASS).add(bits);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, List<Object>> aclGetuser(String username) {
LettuceAssert.notNull(username, "Username " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(GETUSER).add(username);
return createCommand(ACL, new NestedMultiOutput<>(codec), args);
}
Command<K, V, List<String>> aclList() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(LIST);
return createCommand(ACL, new StringListOutput<>(codec), args);
}
Command<K, V, String> aclLoad() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(LOAD);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, List<Map<String, Object>>> aclLog() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(LOG);
return new Command(ACL, new ListOfGenericMapsOutput<>(StringCodec.ASCII), args);
}
Command<K, V, List<Map<String, Object>>> aclLog(int count) {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(LOG).add(count);
return new Command(ACL, new ListOfGenericMapsOutput<>(StringCodec.ASCII), args);
}
Command<K, V, String> aclLogReset() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(LOG).add(RESET);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, String> aclSave() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(CommandKeyword.SAVE);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, String> aclSetuser(String username, AclSetuserArgs setuserArgs) {
notNullKey(username);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(SETUSER).add(username);
setuserArgs.build(args);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, List<String>> aclUsers() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(USERS);
return createCommand(ACL, new StringListOutput<>(codec), args);
}
Command<K, V, String> aclWhoami() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(WHOAMI);
return createCommand(ACL, new StatusOutput<>(codec), args);
}
Command<K, V, Long> append(K key, V value) {
notNullKey(key);
return createCommand(APPEND, new IntegerOutput<>(codec), key, value);
}
Command<K, V, String> asking() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
return createCommand(ASKING, new StatusOutput<>(codec), args);
}
Command<K, V, String> auth(CharSequence password) {
LettuceAssert.notNull(password, "Password " + MUST_NOT_BE_NULL);
char[] chars = new char[password.length()];
for (int i = 0; i < password.length(); i++) {
chars[i] = password.charAt(i);
}
return auth(chars);
}
Command<K, V, String> auth(char[] password) {
LettuceAssert.notNull(password, "Password " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(password);
return createCommand(AUTH, new StatusOutput<>(codec), args);
}
Command<K, V, String> auth(String username, CharSequence password) {
LettuceAssert.notNull(username, "Username " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!username.isEmpty(), "Username " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(password, "Password " + MUST_NOT_BE_NULL);
char[] chars = new char[password.length()];
for (int i = 0; i < password.length(); i++) {
chars[i] = password.charAt(i);
}
return auth(username, chars);
}
Command<K, V, String> auth(String username, char[] password) {
LettuceAssert.notNull(username, "Username " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!username.isEmpty(), "Username " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(password, "Password " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(username).add(password);
return createCommand(AUTH, new StatusOutput<>(codec), args);
}
Command<K, V, String> bgrewriteaof() {
return createCommand(BGREWRITEAOF, new StatusOutput<>(codec));
}
Command<K, V, String> bgsave() {
return createCommand(BGSAVE, new StatusOutput<>(codec));
}
Command<K, V, Long> bitcount(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(BITCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitcount(K key, long start, long end) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(start).add(end);
return createCommand(BITCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Long>> bitfield(K key, BitFieldArgs bitFieldArgs) {
notNullKey(key);
LettuceAssert.notNull(bitFieldArgs, "BitFieldArgs must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
bitFieldArgs.build(args);
return createCommand(BITFIELD, (CommandOutput) new ArrayOutput<>(codec), args);
}
Command<K, V, List<Value<Long>>> bitfieldValue(K key, BitFieldArgs bitFieldArgs) {
notNullKey(key);
LettuceAssert.notNull(bitFieldArgs, "BitFieldArgs must not be null");
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
bitFieldArgs.build(args);
return createCommand(BITFIELD, (CommandOutput) new ValueValueListOutput<>(codec), args);
}
Command<K, V, Long> bitopAnd(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(AND).addKey(destination).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopNot(K destination, K source) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(NOT).addKey(destination).addKey(source);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopOr(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(OR).addKey(destination).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopXor(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(XOR).addKey(destination).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopDiff(K destination, K sourceKey, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sourceKey, "Source key " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(DIFF).addKey(destination).addKey(sourceKey).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopDiff1(K destination, K sourceKey, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sourceKey, "Source key " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(DIFF1).addKey(destination).addKey(sourceKey).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopAndor(K destination, K sourceKey, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sourceKey, "Source key " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(ANDOR).addKey(destination).addKey(sourceKey).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitopOne(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(ONE).addKey(destination).addKeys(keys);
return createCommand(BITOP, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitpos(K key, boolean state) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(state ? 1 : 0);
return createCommand(BITPOS, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitpos(K key, boolean state, long start) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(state ? 1 : 0).add(start);
return createCommand(BITPOS, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> bitpos(K key, boolean state, long start, long end) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(state ? 1 : 0).add(start).add(end);
return createCommand(BITPOS, new IntegerOutput<>(codec), args);
}
Command<K, V, V> blmove(K source, K destination, LMoveArgs lMoveArgs, long timeout) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lMoveArgs, "LMoveArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(source).addKey(destination);
lMoveArgs.build(args);
args.add(timeout);
return createCommand(BLMOVE, new ValueOutput<>(codec), args);
}
Command<K, V, V> blmove(K source, K destination, LMoveArgs lMoveArgs, double timeout) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lMoveArgs, "LMoveArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(source).addKey(destination);
lMoveArgs.build(args);
args.add(timeout);
return createCommand(BLMOVE, new ValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, List<V>>> blmpop(long timeout, LMPopArgs lmPopArgs, K... keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lmPopArgs, "LMPopArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
lmPopArgs.build(args);
return createCommand(BLMPOP, new KeyValueValueListOutput<>(codec), args);
}
Command<K, V, KeyValue<K, List<V>>> blmpop(double timeout, LMPopArgs lmPopArgs, K... keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lmPopArgs, "LMPopArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
lmPopArgs.build(args);
return createCommand(BLMPOP, new KeyValueValueListOutput<>(codec), args);
}
Command<K, V, KeyValue<K, V>> blpop(long timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BLPOP, new KeyValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, V>> blpop(double timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BLPOP, new KeyValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, V>> brpop(long timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BRPOP, new KeyValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, V>> brpop(double timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BRPOP, new KeyValueOutput<>(codec), args);
}
Command<K, V, V> brpoplpush(long timeout, K source, K destination) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(source).addKey(destination).add(timeout);
return createCommand(BRPOPLPUSH, new ValueOutput<>(codec), args);
}
Command<K, V, V> brpoplpush(double timeout, K source, K destination) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(source).addKey(destination).add(timeout);
return createCommand(BRPOPLPUSH, new ValueOutput<>(codec), args);
}
Command<K, V, String> clientCaching(boolean enabled) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CACHING).add(enabled ? YES : NO);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, K> clientGetname() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GETNAME);
return createCommand(CLIENT, new KeyOutput<>(codec), args);
}
Command<K, V, Long> clientGetredir() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GETREDIR);
return createCommand(CLIENT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clientKill(String addr) {
LettuceAssert.notNull(addr, "Addr " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(addr, "Addr " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KILL).add(addr);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, Long> clientKill(KillArgs killArgs) {
LettuceAssert.notNull(killArgs, "KillArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KILL);
killArgs.build(args);
return createCommand(CLIENT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clientList() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LIST);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientList(ClientListArgs clientListArgs) {
LettuceAssert.notNull(clientListArgs, "ClientListArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LIST);
clientListArgs.build(args);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientInfo() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CommandKeyword.INFO);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientNoEvict(boolean on) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("NO-EVICT").add(on ? ON : OFF);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, Long> clientId() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(ID);
return createCommand(CLIENT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clientPause(long timeout) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(PAUSE).add(timeout);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientSetname(K name) {
LettuceAssert.notNull(name, "Name " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETNAME).addKey(name);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientSetinfo(String key, String value) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETINFO).add(key).add(value);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, String> clientTracking(TrackingArgs trackingArgs) {
LettuceAssert.notNull(trackingArgs, "TrackingArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(TRACKING);
trackingArgs.build(args);
return createCommand(CLIENT, new StatusOutput<>(codec), args);
}
Command<K, V, TrackingInfo> clientTrackinginfo() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(TRACKINGINFO);
return new Command<>(CLIENT, new ComplexOutput<>(codec, TrackingInfoParser.INSTANCE), args);
}
Command<K, V, Long> clientUnblock(long id, UnblockType type) {
LettuceAssert.notNull(type, "UnblockType " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(UNBLOCK).add(id).add(type);
return createCommand(CLIENT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clusterAddslots(int[] slots) {
notEmptySlots(slots);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(ADDSLOTS);
for (int slot : slots) {
args.add(slot);
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterAddSlotsRange(Range<Integer>... ranges) {
notEmptyRanges(ranges);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(ADDSLOTSRANGE);
for (Range<Integer> range : ranges) {
args.add(range.getLower().getValue());
args.add(range.getUpper().getValue());
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterBumpepoch() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(BUMPEPOCH);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, Long> clusterCountFailureReports(String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add("COUNT-FAILURE-REPORTS").add(nodeId);
return createCommand(CLUSTER, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> clusterCountKeysInSlot(int slot) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(COUNTKEYSINSLOT).add(slot);
return createCommand(CLUSTER, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clusterDelslots(int[] slots) {
notEmptySlots(slots);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(DELSLOTS);
for (int slot : slots) {
args.add(slot);
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterDelSlotsRange(Range<Integer>... ranges) {
notEmptyRanges(ranges);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(DELSLOTSRANGE);
for (Range<Integer> range : ranges) {
args.add(range.getLower().getValue());
args.add(range.getUpper().getValue());
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterFailover(boolean force) {
return clusterFailover(force, false);
}
Command<K, V, String> clusterFailover(boolean force, boolean takeOver) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FAILOVER);
if (force) {
args.add(FORCE);
} else if (takeOver) {
args.add(TAKEOVER);
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterFlushslots() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FLUSHSLOTS);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterForget(String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FORGET).add(nodeId);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, List<K>> clusterGetKeysInSlot(int slot, int count) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GETKEYSINSLOT).add(slot).add(count);
return createCommand(CLUSTER, new KeyListOutput<>(codec), args);
}
Command<K, V, String> clusterInfo() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CommandType.INFO);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, Long> clusterKeyslot(K key) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KEYSLOT).addKey(key);
return createCommand(CLUSTER, new IntegerOutput<>(codec), args);
}
Command<K, V, String> clusterMeet(String ip, int port) {
LettuceAssert.notNull(ip, "IP " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(ip, "IP " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(MEET).add(ip).add(port);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterMyId() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(MYID);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterMyShardId() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(MYSHARDID);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterNodes() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(NODES);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterReplicate(String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(REPLICATE).add(nodeId);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, List<String>> clusterReplicas(String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(REPLICAS).add(nodeId);
return createCommand(CLUSTER, new StringListOutput<>(codec), args);
}
Command<K, V, String> clusterReset(boolean hard) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESET);
if (hard) {
args.add(HARD);
} else {
args.add(SOFT);
}
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSaveconfig() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SAVECONFIG);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSetConfigEpoch(long configEpoch) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("SET-CONFIG-EPOCH").add(configEpoch);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSetSlotImporting(int slot, String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETSLOT).add(slot).add(IMPORTING).add(nodeId);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSetSlotMigrating(int slot, String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETSLOT).add(slot).add(MIGRATING).add(nodeId);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSetSlotNode(int slot, String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETSLOT).add(slot).add(NODE).add(nodeId);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, String> clusterSetSlotStable(int slot) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SETSLOT).add(slot).add(STABLE);
return createCommand(CLUSTER, new StatusOutput<>(codec), args);
}
Command<K, V, List<Object>> clusterShards() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SHARDS);
return createCommand(CLUSTER, new ArrayOutput<>(codec), args);
}
Command<K, V, List<String>> clusterSlaves(String nodeId) {
assertNodeId(nodeId);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SLAVES).add(nodeId);
return createCommand(CLUSTER, new StringListOutput<>(codec), args);
}
Command<K, V, List<Object>> clusterSlots() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SLOTS);
return createCommand(CLUSTER, new ArrayOutput<>(codec), args);
}
Command<K, V, List<Object>> command() {
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.UTF8);
return Command.class.cast(new Command(COMMAND, new ArrayOutput<>(StringCodec.UTF8), args));
}
Command<K, V, Long> commandCount() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(COUNT);
return createCommand(COMMAND, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Object>> commandInfo(String... commands) {
LettuceAssert.notNull(commands, "Commands " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(commands, "Commands " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(commands, "Commands " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.UTF8);
args.add(CommandKeyword.INFO);
for (String command : commands) {
args.add(command);
}
return Command.class.cast(new Command<>(COMMAND, new ArrayOutput<>(StringCodec.UTF8), args));
}
Command<K, V, Map<String, String>> configGet(String parameter) {
LettuceAssert.notNull(parameter, "Parameter " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(parameter, "Parameter " + MUST_NOT_BE_EMPTY);
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.UTF8).add(GET).add(parameter);
return Command.class.cast(new Command<>(CONFIG, new MapOutput<>(StringCodec.UTF8), args));
}
Command<K, V, Map<String, String>> configGet(String... parameters) {
LettuceAssert.notNull(parameters, "Parameters " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(parameters, "Parameters " + MUST_NOT_BE_EMPTY);
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.UTF8).add(GET);
for (String parameter : parameters) {
args.add(parameter);
}
return Command.class.cast(new Command<>(CONFIG, new MapOutput<>(StringCodec.UTF8), args));
}
Command<K, V, String> configResetstat() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESETSTAT);
return createCommand(CONFIG, new StatusOutput<>(codec), args);
}
Command<K, V, String> configRewrite() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(REWRITE);
return createCommand(CONFIG, new StatusOutput<>(codec), args);
}
Command<K, V, String> configSet(String parameter, String value) {
LettuceAssert.notNull(parameter, "Parameter " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(parameter, "Parameter " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(value, "Value " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SET).add(parameter).add(value);
return createCommand(CONFIG, new StatusOutput<>(codec), args);
}
Command<K, V, String> configSet(Map<String, String> configValues) {
LettuceAssert.notNull(configValues, "ConfigValues " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!configValues.isEmpty(), "ConfigValues " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SET);
configValues.forEach((parameter, value) -> {
args.add(parameter);
args.add(value);
});
return createCommand(CONFIG, new StatusOutput<>(codec), args);
}
Command<K, V, Long> dbsize() {
return createCommand(DBSIZE, new IntegerOutput<>(codec));
}
Command<K, V, String> debugCrashAndRecover(Long delay) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add("CRASH-AND-RECOVER");
if (delay != null) {
args.add(delay);
}
return createCommand(DEBUG, new StatusOutput<>(codec), args);
}
Command<K, V, String> debugHtstats(int db) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(HTSTATS).add(db);
return createCommand(DEBUG, new StatusOutput<>(codec), args);
}
Command<K, V, String> debugObject(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(OBJECT).addKey(key);
return createCommand(DEBUG, new StatusOutput<>(codec), args);
}
Command<K, V, Void> debugOom() {
return createCommand(DEBUG, null, new CommandArgs<>(codec).add("OOM"));
}
Command<K, V, String> debugReload() {
return createCommand(DEBUG, new StatusOutput<>(codec), new CommandArgs<>(codec).add(RELOAD));
}
Command<K, V, String> debugRestart(Long delay) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESTART);
if (delay != null) {
args.add(delay);
}
return createCommand(DEBUG, new StatusOutput<>(codec), args);
}
Command<K, V, String> debugSdslen(K key) {
notNullKey(key);
return createCommand(DEBUG, new StatusOutput<>(codec), new CommandArgs<>(codec).add("SDSLEN").addKey(key));
}
Command<K, V, Void> debugSegfault() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SEGFAULT);
return createCommand(DEBUG, null, args);
}
Command<K, V, Long> decr(K key) {
notNullKey(key);
return createCommand(DECR, new IntegerOutput<>(codec), key);
}
Command<K, V, Long> decrby(K key, long amount) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(amount);
return createCommand(DECRBY, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> del(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(DEL, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> del(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(DEL, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> delex(K key, CompareCondition<V> condition) {
notNullKey(key);
LettuceAssert.notNull(condition, "ValueCondition " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
condition.build(args);
return createCommand(DELEX, new IntegerOutput<>(codec), args);
}
Command<K, V, String> discard() {
return createCommand(DISCARD, new StatusOutput<>(codec));
}
Command<K, V, byte[]> dump(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(DUMP, new ByteArrayOutput<>(codec), args);
}
Command<K, V, V> echo(V msg) {
LettuceAssert.notNull(msg, "message " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addValue(msg);
return createCommand(ECHO, new ValueOutput<>(codec), args);
}
<T> Command<K, V, T> eval(byte[] script, ScriptOutputType type, K[] keys, V... values) {
return eval(script, type, false, keys, values);
}
<T> Command<K, V, T> eval(byte[] script, ScriptOutputType type, boolean readonly, K[] keys, V... values) {
LettuceAssert.notNull(script, "Script " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(type, "ScriptOutputType " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(values, "Values " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(script).add(keys.length).addKeys(keys).addValues(values);
CommandOutput<K, V, T> output = newScriptOutput(codec, type);
return createCommand(readonly ? EVAL_RO : EVAL, output, args);
}
<T> Command<K, V, T> evalsha(String digest, ScriptOutputType type, K[] keys, V... values) {
return evalsha(digest, type, false, keys, values);
}
<T> Command<K, V, T> evalsha(String digest, ScriptOutputType type, boolean readonly, K[] keys, V... values) {
LettuceAssert.notNull(digest, "Digest " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(digest, "Digest " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(type, "ScriptOutputType " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(values, "Values " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(digest).add(keys.length).addKeys(keys).addValues(values);
CommandOutput<K, V, T> output = newScriptOutput(codec, type);
return createCommand(readonly ? EVALSHA_RO : EVALSHA, output, args);
}
Command<K, V, Boolean> exists(K key) {
notNullKey(key);
return createCommand(EXISTS, new BooleanOutput<>(codec), key);
}
Command<K, V, Long> exists(K... keys) {
notEmpty(keys);
return createCommand(EXISTS, new IntegerOutput<>(codec), new CommandArgs<>(codec).addKeys(keys));
}
Command<K, V, Long> exists(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
return createCommand(EXISTS, new IntegerOutput<>(codec), new CommandArgs<>(codec).addKeys(keys));
}
Command<K, V, Boolean> expire(K key, long seconds, ExpireArgs expireArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(seconds);
if (expireArgs != null) {
expireArgs.build(args);
}
return createCommand(EXPIRE, new BooleanOutput<>(codec), args);
}
Command<K, V, List<Long>> hexpire(K key, long seconds, ExpireArgs expireArgs, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(seconds);
if (expireArgs != null) {
expireArgs.build(args);
}
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HEXPIRE, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> hexpireat(K key, long seconds, ExpireArgs expireArgs, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(seconds);
if (expireArgs != null) {
expireArgs.build(args);
}
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HEXPIREAT, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> httl(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HTTL, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> hpexpire(K key, long milliseconds, ExpireArgs expireArgs, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(milliseconds);
if (expireArgs != null) {
expireArgs.build(args);
}
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HPEXPIRE, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> hpexpireat(K key, long timestamp, ExpireArgs expireArgs, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(timestamp);
if (expireArgs != null) {
expireArgs.build(args);
}
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HPEXPIREAT, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> hpexpiretime(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HPEXPIRETIME, new IntegerListOutput<>(codec), args);
}
Command<K, V, List<Long>> hpttl(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HPTTL, new IntegerListOutput<>(codec), args);
}
Command<K, V, Boolean> expireat(K key, long timestamp, ExpireArgs expireArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(timestamp);
if (expireArgs != null) {
expireArgs.build(args);
}
return createCommand(EXPIREAT, new BooleanOutput<>(codec), args);
}
Command<K, V, Long> expiretime(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(EXPIRETIME, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Long>> hexpiretime(K key, K... fields) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HEXPIRETIME, new IntegerListOutput<>(codec), args);
}
Command<K, V, String> flushall() {
return createCommand(FLUSHALL, new StatusOutput<>(codec));
}
Command<K, V, String> flushall(FlushMode flushMode) {
LettuceAssert.notNull(flushMode, "FlushMode " + MUST_NOT_BE_NULL);
return createCommand(FLUSHALL, new StatusOutput<>(codec), new CommandArgs<>(codec).add(flushMode));
}
Command<K, V, String> flushdb() {
return createCommand(FLUSHDB, new StatusOutput<>(codec));
}
Command<K, V, String> flushdb(FlushMode flushMode) {
LettuceAssert.notNull(flushMode, "FlushMode " + MUST_NOT_BE_NULL);
return createCommand(FLUSHDB, new StatusOutput<>(codec), new CommandArgs<>(codec).add(flushMode));
}
<T> Command<K, V, T> fcall(String function, ScriptOutputType type, boolean readonly, K[] keys, V... values) {
LettuceAssert.notEmpty(function, "Function " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(type, "ScriptOutputType " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(values, "Values " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(function).add(keys.length).addKeys(keys).addValues(values);
CommandOutput<K, V, T> output = newScriptOutput(codec, type);
return createCommand(readonly ? FCALL_RO : FCALL, output, args);
}
Command<K, V, String> functionLoad(byte[] functionCode, boolean replace) {
LettuceAssert.notNull(functionCode, "Function code " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LOAD);
if (replace) {
args.add(REPLACE);
}
args.add(functionCode);
return createCommand(FUNCTION, new StatusOutput<>(codec), args);
}
Command<K, V, byte[]> functionDump() {
return createCommand(FUNCTION, new ByteArrayOutput<>(codec), new CommandArgs<>(codec).add(DUMP));
}
Command<K, V, String> functionRestore(byte dump[], FunctionRestoreMode mode) {
LettuceAssert.notNull(dump, "Function dump " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESTORE).add(dump);
if (mode != null) {
args.add(mode);
}
return createCommand(FUNCTION, new StatusOutput<>(codec), args);
}
Command<K, V, String> functionFlush(FlushMode mode) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FLUSH);
if (mode != null) {
args.add(mode);
}
return createCommand(FUNCTION, new StatusOutput<>(codec), args);
}
Command<K, V, String> functionKill() {
return createCommand(FUNCTION, new StatusOutput<>(codec), new CommandArgs<>(codec).add(KILL));
}
Command<K, V, List<Map<String, Object>>> functionList(String pattern) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LIST);
if (pattern != null) {
args.add("LIBRARYNAME").add(pattern);
}
return createCommand(FUNCTION, (CommandOutput) new ObjectOutput<>(StringCodec.UTF8), args);
}
Command<K, V, Long> geoadd(K key, double longitude, double latitude, V member, GeoAddArgs geoArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (geoArgs != null) {
geoArgs.build(args);
}
args.add(longitude).add(latitude).addValue(member);
return createCommand(GEOADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> geoadd(K key, Object[] lngLatMember, GeoAddArgs geoArgs) {
notNullKey(key);
LettuceAssert.notNull(lngLatMember, "LngLatMember " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(lngLatMember, "LngLatMember " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(lngLatMember, "LngLatMember " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
LettuceAssert.isTrue(lngLatMember.length % 3 == 0, "LngLatMember.length must be a multiple of 3 and contain a "
+ "sequence of longitude1, latitude1, member1, longitude2, latitude2, member2, ... longitudeN, latitudeN, memberN");
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (geoArgs != null) {
geoArgs.build(args);
}
for (int i = 0; i < lngLatMember.length; i += 3) {
args.add((Double) lngLatMember[i]);
args.add((Double) lngLatMember[i + 1]);
args.addValue((V) lngLatMember[i + 2]);
}
return createCommand(GEOADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> geoadd(K key, GeoValue<V>[] values, GeoAddArgs geoArgs) {
notNullKey(key);
LettuceAssert.notNull(values, "Values " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(values, "Values " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(values, "Values " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (geoArgs != null) {
geoArgs.build(args);
}
for (GeoValue<V> value : values) {
args.add(value.getCoordinates().getX().doubleValue());
args.add(value.getCoordinates().getY().doubleValue());
args.addValue(value.getValue());
}
return createCommand(GEOADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Double> geodist(K key, V from, V to, GeoArgs.Unit unit) {
notNullKey(key);
LettuceAssert.notNull(from, "From " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(from, "To " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(from).addValue(to);
if (unit != null) {
args.add(unit.name());
}
return createCommand(GEODIST, new DoubleOutput<>(codec), args);
}
Command<K, V, List<Value<String>>> geohash(K key, V... members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(members);
return createCommand(GEOHASH, new StringValueListOutput<>(codec), args);
}
Command<K, V, List<GeoCoordinates>> geopos(K key, V[] members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(members);
return createCommand(GEOPOS, new GeoCoordinatesListOutput<>(codec), args);
}
Command<K, V, List<Value<GeoCoordinates>>> geoposValues(K key, V[] members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(members);
return createCommand(GEOPOS, new GeoCoordinatesValueListOutput<>(codec), args);
}
Command<K, V, Set<V>> georadius(CommandType commandType, K key, double longitude, double latitude, double distance,
String unit) {
notNullKey(key);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(longitude).add(latitude).add(distance).add(unit);
return createCommand(commandType, new ValueSetOutput<>(codec), args);
}
Command<K, V, List<GeoWithin<V>>> georadius(CommandType commandType, K key, double longitude, double latitude,
double distance, String unit, GeoArgs geoArgs) {
notNullKey(key);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(unit, "Unit " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(geoArgs, "GeoArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(longitude).add(latitude).add(distance).add(unit);
geoArgs.build(args);
return createCommand(commandType,
new GeoWithinListOutput<>(codec, geoArgs.isWithDistance(), geoArgs.isWithHash(), geoArgs.isWithCoordinates()),
args);
}
Command<K, V, Long> georadius(K key, double longitude, double latitude, double distance, String unit,
GeoRadiusStoreArgs<K> geoRadiusStoreArgs) {
notNullKey(key);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(unit, "Unit " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(geoRadiusStoreArgs, "GeoRadiusStoreArgs " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(geoRadiusStoreArgs.getStoreKey() != null || geoRadiusStoreArgs.getStoreDistKey() != null,
"At least STORE key or STOREDIST key is required");
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(longitude).add(latitude).add(distance).add(unit);
geoRadiusStoreArgs.build(args);
return createCommand(GEORADIUS, new IntegerOutput<>(codec), args);
}
Command<K, V, Set<V>> georadiusbymember(CommandType commandType, K key, V member, double distance, String unit) {
notNullKey(key);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(unit, "Unit " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(member).add(distance).add(unit);
return createCommand(commandType, new ValueSetOutput<>(codec), args);
}
Command<K, V, List<GeoWithin<V>>> georadiusbymember(CommandType commandType, K key, V member, double distance, String unit,
GeoArgs geoArgs) {
notNullKey(key);
LettuceAssert.notNull(geoArgs, "GeoArgs " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(unit, "Unit " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(member).add(distance).add(unit);
geoArgs.build(args);
return createCommand(commandType,
new GeoWithinListOutput<>(codec, geoArgs.isWithDistance(), geoArgs.isWithHash(), geoArgs.isWithCoordinates()),
args);
}
Command<K, V, Long> georadiusbymember(K key, V member, double distance, String unit,
GeoRadiusStoreArgs<K> geoRadiusStoreArgs) {
notNullKey(key);
LettuceAssert.notNull(geoRadiusStoreArgs, "GeoRadiusStoreArgs " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(unit, "Unit " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(unit, "Unit " + MUST_NOT_BE_EMPTY);
LettuceAssert.isTrue(geoRadiusStoreArgs.getStoreKey() != null || geoRadiusStoreArgs.getStoreDistKey() != null,
"At least STORE key or STOREDIST key is required");
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(member).add(distance).add(unit);
geoRadiusStoreArgs.build(args);
return createCommand(GEORADIUSBYMEMBER, new IntegerOutput<>(codec), args);
}
Command<K, V, Set<V>> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate) {
notNullKey(key);
LettuceAssert.notNull(reference, "GeoRef " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(predicate, "GeoPredicate " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
reference.build(args);
predicate.build(args);
return createCommand(GEOSEARCH, new ValueSetOutput<>(codec), args);
}
Command<K, V, List<GeoWithin<V>>> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate,
GeoArgs geoArgs) {
notNullKey(key);
LettuceAssert.notNull(reference, "GeoRef " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(predicate, "GeoPredicate " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
reference.build(args);
predicate.build(args);
geoArgs.build(args);
return createCommand(GEOSEARCH,
new GeoWithinListOutput<>(codec, geoArgs.isWithDistance(), geoArgs.isWithHash(), geoArgs.isWithCoordinates()),
args);
}
Command<K, V, Long> geosearchstore(K destination, K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate,
GeoArgs geoArgs, boolean storeDist) {
notNullKey(key);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(key, "Key " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(reference, "GeoRef " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(predicate, "GeoPredicate " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(destination).addKey(key);
reference.build(args);
predicate.build(args);
geoArgs.build(args);
if (storeDist) {
args.add("STOREDIST");
}
return createCommand(GEOSEARCHSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, V> get(K key) {
notNullKey(key);
return createCommand(GET, new ValueOutput<>(codec), key);
}
Command<K, V, Long> getbit(K key, long offset) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(offset);
return createCommand(GETBIT, new IntegerOutput<>(codec), args);
}
Command<K, V, V> getdel(K key) {
notNullKey(key);
return createCommand(GETDEL, new ValueOutput<>(codec), key);
}
Command<K, V, V> getex(K key, GetExArgs getExArgs) {
notNullKey(key);
LettuceAssert.notNull(getExArgs, "GetExArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
getExArgs.build(args);
return createCommand(GETEX, new ValueOutput<>(codec), args);
}
Command<K, V, V> getrange(K key, long start, long end) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(end);
return createCommand(GETRANGE, new ValueOutput<>(codec), args);
}
Command<K, V, V> getset(K key, V value) {
notNullKey(key);
return createCommand(GETSET, new ValueOutput<>(codec), key, value);
}
Command<K, V, Long> hdel(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(fields);
return createCommand(HDEL, new IntegerOutput<>(codec), args);
}
Command<String, String, Map<String, Object>> hello(int protocolVersion, String user, char[] password, String name) {
CommandArgs<String, String> args = new CommandArgs<>(StringCodec.ASCII).add(protocolVersion);
if (user != null && password != null) {
args.add(AUTH).add(user).add(password);
}
if (name != null) {
args.add(SETNAME).add(name);
}
return new Command<>(HELLO, new GenericMapOutput<>(StringCodec.ASCII), args);
}
Command<K, V, Boolean> hexists(K key, K field) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field);
return createCommand(HEXISTS, new BooleanOutput<>(codec), args);
}
Command<K, V, V> hget(K key, K field) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field);
return createCommand(HGET, new ValueOutput<>(codec), args);
}
Command<K, V, Map<K, V>> hgetall(K key) {
notNullKey(key);
return createCommand(HGETALL, new MapOutput<>(codec), key);
}
Command<K, V, List<KeyValue<K, V>>> hgetallKeyValue(K key) {
notNullKey(key);
return createCommand(HGETALL, new KeyValueListOutput<>(codec), key);
}
Command<K, V, Long> hgetall(KeyValueStreamingChannel<K, V> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(HGETALL, new KeyValueStreamingOutput<>(codec, channel), key);
}
Command<K, V, Long> hincrby(K key, K field, long amount) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field).add(amount);
return createCommand(HINCRBY, new IntegerOutput<>(codec), args);
}
Command<K, V, Double> hincrbyfloat(K key, K field, double amount) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field).add(amount);
return createCommand(HINCRBYFLOAT, new DoubleOutput<>(codec), args);
}
Command<K, V, List<K>> hkeys(K key) {
notNullKey(key);
return createCommand(HKEYS, new KeyListOutput<>(codec), key);
}
Command<K, V, Long> hkeys(KeyStreamingChannel<K> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(HKEYS, new KeyStreamingOutput<>(codec, channel), key);
}
Command<K, V, Long> hlen(K key) {
notNullKey(key);
return createCommand(HLEN, new IntegerOutput<>(codec), key);
}
Command<K, V, List<V>> hmget(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(fields);
return createCommand(HMGET, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> hmget(ValueStreamingChannel<V> channel, K key, K... fields) {
keyAndFieldsProvided(key, fields);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(fields);
return createCommand(HMGET, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> hmget(KeyValueStreamingChannel<K, V> channel, K key, K... fields) {
keyAndFieldsProvided(key, fields);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(fields);
return createCommand(HMGET, new KeyValueStreamingOutput<>(codec, channel, Arrays.asList(fields)), args);
}
Command<K, V, List<KeyValue<K, V>>> hmgetKeyValue(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(fields);
return createCommand(HMGET, new KeyValueListOutput<>(codec, Arrays.asList(fields)), args);
}
Command<K, V, String> hmset(K key, Map<K, V> map) {
notNullKey(key);
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(map);
return createCommand(HMSET, new StatusOutput<>(codec), args);
}
Command<K, V, K> hrandfield(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(HRANDFIELD, new KeyOutput<>(codec), args);
}
Command<K, V, List<K>> hrandfield(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count);
return createCommand(HRANDFIELD, new KeyListOutput<>(codec), args);
}
Command<K, V, KeyValue<K, V>> hrandfieldWithvalues(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(1).add(WITHVALUES);
return createCommand(HRANDFIELD, new KeyValueOutput<>(codec), args);
}
Command<K, V, List<KeyValue<K, V>>> hrandfieldWithvalues(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count).add(WITHVALUES);
return createCommand(HRANDFIELD, new KeyValueListOutput<>(codec), args);
}
Command<K, V, MapScanCursor<K, V>> hscan(K key) {
notNullKey(key);
return hscan(key, ScanCursor.INITIAL, null);
}
Command<K, V, KeyScanCursor<K>> hscanNovalues(K key) {
notNullKey(key);
return hscanNovalues(key, ScanCursor.INITIAL, null);
}
Command<K, V, MapScanCursor<K, V>> hscan(K key, ScanCursor scanCursor) {
notNullKey(key);
return hscan(key, scanCursor, null);
}
Command<K, V, KeyScanCursor<K>> hscanNovalues(K key, ScanCursor scanCursor) {
notNullKey(key);
return hscanNovalues(key, scanCursor, null);
}
Command<K, V, MapScanCursor<K, V>> hscan(K key, ScanArgs scanArgs) {
notNullKey(key);
return hscan(key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, KeyScanCursor<K>> hscanNovalues(K key, ScanArgs scanArgs) {
notNullKey(key);
return hscanNovalues(key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, MapScanCursor<K, V>> hscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
MapScanOutput<K, V> output = new MapScanOutput<>(codec);
return createCommand(HSCAN, output, args);
}
Command<K, V, KeyScanCursor<K>> hscanNovalues(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
args.add(NOVALUES);
KeyScanOutput<K, V> output = new KeyScanOutput<>(codec);
return createCommand(HSCAN, output, args);
}
Command<K, V, StreamScanCursor> hscanStreaming(KeyValueStreamingChannel<K, V> channel, K key) {
notNullKey(key);
notNull(channel);
return hscanStreaming(channel, key, ScanCursor.INITIAL, null);
}
Command<K, V, StreamScanCursor> hscanNoValuesStreaming(KeyStreamingChannel<K> channel, K key) {
notNullKey(key);
notNull(channel);
return hscanNoValuesStreaming(channel, key, ScanCursor.INITIAL, null);
}
Command<K, V, StreamScanCursor> hscanStreaming(KeyValueStreamingChannel<K, V> channel, K key, ScanCursor scanCursor) {
notNullKey(key);
notNull(channel);
return hscanStreaming(channel, key, scanCursor, null);
}
Command<K, V, StreamScanCursor> hscanNoValuesStreaming(KeyStreamingChannel<K> channel, K key, ScanCursor scanCursor) {
notNullKey(key);
notNull(channel);
return hscanNoValuesStreaming(channel, key, scanCursor, null);
}
Command<K, V, StreamScanCursor> hscanStreaming(KeyValueStreamingChannel<K, V> channel, K key, ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
return hscanStreaming(channel, key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, StreamScanCursor> hscanNoValuesStreaming(KeyStreamingChannel<K> channel, K key, ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
return hscanNoValuesStreaming(channel, key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, StreamScanCursor> hscanStreaming(KeyValueStreamingChannel<K, V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
KeyValueScanStreamingOutput<K, V> output = new KeyValueScanStreamingOutput<>(codec, channel);
return createCommand(HSCAN, output, args);
}
Command<K, V, StreamScanCursor> hscanNoValuesStreaming(KeyStreamingChannel<K> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
args.add(NOVALUES);
KeyScanStreamingOutput<K, V> output = new KeyScanStreamingOutput<>(codec, channel);
return createCommand(HSCAN, output, args);
}
Command<K, V, Boolean> hset(K key, K field, V value) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field).addValue(value);
return createCommand(HSET, new BooleanOutput<>(codec), args);
}
Command<K, V, Long> hset(K key, Map<K, V> map) {
notNullKey(key);
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(map);
return createCommand(HSET, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> hsetex(K key, Map<K, V> map) {
notNullKey(key);
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(map.size()).add(map);
return createCommand(HSETEX, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> hsetex(K key, HSetExArgs hSetExArgs, Map<K, V> map) {
notNullKey(key);
LettuceAssert.notNull(hSetExArgs, "HSetExArgs " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
hSetExArgs.build(args);
args.add(FIELDS).add(map.size()).add(map);
return createCommand(HSETEX, new IntegerOutput<>(codec), args);
}
Command<K, V, List<KeyValue<K, V>>> hgetex(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HGETEX, new KeyValueListOutput<>(codec, Arrays.asList(fields)), args);
}
Command<K, V, List<KeyValue<K, V>>> hgetex(K key, HGetExArgs hGetExArgs, K... fields) {
keyAndFieldsProvided(key, fields);
LettuceAssert.notNull(hGetExArgs, "HGetExArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
hGetExArgs.build(args);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HGETEX, new KeyValueListOutput<>(codec, Arrays.asList(fields)), args);
}
Command<K, V, Long> hgetex(KeyValueStreamingChannel<K, V> channel, K key, HGetExArgs hGetExArgs, K... fields) {
keyAndFieldsProvided(key, fields);
LettuceAssert.notNull(hGetExArgs, "HGetExArgs " + MUST_NOT_BE_NULL);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
hGetExArgs.build(args);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HGETEX, new KeyValueStreamingOutput<>(codec, channel, Arrays.asList(fields)), args);
}
Command<K, V, List<KeyValue<K, V>>> hgetdel(K key, K... fields) {
keyAndFieldsProvided(key, fields);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HGETDEL, new KeyValueListOutput<>(codec, Arrays.asList(fields)), args);
}
Command<K, V, Long> hgetdel(KeyValueStreamingChannel<K, V> channel, K key, K... fields) {
keyAndFieldsProvided(key, fields);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HGETDEL, new KeyValueStreamingOutput<>(codec, channel, Arrays.asList(fields)), args);
}
Command<K, V, Boolean> hsetnx(K key, K field, V value) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field).addValue(value);
return createCommand(HSETNX, new BooleanOutput<>(codec), args);
}
Command<K, V, Long> hstrlen(K key, K field) {
notNullKey(key);
LettuceAssert.notNull(field, "Field " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(field);
return createCommand(HSTRLEN, new IntegerOutput<>(codec), args);
}
Command<K, V, List<V>> hvals(K key) {
notNullKey(key);
return createCommand(HVALS, new ValueListOutput<>(codec), key);
}
Command<K, V, Long> hvals(ValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(HVALS, new ValueStreamingOutput<>(codec, channel), key);
}
Command<K, V, Long> incr(K key) {
notNullKey(key);
return createCommand(INCR, new IntegerOutput<>(codec), key);
}
Command<K, V, Long> incrby(K key, long amount) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(amount);
return createCommand(INCRBY, new IntegerOutput<>(codec), args);
}
Command<K, V, Double> incrbyfloat(K key, double amount) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(amount);
return createCommand(INCRBYFLOAT, new DoubleOutput<>(codec), args);
}
Command<K, V, String> info() {
return createCommand(CommandType.INFO, new StatusOutput<>(codec));
}
Command<K, V, String> info(String section) {
LettuceAssert.notNull(section, "Section " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(section);
return createCommand(CommandType.INFO, new StatusOutput<>(codec), args);
}
Command<K, V, List<K>> keys(String pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(pattern);
return createCommand(KEYS, new KeyListOutput<>(codec), args);
}
/**
* Find all keys matching the given pattern (legacy overload).
*
* @param pattern the pattern type: patternkey (pattern).
* @return List<K> array-reply list of keys matching {@code pattern}.
* @deprecated Use {@link #keys(String)} instead. This legacy overload will be removed in a later version.
*/
@Deprecated
Command<K, V, List<K>> keysLegacy(K pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
return createCommand(KEYS, new KeyListOutput<>(codec), pattern);
}
Command<K, V, Long> keys(KeyStreamingChannel<K> channel, String pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(pattern);
return createCommand(KEYS, new KeyStreamingOutput<>(codec, channel), args);
}
/**
* Find all keys matching the given pattern (legacy overload).
*
* @param channel the channel.
* @param pattern the pattern.
* @return Long array-reply list of keys matching {@code pattern}.
* @deprecated Use {@link #keys(KeyStreamingChannel, String)} instead. This legacy overload will be removed in a later
* version.
*/
@Deprecated
Command<K, V, Long> keysLegacy(KeyStreamingChannel<K> channel, K pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
notNull(channel);
return createCommand(KEYS, new KeyStreamingOutput<>(codec, channel), pattern);
}
Command<K, V, Date> lastsave() {
return createCommand(LASTSAVE, new DateOutput<>(codec));
}
Command<K, V, V> lindex(K key, long index) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(index);
return createCommand(LINDEX, new ValueOutput<>(codec), args);
}
Command<K, V, Long> linsert(K key, boolean before, V pivot, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(before ? BEFORE : AFTER).addValue(pivot).addValue(value);
return createCommand(LINSERT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> llen(K key) {
notNullKey(key);
return createCommand(LLEN, new IntegerOutput<>(codec), key);
}
Command<K, V, V> lmove(K source, K destination, LMoveArgs lMoveArgs) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lMoveArgs, "LMoveArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(source).addKey(destination);
lMoveArgs.build(args);
return createCommand(LMOVE, new ValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, List<V>>> lmpop(LMPopArgs lmPopArgs, K... keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(lmPopArgs, "LMPopArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys);
lmPopArgs.build(args);
return createCommand(LMPOP, new KeyValueValueListOutput<>(codec), args);
}
Command<K, V, V> lpop(K key) {
notNullKey(key);
return createCommand(LPOP, new ValueOutput<>(codec), key);
}
Command<K, V, List<V>> lpop(K key, long count) {
notNullKey(key);
return createCommand(LPOP, new ValueListOutput<>(codec), new CommandArgs<>(codec).addKey(key).add(count));
}
Command<K, V, Long> lpos(K key, V value, LPosArgs lposArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(value);
if (lposArgs != null) {
lposArgs.build(args);
}
return createCommand(LPOS, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Long>> lpos(K key, V value, long count, LPosArgs lposArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(value).add(COUNT).add(count);
if (lposArgs != null) {
lposArgs.build(args);
}
return createCommand(LPOS, new IntegerListOutput<>(codec), args);
}
Command<K, V, Long> lpush(K key, V... values) {
notNullKey(key);
notEmptyValues(values);
return createCommand(LPUSH, new IntegerOutput<>(codec), key, values);
}
Command<K, V, Long> lpushx(K key, V... values) {
notNullKey(key);
notEmptyValues(values);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(values);
return createCommand(LPUSHX, new IntegerOutput<>(codec), args);
}
Command<K, V, List<V>> lrange(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(LRANGE, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> lrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(LRANGE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> lrem(K key, long count, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count).addValue(value);
return createCommand(LREM, new IntegerOutput<>(codec), args);
}
Command<K, V, String> lset(K key, long index, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(index).addValue(value);
return createCommand(LSET, new StatusOutput<>(codec), args);
}
Command<K, V, String> ltrim(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(LTRIM, new StatusOutput<>(codec), args);
}
Command<K, V, Long> memoryUsage(K key) {
return createCommand(MEMORY, new IntegerOutput<>(codec), new CommandArgs<>(codec).add(USAGE).addKey(key));
}
Command<K, V, List<V>> mget(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> mget(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> mget(ValueStreamingChannel<V> channel, K... keys) {
notEmpty(keys);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> mget(KeyValueStreamingChannel<K, V> channel, K... keys) {
notEmpty(keys);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new KeyValueStreamingOutput<>(codec, channel, Arrays.asList(keys)), args);
}
Command<K, V, Long> mget(ValueStreamingChannel<V> channel, Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> mget(KeyValueStreamingChannel<K, V> channel, Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new KeyValueStreamingOutput<>(codec, channel, keys), args);
}
Command<K, V, List<KeyValue<K, V>>> mgetKeyValue(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new KeyValueListOutput<>(codec, Arrays.asList(keys)), args);
}
Command<K, V, List<KeyValue<K, V>>> mgetKeyValue(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(MGET, new KeyValueListOutput<>(codec, keys), args);
}
Command<K, V, String> migrate(String host, int port, K key, int db, long timeout) {
LettuceAssert.notNull(host, "Host " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(host, "Host " + MUST_NOT_BE_EMPTY);
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(host).add(port).addKey(key).add(db).add(timeout);
return createCommand(MIGRATE, new StatusOutput<>(codec), args);
}
Command<K, V, String> migrate(String host, int port, int db, long timeout, MigrateArgs<K> migrateArgs) {
LettuceAssert.notNull(host, "Host " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(host, "Host " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(migrateArgs, "migrateArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(host).add(port);
if (migrateArgs.keys.size() == 1) {
args.addKey(migrateArgs.keys.get(0));
} else {
args.add("");
}
args.add(db).add(timeout);
migrateArgs.build(args);
return createCommand(MIGRATE, new StatusOutput<>(codec), args);
}
Command<K, V, Boolean> move(K key, int db) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(db);
return createCommand(MOVE, new BooleanOutput<>(codec), args);
}
Command<K, V, String> mset(Map<K, V> map) {
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(map);
return createCommand(MSET, new StatusOutput<>(codec), args);
}
Command<K, V, Boolean> msetnx(Map<K, V> map) {
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(map);
return createCommand(MSETNX, new BooleanOutput<>(codec), args);
}
Command<K, V, Boolean> msetex(Map<K, V> map, MSetExArgs setArgs) {
LettuceAssert.notNull(map, "Map " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!map.isEmpty(), "Map " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(map.size()).add(map);
if (setArgs != null) {
setArgs.build(args);
}
return createCommand(MSETEX, new BooleanOutput<>(codec), args);
}
Command<K, V, String> multi() {
return createCommand(MULTI, new StatusOutput<>(codec));
}
Command<K, V, String> objectEncoding(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(ENCODING).addKey(key);
return createCommand(OBJECT, new StatusOutput<>(codec), args);
}
Command<K, V, Long> objectFreq(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FREQ).addKey(key);
return createCommand(OBJECT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> objectIdletime(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(IDLETIME).addKey(key);
return createCommand(OBJECT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> objectRefcount(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(REFCOUNT).addKey(key);
return createCommand(OBJECT, new IntegerOutput<>(codec), args);
}
Command<K, V, Boolean> persist(K key) {
notNullKey(key);
return createCommand(PERSIST, new BooleanOutput<>(codec), key);
}
Command<K, V, List<Long>> hpersist(K key, K... fields) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(FIELDS).add(fields.length).addKeys(fields);
return createCommand(HPERSIST, new IntegerListOutput<>(codec), args);
}
Command<K, V, Boolean> pexpire(K key, long milliseconds, ExpireArgs expireArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(milliseconds);
if (expireArgs != null) {
expireArgs.build(args);
}
return createCommand(PEXPIRE, new BooleanOutput<>(codec), args);
}
Command<K, V, Boolean> pexpireat(K key, long timestamp, ExpireArgs expireArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(timestamp);
if (expireArgs != null) {
expireArgs.build(args);
}
return createCommand(PEXPIREAT, new BooleanOutput<>(codec), args);
}
Command<K, V, Long> pexpiretime(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(PEXPIRETIME, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> pfadd(K key, V value, V... moreValues) {
notNullKey(key);
LettuceAssert.notNull(value, "Value " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(moreValues, "MoreValues " + MUST_NOT_BE_NULL);
LettuceAssert.noNullElements(moreValues, "MoreValues " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(value).addValues(moreValues);
return createCommand(PFADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> pfadd(K key, V... values) {
notNullKey(key);
notEmptyValues(values);
LettuceAssert.noNullElements(values, "Values " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(values);
return createCommand(PFADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> pfcount(K key, K... moreKeys) {
notNullKey(key);
LettuceAssert.notNull(moreKeys, "MoreKeys " + MUST_NOT_BE_NULL);
LettuceAssert.noNullElements(moreKeys, "MoreKeys " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKeys(moreKeys);
return createCommand(PFCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> pfcount(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(PFCOUNT, new IntegerOutput<>(codec), args);
}
@SuppressWarnings("unchecked")
Command<K, V, String> pfmerge(K destkey, K sourcekey, K... moreSourceKeys) {
LettuceAssert.notNull(destkey, "Destkey " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sourcekey, "Sourcekey " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(moreSourceKeys, "MoreSourceKeys " + MUST_NOT_BE_NULL);
LettuceAssert.noNullElements(moreSourceKeys, "MoreSourceKeys " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(destkey).addKey(sourcekey).addKeys(moreSourceKeys);
return createCommand(PFMERGE, new StatusOutput<>(codec), args);
}
@SuppressWarnings("unchecked")
Command<K, V, String> pfmerge(K destkey, K... sourcekeys) {
LettuceAssert.notNull(destkey, "Destkey " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sourcekeys, "Sourcekeys " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(sourcekeys, "Sourcekeys " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(sourcekeys, "Sourcekeys " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(destkey).addKeys(sourcekeys);
return createCommand(PFMERGE, new StatusOutput<>(codec), args);
}
Command<K, V, String> ping() {
return createCommand(PING, new StatusOutput<>(codec));
}
Command<K, V, String> psetex(K key, long milliseconds, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(milliseconds).addValue(value);
return createCommand(PSETEX, new StatusOutput<>(codec), args);
}
Command<K, V, Long> pttl(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(PTTL, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> publish(K channel, V message) {
LettuceAssert.notNull(channel, "Channel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(channel).addValue(message);
return createCommand(PUBLISH, new IntegerOutput<>(codec), args);
}
Command<K, V, List<K>> pubsubChannels() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CHANNELS);
return createCommand(PUBSUB, new KeyListOutput<>(codec), args);
}
Command<K, V, List<K>> pubsubChannels(K pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CHANNELS).addKey(pattern);
return createCommand(PUBSUB, new KeyListOutput<>(codec), args);
}
Command<K, V, Long> pubsubNumpat() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(NUMPAT);
return createCommand(PUBSUB, new IntegerOutput<>(codec), args);
}
@SuppressWarnings({ "unchecked", "rawtypes" })
Command<K, V, Map<K, Long>> pubsubNumsub(K... channels) {
LettuceAssert.notNull(channels, "Channels " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(channels, "Channels " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(NUMSUB).addKeys(channels);
return createCommand(PUBSUB, (MapOutput) new MapOutput<K, Long>((RedisCodec) codec), args);
}
Command<K, V, List<K>> pubsubShardChannels() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SHARDCHANNELS);
return createCommand(PUBSUB, new KeyListOutput<>(codec), args);
}
Command<K, V, List<K>> pubsubShardChannels(K pattern) {
LettuceAssert.notNull(pattern, "Pattern " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SHARDCHANNELS).addKey(pattern);
return createCommand(PUBSUB, new KeyListOutput<>(codec), args);
}
Command<K, V, Map<K, Long>> pubsubShardNumsub(K... shardChannels) {
LettuceAssert.notNull(shardChannels, "ShardChannels " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(shardChannels, "ShardChannels " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(SHARDNUMSUB).addKeys(shardChannels);
return createCommand(PUBSUB, (MapOutput) new MapOutput<K, Long>((RedisCodec) codec), args);
}
Command<K, V, String> quit() {
return createCommand(QUIT, new StatusOutput<>(codec));
}
Command<K, V, K> randomkey() {
return createCommand(RANDOMKEY, new KeyOutput<>(codec));
}
Command<K, V, String> readOnly() {
return createCommand(READONLY, new StatusOutput<>(codec));
}
Command<K, V, String> readWrite() {
return createCommand(READWRITE, new StatusOutput<>(codec));
}
Command<K, V, String> rename(K key, K newKey) {
notNullKey(key);
LettuceAssert.notNull(newKey, "NewKey " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(newKey);
return createCommand(RENAME, new StatusOutput<>(codec), args);
}
Command<K, V, Boolean> renamenx(K key, K newKey) {
notNullKey(key);
LettuceAssert.notNull(newKey, "NewKey " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(newKey);
return createCommand(RENAMENX, new BooleanOutput<>(codec), args);
}
Command<K, V, String> replicaof(String host, int port) {
LettuceAssert.notNull(host, "Host " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(host, "Host " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(host).add(port);
return createCommand(REPLICAOF, new StatusOutput<>(codec), args);
}
Command<K, V, String> replicaofNoOne() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(NO).add(ONE);
return createCommand(REPLICAOF, new StatusOutput<>(codec), args);
}
Command<K, V, String> restore(K key, byte[] value, RestoreArgs restoreArgs) {
notNullKey(key);
LettuceAssert.notNull(value, "Value " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(restoreArgs, "RestoreArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(restoreArgs.ttl).add(value);
restoreArgs.build(args);
return createCommand(RESTORE, new StatusOutput<>(codec), args);
}
Command<K, V, List<Object>> role() {
return createCommand(ROLE, new ArrayOutput<>(codec));
}
Command<K, V, V> rpop(K key) {
notNullKey(key);
return createCommand(RPOP, new ValueOutput<>(codec), key);
}
Command<K, V, List<V>> rpop(K key, long count) {
notNullKey(key);
return createCommand(RPOP, new ValueListOutput<>(codec), new CommandArgs<>(codec).addKey(key).add(count));
}
Command<K, V, V> rpoplpush(K source, K destination) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(source).addKey(destination);
return createCommand(RPOPLPUSH, new ValueOutput<>(codec), args);
}
Command<K, V, Long> rpush(K key, V... values) {
notNullKey(key);
notEmptyValues(values);
return createCommand(RPUSH, new IntegerOutput<>(codec), key, values);
}
Command<K, V, Long> rpushx(K key, V... values) {
notNullKey(key);
notEmptyValues(values);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValues(values);
return createCommand(RPUSHX, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> sadd(K key, V... members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
return createCommand(SADD, new IntegerOutput<>(codec), key, members);
}
Command<K, V, String> save() {
return createCommand(SAVE, new StatusOutput<>(codec));
}
Command<K, V, KeyScanCursor<K>> scan() {
return scan(ScanCursor.INITIAL, null);
}
Command<K, V, KeyScanCursor<K>> scan(ScanCursor scanCursor) {
return scan(scanCursor, null);
}
Command<K, V, KeyScanCursor<K>> scan(ScanArgs scanArgs) {
return scan(ScanCursor.INITIAL, scanArgs);
}
Command<K, V, KeyScanCursor<K>> scan(ScanCursor scanCursor, ScanArgs scanArgs) {
CommandArgs<K, V> args = new CommandArgs<>(codec);
scanArgs(scanCursor, scanArgs, args);
KeyScanOutput<K, V> output = new KeyScanOutput<>(codec);
return createCommand(SCAN, output, args);
}
protected void scanArgs(ScanCursor scanCursor, ScanArgs scanArgs, CommandArgs<K, V> args) {
LettuceAssert.notNull(scanCursor, "ScanCursor " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(!scanCursor.isFinished(), "ScanCursor must not be finished");
args.add(scanCursor.getCursor());
if (scanArgs != null) {
scanArgs.build(args);
}
}
Command<K, V, StreamScanCursor> scanStreaming(KeyStreamingChannel<K> channel) {
notNull(channel);
LettuceAssert.notNull(channel, "KeyStreamingChannel " + MUST_NOT_BE_NULL);
return scanStreaming(channel, ScanCursor.INITIAL, null);
}
Command<K, V, StreamScanCursor> scanStreaming(KeyStreamingChannel<K> channel, ScanCursor scanCursor) {
notNull(channel);
LettuceAssert.notNull(channel, "KeyStreamingChannel " + MUST_NOT_BE_NULL);
return scanStreaming(channel, scanCursor, null);
}
Command<K, V, StreamScanCursor> scanStreaming(KeyStreamingChannel<K> channel, ScanArgs scanArgs) {
notNull(channel);
LettuceAssert.notNull(channel, "KeyStreamingChannel " + MUST_NOT_BE_NULL);
return scanStreaming(channel, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, StreamScanCursor> scanStreaming(KeyStreamingChannel<K> channel, ScanCursor scanCursor, ScanArgs scanArgs) {
notNull(channel);
LettuceAssert.notNull(channel, "KeyStreamingChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
scanArgs(scanCursor, scanArgs, args);
KeyScanStreamingOutput<K, V> output = new KeyScanStreamingOutput<>(codec, channel);
return createCommand(SCAN, output, args);
}
Command<K, V, Long> scard(K key) {
notNullKey(key);
return createCommand(SCARD, new IntegerOutput<>(codec), key);
}
Command<K, V, List<Boolean>> scriptExists(String... digests) {
LettuceAssert.notNull(digests, "Digests " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(digests, "Digests " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(digests, "Digests " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(EXISTS);
for (String sha : digests) {
args.add(sha);
}
return createCommand(SCRIPT, new BooleanListOutput<>(codec), args);
}
Command<K, V, String> scriptFlush() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FLUSH);
return createCommand(SCRIPT, new StatusOutput<>(codec), args);
}
Command<K, V, String> scriptFlush(FlushMode flushMode) {
LettuceAssert.notNull(flushMode, "FlushMode " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(FLUSH).add(flushMode.name());
return createCommand(SCRIPT, new StatusOutput<>(codec), args);
}
Command<K, V, String> scriptKill() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(KILL);
return createCommand(SCRIPT, new StatusOutput<>(codec), args);
}
Command<K, V, String> scriptLoad(byte[] script) {
LettuceAssert.notNull(script, "Script " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LOAD).add(script);
return createCommand(SCRIPT, new StatusOutput<>(codec), args);
}
Command<K, V, Set<V>> sdiff(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SDIFF, new ValueSetOutput<>(codec), args);
}
Command<K, V, Long> sdiff(ValueStreamingChannel<V> channel, K... keys) {
notEmpty(keys);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SDIFF, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> sdiffstore(K destination, K... keys) {
notEmpty(keys);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(destination).addKeys(keys);
return createCommand(SDIFFSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, String> select(int db) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(db);
return createCommand(SELECT, new StatusOutput<>(codec), args);
}
Command<K, V, String> set(K key, V value) {
notNullKey(key);
return createCommand(SET, new StatusOutput<>(codec), key, value);
}
Command<K, V, String> set(K key, V value, SetArgs setArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(value);
setArgs.build(args);
return createCommand(SET, new StatusOutput<>(codec), args);
}
Command<K, V, String> digestKey(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(DIGEST, new StatusOutput<>(codec), args);
}
Command<K, V, V> setGet(K key, V value) {
return setGet(key, value, new SetArgs());
}
Command<K, V, V> setGet(K key, V value, SetArgs setArgs) {
notNullKey(key);
LettuceAssert.notNull(setArgs, "SetArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(value);
setArgs.build(args);
args.add(GET);
return createCommand(SET, new ValueOutput<>(codec), args);
}
Command<K, V, Long> setbit(K key, long offset, int value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(offset).add(value);
return createCommand(SETBIT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> setex(K key, long seconds, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(seconds).addValue(value);
return createCommand(SETEX, new StatusOutput<>(codec), args);
}
Command<K, V, Boolean> setnx(K key, V value) {
notNullKey(key);
return createCommand(SETNX, new BooleanOutput<>(codec), key, value);
}
Command<K, V, Long> setrange(K key, long offset, V value) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(offset).addValue(value);
return createCommand(SETRANGE, new IntegerOutput<>(codec), args);
}
Command<K, V, String> shutdown(boolean save) {
CommandArgs<K, V> args = new CommandArgs<>(codec);
return createCommand(SHUTDOWN, new StatusOutput<>(codec), save ? args.add(SAVE) : args.add(NOSAVE));
}
Command<K, V, String> shutdown(ShutdownArgs shutdownArgs) {
LettuceAssert.notNull(shutdownArgs, "shutdownArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
shutdownArgs.build(args);
return createCommand(SHUTDOWN, new StatusOutput<>(codec), args);
}
Command<K, V, Set<V>> sinter(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SINTER, new ValueSetOutput<>(codec), args);
}
Command<K, V, Long> sinter(ValueStreamingChannel<V> channel, K... keys) {
notEmpty(keys);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SINTER, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> sintercard(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys);
return createCommand(SINTERCARD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> sintercard(long limit, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys).add(LIMIT).add(limit);
return createCommand(SINTERCARD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> sinterstore(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(destination).addKeys(keys);
return createCommand(SINTERSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, Boolean> sismember(K key, V member) {
notNullKey(key);
return createCommand(SISMEMBER, new BooleanOutput<>(codec), key, member);
}
Command<K, V, String> slaveof(String host, int port) {
LettuceAssert.notNull(host, "Host " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(host, "Host " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(host).add(port);
return createCommand(SLAVEOF, new StatusOutput<>(codec), args);
}
Command<K, V, String> slaveofNoOne() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(NO).add(ONE);
return createCommand(SLAVEOF, new StatusOutput<>(codec), args);
}
Command<K, V, List<Object>> slowlogGet() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GET);
return createCommand(SLOWLOG, new NestedMultiOutput<>(codec), args);
}
Command<K, V, List<Object>> slowlogGet(int count) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GET).add(count);
return createCommand(SLOWLOG, new NestedMultiOutput<>(codec), args);
}
Command<K, V, Long> slowlogLen() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LEN);
return createCommand(SLOWLOG, new IntegerOutput<>(codec), args);
}
Command<K, V, String> slowlogReset() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(RESET);
return createCommand(SLOWLOG, new StatusOutput<>(codec), args);
}
Command<K, V, Set<V>> smembers(K key) {
notNullKey(key);
return createCommand(SMEMBERS, new ValueSetOutput<>(codec), key);
}
Command<K, V, Long> smembers(ValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(SMEMBERS, new ValueStreamingOutput<>(codec, channel), key);
}
Command<K, V, List<Boolean>> smismember(K key, V... members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
return createCommand(SMISMEMBER, new BooleanListOutput<>(codec), key, members);
}
Command<K, V, Boolean> smove(K source, K destination, V member) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(source).addKey(destination).addValue(member);
return createCommand(SMOVE, new BooleanOutput<>(codec), args);
}
Command<K, V, List<V>> sort(K key) {
notNullKey(key);
return createCommand(SORT, new ValueListOutput<>(codec), key);
}
Command<K, V, List<V>> sort(K key, SortArgs sortArgs) {
notNullKey(key);
LettuceAssert.notNull(sortArgs, "SortArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
sortArgs.build(args, null);
return createCommand(SORT, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> sort(ValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(SORT, new ValueStreamingOutput<>(codec, channel), key);
}
Command<K, V, Long> sort(ValueStreamingChannel<V> channel, K key, SortArgs sortArgs) {
notNullKey(key);
notNull(channel);
LettuceAssert.notNull(sortArgs, "SortArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
sortArgs.build(args, null);
return createCommand(SORT, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<V>> sortReadOnly(K key) {
notNullKey(key);
return createCommand(SORT_RO, new ValueListOutput<>(codec), key);
}
Command<K, V, List<V>> sortReadOnly(K key, SortArgs sortArgs) {
notNullKey(key);
LettuceAssert.notNull(sortArgs, "SortArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
sortArgs.build(args, null);
return createCommand(SORT_RO, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> sortReadOnly(ValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return createCommand(SORT_RO, new ValueStreamingOutput<>(codec, channel), key);
}
Command<K, V, Long> sortReadOnly(ValueStreamingChannel<V> channel, K key, SortArgs sortArgs) {
notNullKey(key);
notNull(channel);
LettuceAssert.notNull(sortArgs, "SortArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
sortArgs.build(args, null);
return createCommand(SORT_RO, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> sortStore(K key, SortArgs sortArgs, K destination) {
notNullKey(key);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(sortArgs, "SortArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
sortArgs.build(args, destination);
return createCommand(SORT, new IntegerOutput<>(codec), args);
}
Command<K, V, V> spop(K key) {
notNullKey(key);
return createCommand(SPOP, new ValueOutput<>(codec), key);
}
Command<K, V, Set<V>> spop(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count);
return createCommand(SPOP, new ValueSetOutput<>(codec), args);
}
Command<K, V, Long> spublish(K shardChannel, V message) {
LettuceAssert.notNull(shardChannel, "ShardChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(shardChannel).addValue(message);
return createCommand(SPUBLISH, new IntegerOutput<>(codec), args);
}
Command<K, V, V> srandmember(K key) {
notNullKey(key);
return createCommand(SRANDMEMBER, new ValueOutput<>(codec), key);
}
Command<K, V, List<V>> srandmember(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count);
return createCommand(SRANDMEMBER, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> srandmember(ValueStreamingChannel<V> channel, K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(count);
return createCommand(SRANDMEMBER, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> srem(K key, V... members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
return createCommand(SREM, new IntegerOutput<>(codec), key, members);
}
Command<K, V, ValueScanCursor<V>> sscan(K key) {
notNullKey(key);
return sscan(key, ScanCursor.INITIAL, null);
}
Command<K, V, ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor) {
notNullKey(key);
return sscan(key, scanCursor, null);
}
Command<K, V, ValueScanCursor<V>> sscan(K key, ScanArgs scanArgs) {
notNullKey(key);
return sscan(key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, ValueScanCursor<V>> sscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
ValueScanOutput<K, V> output = new ValueScanOutput<>(codec);
return createCommand(SSCAN, output, args);
}
Command<K, V, StreamScanCursor> sscanStreaming(ValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return sscanStreaming(channel, key, ScanCursor.INITIAL, null);
}
Command<K, V, StreamScanCursor> sscanStreaming(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor) {
notNullKey(key);
notNull(channel);
return sscanStreaming(channel, key, scanCursor, null);
}
Command<K, V, StreamScanCursor> sscanStreaming(ValueStreamingChannel<V> channel, K key, ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
return sscanStreaming(channel, key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, StreamScanCursor> sscanStreaming(ValueStreamingChannel<V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
ValueScanStreamingOutput<K, V> output = new ValueScanStreamingOutput<>(codec, channel);
return createCommand(SSCAN, output, args);
}
Command<K, V, Long> strlen(K key) {
notNullKey(key);
return createCommand(STRLEN, new IntegerOutput<>(codec), key);
}
Command<K, V, StringMatchResult> stralgoLcs(StrAlgoArgs strAlgoArgs) {
LettuceAssert.notNull(strAlgoArgs, "StrAlgoArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
strAlgoArgs.build(args);
return createCommand(STRALGO, new StringMatchResultOutput<>(codec), args);
}
Command<K, V, StringMatchResult> lcs(LcsArgs lcsArgs) {
LettuceAssert.notNull(lcsArgs, "lcsArgs" + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
lcsArgs.build(args);
return createCommand(LCS, new StringMatchResultOutput<>(codec), args);
}
Command<K, V, Set<V>> sunion(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SUNION, new ValueSetOutput<>(codec), args);
}
Command<K, V, Long> sunion(ValueStreamingChannel<V> channel, K... keys) {
notEmpty(keys);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(SUNION, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> sunionstore(K destination, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(destination).addKeys(keys);
return createCommand(SUNIONSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, String> swapdb(int db1, int db2) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(db1).add(db2);
return createCommand(SWAPDB, new StatusOutput<>(codec), args);
}
Command<K, V, String> sync() {
return createCommand(SYNC, new StatusOutput<>(codec));
}
Command<K, V, List<V>> time() {
CommandArgs<K, V> args = new CommandArgs<>(codec);
return createCommand(TIME, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> touch(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(TOUCH, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> touch(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(TOUCH, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> ttl(K key) {
notNullKey(key);
return createCommand(TTL, new IntegerOutput<>(codec), key);
}
Command<K, V, String> type(K key) {
notNullKey(key);
return createCommand(CommandType.TYPE, new StatusOutput<>(codec), key);
}
Command<K, V, Long> unlink(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(UNLINK, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> unlink(Iterable<K> keys) {
LettuceAssert.notNull(keys, "Keys " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(UNLINK, new IntegerOutput<>(codec), args);
}
Command<K, V, Boolean> copy(K source, K destination) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(source).addKey(destination);
return createCommand(COPY, new BooleanOutput<>(codec), args);
}
Command<K, V, Boolean> copy(K source, K destination, CopyArgs copyArgs) {
LettuceAssert.notNull(source, "Source " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(source).addKey(destination);
copyArgs.build(args);
return createCommand(COPY, new BooleanOutput<>(codec), args);
}
Command<K, V, String> unwatch() {
return createCommand(UNWATCH, new StatusOutput<>(codec));
}
Command<K, V, Long> wait(int replicas, long timeout) {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(replicas).add(timeout);
return createCommand(WAIT, new IntegerOutput<>(codec), args);
}
Command<K, V, String> watch(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys);
return createCommand(WATCH, new StatusOutput<>(codec), args);
}
public Command<K, V, Long> xack(K key, K group, String[] messageIds) {
notNullKey(key);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(messageIds, "MessageIds " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(messageIds, "MessageIds " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(group);
for (String messageId : messageIds) {
args.add(messageId);
}
return createCommand(XACK, new IntegerOutput<>(codec), args);
}
public Command<K, V, List<StreamEntryDeletionResult>> xackdel(K key, K group, String[] messageIds) {
return xackdel(key, group, null, messageIds);
}
public Command<K, V, List<StreamEntryDeletionResult>> xackdel(K key, K group, StreamDeletionPolicy policy,
String[] messageIds) {
notNullKey(key);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(messageIds, "MessageIds " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(messageIds, "MessageIds " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(group);
if (policy != null) {
args.add(policy);
}
args.add(CommandKeyword.IDS).add(messageIds.length);
for (String messageId : messageIds) {
args.add(messageId);
}
return createCommand(XACKDEL, new StreamEntryDeletionResultListOutput<>(codec), args);
}
public Command<K, V, ClaimedMessages<K, V>> xautoclaim(K key, XAutoClaimArgs<K> xAutoClaimArgs) {
notNullKey(key);
LettuceAssert.notNull(xAutoClaimArgs, "XAutoClaimArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
xAutoClaimArgs.build(args);
return createCommand(XAUTOCLAIM, new ClaimedMessagesOutput<>(codec, key, xAutoClaimArgs.isJustid()), args);
}
public Command<K, V, List<StreamMessage<K, V>>> xclaim(K key, Consumer<K> consumer, XClaimArgs xClaimArgs,
String[] messageIds) {
notNullKey(key);
LettuceAssert.notNull(consumer, "Consumer " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(messageIds, "MessageIds " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(messageIds, "MessageIds " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
LettuceAssert.notNull(xClaimArgs, "XClaimArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(consumer.group).addKey(consumer.name)
.add(xClaimArgs.minIdleTime);
for (String messageId : messageIds) {
args.add(messageId);
}
xClaimArgs.build(args);
return createCommand(XCLAIM, new StreamMessageListOutput<>(codec, key), args);
}
public Command<K, V, String> xadd(K key, XAddArgs xAddArgs, Map<K, V> map) {
notNullKey(key);
LettuceAssert.notNull(map, "Message body " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (xAddArgs != null) {
xAddArgs.build(args);
} else {
args.add("*");
}
args.add(map);
return createCommand(XADD, new StatusOutput<>(codec), args);
}
public Command<K, V, String> xadd(K key, XAddArgs xAddArgs, Object[] body) {
notNullKey(key);
LettuceAssert.notNull(body, "Message body " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(body, "Message body " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(body, "Message body " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
LettuceAssert.isTrue(body.length % 2 == 0, "Message body.length must be a multiple of 2 and contain a "
+ "sequence of field1, value1, field2, value2, fieldN, valueN");
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (xAddArgs != null) {
xAddArgs.build(args);
} else {
args.add("*");
}
for (int i = 0; i < body.length; i += 2) {
args.addKey((K) body[i]);
args.addValue((V) body[i + 1]);
}
return createCommand(XADD, new StatusOutput<>(codec), args);
}
public Command<K, V, Long> xdel(K key, String[] messageIds) {
notNullKey(key);
LettuceAssert.notEmpty(messageIds, "MessageIds " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(messageIds, "MessageIds " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
for (String messageId : messageIds) {
args.add(messageId);
}
return createCommand(XDEL, new IntegerOutput<>(codec), args);
}
public Command<K, V, List<StreamEntryDeletionResult>> xdelex(K key, String[] messageIds) {
return xdelex(key, null, messageIds);
}
public Command<K, V, List<StreamEntryDeletionResult>> xdelex(K key, StreamDeletionPolicy policy, String[] messageIds) {
notNullKey(key);
LettuceAssert.notEmpty(messageIds, "MessageIds " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(messageIds, "MessageIds " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (policy != null) {
args.add(policy);
}
args.add(CommandKeyword.IDS).add(messageIds.length);
for (String messageId : messageIds) {
args.add(messageId);
}
return createCommand(XDELEX, new StreamEntryDeletionResultListOutput<>(codec), args);
}
public Command<K, V, String> xgroupCreate(StreamOffset<K> offset, K group, XGroupCreateArgs commandArgs) {
LettuceAssert.notNull(offset, "StreamOffset " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CREATE).addKey(offset.getName()).addKey(group)
.add(offset.getOffset());
if (commandArgs != null) {
commandArgs.build(args);
}
return createCommand(XGROUP, new StatusOutput<>(codec), args);
}
public Command<K, V, Boolean> xgroupCreateconsumer(K key, Consumer<K> consumer) {
notNullKey(key);
LettuceAssert.notNull(consumer, "Consumer " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add("CREATECONSUMER").addKey(key).addKey(consumer.getGroup())
.addKey(consumer.getName());
return createCommand(XGROUP, new BooleanOutput<>(codec), args);
}
public Command<K, V, Long> xgroupDelconsumer(K key, Consumer<K> consumer) {
notNullKey(key);
LettuceAssert.notNull(consumer, "Consumer " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add("DELCONSUMER").addKey(key).addKey(consumer.getGroup())
.addKey(consumer.getName());
return createCommand(XGROUP, new IntegerOutput<>(codec), args);
}
public Command<K, V, Boolean> xgroupDestroy(K key, K group) {
notNullKey(key);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add("DESTROY").addKey(key).addKey(group);
return createCommand(XGROUP, new BooleanOutput<>(codec), args);
}
public Command<K, V, String> xgroupSetid(StreamOffset<K> offset, K group) {
LettuceAssert.notNull(offset, "StreamOffset " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).add("SETID").addKey(offset.getName()).addKey(group)
.add(offset.getOffset());
return createCommand(XGROUP, new StatusOutput<>(codec), args);
}
public Command<K, V, List<Object>> xinfoStream(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(STREAM).addKey(key);
return createCommand(XINFO, new ArrayOutput<>(codec), args);
}
public Command<K, V, List<Object>> xinfoGroups(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(GROUPS).addKey(key);
return createCommand(XINFO, new ArrayOutput<>(codec), args);
}
public Command<K, V, List<Object>> xinfoConsumers(K key, K group) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(CONSUMERS).addKey(key).addKey(group);
return createCommand(XINFO, new ArrayOutput<>(codec), args);
}
public Command<K, V, Long> xlen(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
return createCommand(XLEN, new IntegerOutput<>(codec), args);
}
public Command<K, V, PendingMessages> xpending(K key, K group) {
notNullKey(key);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(group);
return createCommand(XPENDING, new PendingMessagesOutput<>(codec), args);
}
public Command<K, V, List<PendingMessage>> xpending(K key, K group, Range<String> range, Limit limit) {
notNullKey(key);
LettuceAssert.notNull(group, "Group " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(range, "Range " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(limit, "Limit " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(group);
args.add(getLowerValue(range)).add(getUpperValue(range));
args.add(limit.isLimited() ? limit.getCount() : Long.MAX_VALUE);
return createCommand(XPENDING, new PendingMessageListOutput<>(codec), args);
}
public Command<K, V, List<PendingMessage>> xpending(K key, Consumer<K> consumer, Range<String> range, Limit limit) {
notNullKey(key);
LettuceAssert.notNull(consumer, "Consumer " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(range, "Range " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(limit, "Limit " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addKey(consumer.group);
args.add(getLowerValue(range)).add(getUpperValue(range));
args.add(limit.isLimited() ? limit.getCount() : Long.MAX_VALUE);
args.addKey(consumer.name);
return createCommand(XPENDING, new PendingMessageListOutput<>(codec), args);
}
public Command<K, V, List<PendingMessage>> xpending(K key, XPendingArgs<K> commandArgs) {
notNullKey(key);
LettuceAssert.notNull(commandArgs, "XPendingArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
commandArgs.build(args);
return createCommand(XPENDING, new PendingMessageListOutput<>(codec), args);
}
public Command<K, V, List<StreamMessage<K, V>>> xrange(K key, Range<String> range, Limit limit) {
notNullKey(key);
LettuceAssert.notNull(range, "Range " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(limit, "Limit " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(getLowerValue(range)).add(getUpperValue(range));
if (limit.isLimited()) {
args.add(COUNT).add(limit.getCount());
}
return createCommand(XRANGE, new StreamMessageListOutput<>(codec, key), args);
}
public Command<K, V, List<StreamMessage<K, V>>> xrevrange(K key, Range<String> range, Limit limit) {
notNullKey(key);
LettuceAssert.notNull(range, "Range " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(limit, "Limit " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
args.add(getUpperValue(range)).add(getLowerValue(range));
if (limit.isLimited()) {
args.add(COUNT).add(limit.getCount());
}
return createCommand(XREVRANGE, new StreamMessageListOutput<>(codec, key), args);
}
public Command<K, V, Long> xtrim(K key, boolean approximateTrimming, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(MAXLEN);
if (approximateTrimming) {
args.add("~");
}
args.add(count);
return createCommand(XTRIM, new IntegerOutput<>(codec), args);
}
public Command<K, V, Long> xtrim(K key, XTrimArgs xTrimArgs) {
notNullKey(key);
LettuceAssert.notNull(xTrimArgs, "XTrimArgs " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
xTrimArgs.build(args);
return createCommand(XTRIM, new IntegerOutput<>(codec), args);
}
private static String getLowerValue(Range<String> range) {
Boundary<String> boundary = range.getLower();
return boundary.equals(Boundary.unbounded()) ? "-" : getRange(boundary);
}
private static String getUpperValue(Range<String> range) {
Boundary<String> boundary = range.getUpper();
return boundary.equals(Boundary.unbounded()) ? "+" : getRange(boundary);
}
private static String getRange(Boundary<String> boundary) {
return !boundary.isIncluding() ? "(" + boundary.getValue() : boundary.getValue();
}
public Command<K, V, List<StreamMessage<K, V>>> xread(XReadArgs xReadArgs, StreamOffset<K>[] streams) {
LettuceAssert.notNull(streams, "Streams " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(streams.length > 0, "Streams " + MUST_NOT_BE_EMPTY);
CommandArgs<K, V> args = new CommandArgs<>(codec);
if (xReadArgs != null) {
xReadArgs.build(args);
}
args.add("STREAMS");
for (StreamOffset<K> stream : streams) {
args.addKey(stream.name);
}
for (StreamOffset<K> stream : streams) {
args.add(stream.offset);
}
return createCommand(XREAD, new StreamReadOutput<>(codec), args);
}
public Command<K, V, List<StreamMessage<K, V>>> xreadgroup(Consumer<K> consumer, XReadArgs xReadArgs,
StreamOffset<K>... streams) {
LettuceAssert.notNull(streams, "Streams " + MUST_NOT_BE_NULL);
LettuceAssert.isTrue(streams.length > 0, "Streams " + MUST_NOT_BE_EMPTY);
LettuceAssert.notNull(consumer, "Consumer " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add("GROUP").add(encode(consumer.group)).add(encode(consumer.name));
if (xReadArgs != null) {
xReadArgs.build(args);
}
args.add("STREAMS");
for (StreamOffset<K> stream : streams) {
args.addKey(stream.name);
}
for (XReadArgs.StreamOffset<K> stream : streams) {
args.add(stream.offset);
}
return createCommand(XREADGROUP, new StreamReadOutput<>(codec), args);
}
private byte[] encode(K k) {
ByteBuffer byteBuffer = codec.encodeKey(k);
byte[] result = new byte[byteBuffer.remaining()];
byteBuffer.get(result);
return result;
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzmpop(long timeout, ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
popArgs.build(args);
return createCommand(BZMPOP, new KeyValueOfScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzmpop(double timeout, ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
popArgs.build(args);
return createCommand(BZMPOP, new KeyValueOfScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, KeyValue<K, List<ScoredValue<V>>>> bzmpop(long timeout, long count, ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
popArgs.build(args);
args.add(COUNT).add(count);
return createCommand(BZMPOP, new KeyValueListScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, KeyValue<K, List<ScoredValue<V>>>> bzmpop(double timeout, long count, ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(timeout).add(keys.length).addKeys(keys);
popArgs.build(args);
args.add(COUNT).add(count);
return createCommand(BZMPOP, new KeyValueListScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzpopmin(long timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BZPOPMIN, new KeyValueScoredValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzpopmin(double timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BZPOPMIN, new KeyValueScoredValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzpopmax(long timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BZPOPMAX, new KeyValueScoredValueOutput<>(codec), args);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> bzpopmax(double timeout, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(keys).add(timeout);
return createCommand(BZPOPMAX, new KeyValueScoredValueOutput<>(codec), args);
}
Command<K, V, Long> zadd(K key, ZAddArgs zAddArgs, double score, V member) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (zAddArgs != null) {
zAddArgs.build(args);
}
args.add(score).addValue(member);
return createCommand(ZADD, new IntegerOutput<>(codec), args);
}
@SuppressWarnings("unchecked")
Command<K, V, Long> zadd(K key, ZAddArgs zAddArgs, Object... scoresAndValues) {
notNullKey(key);
LettuceAssert.notNull(scoresAndValues, "ScoresAndValues " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(scoresAndValues, "ScoresAndValues " + MUST_NOT_BE_EMPTY);
LettuceAssert.noNullElements(scoresAndValues, "ScoresAndValues " + MUST_NOT_CONTAIN_NULL_ELEMENTS);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (zAddArgs != null) {
zAddArgs.build(args);
}
if (allElementsInstanceOf(scoresAndValues, ScoredValue.class)) {
for (Object o : scoresAndValues) {
ScoredValue<V> scoredValue = (ScoredValue<V>) o;
args.add(scoredValue.getScore());
args.addValue(scoredValue.getValue());
}
} else {
LettuceAssert.isTrue(scoresAndValues.length % 2 == 0,
"ScoresAndValues.length must be a multiple of 2 and contain a "
+ "sequence of score1, value1, score2, value2, scoreN, valueN");
for (int i = 0; i < scoresAndValues.length; i += 2) {
args.add((Double) scoresAndValues[i]);
args.addValue((V) scoresAndValues[i + 1]);
}
}
return createCommand(ZADD, new IntegerOutput<>(codec), args);
}
Command<K, V, Double> zaddincr(K key, ZAddArgs zAddArgs, double score, V member) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key);
if (zAddArgs != null) {
zAddArgs.build(args);
}
args.add(INCR);
args.add(score).addValue(member);
return createCommand(ZADD, new DoubleOutput<>(codec), args);
}
Command<K, V, Long> zcard(K key) {
notNullKey(key);
return createCommand(ZCARD, new IntegerOutput<>(codec), key);
}
Command<K, V, Long> zcount(K key, double min, double max) {
return zcount(key, string(min), string(max));
}
Command<K, V, Long> zcount(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min).add(max);
return createCommand(ZCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zcount(K key, Range<? extends Number> range) {
notNullKey(key);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min(range)).add(max(range));
return createCommand(ZCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, List<V>> zdiff(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys);
return createCommand(ZDIFF, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zdiffstore(K destKey, K... srcKeys) {
notNullKey(destKey);
notEmpty(srcKeys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(destKey).add(srcKeys.length).addKeys(srcKeys);
return createCommand(ZDIFFSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zdiffWithScores(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys).add(WITHSCORES);
return createCommand(ZDIFF, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Double> zincrby(K key, double amount, V member) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(amount).addValue(member);
return createCommand(ZINCRBY, new DoubleOutput<>(codec), args);
}
Command<K, V, List<V>> zinter(K... keys) {
notEmpty(keys);
return zinter(new ZAggregateArgs(), keys);
}
Command<K, V, List<V>> zinter(ZAggregateArgs aggregateArgs, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys);
aggregateArgs.build(args);
return createCommand(ZINTER, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zintercard(K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys);
return createCommand(ZINTERCARD, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zintercard(long limit, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys).add(LIMIT).add(limit);
return createCommand(ZINTERCARD, new IntegerOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zinterWithScores(K... keys) {
notEmpty(keys);
return zinterWithScores(new ZAggregateArgs(), keys);
}
Command<K, V, List<ScoredValue<V>>> zinterWithScores(ZAggregateArgs aggregateArgs, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys).add(WITHSCORES);
aggregateArgs.build(args);
return createCommand(ZINTER, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zinterstore(K destination, K... keys) {
notEmpty(keys);
return zinterstore(destination, new ZAggregateArgs(), keys);
}
Command<K, V, Long> zinterstore(K destination, ZAggregateArgs aggregateArgs, K... keys) {
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
LettuceAssert.notNull(aggregateArgs, "ZStoreArgs " + MUST_NOT_BE_NULL);
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(destination).add(keys.length).addKeys(keys);
aggregateArgs.build(args);
return createCommand(ZINTERSTORE, new IntegerOutput<>(codec), args);
}
RedisCommand<K, V, Long> zlexcount(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max);
return createCommand(ZLEXCOUNT, new IntegerOutput<>(codec), args);
}
RedisCommand<K, V, Long> zlexcount(K key, Range<? extends V> range) {
notNullKey(key);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(minValue(range)).add(maxValue(range));
return createCommand(ZLEXCOUNT, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Double>> zmscore(K key, V... members) {
notNullKey(key);
notEmpty(members);
return createCommand(ZMSCORE, new DoubleListOutput<>(codec), key, members);
}
Command<K, V, KeyValue<K, ScoredValue<V>>> zmpop(ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys);
popArgs.build(args);
return createCommand(ZMPOP, new KeyValueOfScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, KeyValue<K, List<ScoredValue<V>>>> zmpop(long count, ZPopArgs popArgs, K[] keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec).add(keys.length).addKeys(keys);
popArgs.build(args);
args.add(COUNT).add(count);
return createCommand(ZMPOP, new KeyValueListScoredValueOutput<>(codec, keys[0]), args);
}
Command<K, V, ScoredValue<V>> zpopmin(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key);
return createCommand(ZPOPMIN, new ScoredValueOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zpopmin(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key).add(count);
return createCommand(ZPOPMIN, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, ScoredValue<V>> zpopmax(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key);
return createCommand(ZPOPMAX, new ScoredValueOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zpopmax(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key).add(count);
return createCommand(ZPOPMAX, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, V> zrandmember(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key);
return createCommand(ZRANDMEMBER, new ValueOutput<>(codec), args);
}
Command<K, V, List<V>> zrandmember(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key).add(count);
return createCommand(ZRANDMEMBER, new ValueListOutput<>(codec), args);
}
Command<K, V, ScoredValue<V>> zrandmemberWithScores(K key) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key).add(1).add(WITHSCORES);
return createCommand(ZRANDMEMBER, new ScoredValueOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zrandmemberWithScores(K key, long count) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKeys(key).add(count).add(WITHSCORES);
return createCommand(ZRANDMEMBER, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrange(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(ZRANGE, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(ZRANGE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<ScoredValue<V>>> zrangeWithScores(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(start).add(stop).add(WITHSCORES);
return createCommand(ZRANGE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zrangeWithScores(ScoredValueStreamingChannel<V> channel, K key, long start, long stop) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(start).add(stop).add(WITHSCORES);
return createCommand(ZRANGE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
RedisCommand<K, V, List<V>> zrangebylex(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max);
return createCommand(ZRANGEBYLEX, new ValueListOutput<>(codec), args);
}
RedisCommand<K, V, List<V>> zrangebylex(K key, String min, String max, long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min).add(max), Limit.create(offset, count));
return createCommand(ZRANGEBYLEX, new ValueListOutput<>(codec), args);
}
RedisCommand<K, V, List<V>> zrangebylex(K key, Range<? extends V> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(minValue(range)).add(maxValue(range)), limit);
return createCommand(ZRANGEBYLEX, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrangebyscore(K key, double min, double max) {
return zrangebyscore(key, string(min), string(max));
}
Command<K, V, List<V>> zrangebyscore(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min).add(max);
return createCommand(ZRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrangebyscore(K key, double min, double max, long offset, long count) {
return zrangebyscore(key, string(min), string(max), offset, count);
}
Command<K, V, List<V>> zrangebyscore(K key, String min, String max, long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max).add(LIMIT).add(offset).add(count);
return createCommand(ZRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrangebyscore(K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min(range)).add(max(range));
if (limit.isLimited()) {
args.add(LIMIT).add(limit.getOffset()).add(limit.getCount());
}
return createCommand(ZRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, double min, double max) {
return zrangebyscore(channel, key, string(min), string(max));
}
Command<K, V, Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
LettuceAssert.notNull(channel, "ScoredValueStreamingChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min).add(max);
return createCommand(ZRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, double min, double max, long offset,
long count) {
return zrangebyscore(channel, key, string(min), string(max), offset, count);
}
Command<K, V, Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, String min, String max, long offset,
long count) {
notNullKey(key);
notNullMinMax(min, max);
LettuceAssert.notNull(channel, "ScoredValueStreamingChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min).add(max), Limit.create(offset, count));
return createCommand(ZRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
LettuceAssert.notNull(channel, "ScoredValueStreamingChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min(range)).add(max(range)), limit);
return createCommand(ZRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<ScoredValue<V>>> zrangebyscoreWithScores(K key, double min, double max) {
return zrangebyscoreWithScores(key, string(min), string(max));
}
Command<K, V, List<ScoredValue<V>>> zrangebyscoreWithScores(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max).add(WITHSCORES);
return createCommand(ZRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zrangebyscoreWithScores(K key, double min, double max, long offset, long count) {
return zrangebyscoreWithScores(key, string(min), string(max), offset, count);
}
Command<K, V, List<ScoredValue<V>>> zrangebyscoreWithScores(K key, String min, String max, long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min).add(max).add(WITHSCORES), Limit.create(offset, count));
return createCommand(ZRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zrangebyscoreWithScores(K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min(range)).add(max(range)).add(WITHSCORES), limit);
return createCommand(ZRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double min, double max) {
return zrangebyscoreWithScores(channel, key, string(min), string(max));
}
Command<K, V, Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max).add(WITHSCORES);
return createCommand(ZRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double min, double max,
long offset, long count) {
return zrangebyscoreWithScores(channel, key, string(min), string(max), offset, count);
}
Command<K, V, Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String min, String max,
long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min).add(max).add(WITHSCORES), Limit.create(offset, count));
return createCommand(ZRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, Range<? extends Number> range,
Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(min(range)).add(max(range)).add(WITHSCORES), limit);
return createCommand(ZRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrangestore(K dstKey, K srcKey, Range<Long> range, boolean rev) {
notNullKey(srcKey);
notNullKey(dstKey);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKeys(dstKey, srcKey);
args.add(min(range)).add(max(range));
if (rev) {
args.add(REV);
}
return createCommand(ZRANGESTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zrangestorebylex(K dstKey, K srcKey, Range<? extends V> range, Limit limit, boolean rev) {
notNullKey(srcKey);
notNullKey(dstKey);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKeys(dstKey, srcKey);
if (rev) {
args.add(maxValue(range)).add(minValue(range));
} else {
args.add(minValue(range)).add(maxValue(range));
}
args.add(BYLEX);
if (rev) {
args.add(REV);
}
addLimit(args, limit);
return createCommand(ZRANGESTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zrangestorebyscore(K dstKey, K srcKey, Range<? extends Number> range, Limit limit, boolean rev) {
notNullKey(srcKey);
notNullKey(dstKey);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKeys(dstKey, srcKey);
if (rev) {
args.add(max(range)).add(min(range));
} else {
args.add(min(range)).add(max(range));
}
args.add(BYSCORE);
if (rev) {
args.add(REV);
}
addLimit(args, limit);
return createCommand(ZRANGESTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zrank(K key, V member) {
notNullKey(key);
return createCommand(ZRANK, new IntegerOutput<>(codec), key, member);
}
Command<K, V, ScoredValue<Long>> zrankWithScore(K key, V member) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(member).add(WITHSCORE);
return createCommand(ZRANK, (ScoredValueOutput) new ScoredValueOutput<>(LongCodec.INSTANCE), args);
}
Command<K, V, Long> zrem(K key, V... members) {
notNullKey(key);
LettuceAssert.notNull(members, "Members " + MUST_NOT_BE_NULL);
LettuceAssert.notEmpty(members, "Members " + MUST_NOT_BE_EMPTY);
return createCommand(ZREM, new IntegerOutput<>(codec), key, members);
}
RedisCommand<K, V, Long> zremrangebylex(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(min).add(max);
return createCommand(ZREMRANGEBYLEX, new IntegerOutput<>(codec), args);
}
RedisCommand<K, V, Long> zremrangebylex(K key, Range<? extends V> range) {
notNullKey(key);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(minValue(range)).add(maxValue(range));
return createCommand(ZREMRANGEBYLEX, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zremrangebyrank(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(ZREMRANGEBYRANK, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zremrangebyscore(K key, double min, double max) {
return zremrangebyscore(key, string(min), string(max));
}
Command<K, V, Long> zremrangebyscore(K key, String min, String max) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min).add(max);
return createCommand(ZREMRANGEBYSCORE, new IntegerOutput<>(codec), args);
}
Command<K, V, Long> zremrangebyscore(K key, Range<? extends Number> range) {
notNullKey(key);
notNullRange(range);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(min(range)).add(max(range));
return createCommand(ZREMRANGEBYSCORE, new IntegerOutput<>(codec), args);
}
Command<K, V, List<V>> zrevrange(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(ZREVRANGE, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zrevrange(ValueStreamingChannel<V> channel, K key, long start, long stop) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(start).add(stop);
return createCommand(ZREVRANGE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<ScoredValue<V>>> zrevrangeWithScores(K key, long start, long stop) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(start).add(stop).add(WITHSCORES);
return createCommand(ZREVRANGE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zrevrangeWithScores(ScoredValueStreamingChannel<V> channel, K key, long start, long stop) {
notNullKey(key);
LettuceAssert.notNull(channel, "ValueStreamingChannel " + MUST_NOT_BE_NULL);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(start).add(stop).add(WITHSCORES);
return createCommand(ZREVRANGE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<V>> zrevrangebylex(K key, Range<? extends V> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(maxValue(range)).add(minValue(range)), limit);
return createCommand(ZREVRANGEBYLEX, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrevrangebyscore(K key, double max, double min) {
return zrevrangebyscore(key, string(max), string(min));
}
Command<K, V, List<V>> zrevrangebyscore(K key, String max, String min) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(max).add(min);
return createCommand(ZREVRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrevrangebyscore(K key, double max, double min, long offset, long count) {
return zrevrangebyscore(key, string(max), string(min), offset, count);
}
Command<K, V, List<V>> zrevrangebyscore(K key, String max, String min, long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max).add(min), Limit.create(offset, count));
return createCommand(ZREVRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, List<V>> zrevrangebyscore(K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max(range)).add(min(range)), limit);
return createCommand(ZREVRANGEBYSCORE, new ValueListOutput<>(codec), args);
}
Command<K, V, Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, double max, double min) {
return zrevrangebyscore(channel, key, string(max), string(min));
}
Command<K, V, Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, String max, String min) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).add(max).add(min);
return createCommand(ZREVRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, double max, double min, long offset,
long count) {
return zrevrangebyscore(channel, key, string(max), string(min), offset, count);
}
Command<K, V, Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, String max, String min, long offset,
long count) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max).add(min), Limit.create(offset, count));
return createCommand(ZREVRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrevrangebyscore(ValueStreamingChannel<V> channel, K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max(range)).add(min(range)), limit);
return createCommand(ZREVRANGEBYSCORE, new ValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, double max, double min) {
return zrevrangebyscoreWithScores(key, string(max), string(min));
}
Command<K, V, List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, String max, String min) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(max).add(min).add(WITHSCORES);
return createCommand(ZREVRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, double max, double min, long offset, long count) {
return zrevrangebyscoreWithScores(key, string(max), string(min), offset, count);
}
Command<K, V, List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, String max, String min, long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max).add(min).add(WITHSCORES), Limit.create(offset, count));
return createCommand(ZREVRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zrevrangebyscoreWithScores(K key, Range<? extends Number> range, Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max(range)).add(min(range)).add(WITHSCORES), limit);
return createCommand(ZREVRANGEBYSCORE, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double max, double min) {
return zrevrangebyscoreWithScores(channel, key, string(max), string(min));
}
Command<K, V, Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String max, String min) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key).add(max).add(min).add(WITHSCORES);
return createCommand(ZREVRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, double max, double min,
long offset, long count) {
notNullKey(key);
notNull(channel);
return zrevrangebyscoreWithScores(channel, key, string(max), string(min), offset, count);
}
Command<K, V, Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, String max, String min,
long offset, long count) {
notNullKey(key);
notNullMinMax(min, max);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max).add(min).add(WITHSCORES), Limit.create(offset, count));
return createCommand(ZREVRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrevrangebyscoreWithScores(ScoredValueStreamingChannel<V> channel, K key, Range<? extends Number> range,
Limit limit) {
notNullKey(key);
notNullRange(range);
notNullLimit(limit);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
addLimit(args.addKey(key).add(max(range)).add(min(range)).add(WITHSCORES), limit);
return createCommand(ZREVRANGEBYSCORE, new ScoredValueStreamingOutput<>(codec, channel), args);
}
Command<K, V, Long> zrevrank(K key, V member) {
notNullKey(key);
return createCommand(ZREVRANK, new IntegerOutput<>(codec), key, member);
}
Command<K, V, ScoredValue<Long>> zrevrankWithScore(K key, V member) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec).addKey(key).addValue(member).add(WITHSCORE);
return createCommand(ZREVRANK, (ScoredValueOutput) new ScoredValueOutput<>(LongCodec.INSTANCE), args);
}
Command<K, V, ScoredValueScanCursor<V>> zscan(K key) {
notNullKey(key);
return zscan(key, ScanCursor.INITIAL, null);
}
Command<K, V, ScoredValueScanCursor<V>> zscan(K key, ScanCursor scanCursor) {
notNullKey(key);
return zscan(key, scanCursor, null);
}
Command<K, V, ScoredValueScanCursor<V>> zscan(K key, ScanArgs scanArgs) {
notNullKey(key);
return zscan(key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, ScoredValueScanCursor<V>> zscan(K key, ScanCursor scanCursor, ScanArgs scanArgs) {
notNullKey(key);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
ScoredValueScanOutput<K, V> output = new ScoredValueScanOutput<>(codec);
return createCommand(ZSCAN, output, args);
}
Command<K, V, StreamScanCursor> zscanStreaming(ScoredValueStreamingChannel<V> channel, K key) {
notNullKey(key);
notNull(channel);
return zscanStreaming(channel, key, ScanCursor.INITIAL, null);
}
Command<K, V, StreamScanCursor> zscanStreaming(ScoredValueStreamingChannel<V> channel, K key, ScanCursor scanCursor) {
notNullKey(key);
notNull(channel);
return zscanStreaming(channel, key, scanCursor, null);
}
Command<K, V, StreamScanCursor> zscanStreaming(ScoredValueStreamingChannel<V> channel, K key, ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
return zscanStreaming(channel, key, ScanCursor.INITIAL, scanArgs);
}
Command<K, V, StreamScanCursor> zscanStreaming(ScoredValueStreamingChannel<V> channel, K key, ScanCursor scanCursor,
ScanArgs scanArgs) {
notNullKey(key);
notNull(channel);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(key);
scanArgs(scanCursor, scanArgs, args);
ScoredValueScanStreamingOutput<K, V> output = new ScoredValueScanStreamingOutput<>(codec, channel);
return createCommand(ZSCAN, output, args);
}
Command<K, V, Double> zscore(K key, V member) {
notNullKey(key);
return createCommand(ZSCORE, new DoubleOutput<>(codec), key, member);
}
Command<K, V, List<V>> zunion(K... keys) {
notEmpty(keys);
return zunion(new ZAggregateArgs(), keys);
}
Command<K, V, List<V>> zunion(ZAggregateArgs aggregateArgs, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys);
aggregateArgs.build(args);
return createCommand(ZUNION, new ValueListOutput<>(codec), args);
}
Command<K, V, List<ScoredValue<V>>> zunionWithScores(K... keys) {
notEmpty(keys);
return zunionWithScores(new ZAggregateArgs(), keys);
}
Command<K, V, List<ScoredValue<V>>> zunionWithScores(ZAggregateArgs aggregateArgs, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.add(keys.length).addKeys(keys).add(WITHSCORES);
aggregateArgs.build(args);
return createCommand(ZUNION, new ScoredValueListOutput<>(codec), args);
}
Command<K, V, Long> zunionstore(K destination, K... keys) {
notEmpty(keys);
LettuceAssert.notNull(destination, "Destination " + MUST_NOT_BE_NULL);
return zunionstore(destination, new ZAggregateArgs(), keys);
}
Command<K, V, Long> zunionstore(K destination, ZAggregateArgs aggregateArgs, K... keys) {
notEmpty(keys);
CommandArgs<K, V> args = new CommandArgs<>(codec);
args.addKey(destination).add(keys.length).addKeys(keys);
aggregateArgs.build(args);
return createCommand(ZUNIONSTORE, new IntegerOutput<>(codec), args);
}
Command<K, V, List<Map<String, Object>>> clusterLinks() {
CommandArgs<K, V> args = new CommandArgs<>(codec).add(LINKS);
return createCommand(CLUSTER, (CommandOutput) new ObjectOutput<>(StringCodec.UTF8), args);
}
|
RedisCommandBuilder
|
java
|
google__dagger
|
hilt-compiler/main/java/dagger/hilt/processor/internal/root/TestRootMetadata.java
|
{
"start": 1268,
"end": 1345
}
|
class ____ {@code InternalTestRoot} annotated classes. */
@AutoValue
abstract
|
for
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogOp.java
|
{
"start": 9011,
"end": 9140
}
|
class ____ {
public final FSEditLogOpCodes opCode;
long txid;
byte[] rpcClientId;
int rpcCallId;
public static
|
FSEditLogOp
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/SecurityAutoConfigurationTests.java
|
{
"start": 3996,
"end": 4318
}
|
class ____ implements AuthenticationEventPublisher {
@Override
public void publishAuthenticationSuccess(Authentication authentication) {
}
@Override
public void publishAuthenticationFailure(AuthenticationException exception, Authentication authentication) {
}
}
}
}
|
TestAuthenticationEventPublisher
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/mysql/insert/MySqlInsertTest_4.java
|
{
"start": 1049,
"end": 2201
}
|
class ____ extends MysqlTest {
public void test_0() throws Exception {
String sql = "INSERT INTO tbl_name (a,b,c) VALUES(1,2,3),(4,5,6),(7,8,9);";
MySqlStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
MySqlInsertStatement insertStmt = (MySqlInsertStatement) stmt;
assertEquals(3, insertStmt.getColumns().size());
assertEquals(3, insertStmt.getValuesList().size());
assertEquals(3, insertStmt.getValuesList().get(0).getValues().size());
assertEquals(3, insertStmt.getValuesList().get(1).getValues().size());
assertEquals(3, insertStmt.getValuesList().get(2).getValues().size());
assertEquals(1, statementList.size());
MySqlSchemaStatVisitor visitor = new MySqlSchemaStatVisitor();
stmt.accept(visitor);
assertEquals("INSERT INTO tbl_name (a, b, c)" +
"\nVALUES (1, 2, 3)," +
"\n\t(4, 5, 6)," +
"\n\t(7, 8, 9);", SQLUtils.toMySqlString(insertStmt));
}
}
|
MySqlInsertTest_4
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/preprocessing/customwordembedding/RelevantScriptFeatureExtractorTests.java
|
{
"start": 480,
"end": 4409
}
|
class ____ extends ESTestCase {
private final RelevantScriptFeatureExtractor extractor = new RelevantScriptFeatureExtractor();
private static final double eps = 0.000001;
public void testNonMixedScripts() {
FeatureValue[] results;
results = extractor.extractFeatures("just some plain text");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptOtherUtf8OneByte.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
results = extractor.extractFeatures("ヸヂ゠ヂ");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptKatakana.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
// One UTF8 character by itself.
results = extractor.extractFeatures("ゟ");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptHiragana.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
results = extractor.extractFeatures("ה");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptHebrew.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
}
public void testMixedScripts() {
FeatureValue[] results;
results = extractor.extractFeatures("ヸtヂe゠xtヂ");
assertThat(results.length, equalTo(2));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptOtherUtf8OneByte.toInt()));
assertThat(results[0].getWeight(), closeTo(0.5, eps));
assertThat(results[1].getRow(), equalTo(ScriptDetector.Script.kScriptKatakana.toInt()));
assertThat(results[1].getWeight(), closeTo(0.5, eps));
results = extractor.extractFeatures("just some 121212%^^( ヸヂ゠ヂ text");
assertThat(results.length, equalTo(2));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptOtherUtf8OneByte.toInt()));
assertThat(results[0].getWeight(), closeTo(0.75, eps));
assertThat(results[1].getRow(), equalTo(ScriptDetector.Script.kScriptKatakana.toInt()));
assertThat(results[1].getWeight(), closeTo(0.25, eps));
}
public void testRelevantScriptFeatureCornerCases() {
FeatureValue[] results;
// Empty string.
results = extractor.extractFeatures("");
assertEquals(0, results.length);
// Only whitespaces.
results = extractor.extractFeatures(" ");
assertEquals(0, results.length);
// Only numbers and punctuation.
results = extractor.extractFeatures("12----)(");
assertEquals(0, results.length);
// Only numbers, punctuation, and spaces.
results = extractor.extractFeatures("12--- - ) ( ");
assertEquals(0, results.length);
// One UTF8 character with some numbers / punctuation / spaces: character at
// one extremity or in the middle.
results = extractor.extractFeatures("1234ゟ");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptHiragana.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
results = extractor.extractFeatures("ゟ12-(");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptHiragana.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
results = extractor.extractFeatures("8*1ゟ12----");
assertThat(results.length, equalTo(1));
assertThat(results[0].getRow(), equalTo(ScriptDetector.Script.kScriptHiragana.toInt()));
assertThat(results[0].getWeight(), closeTo(1.0, eps));
;
}
}
|
RelevantScriptFeatureExtractorTests
|
java
|
apache__camel
|
components/camel-dns/src/main/java/org/apache/camel/component/dns/DnsType.java
|
{
"start": 851,
"end": 913
}
|
enum ____ {
dig,
ip,
lookup,
wikipedia
}
|
DnsType
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/function/json/DB2JsonTableFunction.java
|
{
"start": 23752,
"end": 24361
}
|
class ____ implements QueryTransformer {
private final int maxSeriesSize;
public SeriesQueryTransformer(int maxSeriesSize) {
this.maxSeriesSize = maxSeriesSize;
}
@Override
public QuerySpec transform(CteContainer cteContainer, QuerySpec querySpec, SqmToSqlAstConverter converter) {
if ( cteContainer.getCteStatement( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.NAME ) == null ) {
cteContainer.addCteStatement( CteGenerateSeriesFunction.CteGenerateSeriesQueryTransformer.createSeriesCte( maxSeriesSize, converter ) );
}
return querySpec;
}
}
}
|
SeriesQueryTransformer
|
java
|
netty__netty
|
example/src/main/java/io/netty/example/proxy/HexDumpProxyFrontendHandler.java
|
{
"start": 1006,
"end": 3834
}
|
class ____ extends ChannelInboundHandlerAdapter {
private final String remoteHost;
private final int remotePort;
// As we use inboundChannel.eventLoop() when building the Bootstrap this does not need to be volatile as
// the outboundChannel will use the same EventLoop (and therefore Thread) as the inboundChannel.
private Channel outboundChannel;
public HexDumpProxyFrontendHandler(String remoteHost, int remotePort) {
this.remoteHost = remoteHost;
this.remotePort = remotePort;
}
@Override
public void channelActive(ChannelHandlerContext ctx) {
final Channel inboundChannel = ctx.channel();
// Start the connection attempt.
Bootstrap b = new Bootstrap();
b.group(inboundChannel.eventLoop())
.channel(ctx.channel().getClass())
.handler(new HexDumpProxyBackendHandler(inboundChannel))
.option(ChannelOption.AUTO_READ, false);
ChannelFuture f = b.connect(remoteHost, remotePort);
outboundChannel = f.channel();
f.addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) {
if (future.isSuccess()) {
// connection complete start to read first data
inboundChannel.read();
} else {
// Close the connection if the connection attempt has failed.
inboundChannel.close();
}
}
});
}
@Override
public void channelRead(final ChannelHandlerContext ctx, Object msg) {
if (outboundChannel.isActive()) {
outboundChannel.writeAndFlush(msg).addListener(new ChannelFutureListener() {
@Override
public void operationComplete(ChannelFuture future) {
if (future.isSuccess()) {
// was able to flush out data, start to read the next chunk
ctx.channel().read();
} else {
future.channel().close();
}
}
});
}
}
@Override
public void channelInactive(ChannelHandlerContext ctx) {
if (outboundChannel != null) {
closeOnFlush(outboundChannel);
}
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
closeOnFlush(ctx.channel());
}
/**
* Closes the specified channel after all queued write requests are flushed.
*/
static void closeOnFlush(Channel ch) {
if (ch.isActive()) {
ch.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
}
}
|
HexDumpProxyFrontendHandler
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-csi/src/main/java/org/apache/hadoop/yarn/csi/adaptor/CsiAdaptorProtocolService.java
|
{
"start": 2152,
"end": 4294
}
|
class ____ extends AbstractService
implements CsiAdaptorProtocol {
private static final Logger LOG =
LoggerFactory.getLogger(CsiAdaptorProtocolService.class);
private Server server;
private InetSocketAddress adaptorServiceAddress;
private CsiAdaptorPlugin serverImpl;
public CsiAdaptorProtocolService(CsiAdaptorPlugin adaptorImpl) {
super(CsiAdaptorProtocolService.class.getName());
this.serverImpl = adaptorImpl;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
adaptorServiceAddress = CsiConfigUtils
.getCsiAdaptorAddressForDriver(serverImpl.getDriverName(), conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
this.server = rpc.getServer(
CsiAdaptorProtocol.class,
serverImpl, adaptorServiceAddress, conf, null, 1);
this.server.start();
LOG.info("{} started, listening on address: {}",
CsiAdaptorProtocolService.class.getName(),
adaptorServiceAddress.toString());
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
if (this.server != null) {
this.server.stop();
}
super.serviceStop();
}
@Override
public GetPluginInfoResponse getPluginInfo(
GetPluginInfoRequest request) throws YarnException, IOException {
return serverImpl.getPluginInfo(request);
}
@Override
public ValidateVolumeCapabilitiesResponse validateVolumeCapacity(
ValidateVolumeCapabilitiesRequest request) throws YarnException,
IOException {
return serverImpl.validateVolumeCapacity(request);
}
@Override
public NodePublishVolumeResponse nodePublishVolume(
NodePublishVolumeRequest request) throws YarnException, IOException {
return serverImpl.nodePublishVolume(request);
}
@Override
public NodeUnpublishVolumeResponse nodeUnpublishVolume(
NodeUnpublishVolumeRequest request) throws YarnException, IOException {
return serverImpl.nodeUnpublishVolume(request);
}
}
|
CsiAdaptorProtocolService
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/generator/command/pipe/DissectGenerator.java
|
{
"start": 753,
"end": 2956
}
|
class ____ implements CommandGenerator {
public static final String DISSECT = "dissect";
public static final CommandGenerator INSTANCE = new DissectGenerator();
@Override
public CommandDescription generate(
List<CommandDescription> previousCommands,
List<Column> previousOutput,
QuerySchema schema,
QueryExecutor executor
) {
String field = EsqlQueryGenerator.randomStringField(previousOutput);
if (field == null) {
return EMPTY_DESCRIPTION;// no strings to dissect, just skip
}
StringBuilder result = new StringBuilder(" | dissect ");
result.append(field);
result.append(" \"");
for (int i = 0; i < randomIntBetween(1, 3); i++) {
if (i > 0) {
result.append(" ");
}
result.append("%{");
String fieldName;
if (randomBoolean()) {
fieldName = EsqlQueryGenerator.randomIdentifier();
} else {
fieldName = EsqlQueryGenerator.randomRawName(previousOutput);
if (fieldName == null) {
fieldName = EsqlQueryGenerator.randomIdentifier();
}
}
result.append(fieldName);
result.append("}");
}
result.append("\"");
String cmdString = result.toString();
return new CommandDescription(DISSECT, this, cmdString, Map.of());
}
@Override
public ValidationResult validateOutput(
List<CommandDescription> previousCommands,
CommandDescription commandDescription,
List<Column> previousColumns,
List<List<Object>> previousOutput,
List<Column> columns,
List<List<Object>> output
) {
if (commandDescription == EMPTY_DESCRIPTION) {
return VALIDATION_OK;
}
if (previousColumns.size() > columns.size()) {
return new ValidationResult(false, "Expecting at least [" + previousColumns.size() + "] columns, got [" + columns.size() + "]");
}
return CommandGenerator.expectSameRowCount(previousCommands, previousOutput, output);
}
}
|
DissectGenerator
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/threadpool/Scheduler.java
|
{
"start": 7814,
"end": 11453
}
|
class ____ extends AbstractRunnable implements Cancellable {
private final Runnable runnable;
private final TimeValue interval;
private final Executor executor;
private final Scheduler scheduler;
private final Consumer<Exception> rejectionConsumer;
private final Consumer<Exception> failureConsumer;
private volatile boolean run = true;
/**
* Creates a new rescheduling runnable
*
* @param runnable the {@link Runnable} that should be executed periodically
* @param interval the time interval between executions
* @param executor the executor where this runnable should be scheduled to run
* @param scheduler the {@link Scheduler} instance to use for scheduling
*/
ReschedulingRunnable(
Runnable runnable,
TimeValue interval,
Executor executor,
Scheduler scheduler,
Consumer<Exception> rejectionConsumer,
Consumer<Exception> failureConsumer
) {
this.runnable = runnable;
this.interval = interval;
this.executor = executor;
this.scheduler = scheduler;
this.rejectionConsumer = rejectionConsumer;
this.failureConsumer = failureConsumer;
}
/**
* Schedules the first execution of this runnable
*/
void start() {
scheduler.schedule(this, interval, executor);
}
@Override
public boolean cancel() {
final boolean result = run;
run = false;
return result;
}
@Override
public boolean isCancelled() {
return run == false;
}
@Override
public void doRun() {
// always check run here since this may have been cancelled since the last execution and we do not want to run
if (run) {
runnable.run();
}
}
@Override
public void onFailure(Exception e) {
try {
if (runnable instanceof AbstractRunnable abstractRunnable) {
abstractRunnable.onFailure(e);
}
} finally {
failureConsumer.accept(e);
}
}
@Override
public void onRejection(Exception e) {
run = false;
try {
if (runnable instanceof AbstractRunnable abstractRunnable) {
abstractRunnable.onRejection(e);
}
} finally {
rejectionConsumer.accept(e);
}
}
@Override
public void onAfter() {
// if this has not been cancelled reschedule it to run again
if (run) {
try {
scheduler.schedule(this, interval, executor);
} catch (final EsRejectedExecutionException e) {
onRejection(e);
}
}
}
@Override
public boolean isForceExecution() {
return runnable instanceof AbstractRunnable abstractRunnable && abstractRunnable.isForceExecution();
}
@Override
public String toString() {
return "ReschedulingRunnable{" + "runnable=" + runnable + ", interval=" + interval + '}';
}
}
/**
* This subclass ensures to properly bubble up Throwable instances of both type Error and Exception thrown in submitted/scheduled
* tasks to the uncaught exception handler
*/
|
ReschedulingRunnable
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/seq/SequenceWriterTest.java
|
{
"start": 1321,
"end": 1432
}
|
class ____ extends PolyBase {
public int b;
public ImplB(int v) { b = v; }
}
static
|
ImplB
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/throughput/BufferDebloatConfiguration.java
|
{
"start": 1464,
"end": 4613
}
|
class ____ {
private final Duration targetTotalTime;
private final int startingBufferSize;
private final int maxBufferSize;
private final int minBufferSize;
private final int bufferDebloatThresholdPercentages;
private final int numberOfSamples;
private final boolean enabled;
private BufferDebloatConfiguration(
boolean enabled,
Duration targetTotalTime,
int startingBufferSize,
int maxBufferSize,
int minBufferSize,
int bufferDebloatThresholdPercentages,
int numberOfSamples) {
// Right now the buffer size can not be grater than integer max value according to
// MemorySegment and buffer implementation.
checkArgument(maxBufferSize > 0);
checkArgument(minBufferSize > 0);
checkArgument(numberOfSamples > 0);
checkArgument(maxBufferSize >= minBufferSize);
checkArgument(targetTotalTime.toMillis() > 0.0);
checkArgument(maxBufferSize >= startingBufferSize);
checkArgument(minBufferSize <= startingBufferSize);
this.targetTotalTime = checkNotNull(targetTotalTime);
this.startingBufferSize = startingBufferSize;
this.maxBufferSize = maxBufferSize;
this.minBufferSize = minBufferSize;
this.bufferDebloatThresholdPercentages = bufferDebloatThresholdPercentages;
this.numberOfSamples = numberOfSamples;
this.enabled = enabled;
}
public boolean isEnabled() {
return enabled;
}
public Duration getTargetTotalTime() {
return targetTotalTime;
}
public int getStartingBufferSize() {
return startingBufferSize;
}
public int getMaxBufferSize() {
return maxBufferSize;
}
public int getMinBufferSize() {
return minBufferSize;
}
public int getBufferDebloatThresholdPercentages() {
return bufferDebloatThresholdPercentages;
}
public int getNumberOfSamples() {
return numberOfSamples;
}
public static BufferDebloatConfiguration fromConfiguration(ReadableConfig config) {
Duration targetTotalTime = config.get(BUFFER_DEBLOAT_TARGET);
int maxBufferSize =
Math.toIntExact(config.get(TaskManagerOptions.MEMORY_SEGMENT_SIZE).getBytes());
int minBufferSize =
Math.toIntExact(config.get(TaskManagerOptions.MIN_MEMORY_SEGMENT_SIZE).getBytes());
int startingBufferSize =
Math.toIntExact(
config.get(TaskManagerOptions.STARTING_MEMORY_SEGMENT_SIZE).getBytes());
int bufferDebloatThresholdPercentages = config.get(BUFFER_DEBLOAT_THRESHOLD_PERCENTAGES);
final int numberOfSamples = config.get(BUFFER_DEBLOAT_SAMPLES);
return new BufferDebloatConfiguration(
config.get(TaskManagerOptions.BUFFER_DEBLOAT_ENABLED),
targetTotalTime,
startingBufferSize,
maxBufferSize,
minBufferSize,
bufferDebloatThresholdPercentages,
numberOfSamples);
}
}
|
BufferDebloatConfiguration
|
java
|
spring-projects__spring-framework
|
spring-jms/src/main/java/org/springframework/jms/annotation/JmsListener.java
|
{
"start": 4481,
"end": 6493
}
|
interface ____ {
/**
* The unique identifier of the container managing this endpoint.
* <p>If none is specified, an auto-generated one is provided.
* @see org.springframework.jms.config.JmsListenerEndpointRegistry#getListenerContainer(String)
*/
String id() default "";
/**
* The bean name of the {@link org.springframework.jms.config.JmsListenerContainerFactory}
* to use to create the message listener container responsible for serving this endpoint.
* <p>If not specified, the default container factory is used, if any.
*/
String containerFactory() default "";
/**
* The destination name for this listener, resolved through the container-wide
* {@link org.springframework.jms.support.destination.DestinationResolver} strategy.
*/
String destination();
/**
* The name for the durable subscription, if any.
* <p>If an explicit subscription name is not specified, a default subscription
* name will be generated based on the fully qualified name of the annotated
* listener method — for example,
* {@code "org.example.jms.ProductListener.processRequest"} for a
* {@code processRequest(...)} listener method in the
* {@code org.example.jms.ProductListener} class.
*/
String subscription() default "";
/**
* The JMS message selector expression, if any.
* <p>See the JMS specification for a detailed definition of selector expressions.
*/
String selector() default "";
/**
* The concurrency limits for the listener, if any. Overrides the value defined
* by the container factory used to create the listener container.
* <p>The concurrency limits can be a "lower-upper" String — for example,
* "5-10" — or a simple upper limit String — for example, "10", in
* which case the lower limit will be 1.
* <p>Note that the underlying container may or may not support all features.
* For instance, it may not be able to scale, in which case only the upper limit
* is used.
*/
String concurrency() default "";
}
|
JmsListener
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/resume/ResumeAware.java
|
{
"start": 966,
"end": 1851
}
|
interface ____<T extends ResumeStrategy> {
/**
* Injects the {@link ResumeStrategy} instance into the object
*
* @param resumeStrategy the resume strategy
*/
void setResumeStrategy(T resumeStrategy);
/**
* Gets the {@link ResumeStrategy} instance
*
* @return the resume strategy
*/
T getResumeStrategy();
/**
* Allows the implementation to provide custom adapter factories. It binds the service name provided in the
* {@link org.apache.camel.spi.annotations.JdkService} annotation in the adapter with the resume aware class. This
* allows the adapter to be resolved automatically in runtime while also allowing fallback to reusable adapters when
* available.
*
* @return
*/
default String adapterFactoryService() {
return ResumeAdapter.RESUME_ADAPTER_FACTORY;
}
}
|
ResumeAware
|
java
|
quarkusio__quarkus
|
independent-projects/qute/core/src/test/java/io/quarkus/qute/EvalTest.java
|
{
"start": 204,
"end": 2640
}
|
class ____ {
@Test
public void testEval() {
Engine engine = Engine.builder().addDefaults().build();
assertEquals("Hello Foo!",
engine.parse("{#eval 'Hello Foo!' /}").render());
assertEquals("Hello Foo!",
engine.parse("{#eval 'Hello Foo!'}ignored!{/eval}").render());
assertEquals("Hello Lu!",
engine.parse("{#eval foo /}").data("foo", "Hello {bar}!", "bar", "Lu").render());
assertEquals("Hello Lu!",
engine.parse("{#eval foo /}").data("foo", "Hello {#eval bar /}!", "bar", "Lu").render());
assertEquals("Hello Foo and true!",
engine.parse("{#eval name='Foo' template='Hello {name} and {bar}!' /}").data("bar", true).render());
assertEquals("Hello Foo and true!",
engine.parse("{#eval template name='Foo' /}").data("template", "Hello {name} and {bar}!", "bar", true)
.render());
}
@Test
public void testTemplateParamNotSet() {
assertThatExceptionOfType(TemplateException.class)
.isThrownBy(() -> Engine.builder().addDefaults().build().parse("{#eval name='Foo' /}"))
.withMessageContainingAll("Parser error", "mandatory section parameters not declared");
}
@Test
public void testInvalidTemplateContents() {
assertThatExceptionOfType(TemplateException.class)
.isThrownBy(() -> Engine.builder().addDefaults().build().parse("{#eval invalid /}").data("invalid", "{foo")
.render())
.withMessageContainingAll("Parser error in the evaluated template", "unterminated expression");
}
@Test
public void testStrEvalNamespace() {
Engine engine = Engine.builder()
.addDefaults()
.addResultMapper(new HtmlEscaper(ImmutableList.of("text/html")))
.addNamespaceResolver(new StrEvalNamespaceResolver())
.build();
assertEquals("Hello world!",
engine.parse("{str:eval('Hello {name}!')}").data("name", "world").render());
assertEquals("Hello world!",
engine.parse("{str:eval(t1)}").data("t1", "Hello {name}!", "name", "world").render());
assertEquals("<p>",
engine.parse("{str:eval('{foo}')}", Variant.forContentType(Variant.TEXT_HTML)).data("foo", "<p>").render());
}
}
|
EvalTest
|
java
|
apache__flink
|
flink-table/flink-table-code-splitter/src/test/resources/declaration/code/TestLocalVariableWithSameName.java
|
{
"start": 7,
"end": 316
}
|
class ____ {
public void myFun1() {
int local1;
String local2 = "AAAAA";
final long local3;
local3 = 100;
}
public void myFun2() {
int local1;
local1 = 5;
long local2;
final String local3 = "BBBBB";
}
}
|
TestLocalVariableWithSameName
|
java
|
google__error-prone
|
test_helpers/src/test/java/com/google/errorprone/CompilationTestHelperTest.java
|
{
"start": 3997,
"end": 4396
}
|
class ____ {
public boolean doIt() {
// BUG: Diagnostic contains: Method may return normally
return true;
}
}
""")
.doTest();
}
@Test
public void fileWithBugMatcherAndMatchingErrorSucceeds() {
compilationHelper
.addSourceLines(
"Test.java",
"""
public
|
Test
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/index/engine/Engine.java
|
{
"start": 82039,
"end": 84207
}
|
class ____ extends Operation {
private final String id;
private final long ifSeqNo;
private final long ifPrimaryTerm;
public Delete(
String id,
BytesRef uid,
long seqNo,
long primaryTerm,
long version,
VersionType versionType,
Origin origin,
long startTime,
long ifSeqNo,
long ifPrimaryTerm
) {
super(uid, seqNo, primaryTerm, version, versionType, origin, startTime);
assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin;
assert ifPrimaryTerm >= 0 : "ifPrimaryTerm [" + ifPrimaryTerm + "] must be non negative";
assert ifSeqNo == UNASSIGNED_SEQ_NO || ifSeqNo >= 0 : "ifSeqNo [" + ifSeqNo + "] must be non negative or unset";
assert (origin == Origin.PRIMARY) || (ifSeqNo == UNASSIGNED_SEQ_NO && ifPrimaryTerm == UNASSIGNED_PRIMARY_TERM)
: "cas operations are only allowed if origin is primary. get [" + origin + "]";
this.id = Objects.requireNonNull(id);
this.ifSeqNo = ifSeqNo;
this.ifPrimaryTerm = ifPrimaryTerm;
}
public Delete(String id, BytesRef uid, long primaryTerm) {
this(
id,
uid,
UNASSIGNED_SEQ_NO,
primaryTerm,
Versions.MATCH_ANY,
VersionType.INTERNAL,
Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
}
@Override
public String id() {
return this.id;
}
@Override
public TYPE operationType() {
return TYPE.DELETE;
}
@Override
public int estimatedSizeInBytes() {
return uid().length * 2 + 20;
}
public long getIfSeqNo() {
return ifSeqNo;
}
public long getIfPrimaryTerm() {
return ifPrimaryTerm;
}
}
public static
|
Delete
|
java
|
apache__camel
|
components/camel-bindy/src/test/java/org/apache/camel/dataformat/bindy/fix/BindySimpleKeyValuePairWithoutSectionMarshallDslTest.java
|
{
"start": 3516,
"end": 4110
}
|
class ____ extends RouteBuilder {
BindyKeyValuePairDataFormat orderBindyDataFormat
= new BindyKeyValuePairDataFormat(org.apache.camel.dataformat.bindy.model.fix.withoutsection.Order.class);
@Override
public void configure() {
// default should errors go to mock:error
errorHandler(deadLetterChannel(URI_MOCK_ERROR));
onException(IllegalArgumentException.class).maximumRedeliveries(0).handled(true);
from(URI_DIRECT_START).marshal(orderBindyDataFormat).to(URI_MOCK_RESULT);
}
}
}
|
ContextConfig
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkAfterWriteFsyncFailureIT.java
|
{
"start": 4527,
"end": 5487
}
|
class ____ extends FilterFileSystemProvider {
private final AtomicBoolean failFSyncs = new AtomicBoolean();
public FSyncFailureFileSystemProvider(FileSystem delegate) {
super("fsyncfailure://", delegate);
}
public void failFSyncOnce(boolean shouldFail) {
failFSyncs.set(shouldFail);
}
@Override
public FileChannel newFileChannel(Path path, Set<? extends OpenOption> options, FileAttribute<?>... attrs) throws IOException {
return new FilterFileChannel(super.newFileChannel(path, options, attrs)) {
@Override
public void force(boolean metaData) throws IOException {
if (failFSyncs.compareAndSet(true, false)) {
throw new IOException("simulated");
}
super.force(metaData);
}
};
}
}
}
|
FSyncFailureFileSystemProvider
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/tools/picocli/CommandLine.java
|
{
"start": 160501,
"end": 160812
}
|
class ____ implements ITypeConverter<CharSequence> {
@Override
public String convert(final String value) {
return value;
}
}
/** Converts text to a {@code Byte} by delegating to {@link Byte#valueOf(String)}.*/
static
|
CharSequenceConverter
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/erroneous/attributereference/ErroneousMappingsTest.java
|
{
"start": 758,
"end": 3141
}
|
class ____ {
@ProcessorTest
@IssueKey("11")
@WithClasses( { ErroneousMapper.class, AnotherTarget.class } )
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousMapper.class,
kind = Kind.ERROR,
line = 20,
message = "Target property \"foo\" must not be mapped more than once."),
@Diagnostic(type = ErroneousMapper.class,
kind = Kind.ERROR,
line = 16,
message = "No property named \"bar\" exists in source parameter(s). " +
"Did you mean \"foo\"?"),
@Diagnostic(type = ErroneousMapper.class,
kind = Kind.ERROR,
line = 18,
message = "Unknown property \"bar\" in result type Target. Did you mean \"foo\"?"),
@Diagnostic(type = ErroneousMapper.class,
kind = Kind.ERROR,
line = 23,
message = "No property named \"source1.foo\" exists in source parameter(s). " +
"Did you mean \"foo\"?"),
@Diagnostic(type = ErroneousMapper.class,
kind = Kind.WARNING,
line = 26,
message = "Unmapped target property: \"bar\".")
}
)
public void shouldFailToGenerateMappings() {
}
@ProcessorTest
@WithClasses( { ErroneousMapper1.class, DummySource.class } )
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousMapper1.class,
kind = Kind.ERROR,
line = 16,
message = "The type of parameter \"source\" has no property named \"foobar\".")
}
)
public void shouldFailToGenerateMappingsErrorOnMandatoryParameterName() {
}
@ProcessorTest
@WithClasses( { ErroneousMapper2.class } )
@ExpectedCompilationOutcome(
value = CompilationResult.FAILED,
diagnostics = {
@Diagnostic(type = ErroneousMapper2.class,
kind = Kind.ERROR,
line = 19,
message = "Target property \"foo\" must not be mapped more than once." )
}
)
public void shouldFailToGenerateMappingsErrorOnDuplicateTarget() {
}
}
|
ErroneousMappingsTest
|
java
|
elastic__elasticsearch
|
libs/native/src/main/java/org/elasticsearch/nativeaccess/MacNativeAccess.java
|
{
"start": 934,
"end": 5667
}
|
class ____ extends PosixNativeAccess {
private static final int F_PREALLOCATE = 42;
private static final int F_ALLOCATECONTIG = 0x2; // allocate contiguous space
private static final int F_ALLOCATEALL = 0x4; // allocate all the requested space or no space at all
private static final int F_PEOFPOSMODE = 3; // allocate from the physical end of the file
/** The only supported flag... */
static final int SANDBOX_NAMED = 1;
/** Allow everything except process fork and execution */
static final String SANDBOX_RULES = "(version 1) (allow default) (deny process-fork) (deny process-exec)";
private final MacCLibrary macLibc;
MacNativeAccess(NativeLibraryProvider libraryProvider) {
super("MacOS", libraryProvider, new PosixConstants(9223372036854775807L, 5, 1, 6, 512, 144, 96, 104));
this.macLibc = libraryProvider.getLibrary(MacCLibrary.class);
}
@Override
protected long getMaxThreads() {
return ProcessLimits.UNKNOWN;
}
@Override
protected void logMemoryLimitInstructions() {
// we don't have instructions for macos
}
@Override
protected boolean nativePreallocate(int fd, long currentSize, long newSize) {
var fst = libc.newFStore();
fst.set_flags(F_ALLOCATECONTIG);
fst.set_posmode(F_PEOFPOSMODE);
fst.set_offset(0);
fst.set_length(newSize);
// first, try allocating contiguously
if (libc.fcntl(fd, F_PREALLOCATE, fst) != 0) {
// TODO: log warning?
// that failed, so let us try allocating non-contiguously
fst.set_flags(F_ALLOCATEALL);
if (libc.fcntl(fd, F_PREALLOCATE, fst) != 0) {
// i'm afraid captain dale had to bail
logger.warn("Could not allocate non-contiguous size: " + libc.strerror(libc.errno()));
return false;
}
}
if (libc.ftruncate(fd, newSize) != 0) {
logger.warn("Could not truncate file: " + libc.strerror(libc.errno()));
return false;
}
return true;
}
/**
* Installs exec system call filtering on MacOS.
* <p>
* Two different methods of filtering are used. Since MacOS is BSD based, process creation
* is first restricted with {@code setrlimit(RLIMIT_NPROC)}.
* <p>
* Additionally, on Mac OS X Leopard or above, a custom {@code sandbox(7)} ("Seatbelt") profile is installed that
* denies the following rules:
* <ul>
* <li>{@code process-fork}</li>
* <li>{@code process-exec}</li>
* </ul>
* @see <a href="https://reverse.put.as/wp-content/uploads/2011/06/The-Apple-Sandbox-BHDC2011-Paper.pdf">
* * https://reverse.put.as/wp-content/uploads/2011/06/The-Apple-Sandbox-BHDC2011-Paper.pdf</a>
*/
@Override
public void tryInstallExecSandbox() {
initBsdSandbox();
initMacSandbox();
execSandboxState = ExecSandboxState.ALL_THREADS;
}
@SuppressForbidden(reason = "Java tmp dir is ok")
private static Path createTempRulesFile() throws IOException {
return Files.createTempFile("es", "sb");
}
private void initMacSandbox() {
// write rules to a temporary file, which will be passed to sandbox_init()
Path rules;
try {
rules = createTempRulesFile();
Files.write(rules, Collections.singleton(SANDBOX_RULES));
} catch (IOException e) {
throw new UncheckedIOException(e);
}
try {
var errorRef = macLibc.newErrorReference();
int ret = macLibc.sandbox_init(rules.toAbsolutePath().toString(), SANDBOX_NAMED, errorRef);
// if sandbox_init() fails, add the message from the OS (e.g. syntax error) and free the buffer
if (ret != 0) {
RuntimeException e = new UnsupportedOperationException("sandbox_init(): " + errorRef.toString());
macLibc.sandbox_free_error(errorRef);
throw e;
}
logger.debug("OS X seatbelt initialization successful");
} finally {
IOUtils.deleteFilesIgnoringExceptions(rules);
}
}
private void initBsdSandbox() {
RLimit limit = libc.newRLimit();
limit.rlim_cur(0);
limit.rlim_max(0);
// not a standard limit, means something different on linux, etc!
final int RLIMIT_NPROC = 7;
if (libc.setrlimit(RLIMIT_NPROC, limit) != 0) {
throw new UnsupportedOperationException("RLIMIT_NPROC unavailable: " + libc.strerror(libc.errno()));
}
logger.debug("BSD RLIMIT_NPROC initialization successful");
}
}
|
MacNativeAccess
|
java
|
micronaut-projects__micronaut-core
|
router/src/main/java/io/micronaut/web/router/RouteBuilder.java
|
{
"start": 3918,
"end": 3994
}
|
class ____ a singular REST endpoint.</p>
*
* <p>For example given a
|
as
|
java
|
google__guava
|
android/guava/src/com/google/common/collect/AbstractMapBasedMultimap.java
|
{
"start": 19847,
"end": 21492
}
|
class ____ extends WrappedCollection implements SortedSet<V> {
WrappedSortedSet(
@ParametricNullness K key, SortedSet<V> delegate, @Nullable WrappedCollection ancestor) {
super(key, delegate, ancestor);
}
SortedSet<V> getSortedSetDelegate() {
return (SortedSet<V>) getDelegate();
}
@Override
public @Nullable Comparator<? super V> comparator() {
return getSortedSetDelegate().comparator();
}
@Override
@ParametricNullness
public V first() {
refreshIfEmpty();
return getSortedSetDelegate().first();
}
@Override
@ParametricNullness
public V last() {
refreshIfEmpty();
return getSortedSetDelegate().last();
}
@Override
public SortedSet<V> headSet(@ParametricNullness V toElement) {
refreshIfEmpty();
return new WrappedSortedSet(
getKey(),
getSortedSetDelegate().headSet(toElement),
(getAncestor() == null) ? this : getAncestor());
}
@Override
public SortedSet<V> subSet(@ParametricNullness V fromElement, @ParametricNullness V toElement) {
refreshIfEmpty();
return new WrappedSortedSet(
getKey(),
getSortedSetDelegate().subSet(fromElement, toElement),
(getAncestor() == null) ? this : getAncestor());
}
@Override
public SortedSet<V> tailSet(@ParametricNullness V fromElement) {
refreshIfEmpty();
return new WrappedSortedSet(
getKey(),
getSortedSetDelegate().tailSet(fromElement),
(getAncestor() == null) ? this : getAncestor());
}
}
@WeakOuter
final
|
WrappedSortedSet
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-core/src/test/java/org/springframework/security/oauth2/core/endpoint/OAuth2AuthorizationResponseTypeTests.java
|
{
"start": 876,
"end": 1076
}
|
class ____ {
@Test
public void getValueWhenResponseTypeCodeThenReturnCode() {
assertThat(OAuth2AuthorizationResponseType.CODE.getValue()).isEqualTo("code");
}
}
|
OAuth2AuthorizationResponseTypeTests
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/suite/engine/testsuites/LifecycleMethodsSuites.java
|
{
"start": 2246,
"end": 2667
}
|
class ____ {
@BeforeSuite
static void setUp() {
StatefulTestCase.callSequence.add("beforeSuiteMethod");
throw new RuntimeException("Exception thrown by @BeforeSuite method");
}
@AfterSuite
static void tearDown() {
StatefulTestCase.callSequence.add("afterSuiteMethod");
throw new RuntimeException("Exception thrown by @AfterSuite method");
}
}
@TestSuite
public static
|
FailingBeforeAndAfterSuite
|
java
|
junit-team__junit5
|
jupiter-tests/src/test/java/org/junit/jupiter/api/AssertFalseAssertionsTests.java
|
{
"start": 775,
"end": 2894
}
|
class ____ {
@Test
void assertFalseWithBooleanFalse() {
assertFalse(false);
assertFalse(false, "test");
assertFalse(false, () -> "test");
}
@Test
void assertFalseWithBooleanSupplierFalse() {
assertFalse(() -> false);
assertFalse(() -> false, "test");
assertFalse(() -> false, () -> "test");
}
@Test
void assertFalseWithBooleanFalseAndMessageSupplier() {
assertFalse(false, () -> "test");
}
@Test
void assertFalseWithBooleanSupplierFalseAndMessageSupplier() {
assertFalse(() -> false, () -> "test");
}
@Test
void assertFalseWithBooleanTrueAndDefaultMessageWithExpectedAndActualValues() {
try {
assertFalse(true);
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "expected: <false> but was: <true>");
assertExpectedAndActualValues(ex, false, true);
}
}
@Test
void assertFalseWithBooleanTrueAndString() {
try {
assertFalse(true, "test");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "test ==> expected: <false> but was: <true>");
assertExpectedAndActualValues(ex, false, true);
}
}
@Test
void assertFalseWithBooleanSupplierTrueAndString() {
try {
assertFalse(() -> true, "test");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "test ==> expected: <false> but was: <true>");
assertExpectedAndActualValues(ex, false, true);
}
}
@Test
void assertFalseWithBooleanTrueAndMessageSupplier() {
try {
assertFalse(true, () -> "test");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "test ==> expected: <false> but was: <true>");
assertExpectedAndActualValues(ex, false, true);
}
}
@Test
void assertFalseWithBooleanSupplierTrueAndMessageSupplier() {
try {
assertFalse(() -> true, () -> "test");
expectAssertionFailedError();
}
catch (AssertionFailedError ex) {
assertMessageEquals(ex, "test ==> expected: <false> but was: <true>");
assertExpectedAndActualValues(ex, false, true);
}
}
}
|
AssertFalseAssertionsTests
|
java
|
quarkusio__quarkus
|
extensions/arc/runtime-dev/src/main/java/io/quarkus/arc/runtime/dev/console/Invocation.java
|
{
"start": 386,
"end": 2487
}
|
class ____ {
private final InjectableBean<?> interceptedBean;
/**
* Start time in ms
*/
private final long start;
/**
* Duration in ns
*/
private final long duration;
private final Method method;
private final Kind kind;
private final String message;
private final List<Invocation> children;
Invocation(InjectableBean<?> interceptedBean, long start, long duration,
Method method, Kind kind, String message, List<Invocation> children) {
this.interceptedBean = interceptedBean;
this.start = start;
this.duration = duration;
this.method = method;
this.children = children;
this.kind = kind;
this.message = message;
}
public InjectableBean<?> getInterceptedBean() {
return interceptedBean;
}
public long getStart() {
return start;
}
public String getStartFormatted() {
return LocalDateTime.ofInstant(Instant.ofEpochMilli(start), ZoneId.systemDefault()).toString();
}
public long getDuration() {
return duration;
}
public long getDurationMillis() {
return TimeUnit.NANOSECONDS.toMillis(duration);
}
public Method getMethod() {
return method;
}
public String getDeclaringClassName() {
return method.getDeclaringClass().getName();
}
public List<Invocation> getChildren() {
return children;
}
public Kind getKind() {
return kind;
}
public String getMessage() {
return message;
}
@Override
public String toString() {
return kind + " invocation of " + method;
}
public String getPackageName(String name) {
int lastDot = name.lastIndexOf('.');
if (lastDot != -1) {
return name.substring(0, lastDot);
}
return "";
}
public boolean isQuarkusBean() {
if (interceptedBean == null) {
return false;
}
return interceptedBean.getBeanClass().getName().startsWith("io.quarkus");
}
public
|
Invocation
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/inject/QualifierWithTypeUseTest.java
|
{
"start": 1787,
"end": 1976
}
|
interface ____ {}
@Qualifier
// BUG: Diagnostic contains: remove
@Target({ElementType.TYPE_USE, ElementType.TYPE_PARAMETER})
@
|
Qualifier1
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java
|
{
"start": 1484,
"end": 4191
}
|
class ____ extends ValuesSourceAggregationBuilder.LeafOnly<GeoCentroidAggregationBuilder> {
public static final String NAME = "geo_centroid";
public static final ValuesSourceRegistry.RegistryKey<MetricAggregatorSupplier> REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>(
NAME,
MetricAggregatorSupplier.class
);
public static final ObjectParser<GeoCentroidAggregationBuilder, String> PARSER = ObjectParser.fromBuilder(
NAME,
GeoCentroidAggregationBuilder::new
);
static {
ValuesSourceAggregationBuilder.declareFields(PARSER, true, false, false);
}
public static void registerAggregators(ValuesSourceRegistry.Builder builder) {
GeoCentroidAggregatorFactory.registerAggregators(builder);
}
public GeoCentroidAggregationBuilder(String name) {
super(name);
}
protected GeoCentroidAggregationBuilder(
GeoCentroidAggregationBuilder clone,
AggregatorFactories.Builder factoriesBuilder,
Map<String, Object> metadata
) {
super(clone, factoriesBuilder, metadata);
}
@Override
protected ValuesSourceType defaultValueSourceType() {
return CoreValuesSourceType.GEOPOINT;
}
@Override
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metadata) {
return new GeoCentroidAggregationBuilder(this, factoriesBuilder, metadata);
}
/**
* Read from a stream.
*/
public GeoCentroidAggregationBuilder(StreamInput in) throws IOException {
super(in);
}
@Override
public boolean supportsSampling() {
return true;
}
@Override
protected void innerWriteTo(StreamOutput out) {
// Do nothing, no extra state to write to stream
}
@Override
protected GeoCentroidAggregatorFactory innerBuild(
AggregationContext context,
ValuesSourceConfig config,
AggregatorFactory parent,
AggregatorFactories.Builder subFactoriesBuilder
) throws IOException {
MetricAggregatorSupplier aggregatorSupplier = context.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config);
return new GeoCentroidAggregatorFactory(name, config, context, parent, subFactoriesBuilder, metadata, aggregatorSupplier);
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
return builder;
}
@Override
public String getType() {
return NAME;
}
@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersion.zero();
}
}
|
GeoCentroidAggregationBuilder
|
java
|
quarkusio__quarkus
|
independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/inheritance/ScopeInheritanceTest.java
|
{
"start": 2093,
"end": 2283
}
|
class ____ extends SubBean {
}
// should inherit scope
@JustSomeBeanDefiningAnnotation // just to add bean defining annotation to have it recognized
static
|
RequestScopedSubBean
|
java
|
spring-projects__spring-security
|
oauth2/oauth2-authorization-server/src/test/java/org/springframework/security/oauth2/server/authorization/settings/ClientSettingsTests.java
|
{
"start": 942,
"end": 3133
}
|
class ____ {
@Test
public void buildWhenDefaultThenDefaultsAreSet() {
ClientSettings clientSettings = ClientSettings.builder().build();
assertThat(clientSettings.getSettings()).hasSize(2);
assertThat(clientSettings.isRequireProofKey()).isTrue();
assertThat(clientSettings.isRequireAuthorizationConsent()).isFalse();
}
@Test
public void requireProofKeyWhenTrueThenSet() {
ClientSettings clientSettings = ClientSettings.builder().requireProofKey(true).build();
assertThat(clientSettings.isRequireProofKey()).isTrue();
}
@Test
public void requireAuthorizationConsentWhenTrueThenSet() {
ClientSettings clientSettings = ClientSettings.builder().requireAuthorizationConsent(true).build();
assertThat(clientSettings.isRequireAuthorizationConsent()).isTrue();
}
@Test
public void tokenEndpointAuthenticationSigningAlgorithmWhenHS256ThenSet() {
ClientSettings clientSettings = ClientSettings.builder()
.tokenEndpointAuthenticationSigningAlgorithm(MacAlgorithm.HS256)
.build();
assertThat(clientSettings.getTokenEndpointAuthenticationSigningAlgorithm()).isEqualTo(MacAlgorithm.HS256);
}
@Test
public void jwkSetUrlWhenProvidedThenSet() {
ClientSettings clientSettings = ClientSettings.builder().jwkSetUrl("https://client.example.com/jwks").build();
assertThat(clientSettings.getJwkSetUrl()).isEqualTo("https://client.example.com/jwks");
}
@Test
public void x509CertificateSubjectDNWhenProvidedThenSet() {
ClientSettings clientSettings = ClientSettings.builder()
.x509CertificateSubjectDN("CN=demo-client-sample, OU=Spring Samples, O=Spring, C=US")
.build();
assertThat(clientSettings.getX509CertificateSubjectDN())
.isEqualTo("CN=demo-client-sample, OU=Spring Samples, O=Spring, C=US");
}
@Test
public void settingWhenCustomThenSet() {
ClientSettings clientSettings = ClientSettings.builder()
.setting("name1", "value1")
.settings((settings) -> settings.put("name2", "value2"))
.build();
assertThat(clientSettings.getSettings()).hasSize(4);
assertThat(clientSettings.<String>getSetting("name1")).isEqualTo("value1");
assertThat(clientSettings.<String>getSetting("name2")).isEqualTo("value2");
}
}
|
ClientSettingsTests
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/service/FileSystemAccess.java
|
{
"start": 1095,
"end": 1560
}
|
interface ____<T> {
public T execute(FileSystem fs) throws IOException;
}
public <T> T execute(String user, Configuration conf, FileSystemExecutor<T> executor) throws
FileSystemAccessException;
public FileSystem createFileSystem(String user, Configuration conf) throws IOException, FileSystemAccessException;
public void releaseFileSystem(FileSystem fs) throws IOException;
public Configuration getFileSystemConfiguration();
}
|
FileSystemExecutor
|
java
|
elastic__elasticsearch
|
x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/topn/UnsupportedTypesTopNEncoder.java
|
{
"start": 569,
"end": 935
}
|
class ____ needed to build the TopNOperator value and key extractors infrastructure, encoding/decoding is needed
* when actually sorting on a field (which shouldn't be possible for unsupported data types) using key extractors, or when encoding/decoding
* unsupported data types fields values (which should always be "null" by convention) using value extractors.
*/
|
is
|
java
|
assertj__assertj-core
|
assertj-tests/assertj-integration-tests/assertj-core-tests/src/test/java/org/assertj/tests/core/api/class_/ClassAssert_hasRecordComponents_Test.java
|
{
"start": 1331,
"end": 3006
}
|
class ____ {
@Test
void should_fail_if_actual_is_null() {
// GIVEN
Class<?> actual = null;
// WHEN
var error = expectAssertionError(() -> assertThat(actual).hasRecordComponents("component"));
// THEN
then(error).hasMessage(shouldNotBeNull().create());
}
@ParameterizedTest
@ValueSource(classes = {
String.class,
ArrayList.class,
ValueSource.class
})
void should_fail_if_actual_is_not_a_record(Class<?> actual) {
// WHEN
var error = expectAssertionError(() -> assertThat(actual).hasRecordComponents("component"));
// THEN
then(error).hasMessage(shouldBeRecord(actual).create());
}
@Test
void should_pass_if_record_has_expected_component() {
// WHEN/THEN
assertThat(MyRecord.class).hasRecordComponents("componentOne");
}
@Test
void should_pass_if_record_has_expected_components() {
// WHEN/THEN
assertThat(MyRecord.class).hasRecordComponents("componentOne", "componentTwo");
}
@Test
void should_fail_if_record_components_are_missing() {
// WHEN
var assertionError = expectAssertionError(() -> assertThat(MyRecord.class).hasRecordComponents("componentOne",
"missing"));
// THEN
then(assertionError).hasMessage(shouldHaveRecordComponents(MyRecord.class,
set("componentOne", "missing"),
set("missing")).create());
}
private record MyRecord(String componentOne, String componentTwo) {
}
}
|
ClassAssert_hasRecordComponents_Test
|
java
|
spring-projects__spring-boot
|
smoke-test/spring-boot-smoke-test-activemq-embedded/src/test/java/smoketest/activemq/embedded/SampleActiveMQApplicationTests.java
|
{
"start": 1226,
"end": 1505
}
|
class ____ {
@Autowired
private Producer producer;
@Test
void sendSimpleMessage(CapturedOutput output) throws InterruptedException {
this.producer.send("Test message");
Thread.sleep(1000L);
assertThat(output).contains("Test message");
}
}
|
SampleActiveMQApplicationTests
|
java
|
elastic__elasticsearch
|
libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/vectorization/MSDibitToInt4ESNextOSQVectorsScorer.java
|
{
"start": 1300,
"end": 20968
}
|
class ____ extends MemorySegmentESNextOSQVectorsScorer.MemorySegmentScorer {
private static final float TWO_BIT_SCALE = 1f / ((1 << 2) - 1);
MSDibitToInt4ESNextOSQVectorsScorer(IndexInput in, int dimensions, int dataLength, MemorySegment memorySegment) {
super(in, dimensions, dataLength, memorySegment);
}
@Override
public long quantizeScore(byte[] q) throws IOException {
assert q.length == length * 2;
// 128 / 8 == 16
if (length >= 16 && PanamaESVectorUtilSupport.HAS_FAST_INTEGER_VECTORS) {
if (PanamaESVectorUtilSupport.VECTOR_BITSIZE >= 256) {
return quantizeScore256DibitToInt4(q);
} else if (PanamaESVectorUtilSupport.VECTOR_BITSIZE == 128) {
return quantizeScore128DibitToInt4(q);
}
}
return Long.MIN_VALUE;
}
private long quantizeScore256DibitToInt4(byte[] q) throws IOException {
int lower = (int) quantizeScore256(q);
int upper = (int) quantizeScore256(q);
return lower + ((long) upper << 1);
}
private long quantizeScore128DibitToInt4(byte[] q) throws IOException {
int lower = (int) quantizeScore128(q);
int upper = (int) quantizeScore128(q);
return lower + ((long) upper << 1);
}
private long quantizeScore256(byte[] q) throws IOException {
long subRet0 = 0;
long subRet1 = 0;
long subRet2 = 0;
long subRet3 = 0;
int i = 0;
long offset = in.getFilePointer();
int size = length / 2;
if (size >= ByteVector.SPECIES_256.vectorByteSize() * 2) {
int limit = ByteVector.SPECIES_256.loopBound(size);
var sum0 = LongVector.zero(LONG_SPECIES_256);
var sum1 = LongVector.zero(LONG_SPECIES_256);
var sum2 = LongVector.zero(LONG_SPECIES_256);
var sum3 = LongVector.zero(LONG_SPECIES_256);
for (; i < limit; i += ByteVector.SPECIES_256.length(), offset += LONG_SPECIES_256.vectorByteSize()) {
var vq0 = ByteVector.fromArray(BYTE_SPECIES_256, q, i).reinterpretAsLongs();
var vq1 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + size).reinterpretAsLongs();
var vq2 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + size * 2).reinterpretAsLongs();
var vq3 = ByteVector.fromArray(BYTE_SPECIES_256, q, i + size * 3).reinterpretAsLongs();
var vd = LongVector.fromMemorySegment(LONG_SPECIES_256, memorySegment, offset, ByteOrder.LITTLE_ENDIAN);
sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT));
}
subRet0 += sum0.reduceLanes(VectorOperators.ADD);
subRet1 += sum1.reduceLanes(VectorOperators.ADD);
subRet2 += sum2.reduceLanes(VectorOperators.ADD);
subRet3 += sum3.reduceLanes(VectorOperators.ADD);
}
if (size - i >= ByteVector.SPECIES_128.vectorByteSize()) {
var sum0 = LongVector.zero(LONG_SPECIES_128);
var sum1 = LongVector.zero(LONG_SPECIES_128);
var sum2 = LongVector.zero(LONG_SPECIES_128);
var sum3 = LongVector.zero(LONG_SPECIES_128);
int limit = ByteVector.SPECIES_128.loopBound(size);
for (; i < limit; i += ByteVector.SPECIES_128.length(), offset += LONG_SPECIES_128.vectorByteSize()) {
var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsLongs();
var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size).reinterpretAsLongs();
var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size * 2).reinterpretAsLongs();
var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size * 3).reinterpretAsLongs();
var vd = LongVector.fromMemorySegment(LONG_SPECIES_128, memorySegment, offset, ByteOrder.LITTLE_ENDIAN);
sum0 = sum0.add(vq0.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum1 = sum1.add(vq1.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum2 = sum2.add(vq2.and(vd).lanewise(VectorOperators.BIT_COUNT));
sum3 = sum3.add(vq3.and(vd).lanewise(VectorOperators.BIT_COUNT));
}
subRet0 += sum0.reduceLanes(VectorOperators.ADD);
subRet1 += sum1.reduceLanes(VectorOperators.ADD);
subRet2 += sum2.reduceLanes(VectorOperators.ADD);
subRet3 += sum3.reduceLanes(VectorOperators.ADD);
}
// process scalar tail
in.seek(offset);
for (final int upperBound = size & -Long.BYTES; i < upperBound; i += Long.BYTES) {
final long value = in.readLong();
subRet0 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i) & value);
subRet1 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + size) & value);
subRet2 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + 2 * size) & value);
subRet3 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + 3 * size) & value);
}
for (final int upperBound = size & -Integer.BYTES; i < upperBound; i += Integer.BYTES) {
final int value = in.readInt();
subRet0 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i) & value);
subRet1 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + size) & value);
subRet2 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + 2 * size) & value);
subRet3 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + 3 * size) & value);
}
for (; i < size; i++) {
int dValue = in.readByte() & 0xFF;
subRet0 += Integer.bitCount((q[i] & dValue) & 0xFF);
subRet1 += Integer.bitCount((q[i + size] & dValue) & 0xFF);
subRet2 += Integer.bitCount((q[i + 2 * size] & dValue) & 0xFF);
subRet3 += Integer.bitCount((q[i + 3 * size] & dValue) & 0xFF);
}
return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3);
}
private long quantizeScore128(byte[] q) throws IOException {
long subRet0 = 0;
long subRet1 = 0;
long subRet2 = 0;
long subRet3 = 0;
int i = 0;
long offset = in.getFilePointer();
var sum0 = IntVector.zero(INT_SPECIES_128);
var sum1 = IntVector.zero(INT_SPECIES_128);
var sum2 = IntVector.zero(INT_SPECIES_128);
var sum3 = IntVector.zero(INT_SPECIES_128);
int size = length / 2;
int limit = ByteVector.SPECIES_128.loopBound(size);
for (; i < limit; i += ByteVector.SPECIES_128.length(), offset += INT_SPECIES_128.vectorByteSize()) {
var vd = IntVector.fromMemorySegment(INT_SPECIES_128, memorySegment, offset, ByteOrder.LITTLE_ENDIAN);
var vq0 = ByteVector.fromArray(BYTE_SPECIES_128, q, i).reinterpretAsInts();
var vq1 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size).reinterpretAsInts();
var vq2 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size * 2).reinterpretAsInts();
var vq3 = ByteVector.fromArray(BYTE_SPECIES_128, q, i + size * 3).reinterpretAsInts();
sum0 = sum0.add(vd.and(vq0).lanewise(VectorOperators.BIT_COUNT));
sum1 = sum1.add(vd.and(vq1).lanewise(VectorOperators.BIT_COUNT));
sum2 = sum2.add(vd.and(vq2).lanewise(VectorOperators.BIT_COUNT));
sum3 = sum3.add(vd.and(vq3).lanewise(VectorOperators.BIT_COUNT));
}
subRet0 += sum0.reduceLanes(VectorOperators.ADD);
subRet1 += sum1.reduceLanes(VectorOperators.ADD);
subRet2 += sum2.reduceLanes(VectorOperators.ADD);
subRet3 += sum3.reduceLanes(VectorOperators.ADD);
// process scalar tail
in.seek(offset);
for (final int upperBound = size & -Long.BYTES; i < upperBound; i += Long.BYTES) {
final long value = in.readLong();
subRet0 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i) & value);
subRet1 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + size) & value);
subRet2 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + 2 * size) & value);
subRet3 += Long.bitCount((long) BitUtil.VH_LE_LONG.get(q, i + 3 * size) & value);
}
for (final int upperBound = size & -Integer.BYTES; i < upperBound; i += Integer.BYTES) {
final int value = in.readInt();
subRet0 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i) & value);
subRet1 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + size) & value);
subRet2 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + 2 * size) & value);
subRet3 += Integer.bitCount((int) BitUtil.VH_LE_INT.get(q, i + 3 * size) & value);
}
for (; i < size; i++) {
int dValue = in.readByte() & 0xFF;
subRet0 += Integer.bitCount((q[i] & dValue) & 0xFF);
subRet1 += Integer.bitCount((q[i + size] & dValue) & 0xFF);
subRet2 += Integer.bitCount((q[i + 2 * size] & dValue) & 0xFF);
subRet3 += Integer.bitCount((q[i + 3 * size] & dValue) & 0xFF);
}
return subRet0 + (subRet1 << 1) + (subRet2 << 2) + (subRet3 << 3);
}
@Override
public boolean quantizeScoreBulk(byte[] q, int count, float[] scores) throws IOException {
assert q.length == length * 2;
// 128 / 8 == 16
if (length >= 16 && PanamaESVectorUtilSupport.HAS_FAST_INTEGER_VECTORS) {
if (PanamaESVectorUtilSupport.VECTOR_BITSIZE >= 256) {
quantizeScore256Bulk(q, count, scores);
return true;
} else if (PanamaESVectorUtilSupport.VECTOR_BITSIZE == 128) {
quantizeScore128Bulk(q, count, scores);
return true;
}
}
return false;
}
private void quantizeScore128Bulk(byte[] q, int count, float[] scores) throws IOException {
for (int iter = 0; iter < count; iter++) {
scores[iter] = quantizeScore128DibitToInt4(q);
}
}
private void quantizeScore256Bulk(byte[] q, int count, float[] scores) throws IOException {
for (int iter = 0; iter < count; iter++) {
scores[iter] = quantizeScore256DibitToInt4(q);
}
}
@Override
public float scoreBulk(
byte[] q,
float queryLowerInterval,
float queryUpperInterval,
int queryComponentSum,
float queryAdditionalCorrection,
VectorSimilarityFunction similarityFunction,
float centroidDp,
float[] scores
) throws IOException {
assert q.length == length * 4;
// 128 / 8 == 16
if (length >= 16 && PanamaESVectorUtilSupport.HAS_FAST_INTEGER_VECTORS) {
if (PanamaESVectorUtilSupport.VECTOR_BITSIZE >= 256) {
return score256Bulk(
q,
queryLowerInterval,
queryUpperInterval,
queryComponentSum,
queryAdditionalCorrection,
similarityFunction,
centroidDp,
scores
);
} else if (PanamaESVectorUtilSupport.VECTOR_BITSIZE == 128) {
return score128Bulk(
q,
queryLowerInterval,
queryUpperInterval,
queryComponentSum,
queryAdditionalCorrection,
similarityFunction,
centroidDp,
scores
);
}
}
return Float.NEGATIVE_INFINITY;
}
private float score128Bulk(
byte[] q,
float queryLowerInterval,
float queryUpperInterval,
int queryComponentSum,
float queryAdditionalCorrection,
VectorSimilarityFunction similarityFunction,
float centroidDp,
float[] scores
) throws IOException {
quantizeScore128Bulk(q, BULK_SIZE, scores);
int limit = FLOAT_SPECIES_128.loopBound(BULK_SIZE);
int i = 0;
long offset = in.getFilePointer();
float ay = queryLowerInterval;
float ly = (queryUpperInterval - ay) * FOUR_BIT_SCALE;
float y1 = queryComponentSum;
float maxScore = Float.NEGATIVE_INFINITY;
for (; i < limit; i += FLOAT_SPECIES_128.length()) {
var ax = FloatVector.fromMemorySegment(FLOAT_SPECIES_128, memorySegment, offset + i * Float.BYTES, ByteOrder.LITTLE_ENDIAN);
var lx = FloatVector.fromMemorySegment(
FLOAT_SPECIES_128,
memorySegment,
offset + 4 * BULK_SIZE + i * Float.BYTES,
ByteOrder.LITTLE_ENDIAN
).sub(ax).mul(TWO_BIT_SCALE);
var targetComponentSums = ShortVector.fromMemorySegment(
SHORT_SPECIES_128,
memorySegment,
offset + 8 * BULK_SIZE + i * Short.BYTES,
ByteOrder.LITTLE_ENDIAN
).convert(VectorOperators.S2I, 0).reinterpretAsInts().and(0xffff).convert(VectorOperators.I2F, 0);
var additionalCorrections = FloatVector.fromMemorySegment(
FLOAT_SPECIES_128,
memorySegment,
offset + 10 * BULK_SIZE + i * Float.BYTES,
ByteOrder.LITTLE_ENDIAN
);
var qcDist = FloatVector.fromArray(FLOAT_SPECIES_128, scores, i);
// ax * ay * dimensions + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly *
// qcDist;
var res1 = ax.mul(ay).mul(dimensions);
var res2 = lx.mul(ay).mul(targetComponentSums);
var res3 = ax.mul(ly).mul(y1);
var res4 = lx.mul(ly).mul(qcDist);
var res = res1.add(res2).add(res3).add(res4);
// For euclidean, we need to invert the score and apply the additional correction, which is
// assumed to be the squared l2norm of the centroid centered vectors.
if (similarityFunction == EUCLIDEAN) {
res = res.mul(-2).add(additionalCorrections).add(queryAdditionalCorrection).add(1f);
res = FloatVector.broadcast(FLOAT_SPECIES_128, 1).div(res).max(0);
maxScore = Math.max(maxScore, res.reduceLanes(VectorOperators.MAX));
res.intoArray(scores, i);
} else {
// For cosine and max inner product, we need to apply the additional correction, which is
// assumed to be the non-centered dot-product between the vector and the centroid
res = res.add(queryAdditionalCorrection).add(additionalCorrections).sub(centroidDp);
if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
res.intoArray(scores, i);
// not sure how to do it better
for (int j = 0; j < FLOAT_SPECIES_128.length(); j++) {
scores[i + j] = VectorUtil.scaleMaxInnerProductScore(scores[i + j]);
maxScore = Math.max(maxScore, scores[i + j]);
}
} else {
res = res.add(1f).mul(0.5f).max(0);
res.intoArray(scores, i);
maxScore = Math.max(maxScore, res.reduceLanes(VectorOperators.MAX));
}
}
}
in.seek(offset + 14L * BULK_SIZE);
return maxScore;
}
private float score256Bulk(
byte[] q,
float queryLowerInterval,
float queryUpperInterval,
int queryComponentSum,
float queryAdditionalCorrection,
VectorSimilarityFunction similarityFunction,
float centroidDp,
float[] scores
) throws IOException {
quantizeScore256Bulk(q, BULK_SIZE, scores);
int limit = FLOAT_SPECIES_256.loopBound(BULK_SIZE);
int i = 0;
long offset = in.getFilePointer();
float ay = queryLowerInterval;
float ly = (queryUpperInterval - ay) * FOUR_BIT_SCALE;
float y1 = queryComponentSum;
float maxScore = Float.NEGATIVE_INFINITY;
for (; i < limit; i += FLOAT_SPECIES_256.length()) {
var ax = FloatVector.fromMemorySegment(FLOAT_SPECIES_256, memorySegment, offset + i * Float.BYTES, ByteOrder.LITTLE_ENDIAN);
var lx = FloatVector.fromMemorySegment(
FLOAT_SPECIES_256,
memorySegment,
offset + 4 * BULK_SIZE + i * Float.BYTES,
ByteOrder.LITTLE_ENDIAN
).sub(ax).mul(TWO_BIT_SCALE);
var targetComponentSums = ShortVector.fromMemorySegment(
SHORT_SPECIES_256,
memorySegment,
offset + 8 * BULK_SIZE + i * Short.BYTES,
ByteOrder.LITTLE_ENDIAN
).convert(VectorOperators.S2I, 0).reinterpretAsInts().and(0xffff).convert(VectorOperators.I2F, 0);
var additionalCorrections = FloatVector.fromMemorySegment(
FLOAT_SPECIES_256,
memorySegment,
offset + 10 * BULK_SIZE + i * Float.BYTES,
ByteOrder.LITTLE_ENDIAN
);
var qcDist = FloatVector.fromArray(FLOAT_SPECIES_256, scores, i);
// ax * ay * dimensions + ay * lx * (float) targetComponentSum + ax * ly * y1 + lx * ly *
// qcDist;
var res1 = ax.mul(ay).mul(dimensions);
var res2 = lx.mul(ay).mul(targetComponentSums);
var res3 = ax.mul(ly).mul(y1);
var res4 = lx.mul(ly).mul(qcDist);
var res = res1.add(res2).add(res3).add(res4);
// For euclidean, we need to invert the score and apply the additional correction, which is
// assumed to be the squared l2norm of the centroid centered vectors.
if (similarityFunction == EUCLIDEAN) {
res = res.mul(-2).add(additionalCorrections).add(queryAdditionalCorrection).add(1f);
res = FloatVector.broadcast(FLOAT_SPECIES_256, 1).div(res).max(0);
maxScore = Math.max(maxScore, res.reduceLanes(VectorOperators.MAX));
res.intoArray(scores, i);
} else {
// For cosine and max inner product, we need to apply the additional correction, which is
// assumed to be the non-centered dot-product between the vector and the centroid
res = res.add(queryAdditionalCorrection).add(additionalCorrections).sub(centroidDp);
if (similarityFunction == MAXIMUM_INNER_PRODUCT) {
res.intoArray(scores, i);
// not sure how to do it better
for (int j = 0; j < FLOAT_SPECIES_256.length(); j++) {
scores[i + j] = VectorUtil.scaleMaxInnerProductScore(scores[i + j]);
maxScore = Math.max(maxScore, scores[i + j]);
}
} else {
res = res.add(1f).mul(0.5f).max(0);
maxScore = Math.max(maxScore, res.reduceLanes(VectorOperators.MAX));
res.intoArray(scores, i);
}
}
}
in.seek(offset + 14L * BULK_SIZE);
return maxScore;
}
}
|
MSDibitToInt4ESNextOSQVectorsScorer
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/action/DocWriteResponse.java
|
{
"start": 2481,
"end": 10327
}
|
enum ____ implements Writeable {
CREATED(0),
UPDATED(1),
DELETED(2),
NOT_FOUND(3),
NOOP(4);
private final byte op;
private final String lowercase;
Result(int op) {
this.op = (byte) op;
this.lowercase = this.name().toLowerCase(Locale.ROOT);
}
public byte getOp() {
return op;
}
public String getLowercase() {
return lowercase;
}
public static Result readFrom(StreamInput in) throws IOException {
Byte opcode = in.readByte();
return switch (opcode) {
case 0 -> CREATED;
case 1 -> UPDATED;
case 2 -> DELETED;
case 3 -> NOT_FOUND;
case 4 -> NOOP;
default -> throw new IllegalArgumentException("Unknown result code: " + opcode);
};
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeByte(op);
}
}
private final ShardId shardId;
private final String id;
private final long version;
private final long seqNo;
private final long primaryTerm;
private boolean forcedRefresh;
protected final Result result;
public DocWriteResponse(ShardId shardId, String id, long seqNo, long primaryTerm, long version, Result result) {
this.shardId = Objects.requireNonNull(shardId);
this.id = Objects.requireNonNull(id);
this.seqNo = seqNo;
this.primaryTerm = primaryTerm;
this.version = version;
this.result = Objects.requireNonNull(result);
}
// needed for deserialization
protected DocWriteResponse(ShardId shardId, StreamInput in) throws IOException {
super(in);
this.shardId = shardId;
id = in.readString();
version = in.readZLong();
seqNo = in.readZLong();
primaryTerm = in.readVLong();
forcedRefresh = in.readBoolean();
result = Result.readFrom(in);
}
/**
* Needed for deserialization of single item requests in {@link org.elasticsearch.action.index.TransportIndexAction} and BwC
* deserialization path
*/
protected DocWriteResponse(StreamInput in) throws IOException {
super(in);
shardId = new ShardId(in);
id = in.readString();
version = in.readZLong();
seqNo = in.readZLong();
primaryTerm = in.readVLong();
forcedRefresh = in.readBoolean();
result = Result.readFrom(in);
}
/**
* The change that occurred to the document.
*/
public Result getResult() {
return result;
}
/**
* The index the document was changed in.
*/
public String getIndex() {
return this.shardId.getIndexName();
}
/**
* The exact shard the document was changed in.
*/
public ShardId getShardId() {
return this.shardId;
}
/**
* The id of the document changed.
*/
public String getId() {
return this.id;
}
/**
* Returns the current version of the doc.
*/
public long getVersion() {
return this.version;
}
/**
* Returns the sequence number assigned for this change. Returns {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if the operation
* wasn't performed (i.e., an update operation that resulted in a NOOP).
*/
public long getSeqNo() {
return seqNo;
}
/**
* The primary term for this change.
*
* @return the primary term
*/
public long getPrimaryTerm() {
return primaryTerm;
}
/**
* Did this request force a refresh? Requests that set {@link WriteRequest#setRefreshPolicy(RefreshPolicy)} to
* {@link RefreshPolicy#IMMEDIATE} will always return true for this. Requests that set it to {@link RefreshPolicy#WAIT_UNTIL} will
* only return true here if they run out of refresh listener slots (see {@link IndexSettings#MAX_REFRESH_LISTENERS_PER_SHARD}).
*/
public boolean forcedRefresh() {
return forcedRefresh;
}
@Override
public void setForcedRefresh(boolean forcedRefresh) {
this.forcedRefresh = forcedRefresh;
}
/** returns the rest status for this response (based on {@link ShardInfo#status()} */
public RestStatus status() {
return getShardInfo().status();
}
/**
* Return the relative URI for the location of the document suitable for use in the {@code Location} header. The use of relative URIs is
* permitted as of HTTP/1.1 (cf. https://tools.ietf.org/html/rfc7231#section-7.1.2).
*
* @param routing custom routing or {@code null} if custom routing is not used
* @return the relative URI for the location of the document
*/
public String getLocation(@Nullable String routing) {
// encode the path components separately otherwise the path separators will be encoded
final String encodedIndex = URLEncoder.encode(getIndex(), StandardCharsets.UTF_8);
final String encodedType = URLEncoder.encode(MapperService.SINGLE_MAPPING_NAME, StandardCharsets.UTF_8);
final String encodedId = URLEncoder.encode(getId(), StandardCharsets.UTF_8);
final String encodedRouting = routing == null ? null : URLEncoder.encode(routing, StandardCharsets.UTF_8);
final String routingStart = "?routing=";
final int bufferSizeExcludingRouting = 3 + encodedIndex.length() + encodedType.length() + encodedId.length();
final int bufferSize;
if (encodedRouting == null) {
bufferSize = bufferSizeExcludingRouting;
} else {
bufferSize = bufferSizeExcludingRouting + routingStart.length() + encodedRouting.length();
}
final StringBuilder location = new StringBuilder(bufferSize);
location.append('/').append(encodedIndex);
location.append('/').append(encodedType);
location.append('/').append(encodedId);
if (encodedRouting != null) {
location.append(routingStart).append(encodedRouting);
}
return location.toString();
}
public IndexDocFailureStoreStatus getFailureStoreStatus() {
return IndexDocFailureStoreStatus.NOT_APPLICABLE_OR_UNKNOWN;
}
public void writeThin(StreamOutput out) throws IOException {
super.writeTo(out);
writeWithoutShardId(out);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
shardId.writeTo(out);
writeWithoutShardId(out);
}
private void writeWithoutShardId(StreamOutput out) throws IOException {
out.writeString(id);
out.writeZLong(version);
out.writeZLong(seqNo);
out.writeVLong(primaryTerm);
out.writeBoolean(forcedRefresh);
result.writeTo(out);
}
@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerToXContent(builder, params);
builder.endObject();
return builder;
}
public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException {
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
builder.field(_INDEX, shardId.getIndexName());
builder.field(_ID, id).field(_VERSION, version).field(RESULT, getResult().getLowercase());
if (forcedRefresh) {
builder.field(FORCED_REFRESH, true);
}
builder.field(_SHARDS, shardInfo);
if (getSeqNo() >= 0) {
builder.field(_SEQ_NO, getSeqNo());
builder.field(_PRIMARY_TERM, getPrimaryTerm());
}
return builder;
}
/**
* Base
|
Result
|
java
|
lettuce-io__lettuce-core
|
src/main/java/io/lettuce/core/support/ConnectionWrapping.java
|
{
"start": 7365,
"end": 7481
}
|
interface ____ indicate a wrapper.
*
* @param <T> Type of the wrapped object.
* @since 5.2
*/
|
to
|
java
|
apache__flink
|
flink-rpc/flink-rpc-akka/src/main/java/org/apache/flink/runtime/rpc/pekko/ControlMessages.java
|
{
"start": 914,
"end": 1104
}
|
enum ____ {
START, // Start processing incoming messages
STOP, // Stop processing messages and drop all newly incoming messages
TERMINATE, // Terminate the RpcActor
}
|
ControlMessages
|
java
|
alibaba__druid
|
core/src/test/java/com/alibaba/druid/bvt/sql/oracle/create/OracleCreateViewTest12.java
|
{
"start": 1062,
"end": 18205
}
|
class ____ extends OracleTest {
public void test_types() throws Exception {
String sql = //
"CREATE OR REPLACE FORCE VIEW \"TCP_CPR\".\"DIFF_CON_CONFIG_ALL_V\" (\"OSG_TYPE_ID\", \"CONTRACT_HEADER_ID\", \"CONTRACT_NUMBER\", \"ORGANIZATION_ID\", \"CONTRACT_PRODUCT_ID\", \"PROD_ID\", \"PROD_DES\", \"MI\", \"CONTRACT_DEVICE_ID\", \"DEV_ID\", \"DEV_DES\", \"SITE_ID\", \"SITE_QUANTITY\", \"SITE_DES\", \"CONTRACT_MODULE_ID\", \"MOD_ID\", \"MOD_DES\", \"MODULE_QUANTITY\", \"CONTRACT_ITEM_ID\", \"ITEM_ID\", \"ITEM_DES\", \"ITEM_TYPE\", \"ITEM_QUANTITY\", \"HARD_PARAM\", \"SOFT_PARAM\", \"MAKE_PARAM\", \"RISK_PARAM\", \"SOFT_COST_PARAM\", \"PROD_MANAGER\", \"COST_PRICE04\", \"CONFIRM_ITEM_PARAM\", \"CONFIRM_FLAG04\", \"COST_PRICE\", \"COST_PRICE_PARAMETER\", \"OLD_COST\", \"LIST_PRICE\", \"ITEM_CODE\", \"CONFIRM_COST_PRICE04\", \"CUSTOMIZE_SITES_ID\", \"SPARE_FLAG\") AS \n" +
" SELECT I.osg_type_id, M.CONTRACT_HEADER_ID,M.CONTRACT_NUMBER,\n" +
" M.ORGANIZATION_ID,\n" +
" M.CONTRACT_PRODUCT_ID,\n" +
" M.prod_id,\n" +
" M.prod_des,M.MI,\n" +
" M.CONTRACT_DEVICE_ID,\n" +
" M.dev_id,\n" +
" m.dev_des,\n" +
" M.SITE_ID ,\n" +
" M.SITE_QUANTITY,\n" +
" M.site_des,\n" +
" M.CONTRACT_MODULE_ID,\n" +
" M.mod_id,\n" +
" M.mod_des,\n" +
" M.MODULE_QUANTITY,\n" +
" I.CONTRACT_ITEM_ID,\n" +
" I.ITEM_ID,\n" +
" I.ITEM_DES,\n" +
" I.ITEM_TYPE,\n" +
" I.item_quantity,\n" +
" M.hard_param,\n" +
" M.soft_param, M.make_param,M.risk_param, M.soft_cost_param,\n" +
" M.prod_manager, I.COST_PRICE04, I.CONFIRM_ITEM_PARAM, I.CONFIRM_FLAG04,\n" +
" I.cost_price,I.COST_PRICE_PARAMETER,I.OLD_COST,I.LIST_PRICE,I.ITEM_CODE,\n" +
" I.CONFIRM_COST_PRICE04\n" +
" --,I.PROD_ATTRIBUTE_ID,I.ITEM_CHIP\n" +
" ,M.customize_sites_id\n" +
" ,M.spare_flag\n" +
" FROM TCP_CPR.DIFF_CON_CONFIG_MODULE_V M\n" +
" LEFT JOIN\n" +
" (\n" +
" (\n" +
" select 0 osg_type_id,\n" +
" v.contract_module_id,\n" +
" v.item_id,\n" +
" v.contract_item_id,\n" +
" v.item_des,\n" +
" v.item_type,\n" +
" v.ITEM_QUANTITY,\n" +
" v.cost_price,\n" +
" v.COST_PRICE_PARAMETER,\n" +
" v.CONFIRM_FLAG,\n" +
" v.COST_PRICE04,\n" +
" v.CONFIRM_ITEM_PARAM,\n" +
" v.CONFIRM_FLAG04,\n" +
" v.OLD_COST,\n" +
" v.LIST_PRICE,\n" +
" v.ITEM_CODE,\n" +
" v.CONFIRM_COST_PRICE04\n" +
" from TCP_CPR.DIFF_CON_CONFIG_ITEM_V v\n" +
" )\n" +
" union all\n" +
" (\n" +
" SELECT header.product_id osg_type_id,\n" +
" HEADER.PARENT_ID CONTRACT_MODULE_ID,\n" +
" HEADER.SERIAL_ID item_id,\n" +
" HEADER.OSG_HEADER_ID CONTRACT_ITEM_ID,\n" +
" ser.product_serial item_name,\n" +
" 'OSG' item_type,\n" +
" HEADER.QUANTITY item_quantity,\n" +
" (LINE.REF_PRICE+ nvl(REPLY.MARKET_REFERENCE_PRICE,0)) COST_PRICE,\n" +
" 1 COST_PRICE_PARAMETER,\n" +
" 'Y' CONFIRM_FLAG,\n" +
" 0 COST_PRICE04,\n" +
" 1 CONFIRM_ITEM_PARAM,\n" +
" 'Y' CONFIRM_FLAG04,\n" +
" 1 OLD_COST,\n" +
" --LINE.PRICE+nvl(REPLY.LIST_PRICE,0) LIST_PRICE,\n" +
" HEADER.LIST_PRICE LIST_PRICE,\n" +
" '+Mn\u0016-�' ITEM_CODE,\n" +
" (LINE.COST+ nvl(REPLY.RMBPRICE_WITHTAX,0)) CONFIRM_COST_PRICE04\n" +
" --0 PROD_ATTRIBUTE_ID,0 ITEM_CHIP\n" +
" FROM TCP_CPR.DIFF_CON_OSG3_HEADERS HEADER,ERP_ZTE.ZTE_KX_OSG3_SERIALS ser, ERP_ZTE.zte_kx_osg3_reply_headers\n" +
" REPLY, (\n" +
" select LINE.OSG_HEADER_ID,SUM((LINE.QUANTITY-LINE.THEORETIC_QTY)*\n" +
" PART.rmbprice_withtax) COST, SUM((LINE.QUANTITY-\n" +
" LINE.THEORETIC_QTY)* PART.LIST_PRICE) PRICE, SUM((\n" +
" LINE.QUANTITY-LINE.THEORETIC_QTY)* PART.MARKET_REFERENCE_PRICE\n" +
" ) REF_PRICE\n" +
" from TCP_CPR.DIFF_CON_OSG3_LINES LINE,\n" +
" ERP_ZTE.ZTE_KX_OSG3_PART_DETAILS PART\n" +
" WHERE LINE.PART_DETAIL_ID = PART.PART_DETAIL_ID\n" +
" AND LINE.ENABLED_FLAG = 'Y'\n" +
" GROUP BY LINE.OSG_HEADER_ID) LINE\n" +
" where HEADER.ENABLED_FLAG = 'Y' AND ser.serial_id=HEADER.Serial_Id\n" +
" and header.REPLY_ID = reply.reply_head_id(+)\n" +
" and header.OSG_HEADER_ID = line.OSG_HEADER_ID\n" +
" )\n" +
" union all\n" +
" (\n" +
" SELECT item.osg_type_id osg_type_id,\n" +
" ITEM.PARENT_ID CONTRACT_MODULE_ID,\n" +
" item.osg_item_id item_id,\n" +
" ITEM.OSG_ITEM_ID CONTRACT_ITEM_ID,\n" +
" SYS_ITEM.DESCRIPTION item_name,\n" +
" 'SINGLEOSG' item_type,\n" +
" ITEM.QUANTITY item_quantity,\n" +
" SYS_ITEM.MARKET_REFERENCE_PRICE COST_PRICE,\n" +
" 1 COST_PRICE_PARAMETER,\n" +
" SYS_ITEM.ENABLED_FLAG CONFIRM_FLAG,\n" +
" 0 COST_PRICE04,\n" +
" 1 CONFIRM_ITEM_PARAM,\n" +
" 'Y' CONFIRM_FLAG04,\n" +
" 1 OLD_COST,\n" +
" --SYS_ITEM.LIST_PRICE LIST_PRICE,\n" +
" ITEM.LIST_PRICE LIST_PRICE,\n" +
" SYS_ITEM.INVENTORY_ID||'\n" +
"+Mn\u0016-�' ITEM_CODE,\n" +
" SYS_ITEM.PRICE CONFIRM_COST_PRICE04--, 0 PROD_ATTRIBUTE_ID--,0 ITEM_CHIP\n" +
" FROM TCP_CPR.DIFF_CON_OSG3A_HEADERS ITEM,ERP_ZTE.ZTE_KX_OSG3_ITEMS\n" +
" SYS_ITEM\n" +
" where ITEM.OSG_ITEM_ID = SYS_ITEM.OSG_ITEM_ID\n" +
" AND ITEM.ENABLED_FLAG = 'Y'\n" +
" )\n" +
" ) I\n" +
" ON M.CONTRACT_MODULE_ID = I.CONTRACT_MODULE_ID\n" +
" WHERE item_quantity>=0";
System.out.println(sql);
OracleStatementParser parser = new OracleStatementParser(sql);
List<SQLStatement> statementList = parser.parseStatementList();
SQLStatement stmt = statementList.get(0);
print(statementList);
assertEquals(1, statementList.size());
assertEquals("CREATE OR REPLACE VIEW \"TCP_CPR\".\"DIFF_CON_CONFIG_ALL_V\" (\n" +
"\t\"OSG_TYPE_ID\", \n" +
"\t\"CONTRACT_HEADER_ID\", \n" +
"\t\"CONTRACT_NUMBER\", \n" +
"\t\"ORGANIZATION_ID\", \n" +
"\t\"CONTRACT_PRODUCT_ID\", \n" +
"\t\"PROD_ID\", \n" +
"\t\"PROD_DES\", \n" +
"\t\"MI\", \n" +
"\t\"CONTRACT_DEVICE_ID\", \n" +
"\t\"DEV_ID\", \n" +
"\t\"DEV_DES\", \n" +
"\t\"SITE_ID\", \n" +
"\t\"SITE_QUANTITY\", \n" +
"\t\"SITE_DES\", \n" +
"\t\"CONTRACT_MODULE_ID\", \n" +
"\t\"MOD_ID\", \n" +
"\t\"MOD_DES\", \n" +
"\t\"MODULE_QUANTITY\", \n" +
"\t\"CONTRACT_ITEM_ID\", \n" +
"\t\"ITEM_ID\", \n" +
"\t\"ITEM_DES\", \n" +
"\t\"ITEM_TYPE\", \n" +
"\t\"ITEM_QUANTITY\", \n" +
"\t\"HARD_PARAM\", \n" +
"\t\"SOFT_PARAM\", \n" +
"\t\"MAKE_PARAM\", \n" +
"\t\"RISK_PARAM\", \n" +
"\t\"SOFT_COST_PARAM\", \n" +
"\t\"PROD_MANAGER\", \n" +
"\t\"COST_PRICE04\", \n" +
"\t\"CONFIRM_ITEM_PARAM\", \n" +
"\t\"CONFIRM_FLAG04\", \n" +
"\t\"COST_PRICE\", \n" +
"\t\"COST_PRICE_PARAMETER\", \n" +
"\t\"OLD_COST\", \n" +
"\t\"LIST_PRICE\", \n" +
"\t\"ITEM_CODE\", \n" +
"\t\"CONFIRM_COST_PRICE04\", \n" +
"\t\"CUSTOMIZE_SITES_ID\", \n" +
"\t\"SPARE_FLAG\"\n" +
")\n" +
"AS\n" +
"SELECT I.osg_type_id, M.CONTRACT_HEADER_ID, M.CONTRACT_NUMBER, M.ORGANIZATION_ID, M.CONTRACT_PRODUCT_ID\n" +
"\t, M.prod_id, M.prod_des, M.MI, M.CONTRACT_DEVICE_ID, M.dev_id\n" +
"\t, m.dev_des, M.SITE_ID, M.SITE_QUANTITY, M.site_des, M.CONTRACT_MODULE_ID\n" +
"\t, M.mod_id, M.mod_des, M.MODULE_QUANTITY, I.CONTRACT_ITEM_ID, I.ITEM_ID\n" +
"\t, I.ITEM_DES, I.ITEM_TYPE, I.item_quantity, M.hard_param, M.soft_param\n" +
"\t, M.make_param, M.risk_param, M.soft_cost_param, M.prod_manager, I.COST_PRICE04\n" +
"\t, I.CONFIRM_ITEM_PARAM, I.CONFIRM_FLAG04, I.cost_price, I.COST_PRICE_PARAMETER, I.OLD_COST\n" +
"\t, I.LIST_PRICE, I.ITEM_CODE, I.CONFIRM_COST_PRICE04 -- ,I.PROD_ATTRIBUTE_ID,I.ITEM_CHIP\n" +
"\t, M.customize_sites_id, M.spare_flag\n" +
"FROM TCP_CPR.DIFF_CON_CONFIG_MODULE_V M\n" +
"\tLEFT JOIN (\n" +
"\t\t(SELECT 0 AS osg_type_id, v.contract_module_id, v.item_id, v.contract_item_id, v.item_des\n" +
"\t\t\t, v.item_type, v.ITEM_QUANTITY, v.cost_price, v.COST_PRICE_PARAMETER, v.CONFIRM_FLAG\n" +
"\t\t\t, v.COST_PRICE04, v.CONFIRM_ITEM_PARAM, v.CONFIRM_FLAG04, v.OLD_COST, v.LIST_PRICE\n" +
"\t\t\t, v.ITEM_CODE, v.CONFIRM_COST_PRICE04\n" +
"\t\tFROM TCP_CPR.DIFF_CON_CONFIG_ITEM_V v)\n" +
"\t\tUNION ALL\n" +
"\t\t(SELECT header.product_id AS osg_type_id, HEADER.PARENT_ID AS CONTRACT_MODULE_ID, HEADER.SERIAL_ID AS item_id, HEADER.OSG_HEADER_ID AS CONTRACT_ITEM_ID, ser.product_serial AS item_name\n" +
"\t\t\t, 'OSG' AS item_type, HEADER.QUANTITY AS item_quantity\n" +
"\t\t\t, (LINE.REF_PRICE + nvl(REPLY.MARKET_REFERENCE_PRICE, 0)) AS COST_PRICE\n" +
"\t\t\t, 1 AS COST_PRICE_PARAMETER, 'Y' AS CONFIRM_FLAG, 0 AS COST_PRICE04, 1 AS CONFIRM_ITEM_PARAM, 'Y' AS CONFIRM_FLAG04\n" +
"\t\t\t, 1 AS OLD_COST -- LINE.PRICE+nvl(REPLY.LIST_PRICE,0) LIST_PRICE,\n" +
"\t\t\t, HEADER.LIST_PRICE AS LIST_PRICE, '+Mn\u0016-�' AS ITEM_CODE\n" +
"\t\t\t, (LINE.COST + nvl(REPLY.RMBPRICE_WITHTAX, 0)) AS CONFIRM_COST_PRICE04 -- 0 PROD_ATTRIBUTE_ID,0 ITEM_CHIP\n" +
"\t\tFROM TCP_CPR.DIFF_CON_OSG3_HEADERS HEADER, ERP_ZTE.ZTE_KX_OSG3_SERIALS ser, ERP_ZTE.zte_kx_osg3_reply_headers REPLY, (\n" +
"\t\t\tSELECT LINE.OSG_HEADER_ID, SUM((LINE.QUANTITY - LINE.THEORETIC_QTY) * PART.rmbprice_withtax) AS COST\n" +
"\t\t\t\t, SUM((LINE.QUANTITY - LINE.THEORETIC_QTY) * PART.LIST_PRICE) AS PRICE\n" +
"\t\t\t\t, SUM((LINE.QUANTITY - LINE.THEORETIC_QTY) * PART.MARKET_REFERENCE_PRICE) AS REF_PRICE\n" +
"\t\t\tFROM TCP_CPR.DIFF_CON_OSG3_LINES LINE, ERP_ZTE.ZTE_KX_OSG3_PART_DETAILS PART\n" +
"\t\t\tWHERE LINE.PART_DETAIL_ID = PART.PART_DETAIL_ID\n" +
"\t\t\t\tAND LINE.ENABLED_FLAG = 'Y'\n" +
"\t\t\tGROUP BY LINE.OSG_HEADER_ID\n" +
"\t\t) LINE\n" +
"\t\tWHERE HEADER.ENABLED_FLAG = 'Y'\n" +
"\t\t\tAND ser.serial_id = HEADER.Serial_Id\n" +
"\t\t\tAND header.REPLY_ID = reply.reply_head_id(+)\n" +
"\t\t\tAND header.OSG_HEADER_ID = line.OSG_HEADER_ID)\n" +
"\t\tUNION ALL\n" +
"\t\t(SELECT item.osg_type_id AS osg_type_id, ITEM.PARENT_ID AS CONTRACT_MODULE_ID, item.osg_item_id AS item_id, ITEM.OSG_ITEM_ID AS CONTRACT_ITEM_ID, SYS_ITEM.DESCRIPTION AS item_name\n" +
"\t\t\t, 'SINGLEOSG' AS item_type, ITEM.QUANTITY AS item_quantity, SYS_ITEM.MARKET_REFERENCE_PRICE AS COST_PRICE, 1 AS COST_PRICE_PARAMETER, SYS_ITEM.ENABLED_FLAG AS CONFIRM_FLAG\n" +
"\t\t\t, 0 AS COST_PRICE04, 1 AS CONFIRM_ITEM_PARAM, 'Y' AS CONFIRM_FLAG04, 1 AS OLD_COST -- SYS_ITEM.LIST_PRICE LIST_PRICE,\n" +
"\t\t\t, ITEM.LIST_PRICE AS LIST_PRICE, SYS_ITEM.INVENTORY_ID || '\n" +
"+Mn\u0016-�' AS ITEM_CODE, SYS_ITEM.PRICE AS CONFIRM_COST_PRICE04 -- , 0 PROD_ATTRIBUTE_ID--,0 ITEM_CHIP\n" +
"\t\tFROM TCP_CPR.DIFF_CON_OSG3A_HEADERS ITEM, ERP_ZTE.ZTE_KX_OSG3_ITEMS SYS_ITEM\n" +
"\t\tWHERE ITEM.OSG_ITEM_ID = SYS_ITEM.OSG_ITEM_ID\n" +
"\t\t\tAND ITEM.ENABLED_FLAG = 'Y')\n" +
"\t) I ON M.CONTRACT_MODULE_ID = I.CONTRACT_MODULE_ID \n" +
"WHERE item_quantity >= 0",
SQLUtils.toSQLString(stmt, JdbcConstants.ORACLE));
OracleSchemaStatVisitor visitor = new OracleSchemaStatVisitor();
stmt.accept(visitor);
System.out.println("Tables : " + visitor.getTables());
System.out.println("fields : " + visitor.getColumns());
System.out.println("coditions : " + visitor.getConditions());
System.out.println("relationships : " + visitor.getRelationships());
System.out.println("orderBy : " + visitor.getOrderByColumns());
assertEquals(9, visitor.getTables().size());
assertEquals(75, visitor.getColumns().size());
assertTrue(visitor.getColumns().contains(new TableStat.Column("TCP_CPR.DIFF_CON_CONFIG_ITEM_V", "contract_module_id")));
}
}
|
OracleCreateViewTest12
|
java
|
apache__hadoop
|
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/converter/TestConvertedConfigValidator.java
|
{
"start": 1311,
"end": 2198
}
|
class ____ {
private static final String CONFIG_DIR_PASSES =
new File("src/test/resources/cs-validation-pass").getAbsolutePath();
private static final String CONFIG_DIR_FAIL =
new File("src/test/resources/cs-validation-fail").getAbsolutePath();
private ConvertedConfigValidator validator;
@BeforeEach
public void setup() {
QueueMetrics.clearQueueMetrics();
validator = new ConvertedConfigValidator();
}
@AfterEach
public void after() {
QueueMetrics.clearQueueMetrics();
}
@Test
public void testValidationPassed() throws Exception {
validator.validateConvertedConfig(CONFIG_DIR_PASSES);
// expected: no exception
}
@Test
public void testValidationFails() throws Exception {
assertThrows(VerificationException.class, ()->{
validator.validateConvertedConfig(CONFIG_DIR_FAIL);
});
}
}
|
TestConvertedConfigValidator
|
java
|
apache__camel
|
components/camel-vertx/camel-vertx-http/src/test/java/org/apache/camel/component/vertx/http/VertxHttpCustomWebClientTest.java
|
{
"start": 1257,
"end": 3086
}
|
class ____ {
@Test
public void testCustomWebClientOptions() throws Exception {
WebClientOptions opts = new WebClientOptions();
opts.setSsl(true);
opts.setConnectTimeout(Integer.MAX_VALUE);
try (CamelContext context = new DefaultCamelContext()) {
VertxHttpComponent component = new VertxHttpComponent();
component.setCamelContext(context);
component.setWebClientOptions(opts);
context.start();
assertSame(opts, component.getWebClientOptions());
assertTrue(component.getWebClientOptions().isSsl());
assertEquals(Integer.MAX_VALUE, component.getWebClientOptions().getConnectTimeout());
}
}
@Test
public void testCustomWebClientOptionsWithRoute() throws Exception {
WebClientOptions opts = new WebClientOptions();
opts.setSsl(true);
opts.setConnectTimeout(Integer.MAX_VALUE);
try (CamelContext context = new DefaultCamelContext()) {
VertxHttpComponent component = new VertxHttpComponent();
component.setWebClientOptions(opts);
context.addComponent("vertx-http", component);
context.addRoutes(new RouteBuilder() {
@Override
public void configure() {
from("direct:start")
.to("vertx-http:http://localhost:8080");
}
});
context.start();
VertxHttpEndpoint ve = context.getEndpoint("vertx-http:http://localhost:8080", VertxHttpEndpoint.class);
assertTrue(ve.getConfiguration().getWebClientOptions().isSsl());
assertEquals(Integer.MAX_VALUE, ve.getConfiguration().getWebClientOptions().getConnectTimeout());
}
}
}
|
VertxHttpCustomWebClientTest
|
java
|
alibaba__nacos
|
naming/src/main/java/com/alibaba/nacos/naming/model/form/UpdateSwitchForm.java
|
{
"start": 1002,
"end": 3009
}
|
class ____ implements Serializable {
private static final long serialVersionUID = -1580959130954136990L;
private boolean debug;
private String entry;
private String value;
public UpdateSwitchForm() {
}
/**
* check param.
*
* @throws NacosApiException NacosApiException
*/
public void validate() throws NacosApiException {
if (StringUtils.isBlank(entry)) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_MISSING,
"Required parameter 'entry' type String is not present");
}
if (StringUtils.isBlank(value)) {
throw new NacosApiException(HttpStatus.BAD_REQUEST.value(), ErrorCode.PARAMETER_MISSING,
"Required parameter 'value' type String is not present");
}
}
public boolean getDebug() {
return debug;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public String getEntry() {
return entry;
}
public void setEntry(String entry) {
this.entry = entry;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpdateSwitchForm that = (UpdateSwitchForm) o;
return Objects.equals(debug, that.debug) && Objects.equals(entry, that.entry) && Objects
.equals(value, that.value);
}
@Override
public int hashCode() {
return Objects.hash(debug, entry, value);
}
@Override
public String toString() {
return "UpdateSwitchForm{" + "debug=" + debug + ", entry='" + entry + '\'' + ", value='" + value + '\'' + '}';
}
}
|
UpdateSwitchForm
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/aot/hint/support/SpringFactoriesLoaderRuntimeHints.java
|
{
"start": 1351,
"end": 3968
}
|
class ____ implements RuntimeHintsRegistrar {
private static final List<String> RESOURCE_LOCATIONS =
List.of(SpringFactoriesLoader.FACTORIES_RESOURCE_LOCATION);
private static final Log logger = LogFactory.getLog(SpringFactoriesLoaderRuntimeHints.class);
@Override
public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) {
ClassLoader classLoaderToUse = (classLoader != null ? classLoader : ClassUtils.getDefaultClassLoader());
if (classLoaderToUse != null) {
for (String resourceLocation : RESOURCE_LOCATIONS) {
registerHints(hints, classLoaderToUse, resourceLocation);
}
}
}
private void registerHints(RuntimeHints hints, ClassLoader classLoader, String resourceLocation) {
hints.resources().registerPattern(resourceLocation);
Map<String, List<String>> factories =
ExtendedSpringFactoriesLoader.accessLoadFactoriesResource(classLoader, resourceLocation);
factories.forEach((factoryClassName, implementationClassNames) ->
registerHints(hints, classLoader, factoryClassName, implementationClassNames));
}
private void registerHints(RuntimeHints hints, ClassLoader classLoader,
String factoryClassName, List<String> implementationClassNames) {
Class<?> factoryClass = resolveClassName(classLoader, factoryClassName);
if (factoryClass == null) {
if (logger.isTraceEnabled()) {
logger.trace(LogMessage.format("Skipping factories for [%s]", factoryClassName));
}
return;
}
if (logger.isTraceEnabled()) {
logger.trace(LogMessage.format("Processing factories for [%s]", factoryClassName));
}
hints.reflection().registerType(factoryClass, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
for (String implementationClassName : implementationClassNames) {
Class<?> implementationType = resolveClassName(classLoader, implementationClassName);
if (logger.isTraceEnabled()) {
logger.trace(LogMessage.format("%s factory type [%s] and implementation [%s]",
(implementationType != null ? "Processing" : "Skipping"), factoryClassName,
implementationClassName));
}
if (implementationType != null) {
hints.reflection().registerType(implementationType, MemberCategory.INVOKE_DECLARED_CONSTRUCTORS);
}
}
}
private @Nullable Class<?> resolveClassName(ClassLoader classLoader, String factoryClassName) {
try {
Class<?> clazz = ClassUtils.resolveClassName(factoryClassName, classLoader);
// Force resolution of all constructors to cache
clazz.getDeclaredConstructors();
return clazz;
}
catch (Throwable ex) {
return null;
}
}
private static
|
SpringFactoriesLoaderRuntimeHints
|
java
|
apache__camel
|
dsl/camel-jbang/camel-jbang-core/src/test/java/org/apache/camel/dsl/jbang/core/commands/bind/BindObjectReferenceTest.java
|
{
"start": 1173,
"end": 7209
}
|
class ____ extends CamelCommandBaseTest {
@Test
public void shouldBindToObjectReference() throws Exception {
Bind command = createCommand("timer", "foo");
command.sink = "sandbox.camel.apache.org/v1:Foo:bar";
command.doCall();
String output = printer.getOutput();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Pipe
metadata:
name: timer-to-foo
spec:
source:
ref:
kind: Kamelet
apiVersion: camel.apache.org/v1
name: timer-source
properties:
message: "hello world"
sink:
ref:
kind: Foo
apiVersion: sandbox.camel.apache.org/v1
name: bar
#properties:
#key: "value"
""".trim(), output);
}
@Test
public void shouldBindToNamespacedObjectReference() throws Exception {
Bind command = createCommand("timer", "foo");
command.sink = "sandbox.camel.apache.org/v1alpha1:Foo:my-namespace/bar";
command.doCall();
String output = printer.getOutput();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Pipe
metadata:
name: timer-to-foo
spec:
source:
ref:
kind: Kamelet
apiVersion: camel.apache.org/v1
name: timer-source
properties:
message: "hello world"
sink:
ref:
kind: Foo
apiVersion: sandbox.camel.apache.org/v1alpha1
name: bar
namespace: my-namespace
#properties:
#key: "value"
""".trim(), output);
}
@Test
public void shouldBindToObjectReferenceWithProperties() throws Exception {
Bind command = createCommand("timer", "foo");
command.sink = "sandbox.camel.apache.org/v1:Foo:bar";
command.properties = new String[] {
"source.message=Hello",
"sink.foo=bar",
"sink.bar=baz",
};
command.doCall();
String output = printer.getOutput();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Pipe
metadata:
name: timer-to-foo
spec:
source:
ref:
kind: Kamelet
apiVersion: camel.apache.org/v1
name: timer-source
properties:
message: Hello
sink:
ref:
kind: Foo
apiVersion: sandbox.camel.apache.org/v1
name: bar
properties:
bar: baz
foo: bar
""".trim(), output);
}
@Test
public void shouldBindToObjectReferenceWithUriProperties() throws Exception {
Bind command = createCommand("timer", "foo");
command.sink = "sandbox.camel.apache.org/v1:Foo:bar?bar=baz&foo=bar";
command.properties = new String[] {
"source.message=Hello",
};
command.doCall();
String output = printer.getOutput();
Assertions.assertEquals("""
apiVersion: camel.apache.org/v1
kind: Pipe
metadata:
name: timer-to-foo
spec:
source:
ref:
kind: Kamelet
apiVersion: camel.apache.org/v1
name: timer-source
properties:
message: Hello
sink:
ref:
kind: Foo
apiVersion: sandbox.camel.apache.org/v1
name: bar
properties:
bar: baz
foo: bar
""".trim(), output);
}
@Test
public void shouldHandleInvalidObjectReference() throws Exception {
Bind command = createCommand("timer", "foo");
command.sink = "sandbox.camel.apache.org:Foo:bar"; // missing api version
command.doCall();
String output = printer.getOutput();
assertThat(output).isEqualToIgnoringNewLines(
"""
ERROR: Failed to resolve endpoint URI expression sandbox.camel.apache.org:Foo:bar - no matching binding provider found
ERROR: Failed to construct Pipe resource
"""
.trim());
}
private Bind createCommand(String source, String sink) {
Bind command = new Bind(new CamelJBangMain().withPrinter(printer));
String sourceName;
String sourceUri;
if (source.contains(":")) {
sourceName = StringHelper.before(source, ":");
sourceUri = source;
} else {
sourceName = source;
sourceUri = source + "-source";
}
String sinkName;
String sinkUri;
if (sink.contains(":")) {
sinkName = StringHelper.before(sink, ":");
sinkUri = sink;
} else {
sinkName = sink;
sinkUri = sink + "-sink";
}
command.file = sourceName + "-to-" + sinkName + ".yaml";
command.source = sourceUri;
command.sink = sinkUri;
command.output = "yaml";
return command;
}
}
|
BindObjectReferenceTest
|
java
|
spring-projects__spring-data-jpa
|
spring-data-jpa/src/main/java/org/springframework/data/jpa/repository/aot/JpaCodeBlocks.java
|
{
"start": 22490,
"end": 33866
}
|
class ____ {
private final AotQueryMethodGenerationContext context;
private final JpaQueryMethod queryMethod;
private final String queryVariableName;
private @Nullable AotQuery aotQuery;
private @Nullable String pageable;
private MergedAnnotation<Modifying> modifying = MergedAnnotation.missing();
private QueryExecutionBlockBuilder(AotQueryMethodGenerationContext context, JpaQueryMethod queryMethod) {
this.context = context;
this.queryMethod = queryMethod;
this.queryVariableName = context.localVariable("query");
this.pageable = context.getPageableParameterName() != null ? context.localVariable("pageable") : null;
}
public QueryExecutionBlockBuilder query(AotQuery aotQuery) {
this.aotQuery = aotQuery;
return this;
}
public QueryExecutionBlockBuilder query(String pageable) {
this.pageable = pageable;
return this;
}
public QueryExecutionBlockBuilder modifying(MergedAnnotation<Modifying> modifying) {
this.modifying = modifying;
return this;
}
public CodeBlock build() {
Builder builder = CodeBlock.builder();
MethodReturn methodReturn = context.getMethodReturn();
boolean isProjecting = methodReturn.isProjecting()
|| !ObjectUtils.nullSafeEquals(context.getDomainType(), methodReturn.getActualReturnClass())
|| StringUtils.hasText(context.getDynamicProjectionParameterName());
TypeName typeToRead = isProjecting ? methodReturn.getActualTypeName() : TypeName.get(context.getDomainType());
builder.add("\n");
if (modifying.isPresent() && !aotQuery.isDerived()) {
if (modifying.getBoolean("flushAutomatically")) {
builder.addStatement("this.$L.flush()", context.fieldNameOf(EntityManager.class));
}
Class<?> returnType = methodReturn.toClass();
if (returnsModifying(returnType)) {
builder.addStatement("int $L = $L.executeUpdate()", context.localVariable("result"), queryVariableName);
} else {
builder.addStatement("$L.executeUpdate()", queryVariableName);
}
if (modifying.getBoolean("clearAutomatically")) {
builder.addStatement("this.$L.clear()", context.fieldNameOf(EntityManager.class));
}
if (returnType == int.class || returnType == long.class || returnType == Integer.class) {
builder.addStatement("return $L", context.localVariable("result"));
}
if (returnType == Long.class) {
builder.addStatement("return (long) $L", context.localVariable("result"));
}
return builder.build();
}
if (aotQuery != null && aotQuery.isDelete()) {
builder.addStatement("$T $L = $L.getResultList()", List.class, context.localVariable("resultList"),
queryVariableName);
boolean returnCount = ClassUtils.isAssignable(Number.class, methodReturn.toClass());
boolean simpleBatch = returnCount || methodReturn.isVoid();
boolean collectionQuery = queryMethod.isCollectionQuery();
if (!simpleBatch && !collectionQuery) {
builder.beginControlFlow("if ($L.size() > 1)", context.localVariable("resultList"));
builder.addStatement("throw new $1T($2S + $3L.size(), 1, $3L.size())",
IncorrectResultSizeDataAccessException.class,
"Delete query returned more than one element: expected 1, actual ", context.localVariable("resultList"));
builder.endControlFlow();
}
builder.addStatement("$L.forEach($L::remove)", context.localVariable("resultList"),
context.fieldNameOf(EntityManager.class));
if (collectionQuery) {
if (isStreamable(methodReturn)) {
builder.addStatement("return ($1T) $1T.of($2L)", Streamable.class, context.localVariable("resultList"));
} else if (isStreamableWrapper(methodReturn) && canConvert(Streamable.class, methodReturn)) {
builder.addStatement(
"return ($1T) $2T.getSharedInstance().convert($3T.of($4L), $5T.valueOf($3T.class), $5T.valueOf($1T.class))",
methodReturn.toClass(), DefaultConversionService.class, Streamable.class,
context.localVariable("resultList"), TypeDescriptor.class);
} else if (isSet(methodReturn)) {
builder.addStatement("return ($T) convertOne($L, false, $T.class)", List.class,
context.localVariable("resultList"), methodReturn.toClass());
} else {
builder.addStatement("return ($T) $L", List.class, context.localVariable("resultList"));
}
} else if (returnCount) {
builder.addStatement("return $T.valueOf($L.size())",
ClassUtils.resolvePrimitiveIfNecessary(methodReturn.getActualReturnClass()),
context.localVariable("resultList"));
} else {
builder.addStatement(LordOfTheStrings.returning(methodReturn.toClass())
.optional("($1T) ($2L.isEmpty() ? null : $2L.iterator().next())", typeToRead,
context.localVariable("resultList")) //
.build());
}
} else if (aotQuery != null && aotQuery.isExists()) {
builder.addStatement("return !$L.getResultList().isEmpty()", queryVariableName);
} else if (aotQuery != null) {
if (isProjecting) {
TypeName returnType = TypeNames.typeNameOrWrapper(methodReturn.getActualType());
CodeBlock convertTo;
if (StringUtils.hasText(context.getDynamicProjectionParameterName())) {
convertTo = CodeBlock.of("$L", context.getDynamicProjectionParameterName());
} else {
if (methodReturn.isArray() && methodReturn.getActualType().toClass().equals(byte.class)) {
returnType = TypeName.get(byte[].class);
convertTo = CodeBlock.of("$T.class", returnType);
} else {
convertTo = CodeBlock.of("$T.class", TypeNames.classNameOrWrapper(methodReturn.getActualType()));
}
}
if (queryMethod.isCollectionQuery()) {
if (isStreamable(methodReturn)) {
builder.addStatement("return ($1T) $1T.of(($2T) convertMany($3L.getResultList(), $4L, $5L))",
Streamable.class, Iterable.class, queryVariableName, aotQuery.isNative(), convertTo);
} else if (isSet(methodReturn)) {
builder.addStatement("return ($T) convertOne(convertMany($L.getResultList(), $L, $L), false, $T.class)",
methodReturn.getTypeName(), queryVariableName, aotQuery.isNative(), convertTo,
methodReturn.toClass());
} else {
builder.addStatement("return ($T) convertMany($L.getResultList(), $L, $L)", methodReturn.getTypeName(),
queryVariableName, aotQuery.isNative(), convertTo);
}
} else if (queryMethod.isStreamQuery()) {
builder.addStatement("return ($T) convertMany($L.getResultStream(), $L, $L)", methodReturn.getTypeName(),
queryVariableName, aotQuery.isNative(), convertTo);
} else if (queryMethod.isPageQuery()) {
builder.addStatement("return $T.getPage(($T<$T>) convertMany($L.getResultList(), $L, $L), $L, $L)",
PageableExecutionUtils.class, List.class, TypeNames.typeNameOrWrapper(methodReturn.getActualType()),
queryVariableName, aotQuery.isNative(), convertTo, pageable, context.localVariable("countAll"));
} else if (queryMethod.isSliceQuery()) {
builder.addStatement("$T<$T> $L = ($T<$T>) convertMany($L.getResultList(), $L, $L)", List.class,
TypeNames.typeNameOrWrapper(methodReturn.getActualType()), context.localVariable("resultList"),
List.class, typeToRead, queryVariableName, aotQuery.isNative(), convertTo);
builder.addStatement("boolean $L = $L.isPaged() && $L.size() > $L.getPageSize()",
context.localVariable("hasNext"), pageable, context.localVariable("resultList"), pageable);
builder.addStatement("return new $T<>($L ? $L.subList(0, $L.getPageSize()) : $L, $L, $L)", SliceImpl.class,
context.localVariable("hasNext"), context.localVariable("resultList"), pageable,
context.localVariable("resultList"), pageable, context.localVariable("hasNext"));
} else {
builder.addStatement(LordOfTheStrings.returning(methodReturn.toClass())
.optional("($T) convertOne($L.getSingleResultOrNull(), $L, $L)", returnType, queryVariableName,
aotQuery.isNative(), convertTo) //
.build());
}
} else {
if (queryMethod.isCollectionQuery()) {
if (isStreamable(methodReturn)) {
builder.addStatement("return ($T) $T.of($L.getResultList())", methodReturn.getTypeName(),
Streamable.class, queryVariableName);
} else if (isStreamableWrapper(methodReturn) && canConvert(Streamable.class, methodReturn)) {
builder.addStatement(
"return ($1T) $2T.getSharedInstance().convert($3T.of($4L.getResultList()), $5T.valueOf($3T.class), $5T.valueOf($1T.class))",
methodReturn.toClass(), DefaultConversionService.class, Streamable.class, queryVariableName,
TypeDescriptor.class);
} else if (isSet(methodReturn)) {
builder.addStatement("return ($T) convertOne($L.getResultList(), false, $T.class)",
methodReturn.getTypeName(), queryVariableName, methodReturn.toClass());
} else {
builder.addStatement("return ($T) $L.getResultList()", methodReturn.getTypeName(), queryVariableName);
}
} else if (queryMethod.isStreamQuery()) {
builder.addStatement("return ($T) $L.getResultStream()", methodReturn.getTypeName(), queryVariableName);
} else if (queryMethod.isPageQuery()) {
builder.addStatement("return $T.getPage(($T<$T>) $L.getResultList(), $L, $L)", PageableExecutionUtils.class,
List.class, typeToRead, queryVariableName, pageable, context.localVariable("countAll"));
} else if (queryMethod.isSliceQuery()) {
builder.addStatement("$T<$T> $L = $L.getResultList()", List.class, typeToRead,
context.localVariable("resultList"), queryVariableName);
builder.addStatement("boolean $L = $L.isPaged() && $L.size() > $L.getPageSize()",
context.localVariable("hasNext"), pageable, context.localVariable("resultList"), pageable);
builder.addStatement("return new $T<>($L ? $L.subList(0, $L.getPageSize()) : $L, $L, $L)", SliceImpl.class,
context.localVariable("hasNext"), context.localVariable("resultList"), pageable,
context.localVariable("resultList"), pageable, context.localVariable("hasNext"));
} else {
builder.addStatement(LordOfTheStrings.returning(methodReturn.toClass())
.optional("($T) convertOne($L.getSingleResultOrNull(), $L, $T.class)",
TypeNames.typeNameOrWrapper(methodReturn.getActualType()), queryVariableName, aotQuery.isNative(),
TypeNames.classNameOrWrapper(methodReturn.getActualType())) //
.build());
}
}
}
return builder.build();
}
private boolean canConvert(Class<?> from, MethodReturn methodReturn) {
return DefaultConversionService.getSharedInstance().canConvert(from, methodReturn.toClass());
}
private static boolean isSet(MethodReturn methodReturn) {
return Set.class.isAssignableFrom(methodReturn.toClass());
}
private static boolean isStreamable(MethodReturn methodReturn) {
return methodReturn.toClass().equals(Streamable.class);
}
private static boolean isStreamableWrapper(MethodReturn methodReturn) {
return !isStreamable(methodReturn) && Streamable.class.isAssignableFrom(methodReturn.toClass());
}
public static boolean returnsModifying(Class<?> returnType) {
return returnType == int.class || returnType == long.class || returnType == Integer.class
|| returnType == Long.class;
}
}
}
|
QueryExecutionBlockBuilder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/type/descriptor/java/CharacterJavaType.java
|
{
"start": 438,
"end": 3203
}
|
class ____ extends AbstractClassJavaType<Character> implements
PrimitiveJavaType<Character> {
public static final CharacterJavaType INSTANCE = new CharacterJavaType();
public CharacterJavaType() {
super( Character.class );
}
@Override
public boolean useObjectEqualsHashCode() {
return true;
}
@Override
public String toString(Character value) {
return value.toString();
}
@Override
public Character fromString(CharSequence string) {
if ( string.length() != 1 ) {
throw new CoercionException( "value must contain exactly one character: '" + string + "'" );
}
return string.charAt( 0 );
}
@Override
public boolean isInstance(Object value) {
return value instanceof Character;
}
@SuppressWarnings("unchecked")
@Override
public <X> X unwrap(Character value, Class<X> type, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( Character.class.isAssignableFrom( type ) || type == Object.class ) {
return (X) value;
}
if ( String.class.isAssignableFrom( type ) ) {
return (X) value.toString();
}
if ( Number.class.isAssignableFrom( type ) ) {
return (X) Short.valueOf( (short) value.charValue() );
}
throw unknownUnwrap( type );
}
@Override
public <X> Character wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
else if (value instanceof Character character) {
return character;
}
else if (value instanceof String string) {
switch ( string.length() ) {
case 1:
return string.charAt( 0 );
case 0:
if ( options.getDialect().stripsTrailingSpacesFromChar() ) {
// we previously stored char values in char(1) columns on MySQL
// but MySQL strips trailing spaces from the value when read
return ' ';
}
else {
throw new CoercionException( "value does not contain a character: '" + string + "'" );
}
default:
throw new CoercionException( "value contains more than one character: '" + string + "'" );
}
}
else if (value instanceof Number number) {
return (char) number.shortValue();
}
else {
throw unknownWrap( value.getClass() );
}
}
@Override
public Class<?> getPrimitiveClass() {
return char.class;
}
@Override
public Class<Character[]> getArrayClass() {
return Character[].class;
}
@Override
public Class<?> getPrimitiveArrayClass() {
return char[].class;
}
@Override
public Character getDefaultValue() {
return 0;
}
@Override
public long getDefaultSqlLength(Dialect dialect, JdbcType jdbcType) {
return 1;
}
@Override
public int getDefaultSqlPrecision(Dialect dialect, JdbcType jdbcType) {
return 3;
}
@Override
public int getDefaultSqlScale(Dialect dialect, JdbcType jdbcType) {
return 0;
}
}
|
CharacterJavaType
|
java
|
eclipse-vertx__vert.x
|
vertx-core/src/main/java/io/vertx/core/eventbus/impl/CodecManager.java
|
{
"start": 1041,
"end": 8631
}
|
class ____ {
// The standard message codecs
public static final MessageCodec<String, String> PING_MESSAGE_CODEC = new PingMessageCodec();
public static final MessageCodec<String, String> NULL_MESSAGE_CODEC = new NullMessageCodec();
public static final MessageCodec<String, String> STRING_MESSAGE_CODEC = new StringMessageCodec();
public static final MessageCodec<Buffer, Buffer> BUFFER_MESSAGE_CODEC = new BufferMessageCodec();
public static final MessageCodec<JsonObject, JsonObject> JSON_OBJECT_MESSAGE_CODEC = new JsonObjectMessageCodec();
public static final MessageCodec<JsonArray, JsonArray> JSON_ARRAY_MESSAGE_CODEC = new JsonArrayMessageCodec();
public static final MessageCodec<byte[], byte[]> BYTE_ARRAY_MESSAGE_CODEC = new ByteArrayMessageCodec();
public static final MessageCodec<Integer, Integer> INT_MESSAGE_CODEC = new IntMessageCodec();
public static final MessageCodec<Long, Long> LONG_MESSAGE_CODEC = new LongMessageCodec();
public static final MessageCodec<Float, Float> FLOAT_MESSAGE_CODEC = new FloatMessageCodec();
public static final MessageCodec<Double, Double> DOUBLE_MESSAGE_CODEC = new DoubleMessageCodec();
public static final MessageCodec<Boolean, Boolean> BOOLEAN_MESSAGE_CODEC = new BooleanMessageCodec();
public static final MessageCodec<Short, Short> SHORT_MESSAGE_CODEC = new ShortMessageCodec();
public static final MessageCodec<Character, Character> CHAR_MESSAGE_CODEC = new CharMessageCodec();
public static final MessageCodec<Byte, Byte> BYTE_MESSAGE_CODEC = new ByteMessageCodec();
public static final MessageCodec<ReplyException, ReplyException> REPLY_EXCEPTION_MESSAGE_CODEC = new ReplyExceptionMessageCodec();
private final MessageCodec[] systemCodecs;
private final ConcurrentMap<String, MessageCodec> userCodecMap = new ConcurrentHashMap<>();
private final ConcurrentMap<Class, MessageCodec> defaultCodecMap = new ConcurrentHashMap<>();
private final ClusterSerializableCodec clusterSerializableCodec = new ClusterSerializableCodec(this);
private final SerializableCodec serializableCodec = new SerializableCodec(this);
private volatile Function<String, Boolean> clusterSerializableCheck = s -> Boolean.FALSE;
private volatile Function<String, Boolean> serializableCheck = EventBus.DEFAULT_SERIALIZABLE_CHECKER;
private volatile Function<Object, String> codecSelector = o -> null;
public CodecManager() {
this.systemCodecs = codecs(NULL_MESSAGE_CODEC, PING_MESSAGE_CODEC, STRING_MESSAGE_CODEC, BUFFER_MESSAGE_CODEC, JSON_OBJECT_MESSAGE_CODEC, JSON_ARRAY_MESSAGE_CODEC,
BYTE_ARRAY_MESSAGE_CODEC, INT_MESSAGE_CODEC, LONG_MESSAGE_CODEC, FLOAT_MESSAGE_CODEC, DOUBLE_MESSAGE_CODEC,
BOOLEAN_MESSAGE_CODEC, SHORT_MESSAGE_CODEC, CHAR_MESSAGE_CODEC, BYTE_MESSAGE_CODEC, REPLY_EXCEPTION_MESSAGE_CODEC,
clusterSerializableCodec, serializableCodec);
}
public MessageCodec lookupCodec(Object body, String codecName, boolean local) {
MessageCodec codec;
if (codecName != null) {
codec = getCodec(codecName);
} else if (body == null) {
codec = NULL_MESSAGE_CODEC;
} else if (body instanceof String) {
codec = STRING_MESSAGE_CODEC;
} else if (body instanceof Buffer) {
codec = BUFFER_MESSAGE_CODEC;
} else if (body instanceof JsonObject) {
codec = JSON_OBJECT_MESSAGE_CODEC;
} else if (body instanceof JsonArray) {
codec = JSON_ARRAY_MESSAGE_CODEC;
} else if (body instanceof byte[]) {
codec = BYTE_ARRAY_MESSAGE_CODEC;
} else if (body instanceof Integer) {
codec = INT_MESSAGE_CODEC;
} else if (body instanceof Long) {
codec = LONG_MESSAGE_CODEC;
} else if (body instanceof Float) {
codec = FLOAT_MESSAGE_CODEC;
} else if (body instanceof Double) {
codec = DOUBLE_MESSAGE_CODEC;
} else if (body instanceof Boolean) {
codec = BOOLEAN_MESSAGE_CODEC;
} else if (body instanceof Short) {
codec = SHORT_MESSAGE_CODEC;
} else if (body instanceof Character) {
codec = CHAR_MESSAGE_CODEC;
} else if (body instanceof Byte) {
codec = BYTE_MESSAGE_CODEC;
} else if (body instanceof ReplyException) {
codec = defaultCodecMap.get(body.getClass());
if (codec == null) {
codec = REPLY_EXCEPTION_MESSAGE_CODEC;
}
} else {
codec = defaultCodecMap.get(body.getClass());
if (codec == null) {
if ((codecName = codecSelector.apply(body)) != null) {
codec = getCodec(codecName);
} else if (body instanceof ClusterSerializable && (local || acceptClusterSerializable(body.getClass().getName()))) {
codec = clusterSerializableCodec;
} else if (body instanceof Serializable && (local || acceptSerializable(body.getClass().getName()))) {
codec = serializableCodec;
}
}
}
if (codec == null) {
throw new IllegalArgumentException("No message codec for type: " + body.getClass());
}
return codec;
}
public MessageCodec getCodec(String codecName) {
return userCodecMap.get(codecName);
}
public void registerCodec(MessageCodec codec) {
Objects.requireNonNull(codec, "codec");
Objects.requireNonNull(codec.name(), "code.name()");
checkSystemCodec(codec);
if (userCodecMap.containsKey(codec.name())) {
throw new IllegalStateException("Already a codec registered with name " + codec.name());
}
userCodecMap.put(codec.name(), codec);
}
public void unregisterCodec(String name) {
Objects.requireNonNull(name);
userCodecMap.remove(name);
}
public <T> void registerDefaultCodec(Class<T> clazz, MessageCodec<T, ?> codec) {
Objects.requireNonNull(clazz);
Objects.requireNonNull(codec, "codec");
Objects.requireNonNull(codec.name(), "code.name()");
checkSystemCodec(codec);
if (defaultCodecMap.containsKey(clazz)) {
throw new IllegalStateException("Already a default codec registered for class " + clazz);
}
if (userCodecMap.containsKey(codec.name())) {
throw new IllegalStateException("Already a codec registered with name " + codec.name());
}
defaultCodecMap.put(clazz, codec);
userCodecMap.put(codec.name(), codec);
}
public void unregisterDefaultCodec(Class clazz) {
Objects.requireNonNull(clazz);
MessageCodec codec = defaultCodecMap.remove(clazz);
if (codec != null) {
userCodecMap.remove(codec.name());
}
}
public MessageCodec[] systemCodecs() {
return systemCodecs;
}
private void checkSystemCodec(MessageCodec codec) {
if (codec.systemCodecID() != -1) {
throw new IllegalArgumentException("Can't register a system codec");
}
}
private MessageCodec[] codecs(MessageCodec... codecs) {
MessageCodec[] arr = new MessageCodec[codecs.length];
for (MessageCodec codec : codecs) {
arr[codec.systemCodecID()] = codec;
}
return arr;
}
public void clusterSerializableCheck(Function<String, Boolean> classNamePredicate) {
this.clusterSerializableCheck = Objects.requireNonNull(classNamePredicate);
}
public boolean acceptClusterSerializable(String className) {
return clusterSerializableCheck.apply(className);
}
public void serializableCheck(Function<String, Boolean> classNamePredicate) {
this.serializableCheck = Objects.requireNonNull(classNamePredicate);
}
public boolean acceptSerializable(String className) {
return serializableCheck.apply(className);
}
public void codecSelector(Function<Object, String> selector) {
this.codecSelector = Objects.requireNonNull(selector);
}
}
|
CodecManager
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/PropertyMapping.java
|
{
"start": 6554,
"end": 39855
}
|
class ____ extends MappingBuilderBase<PropertyMappingBuilder> {
// initial properties
private String defaultValue;
private String defaultJavaExpression;
private String conditionJavaExpression;
private SourceReference sourceReference;
private SourceRHS rightHandSide;
private FormattingParameters formattingParameters;
private SelectionParameters selectionParameters;
private MappingControl mappingControl;
private MappingReferences forgeMethodWithMappingReferences;
private boolean forceUpdateMethod;
private boolean forgedNamedBased = true;
private NullValueCheckStrategyGem nvcs;
private NullValuePropertyMappingStrategyGem nvpms;
PropertyMappingBuilder() {
super( PropertyMappingBuilder.class );
}
public PropertyMappingBuilder sourceReference(SourceReference sourceReference) {
this.sourceReference = sourceReference;
return this;
}
public PropertyMappingBuilder selectionParameters(SelectionParameters selectionParameters) {
this.selectionParameters = selectionParameters;
return this;
}
public PropertyMappingBuilder formattingParameters(FormattingParameters formattingParameters) {
this.formattingParameters = formattingParameters;
return this;
}
public PropertyMappingBuilder defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public PropertyMappingBuilder defaultJavaExpression(String defaultJavaExpression) {
this.defaultJavaExpression = defaultJavaExpression;
return this;
}
public PropertyMappingBuilder conditionJavaExpression(String conditionJavaExpression) {
this.conditionJavaExpression = conditionJavaExpression;
return this;
}
public PropertyMappingBuilder forgeMethodWithMappingReferences(MappingReferences mappingReferences) {
this.forgeMethodWithMappingReferences = mappingReferences;
return this;
}
/**
* Force the created mapping to use update methods when forging a method.
*
* @param forceUpdateMethod whether the mapping should force update method for forged mappings
* @return the builder for chaining
*/
public PropertyMappingBuilder forceUpdateMethod(boolean forceUpdateMethod) {
this.forceUpdateMethod = forceUpdateMethod;
return this;
}
/**
* @param forgedNamedBased mapping is based on forging
*
* @return the builder for chaining
*/
public PropertyMappingBuilder forgedNamedBased(boolean forgedNamedBased) {
this.forgedNamedBased = forgedNamedBased;
return this;
}
public PropertyMappingBuilder options(DelegatingOptions options) {
this.mappingControl = options.getMappingControl( ctx.getElementUtils() );
this.nvcs = options.getNullValueCheckStrategy();
if ( method.isUpdateMethod() ) {
this.nvpms = options.getNullValuePropertyMappingStrategy();
}
return this;
}
public PropertyMapping build() {
// handle source
this.rightHandSide = getSourceRHS( sourceReference );
ctx.getMessager().note( 2, Message.PROPERTYMAPPING_MAPPING_NOTE, rightHandSide, targetWriteAccessor );
rightHandSide.setUseElementAsSourceTypeForMatching(
targetWriteAccessorType == AccessorType.ADDER );
// all the tricky cases will be excluded for the time being.
boolean preferUpdateMethods;
if ( targetWriteAccessorType == AccessorType.ADDER ) {
preferUpdateMethods = false;
}
else {
preferUpdateMethods = method.getMappingTargetParameter() != null;
}
SelectionCriteria criteria = SelectionCriteria.forMappingMethods(
selectionParameters,
mappingControl,
targetPropertyName,
preferUpdateMethods
);
// forge a method instead of resolving one when there are mapping options.
Assignment assignment = null;
if ( forgeMethodWithMappingReferences == null ) {
assignment = ctx.getMappingResolver().getTargetAssignment(
method,
getForgedMethodHistory( rightHandSide ),
targetType,
formattingParameters,
criteria,
rightHandSide,
positionHint,
this::forge
);
}
else {
assignment = forge();
}
Type sourceType = rightHandSide.getSourceType();
if ( assignment != null ) {
ctx.getMessager().note( 2, Message.PROPERTYMAPPING_SELECT_NOTE, assignment );
if ( targetType.isCollectionOrMapType() ) {
assignment = assignToCollection( targetType, targetWriteAccessorType, assignment );
}
else if ( targetType.isArrayType() && sourceType.isArrayType() && assignment.getType() == DIRECT ) {
assignment = assignToArray( targetType, assignment );
}
else {
assignment = assignToPlain( targetType, targetWriteAccessorType, assignment );
}
}
else {
reportCannotCreateMapping();
}
return new PropertyMapping(
sourcePropertyName,
targetPropertyName,
rightHandSide.getSourceParameterName(),
targetWriteAccessor.getSimpleName(),
targetReadAccessor,
targetType,
assignment,
dependsOn,
getDefaultValueAssignment( assignment ),
targetWriteAccessorType == AccessorType.PARAMETER
);
}
private Assignment forge( ) {
Assignment assignment;
Type sourceType = rightHandSide.getSourceType();
if ( ( sourceType.isCollectionType() || sourceType.isArrayType()) && targetType.isIterableType()
|| ( sourceType.isIterableType() && targetType.isCollectionType() ) ) {
assignment = forgeIterableMapping( sourceType, targetType, rightHandSide );
}
else if ( sourceType.isMapType() && targetType.isMapType() ) {
assignment = forgeMapMapping( sourceType, targetType, rightHandSide );
}
else if ( sourceType.isMapType() && !targetType.isMapType()) {
assignment = forgeMapping( sourceType, targetType.withoutBounds(), rightHandSide );
}
else if ( ( sourceType.isIterableType() && targetType.isStreamType() )
|| ( sourceType.isStreamType() && targetType.isStreamType() )
|| ( sourceType.isStreamType() && targetType.isIterableType() ) ) {
assignment = forgeStreamMapping( sourceType, targetType, rightHandSide );
}
else {
assignment = forgeMapping( rightHandSide );
}
if ( assignment != null ) {
ctx.getMessager().note( 2, Message.PROPERTYMAPPING_CREATE_NOTE, assignment );
}
return assignment;
}
/**
* Report that a mapping could not be created.
*/
private void reportCannotCreateMapping() {
if ( forgeMethodWithMappingReferences != null && ctx.isErroneous() ) {
// If we arrived here, there is an error it means that we couldn't forge a mapping method
// so skip the cannot create mapping
return;
}
if ( method instanceof ForgedMethod && ( (ForgedMethod) method ).getHistory() != null ) {
// The history that is part of the ForgedMethod misses the information from the current right hand
// side. Therefore we need to extract the most relevant history and use that in the error reporting.
ForgedMethodHistory history = getForgedMethodHistory( rightHandSide );
reportCannotCreateMapping(
method,
positionHint,
history.createSourcePropertyErrorMessage(),
history.getSourceType(),
history.getTargetType(),
history.createTargetPropertyName()
);
}
else {
reportCannotCreateMapping(
method,
positionHint,
rightHandSide.getSourceErrorMessagePart(),
rightHandSide.getSourceType(),
targetType,
targetPropertyName
);
}
}
private Assignment getDefaultValueAssignment( Assignment rhs ) {
if ( defaultValue != null
&& ( !rhs.getSourceType().isPrimitive() || rhs.getSourcePresenceCheckerReference() != null) ) {
// cannot check on null source if source is primitive unless it has a presence checker
PropertyMapping build = new ConstantMappingBuilder()
.constantExpression( defaultValue )
.formattingParameters( formattingParameters )
.selectionParameters( selectionParameters )
.dependsOn( dependsOn )
.existingVariableNames( existingVariableNames )
.mappingContext( ctx )
.sourceMethod( method )
.target( targetPropertyName, targetReadAccessor, targetWriteAccessor )
.build();
return build.getAssignment();
}
if ( defaultJavaExpression != null
&& ( !rhs.getSourceType().isPrimitive() || rhs.getSourcePresenceCheckerReference() != null) ) {
// cannot check on null source if source is primitive unless it has a presence checker
PropertyMapping build = new JavaExpressionMappingBuilder()
.javaExpression( defaultJavaExpression )
.dependsOn( dependsOn )
.existingVariableNames( existingVariableNames )
.mappingContext( ctx )
.sourceMethod( method )
.target( targetPropertyName, targetReadAccessor, targetWriteAccessor )
.build();
return build.getAssignment();
}
return null;
}
private Assignment assignToPlain(Type targetType, AccessorType targetAccessorType,
Assignment rightHandSide) {
Assignment result;
if ( targetAccessorType == AccessorType.SETTER || targetAccessorType.isFieldAssignment() ) {
result = assignToPlainViaSetter( targetType, rightHandSide );
}
else {
result = assignToPlainViaAdder( rightHandSide );
}
return result;
}
private Assignment assignToPlainViaSetter(Type targetType, Assignment rhs) {
if ( rhs.isCallingUpdateMethod() ) {
if ( targetReadAccessor == null ) {
ctx.getMessager().printMessage(
method.getExecutable(),
positionHint,
Message.PROPERTYMAPPING_NO_READ_ACCESSOR_FOR_TARGET_TYPE,
targetPropertyName
);
}
Assignment factory = ObjectFactoryMethodResolver
.getFactoryMethod( method, targetType, SelectionParameters.forSourceRHS( rightHandSide ), ctx );
if ( factory == null && targetBuilderType != null) {
// If there is no dedicated factory method and the target has a builder we will try to use that
MethodReference builderFactoryMethod = ObjectFactoryMethodResolver.getBuilderFactoryMethod(
targetType,
targetBuilderType
);
if ( builderFactoryMethod != null ) {
MethodReference finisherMethod = BuilderFinisherMethodResolver.getBuilderFinisherMethod(
method,
targetBuilderType,
ctx
);
if ( finisherMethod != null ) {
factory = MethodReference.forMethodChaining( builderFactoryMethod, finisherMethod );
}
}
}
return new UpdateWrapper(
rhs,
method.getThrownTypes(),
factory,
isFieldAssignment(),
targetType,
!rhs.isSourceReferenceParameter(),
nvpms == SET_TO_NULL && !targetType.isPrimitive(),
nvpms == SET_TO_DEFAULT
);
}
else {
// If the property mapping has a default value assignment then we have to do a null value check
boolean includeSourceNullCheck = setterWrapperNeedsSourceNullCheck( rhs, targetType );
if ( !includeSourceNullCheck ) {
// solution for #834 introduced a local var and null check for nested properties always.
// however, a local var is not needed if there's no need to check for null.
rhs.setSourceLocalVarName( null );
}
return new SetterWrapper(
rhs,
method.getThrownTypes(),
isFieldAssignment(),
includeSourceNullCheck,
includeSourceNullCheck && nvpms == SET_TO_NULL && !targetType.isPrimitive(),
nvpms == SET_TO_DEFAULT );
}
}
/**
* Checks whether the setter wrapper should include a null / presence check or not
*
* @param rhs the source right hand side
* @param targetType the target type
*
* @return whether to include a null / presence check or not
*/
private boolean setterWrapperNeedsSourceNullCheck(Assignment rhs, Type targetType) {
if ( rhs.getSourceType().isPrimitive() && rhs.getSourcePresenceCheckerReference() == null ) {
// If the source type is primitive or it doesn't have a presence checker then
// we shouldn't do a null check
return false;
}
if ( nvcs == ALWAYS ) {
// NullValueCheckStrategy is ALWAYS -> do a null check
return true;
}
if ( rhs.getSourcePresenceCheckerReference() != null ) {
// There is an explicit source presence check method -> do a null / presence check
return true;
}
if ( nvpms == SET_TO_DEFAULT || nvpms == IGNORE ) {
// NullValuePropertyMapping is SET_TO_DEFAULT or IGNORE -> do a null check
return true;
}
if ( rhs.getType().isConverted() ) {
// A type conversion is applied, so a null check is required
return true;
}
if ( rhs.getType().isDirect() && targetType.isPrimitive() ) {
// If the type is direct and the target type is primitive (i.e. we are unboxing) then check is needed
return true;
}
if ( hasDefaultValueOrDefaultExpression() ) {
// If there is default value defined then a check is needed
return true;
}
return false;
}
private boolean hasDefaultValueOrDefaultExpression() {
return defaultValue != null || defaultJavaExpression != null;
}
private Assignment assignToPlainViaAdder( Assignment rightHandSide) {
Assignment result = rightHandSide;
String adderIteratorName = sourcePropertyName == null ? targetPropertyName : sourcePropertyName;
if ( result.getSourceType().isIterableType() ) {
result = new AdderWrapper( result, method.getThrownTypes(), isFieldAssignment(), adderIteratorName );
}
else if ( result.getSourceType().isStreamType() ) {
result = new StreamAdderWrapper(
result, method.getThrownTypes(), isFieldAssignment(), adderIteratorName );
}
else {
// Possibly adding null to a target collection. So should be surrounded by an null check.
// TODO: what triggers this else branch? Should nvcs, nvpms be applied?
result = new SetterWrapper( result,
method.getThrownTypes(),
isFieldAssignment(),
true,
nvpms == SET_TO_NULL && !targetType.isPrimitive(),
nvpms == SET_TO_DEFAULT
);
}
return result;
}
private Assignment assignToCollection(Type targetType, AccessorType targetAccessorType,
Assignment rhs) {
return new CollectionAssignmentBuilder()
.mappingBuilderContext( ctx )
.method( method )
.targetReadAccessor( targetReadAccessor )
.targetType( targetType )
.targetPropertyName( targetPropertyName )
.targetAccessorType( targetAccessorType )
.rightHandSide( rightHandSide )
.assignment( rhs )
.nullValueCheckStrategy( hasDefaultValueOrDefaultExpression() ? ALWAYS : nvcs )
.nullValuePropertyMappingStrategy( nvpms )
.build();
}
private Assignment assignToArray(Type targetType, Assignment rightHandSide) {
Type arrayType = ctx.getTypeFactory().getType( Arrays.class );
//TODO init default value
return new ArrayCopyWrapper(
rightHandSide,
targetPropertyName,
arrayType,
targetType,
isFieldAssignment(),
nvpms == SET_TO_NULL && !targetType.isPrimitive(),
nvpms == SET_TO_DEFAULT );
}
private SourceRHS getSourceRHS( SourceReference sourceReference ) {
Parameter sourceParam = sourceReference.getParameter();
PropertyEntry propertyEntry = sourceReference.getDeepestProperty();
// parameter reference
if ( propertyEntry == null ) {
SourceRHS sourceRHS = new SourceRHS(
sourceParam.getName(),
sourceParam.getType(),
existingVariableNames,
sourceReference.toString()
);
sourceRHS.setSourcePresenceCheckerReference( getSourcePresenceCheckerRef(
sourceReference,
sourceRHS
) );
return sourceRHS;
}
// simple property
else if ( !sourceReference.isNested() ) {
String sourceRef = sourceParam.getName() + "." + propertyEntry.getReadAccessor().getReadValueSource();
SourceRHS sourceRHS = new SourceRHS(
sourceParam.getName(),
sourceRef,
null,
propertyEntry.getType(),
existingVariableNames,
sourceReference.toString()
);
sourceRHS.setSourcePresenceCheckerReference( getSourcePresenceCheckerRef(
sourceReference,
sourceRHS
) );
return sourceRHS;
}
// nested property given as dot path
else {
Type sourceType = propertyEntry.getType();
if ( sourceType.isPrimitive() && !targetType.isPrimitive() ) {
// Handle null's. If the forged method needs to be mapped to an object, the forged method must be
// able to return null. So in that case primitive types are mapped to their corresponding wrapped
// type. The source type becomes the wrapped type in that case.
sourceType = ctx.getTypeFactory().getWrappedType( sourceType );
}
// forge a method from the parameter type to the last entry type.
String forgedName = Strings.joinAndCamelize( sourceReference.getElementNames() );
forgedName = Strings.getSafeVariableName( forgedName, ctx.getReservedNames() );
Type sourceParameterType = sourceReference.getParameter().getType();
ForgedMethod methodRef = forParameterMapping( forgedName, sourceParameterType, sourceType, method );
NestedPropertyMappingMethod.Builder builder = new NestedPropertyMappingMethod.Builder();
NestedPropertyMappingMethod nestedPropertyMapping = builder
.method( methodRef )
.propertyEntries( sourceReference.getPropertyEntries() )
.mappingContext( ctx )
.build();
// add if not yet existing
if ( !ctx.getMappingsToGenerate().contains( nestedPropertyMapping ) ) {
ctx.getMappingsToGenerate().add( nestedPropertyMapping );
}
else {
forgedName = ctx.getExistingMappingMethod( nestedPropertyMapping ).getName();
}
String sourceRef = forgedName + "( " + sourceParam.getName() + " )";
SourceRHS sourceRhs = new SourceRHS( sourceParam.getName(),
sourceRef,
null,
sourceType,
existingVariableNames,
sourceReference.toString()
);
sourceRhs.setSourcePresenceCheckerReference( getSourcePresenceCheckerRef(
sourceReference,
sourceRhs
) );
// create a local variable to which forged method can be assigned.
String desiredName = propertyEntry.getName();
sourceRhs.setSourceLocalVarName( sourceRhs.createUniqueVarName( desiredName ) );
return sourceRhs;
}
}
private PresenceCheck getSourcePresenceCheckerRef(SourceReference sourceReference,
SourceRHS sourceRHS) {
if ( conditionJavaExpression != null ) {
return new JavaExpressionPresenceCheck( conditionJavaExpression );
}
SelectionParameters selectionParameters = this.selectionParameters != null ?
this.selectionParameters.withSourceRHS( sourceRHS ) :
SelectionParameters.forSourceRHS( sourceRHS );
PresenceCheck presenceCheck = PresenceCheckMethodResolver.getPresenceCheck(
method,
selectionParameters,
ctx
);
if ( presenceCheck != null ) {
return presenceCheck;
}
PresenceCheck sourcePresenceChecker = null;
if ( !sourceReference.getPropertyEntries().isEmpty() ) {
Parameter sourceParam = sourceReference.getParameter();
// TODO is first correct here?? shouldn't it be last since the remainer is checked
// in the forged method?
PropertyEntry propertyEntry = sourceReference.getShallowestProperty();
if ( propertyEntry.getPresenceChecker() != null ) {
List<PresenceCheck> presenceChecks = new ArrayList<>();
presenceChecks.add( new SuffixPresenceCheck(
sourceParam.getName(),
propertyEntry.getPresenceChecker().getPresenceCheckSuffix()
) );
String variableName = sourceParam.getName() + "."
+ propertyEntry.getReadAccessor().getReadValueSource();
for (int i = 1; i < sourceReference.getPropertyEntries().size(); i++) {
PropertyEntry entry = sourceReference.getPropertyEntries().get( i );
if (entry.getPresenceChecker() != null && entry.getReadAccessor() != null) {
presenceChecks.add( new NullPresenceCheck( variableName ) );
presenceChecks.add( new SuffixPresenceCheck(
variableName,
entry.getPresenceChecker().getPresenceCheckSuffix()
) );
variableName = variableName + "." + entry.getReadAccessor().getSimpleName() + "()";
}
else {
break;
}
}
if ( presenceChecks.size() == 1 ) {
sourcePresenceChecker = presenceChecks.get( 0 );
}
else {
sourcePresenceChecker = new AllPresenceChecksPresenceCheck( presenceChecks );
}
}
}
return sourcePresenceChecker;
}
private Assignment forgeStreamMapping(Type sourceType, Type targetType, SourceRHS source) {
StreamMappingMethod.Builder builder = new StreamMappingMethod.Builder();
return forgeWithElementMapping( sourceType, targetType, source, builder );
}
private Assignment forgeIterableMapping(Type sourceType, Type targetType, SourceRHS source) {
IterableMappingMethod.Builder builder = new IterableMappingMethod.Builder();
return forgeWithElementMapping( sourceType, targetType, source, builder );
}
private Assignment forgeWithElementMapping(Type sourceType, Type targetType, SourceRHS source,
ContainerMappingMethodBuilder<?, ? extends ContainerMappingMethod> builder) {
targetType = targetType.withoutBounds();
ForgedMethod methodRef = prepareForgedMethod( sourceType, targetType, source, "[]" );
Supplier<MappingMethod> mappingMethodCreator = () -> builder
.mappingContext( ctx )
.method( methodRef )
.selectionParameters( selectionParameters )
.callingContextTargetPropertyName( targetPropertyName )
.positionHint( positionHint )
.build();
return getOrCreateForgedAssignment( source, methodRef, mappingMethodCreator );
}
private ForgedMethod prepareForgedMethod(Type sourceType, Type targetType, SourceRHS source, String suffix) {
String name = getName( sourceType, targetType );
name = Strings.getSafeVariableName( name, ctx.getReservedNames() );
// copy mapper configuration from the source method, its the same mapper
ForgedMethodHistory forgedMethodHistory = getForgedMethodHistory( source, suffix );
return forElementMapping( name, sourceType, targetType, method, forgedMethodHistory, forgedNamedBased );
}
private Assignment forgeMapMapping(Type sourceType, Type targetType, SourceRHS source) {
targetType = targetType.withoutBounds();
ForgedMethod methodRef = prepareForgedMethod( sourceType, targetType, source, "{}" );
MapMappingMethod.Builder builder = new MapMappingMethod.Builder();
Supplier<MappingMethod> mapMappingMethodCreator = () -> builder
.mappingContext( ctx )
.method( methodRef )
.build();
return getOrCreateForgedAssignment( source, methodRef, mapMappingMethodCreator );
}
private Assignment forgeMapping(SourceRHS sourceRHS) {
Type sourceType;
if ( targetWriteAccessorType == AccessorType.ADDER ) {
sourceType = sourceRHS.getSourceTypeForMatching();
}
else {
sourceType = sourceRHS.getSourceType();
}
if ( forgedNamedBased && !canGenerateAutoSubMappingBetween( sourceType, targetType ) ) {
return null;
}
return forgeMapping( sourceType, targetType, sourceRHS );
}
private Assignment forgeMapping(Type sourceType, Type targetType, SourceRHS sourceRHS) {
//Fail fast. If we could not find the method by now, no need to try
if ( sourceType.isPrimitive() || targetType.isPrimitive() ) {
return null;
}
String name = getName( sourceType, targetType );
name = Strings.getSafeVariableName( name, ctx.getReservedNames() );
List<Parameter> parameters = new ArrayList<>( method.getContextParameters() );
Type returnType;
// there's only one case for forging a method with mapping options: nested target properties.
// They should forge an update method only if we set the forceUpdateMethod. This is set to true,
// because we are forging a Mapping for a method with multiple source parameters.
// If the target type is enum, then we can't create an update method
if ( !targetType.isEnumType() && ( method.isUpdateMethod() || forceUpdateMethod )
&& targetWriteAccessorType != AccessorType.ADDER) {
parameters.add( Parameter.forForgedMappingTarget( targetType ) );
returnType = ctx.getTypeFactory().createVoidType();
}
else {
returnType = targetType;
}
ForgedMethod forgedMethod = forPropertyMapping( name,
sourceType,
returnType,
parameters,
method,
getForgedMethodHistory( sourceRHS ),
forgeMethodWithMappingReferences,
forgedNamedBased
);
return createForgedAssignment( sourceRHS, targetBuilderType, forgedMethod );
}
private ForgedMethodHistory getForgedMethodHistory(SourceRHS sourceRHS) {
return getForgedMethodHistory( sourceRHS, "" );
}
private ForgedMethodHistory getForgedMethodHistory(SourceRHS sourceRHS, String suffix) {
ForgedMethodHistory history = null;
if ( method instanceof ForgedMethod ) {
ForgedMethod method = (ForgedMethod) this.method;
history = method.getHistory();
}
return new ForgedMethodHistory( history, getSourceElementName() + suffix,
targetPropertyName + suffix, sourceRHS.getSourceType(), targetType, true, "property"
);
}
private String getName(Type sourceType, Type targetType) {
String fromName = getName( sourceType );
String toName = getName( targetType );
return Strings.decapitalize( fromName + "To" + toName );
}
private String getName(Type type) {
StringBuilder builder = new StringBuilder();
for ( Type typeParam : type.getTypeParameters() ) {
builder.append( typeParam.getIdentification() );
}
builder.append( type.getIdentification() );
return builder.toString();
}
private String getSourceElementName() {
Parameter sourceParam = sourceReference.getParameter();
List<PropertyEntry> propertyEntries = sourceReference.getPropertyEntries();
if ( propertyEntries.isEmpty() ) {
return sourceParam.getName();
}
else if ( propertyEntries.size() == 1 ) {
PropertyEntry propertyEntry = propertyEntries.get( 0 );
return propertyEntry.getName();
}
else {
return Strings.join( sourceReference.getElementNames(), "." );
}
}
}
public static
|
PropertyMappingBuilder
|
java
|
elastic__elasticsearch
|
x-pack/plugin/apm-data/src/yamlRestTest/java/org/elasticsearch/xpack/apmdata/APMYamlTestSuiteIT.java
|
{
"start": 658,
"end": 1759
}
|
class ____ extends ESClientYamlSuiteTestCase {
@ClassRule
public static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("constant-keyword")
.module("counted-keyword")
.module("data-streams")
.module("ingest-common")
.module("ingest-geoip")
.module("ingest-user-agent")
.module("lang-mustache")
.module("mapper-extras")
.module("wildcard")
.module("x-pack-analytics")
.module("x-pack-apm-data")
.module("x-pack-aggregate-metric")
.module("x-pack-ilm")
.module("x-pack-stack")
.setting("ingest.geoip.downloader.enabled", "false")
.build();
public APMYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
|
APMYamlTestSuiteIT
|
java
|
apache__dubbo
|
dubbo-remoting/dubbo-remoting-api/src/main/java/org/apache/dubbo/remoting/exchange/ExchangeHandler.java
|
{
"start": 1116,
"end": 1426
}
|
interface ____ extends ChannelHandler, TelnetHandler {
/**
* reply.
*
* @param channel
* @param request
* @return response
* @throws RemotingException
*/
CompletableFuture<Object> reply(ExchangeChannel channel, Object request) throws RemotingException;
}
|
ExchangeHandler
|
java
|
apache__flink
|
flink-core/src/test/java/org/apache/flink/api/java/typeutils/TypeExtractorTest.java
|
{
"start": 21776,
"end": 22059
}
|
class ____ {
public String myField1;
public int myField2;
public CustomType() {}
public CustomType(String myField1, int myField2) {
this.myField1 = myField1;
this.myField2 = myField2;
}
}
public static
|
CustomType
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/ConcatFunctionPipe.java
|
{
"start": 647,
"end": 1805
}
|
class ____ extends BinaryPipe {
public ConcatFunctionPipe(Source source, Expression expression, Pipe left, Pipe right) {
super(source, expression, left, right);
}
@Override
protected NodeInfo<ConcatFunctionPipe> info() {
return NodeInfo.create(this, ConcatFunctionPipe::new, expression(), left(), right());
}
@Override
protected BinaryPipe replaceChildren(Pipe left, Pipe right) {
return new ConcatFunctionPipe(source(), expression(), left, right);
}
@Override
public ConcatFunctionProcessor asProcessor() {
return new ConcatFunctionProcessor(left().asProcessor(), right().asProcessor());
}
@Override
public int hashCode() {
return Objects.hash(left(), right());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
ConcatFunctionPipe other = (ConcatFunctionPipe) obj;
return Objects.equals(left(), other.left()) && Objects.equals(right(), other.right());
}
}
|
ConcatFunctionPipe
|
java
|
alibaba__nacos
|
client-basic/src/main/java/com/alibaba/nacos/client/address/AbstractServerListManager.java
|
{
"start": 1417,
"end": 4993
}
|
class ____ implements ServerListFactory, Closeable {
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractServerListManager.class);
protected ServerListProvider serverListProvider;
protected NacosClientProperties properties;
public AbstractServerListManager(NacosClientProperties properties) {
this(properties, null);
}
public AbstractServerListManager(NacosClientProperties properties, String namespace) {
// To avoid set operation affect the original properties.
NacosClientProperties tmpProperties = properties.derive();
if (StringUtils.isNotBlank(namespace)) {
tmpProperties.setProperty(PropertyKeyConst.NAMESPACE, namespace);
}
tmpProperties.setProperty(Constants.CLIENT_MODULE_TYPE, getModuleName());
this.properties = tmpProperties;
}
@Override
public List<String> getServerList() {
return serverListProvider.getServerList();
}
@Override
public void shutdown() throws NacosException {
String className = this.getClass().getName();
LOGGER.info("{} do shutdown begin", className);
if (null != serverListProvider) {
serverListProvider.shutdown();
}
serverListProvider = null;
LOGGER.info("{} do shutdown stop", className);
}
/**
* Start server list manager.
*
* @throws NacosException during start and initialize.
*/
public void start() throws NacosException {
Collection<ServerListProvider> serverListProviders = NacosServiceLoader.load(ServerListProvider.class);
Collection<ServerListProvider> sorted = serverListProviders.stream()
.sorted((a, b) -> b.getOrder() - a.getOrder()).collect(Collectors.toList());
for (ServerListProvider each : sorted) {
boolean matchResult = each.match(properties);
LOGGER.info("Load and match ServerListProvider {}, match result: {}", each.getClass().getCanonicalName(),
matchResult);
if (matchResult) {
this.serverListProvider = each;
LOGGER.info("Will use {} as ServerListProvider", this.serverListProvider.getClass().getCanonicalName());
break;
}
}
if (null == serverListProvider) {
LOGGER.error("No server list provider found, SPI load size: {}", sorted.size());
throw new NacosException(NacosException.CLIENT_INVALID_PARAM, "No server list provider found.");
}
this.serverListProvider.init(properties, getNacosRestTemplate());
}
public String getServerName() {
return getModuleName() + "-" + serverListProvider.getServerName();
}
public String getContextPath() {
return serverListProvider.getContextPath();
}
public String getNamespace() {
return serverListProvider.getNamespace();
}
public String getAddressSource() {
return serverListProvider.getAddressSource();
}
public boolean isFixed() {
return serverListProvider.isFixed();
}
/**
* get module name.
*
* @return module name
*/
protected abstract String getModuleName();
/**
* get nacos rest template.
*
* @return nacos rest template
*/
protected abstract NacosRestTemplate getNacosRestTemplate();
@JustForTest
NacosClientProperties getProperties() {
return properties;
}
}
|
AbstractServerListManager
|
java
|
quarkusio__quarkus
|
extensions/funqy/funqy-knative-events/runtime/src/main/java/io/quarkus/funqy/runtime/bindings/knative/events/KnativeEventsBindingRecorder.java
|
{
"start": 1845,
"end": 13625
}
|
class ____ {
private static final Logger log = Logger.getLogger(KnativeEventsBindingRecorder.class);
private static ObjectMapper objectMapper;
private static QueryObjectMapper queryMapper;
private static Map<String, Collection<FunctionInvoker>> typeTriggers;
private static Map<String, List<Predicate<CloudEvent>>> invokersFilters;
public static final String RESPONSE_TYPE = "response.cloud.event.type";
public static final String RESPONSE_SOURCE = "response.cloud.event.source";
public static final String INPUT_CE_DATA_TYPE = "io.quarkus.funqy.knative.events.INPUT_CE_DATA_TYPE";
public static final String OUTPUT_CE_DATA_TYPE = "io.quarkus.funqy.knative.events.OUTPUT_CE_DATA_TYPE";
public static final String DATA_OBJECT_READER = ObjectReader.class.getName() + "_DATA_OBJECT_READER";
public static final String DATA_OBJECT_WRITER = ObjectWriter.class.getName() + "_DATA_OBJECT_WRITER";
private final RuntimeValue<FunqyConfig> runtimeConfig;
private final RuntimeValue<FunqyKnativeEventsConfig> eventsRuntimeConfig;
public KnativeEventsBindingRecorder(
final RuntimeValue<FunqyConfig> runtimeConfig,
final RuntimeValue<FunqyKnativeEventsConfig> eventsRuntimeConfig) {
this.runtimeConfig = runtimeConfig;
this.eventsRuntimeConfig = eventsRuntimeConfig;
}
public void init() {
typeTriggers = new HashMap<>();
invokersFilters = new HashMap<>();
objectMapper = getObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.configure(MapperFeature.ACCEPT_CASE_INSENSITIVE_PROPERTIES, true);
queryMapper = new QueryObjectMapper();
for (FunctionInvoker invoker : FunctionRecorder.registry.invokers()) {
Method method = invoker.getMethod();
String trigger;
CloudEventMapping annotation = method.getAnnotation(CloudEventMapping.class);
final List<Predicate<CloudEvent>> filter;
if (annotation != null) {
if (!annotation.trigger().isEmpty()) {
trigger = annotation.trigger();
} else {
trigger = invoker.getName();
}
filter = filter(invoker.getName(), annotation);
} else {
trigger = invoker.getName();
filter = Collections.emptyList();
}
invokersFilters.put(invoker.getName(), filter);
typeTriggers.compute(trigger, (k, v) -> {
if (v == null) {
v = new ArrayList<>();
}
// validate if there are no conflicts for the same type (trigger) and defined filters
// as resolution based on trigger (ce-type) and optional filters (on ce-attributes) can return only
// one function invoker
if (v.stream().anyMatch(i -> hasSameFilters(i.getName(), invokersFilters.get(i.getName()), filter))) {
throw new IllegalStateException("Function for trigger '" + trigger + "' has multiple matching invokers");
}
v.add(invoker);
return v;
});
if (invoker.hasInput()) {
Type inputType = invoker.getInputType();
if (CloudEvent.class.equals(Reflections.getRawType(inputType))) {
if (inputType instanceof ParameterizedType) {
Type[] params = ((ParameterizedType) inputType).getActualTypeArguments();
if (params.length == 1) {
inputType = params[0];
invoker.getBindingContext().put(INPUT_CE_DATA_TYPE, inputType);
}
} else {
throw new RuntimeException("When using CloudEvent<> generic parameter must be used.");
}
}
validateNotBoxedByteArray(inputType, invoker.getName(), "input");
JavaType javaInputType = objectMapper.constructType(inputType);
ObjectReader reader = objectMapper.readerFor(javaInputType);
invoker.getBindingContext().put(DATA_OBJECT_READER, reader);
QueryReader queryReader = queryMapper.readerFor(inputType);
invoker.getBindingContext().put(QueryReader.class.getName(), queryReader);
}
if (invoker.hasOutput()) {
Type outputType = invoker.getOutputType();
if (CloudEvent.class.equals(Reflections.getRawType(outputType))) {
if (outputType instanceof ParameterizedType) {
Type[] params = ((ParameterizedType) outputType).getActualTypeArguments();
if (params.length == 1) {
outputType = params[0];
invoker.getBindingContext().put(OUTPUT_CE_DATA_TYPE, outputType);
}
} else {
throw new RuntimeException("When using CloudEvent<> generic parameter must be used.");
}
}
validateNotBoxedByteArray(outputType, invoker.getName(), "output");
JavaType outputJavaType = objectMapper.constructType(outputType);
ObjectWriter writer = objectMapper.writerFor(outputJavaType);
invoker.getBindingContext().put(DATA_OBJECT_WRITER, writer);
String functionName = invoker.getName();
if (annotation != null && !annotation.responseType().isEmpty()) {
invoker.getBindingContext().put(RESPONSE_TYPE, annotation.responseType());
} else {
invoker.getBindingContext().put(RESPONSE_TYPE, functionName + ".output");
}
if (annotation != null && !annotation.responseSource().isEmpty()) {
invoker.getBindingContext().put(RESPONSE_SOURCE, annotation.responseSource());
} else {
invoker.getBindingContext().put(RESPONSE_SOURCE, functionName);
}
}
}
}
private ObjectMapper getObjectMapper() {
InstanceHandle<ObjectMapper> instance = Arc.container().instance(ObjectMapper.class);
if (instance.isAvailable()) {
return instance.get().copy();
}
return new ObjectMapper();
}
public Handler<RoutingContext> start(
String rootPath,
Supplier<Vertx> vertx,
ShutdownContext shutdown,
BeanContainer beanContainer,
Executor executor) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
FunctionConstructor.CONTAINER = null;
objectMapper = null;
typeTriggers = null;
}
});
FunctionConstructor.CONTAINER = beanContainer;
// This needs to happen in start at RUNTIME so that
// mappings can be overridden by environment variables
FunctionInvoker defaultInvoker = null;
if (runtimeConfig.getValue().export().isPresent()) {
defaultInvoker = FunctionRecorder.registry.matchInvoker(runtimeConfig.getValue().export().get());
if (defaultInvoker == null) {
throw new RuntimeException(
"quarkus.funqy.export value does not map a function: " + runtimeConfig.getValue().export().get());
}
}
if (eventsRuntimeConfig.getValue().mapping() != null) {
for (Map.Entry<String, FunqyKnativeEventsConfig.FunctionMapping> entry : eventsRuntimeConfig.getValue().mapping()
.entrySet()) {
String functionName = entry.getKey();
FunctionInvoker invoker = FunctionRecorder.registry.matchInvoker(functionName);
if (invoker == null) {
throw new RuntimeException("knative-events.function-mapping does not map to a function: " + functionName);
}
FunqyKnativeEventsConfig.FunctionMapping mapping = entry.getValue();
if (mapping.trigger().isPresent()) {
typeTriggers.compute(mapping.trigger().get(), (k, v) -> {
if (v == null) {
v = new ArrayList<>();
}
v.add(invoker);
return v;
});
}
if (invoker.hasOutput()) {
if (mapping.responseSource().isPresent()) {
invoker.getBindingContext().put(RESPONSE_SOURCE, mapping.responseSource().get());
}
if (mapping.responseType().isPresent()) {
invoker.getBindingContext().put(RESPONSE_TYPE, mapping.responseType().get());
}
}
}
}
Handler<RoutingContext> handler = new VertxRequestHandler(vertx.get(), rootPath, beanContainer, objectMapper,
eventsRuntimeConfig.getValue(),
defaultInvoker, typeTriggers, invokersFilters, executor);
return handler;
}
private List<Predicate<CloudEvent>> filter(String functionName, CloudEventMapping mapping) {
if (mapping.attributes() == null || mapping.attributes().length == 0) {
return Collections.emptyList();
}
List<Predicate<CloudEvent>> filters = new ArrayList<>();
for (EventAttribute attribute : mapping.attributes()) {
Objects.requireNonNull(attribute.name(),
"Attribute name of the EventAttribute on function " + functionName + " is required");
Objects.requireNonNull(attribute.value(),
"Attribute name of the EventAttribute on function " + functionName + " is required");
filters.add(new CEAttributeLiteralEqualsFilter(attribute.name(), attribute.value()));
}
return filters;
}
private boolean hasSameFilters(String name, List<Predicate<CloudEvent>> one, List<Predicate<CloudEvent>> two) {
final List<Predicate<CloudEvent>> first = one != null ? one : Collections.emptyList();
final List<Predicate<CloudEvent>> second = two != null ? two : Collections.emptyList();
// empty set is sub-set of any set
if (first.size() <= 0 || second.size() <= 0) {
log.warn("Invoker " + name + " has multiple matching filters " + one + " " + two);
return true;
}
boolean result = first.size() <= second.size() ? second.containsAll(first) : first.containsAll(second);
if (result) {
log.warn("Invoker " + name + " has multiple matching filters " + one + " " + two);
}
return result;
}
private void validateNotBoxedByteArray(Type type, String functionName, String paramType) {
Class<?> rawType = Reflections.getRawType(type);
if (rawType != null && rawType.isArray()) {
Class<?> componentType = rawType.getComponentType();
if (Byte.class.equals(componentType)) {
throw new IllegalStateException(
"Function '" + functionName + "' has " + paramType + " type 'Byte[]' (boxed byte array). " +
"Use 'byte[]' (primitive byte array) instead. " +
"Byte[] is not supported for binary data handling in Funqy Knative Events.");
}
}
}
}
|
KnativeEventsBindingRecorder
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/core/ResolvableType.java
|
{
"start": 58059,
"end": 58127
}
|
interface ____ to resolve {@link TypeVariable TypeVariables}.
*/
|
used
|
java
|
apache__camel
|
core/camel-api/src/main/java/org/apache/camel/vault/HashicorpVaultConfiguration.java
|
{
"start": 946,
"end": 3567
}
|
class ____ extends VaultConfiguration {
@Metadata(secret = true)
private String token;
@Metadata
private String host;
@Metadata
private String port;
@Metadata
private String scheme;
@Metadata
private boolean cloud;
@Metadata
private String namespace;
@Metadata
private boolean refreshEnabled;
@Metadata(defaultValue = "60000")
private long refreshPeriod = 60000;
@Metadata
private String secrets;
public String getToken() {
return token;
}
/**
* Token to access hashicorp vault
*/
public void setToken(String token) {
this.token = token;
}
public String getHost() {
return host;
}
/**
* Host to access hashicorp vault
*/
public void setHost(String host) {
this.host = host;
}
public String getPort() {
return port;
}
/**
* Port to access hashicorp vault
*/
public void setPort(String port) {
this.port = port;
}
public String getScheme() {
return scheme;
}
/**
* Scheme to access hashicorp vault
*/
public void setScheme(String scheme) {
this.scheme = scheme;
}
public boolean isCloud() {
return cloud;
}
/**
* Determine if the Hashicorp Vault is deployed on Hashicorp Cloud or not
*/
public void setCloud(boolean cloud) {
this.cloud = cloud;
}
public String getNamespace() {
return namespace;
}
/**
* If the Hashicorp Vault instance is deployed on Hashicorp Cloud, this field will determine the namespace
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public boolean isRefreshEnabled() {
return refreshEnabled;
}
/**
* Whether to automatically reload Camel upon secrets being updated in Hashicorp Vault.
*/
public void setRefreshEnabled(boolean refreshEnabled) {
this.refreshEnabled = refreshEnabled;
}
public long getRefreshPeriod() {
return refreshPeriod;
}
/**
* The period (millis) between checking Hashicorp Vault for updated secrets.
*/
public void setRefreshPeriod(long refreshPeriod) {
this.refreshPeriod = refreshPeriod;
}
public String getSecrets() {
return secrets;
}
/**
* Specify the secret names (or pattern) to check for updates. Multiple secrets can be separated by comma.
*/
public void setSecrets(String secrets) {
this.secrets = secrets;
}
}
|
HashicorpVaultConfiguration
|
java
|
apache__dubbo
|
dubbo-common/src/main/java/org/apache/dubbo/config/context/ConfigManager.java
|
{
"start": 2236,
"end": 13025
}
|
class ____ extends AbstractConfigManager implements ApplicationExt {
private static final Logger logger = LoggerFactory.getLogger(ConfigManager.class);
public static final String NAME = "config";
public static final String BEAN_NAME = "dubboConfigManager";
public static final String DUBBO_CONFIG_MODE = ConfigKeys.DUBBO_CONFIG_MODE;
public ConfigManager(ApplicationModel applicationModel) {
super(
applicationModel,
Arrays.asList(
ApplicationConfig.class,
MonitorConfig.class,
MetricsConfig.class,
SslConfig.class,
ProtocolConfig.class,
RegistryConfig.class,
ConfigCenterConfig.class,
MetadataReportConfig.class,
TracingConfig.class));
}
public static ProtocolConfig getProtocolOrDefault(URL url) {
return getProtocolOrDefault(url.getOrDefaultApplicationModel(), url.getProtocol());
}
public static ProtocolConfig getProtocolOrDefault(String idOrName) {
return getProtocolOrDefault(ApplicationModel.defaultModel(), idOrName);
}
private static ProtocolConfig getProtocolOrDefault(ApplicationModel applicationModel, String idOrName) {
return applicationModel.getApplicationConfigManager().getOrAddProtocol(idOrName);
}
// ApplicationConfig correlative methods
/**
* Set application config
*/
@DisableInject
public void setApplication(ApplicationConfig application) {
addConfig(application);
}
public Optional<ApplicationConfig> getApplication() {
return ofNullable(getSingleConfig(getTagName(ApplicationConfig.class)));
}
public ApplicationConfig getApplicationOrElseThrow() {
return getApplication().orElseThrow(() -> new IllegalStateException("There's no ApplicationConfig specified."));
}
// MonitorConfig correlative methods
@DisableInject
public void setMonitor(MonitorConfig monitor) {
addConfig(monitor);
}
public Optional<MonitorConfig> getMonitor() {
return ofNullable(getSingleConfig(getTagName(MonitorConfig.class)));
}
@DisableInject
public void setMetrics(MetricsConfig metrics) {
addConfig(metrics);
}
public Optional<MetricsConfig> getMetrics() {
return ofNullable(getSingleConfig(getTagName(MetricsConfig.class)));
}
@DisableInject
public void setTracing(TracingConfig tracing) {
addConfig(tracing);
}
public Optional<TracingConfig> getTracing() {
return ofNullable(getSingleConfig(getTagName(TracingConfig.class)));
}
@DisableInject
public void setSsl(SslConfig sslConfig) {
addConfig(sslConfig);
}
public Optional<SslConfig> getSsl() {
return ofNullable(getSingleConfig(getTagName(SslConfig.class)));
}
// ConfigCenterConfig correlative methods
public void addConfigCenter(ConfigCenterConfig configCenter) {
addConfig(configCenter);
}
public void addConfigCenters(Iterable<ConfigCenterConfig> configCenters) {
configCenters.forEach(this::addConfigCenter);
}
public Optional<Collection<ConfigCenterConfig>> getDefaultConfigCenter() {
Collection<ConfigCenterConfig> defaults =
getDefaultConfigs(getConfigsMap(getTagName(ConfigCenterConfig.class)));
if (CollectionUtils.isEmpty(defaults)) {
defaults = getConfigCenters();
}
return ofNullable(defaults);
}
public Optional<ConfigCenterConfig> getConfigCenter(String id) {
return getConfig(ConfigCenterConfig.class, id);
}
public Collection<ConfigCenterConfig> getConfigCenters() {
return getConfigs(getTagName(ConfigCenterConfig.class));
}
// MetadataReportConfig correlative methods
public void addMetadataReport(MetadataReportConfig metadataReportConfig) {
addConfig(metadataReportConfig);
}
public void addMetadataReports(Iterable<MetadataReportConfig> metadataReportConfigs) {
metadataReportConfigs.forEach(this::addMetadataReport);
}
public Collection<MetadataReportConfig> getMetadataConfigs() {
return getConfigs(getTagName(MetadataReportConfig.class));
}
public Collection<MetadataReportConfig> getDefaultMetadataConfigs() {
Collection<MetadataReportConfig> defaults =
getDefaultConfigs(getConfigsMap(getTagName(MetadataReportConfig.class)));
if (CollectionUtils.isEmpty(defaults)) {
return getMetadataConfigs();
}
return defaults;
}
// ProtocolConfig correlative methods
public void addProtocol(ProtocolConfig protocolConfig) {
addConfig(protocolConfig);
}
public void addProtocols(Iterable<ProtocolConfig> protocolConfigs) {
if (protocolConfigs != null) {
protocolConfigs.forEach(this::addProtocol);
}
}
public Optional<ProtocolConfig> getProtocol(String idOrName) {
return getConfig(ProtocolConfig.class, idOrName);
}
public ProtocolConfig getOrAddProtocol(String idOrName) {
Optional<ProtocolConfig> protocol = getProtocol(idOrName);
if (protocol.isPresent()) {
return protocol.get();
}
// Avoiding default protocol configuration overriding custom protocol configuration
// due to `getOrAddProtocol` being called when they are not loaded
idOrName = idOrName + ".default";
protocol = getProtocol(idOrName);
if (protocol.isPresent()) {
return protocol.get();
}
ProtocolConfig protocolConfig = addConfig(new ProtocolConfig(idOrName));
// addProtocol triggers refresh when other protocols exist in the ConfigManager.
// so refresh is only done when ProtocolConfig is not refreshed.
if (!protocolConfig.isRefreshed()) {
protocolConfig.refresh();
}
return protocolConfig;
}
public List<ProtocolConfig> getDefaultProtocols() {
return getDefaultConfigs(ProtocolConfig.class);
}
@Override
@SuppressWarnings("RedundantMethodOverride")
public <C extends AbstractConfig> List<C> getDefaultConfigs(Class<C> cls) {
return getDefaultConfigs(getConfigsMap(getTagName(cls)));
}
public Collection<ProtocolConfig> getProtocols() {
return getConfigs(getTagName(ProtocolConfig.class));
}
// RegistryConfig correlative methods
public void addRegistry(RegistryConfig registryConfig) {
addConfig(registryConfig);
}
public void addRegistries(Iterable<RegistryConfig> registryConfigs) {
if (registryConfigs != null) {
registryConfigs.forEach(this::addRegistry);
}
}
public Optional<RegistryConfig> getRegistry(String id) {
return getConfig(RegistryConfig.class, id);
}
public List<RegistryConfig> getDefaultRegistries() {
return getDefaultConfigs(getConfigsMap(getTagName(RegistryConfig.class)));
}
public Collection<RegistryConfig> getRegistries() {
return getConfigs(getTagName(RegistryConfig.class));
}
@Override
public void refreshAll() {
// refresh all configs here
getApplication().ifPresent(ApplicationConfig::refresh);
getMonitor().ifPresent(MonitorConfig::refresh);
getMetrics().ifPresent(MetricsConfig::refresh);
getTracing().ifPresent(TracingConfig::refresh);
getSsl().ifPresent(SslConfig::refresh);
getProtocols().forEach(ProtocolConfig::refresh);
getRegistries().forEach(RegistryConfig::refresh);
getConfigCenters().forEach(ConfigCenterConfig::refresh);
getMetadataConfigs().forEach(MetadataReportConfig::refresh);
}
@Override
public void loadConfigs() {
// application config has load before starting config center
// load dubbo.applications.xxx
loadConfigsOfTypeFromProps(ApplicationConfig.class);
// load dubbo.monitors.xxx
loadConfigsOfTypeFromProps(MonitorConfig.class);
// load dubbo.metrics.xxx
loadConfigsOfTypeFromProps(MetricsConfig.class);
// load dubbo.tracing.xxx
loadConfigsOfTypeFromProps(TracingConfig.class);
// load multiple config types:
// load dubbo.protocols.xxx
loadConfigsOfTypeFromProps(ProtocolConfig.class);
// load dubbo.registries.xxx
loadConfigsOfTypeFromProps(RegistryConfig.class);
// load dubbo.metadata-report.xxx
loadConfigsOfTypeFromProps(MetadataReportConfig.class);
// config centers has been loaded before starting config center
// loadConfigsOfTypeFromProps(ConfigCenterConfig.class);
refreshAll();
checkConfigs();
// set model name
if (StringUtils.isBlank(applicationModel.getModelName())) {
applicationModel.setModelName(applicationModel.getApplicationName());
}
}
private void checkConfigs() {
// check config types (ignore metadata-center)
List<Class<? extends AbstractConfig>> multipleConfigTypes = Arrays.asList(
ApplicationConfig.class,
ProtocolConfig.class,
RegistryConfig.class,
MonitorConfig.class,
MetricsConfig.class,
TracingConfig.class,
SslConfig.class);
for (Class<? extends AbstractConfig> configType : multipleConfigTypes) {
checkDefaultAndValidateConfigs(configType);
}
// check port conflicts
Map<Integer, ProtocolConfig> protocolPortMap = new LinkedHashMap<>();
for (ProtocolConfig protocol : getProtocols()) {
Integer port = protocol.getPort();
if (port == null || port == -1) {
continue;
}
ProtocolConfig prevProtocol = protocolPortMap.get(port);
if (prevProtocol != null) {
throw new IllegalStateException("Duplicated port used by protocol configs, port: " + port
+ ", configs: " + Arrays.asList(prevProtocol, protocol));
}
protocolPortMap.put(port, protocol);
}
// Log the current configurations.
logger.info("The current configurations or effective configurations are as follows:");
for (Class<? extends AbstractConfig> configType : multipleConfigTypes) {
getConfigs(configType).forEach((config) -> logger.info(config.toString()));
}
}
public ConfigMode getConfigMode() {
return configMode;
}
}
|
ConfigManager
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/embeddable/XmlEmbeddableTest.java
|
{
"start": 12854,
"end": 13573
}
|
class ____ {
@Id
private Long id;
@JdbcTypeCode(SqlTypes.SQLXML)
private EmbeddableAggregate aggregate;
//end::embeddable-xml-type-mapping-example[]
//Getters and setters are omitted for brevity
public XmlHolder() {
}
public XmlHolder(Long id, EmbeddableAggregate aggregate) {
this.id = id;
this.aggregate = aggregate;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public EmbeddableAggregate getAggregate() {
return aggregate;
}
public void setAggregate(EmbeddableAggregate aggregate) {
this.aggregate = aggregate;
}
//tag::embeddable-xml-type-mapping-example[]
}
//end::embeddable-xml-type-mapping-example[]
}
|
XmlHolder
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/tck2_2/mapkeycolumn/MapKeyColumnOneToManyFKTest.java
|
{
"start": 3922,
"end": 4380
}
|
class ____ {
@Id
public Integer id;
public String name;
@JoinColumn
@MapKeyColumn( name = "a_type" )
@OneToMany( cascade = {CascadeType.PERSIST, CascadeType.REMOVE} )
public Map<String,Address2> addresses = new HashMap<>();
public AddressCapable2() {
}
public AddressCapable2(Integer id, String name) {
this.id = id;
this.name = name;
}
}
@Entity( name = "Address2" )
@Table( name = "addresses2" )
public static
|
AddressCapable2
|
java
|
elastic__elasticsearch
|
x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/Sample.java
|
{
"start": 743,
"end": 867
}
|
class ____ the instance of a sample. Used at runtime by the engine to track samples.
* Defined by its key.
* This
|
representing
|
java
|
junit-team__junit5
|
platform-tests/src/test/java/org/junit/platform/engine/support/descriptor/DemoClassTestDescriptor.java
|
{
"start": 779,
"end": 1436
}
|
class ____ extends AbstractTestDescriptor {
private final Class<?> testClass;
public DemoClassTestDescriptor(UniqueId uniqueId, Class<?> testClass) {
super(uniqueId, Preconditions.notNull(testClass, "Class must not be null").getSimpleName(),
ClassSource.from(testClass));
this.testClass = testClass;
}
@Override
public Set<TestTag> getTags() {
return findRepeatableAnnotations(this.testClass, Tag.class).stream() //
.map(Tag::value) //
.filter(TestTag::isValid) //
.map(TestTag::create) //
.collect(toCollection(LinkedHashSet::new));
}
@Override
public Type getType() {
return Type.CONTAINER;
}
}
|
DemoClassTestDescriptor
|
java
|
apache__camel
|
components/camel-ibm/camel-ibm-watson-text-to-speech/src/main/java/org/apache/camel/component/ibm/watson/tts/WatsonTextToSpeechConfiguration.java
|
{
"start": 1036,
"end": 3548
}
|
class ____ implements Cloneable {
@UriParam(label = "security", secret = true)
@Metadata(required = true)
private String apiKey;
@UriParam(label = "common")
private String serviceUrl;
@UriParam(label = "producer")
private WatsonTextToSpeechOperations operation;
@UriParam(label = "producer", defaultValue = "en-US_MichaelV3Voice")
private String voice = "en-US_MichaelV3Voice";
@UriParam(label = "producer", defaultValue = "audio/wav")
private String accept = "audio/wav";
@UriParam(label = "producer")
private String customizationId;
public String getApiKey() {
return apiKey;
}
/**
* The IBM Cloud API key for authentication
*/
public void setApiKey(String apiKey) {
this.apiKey = apiKey;
}
public String getServiceUrl() {
return serviceUrl;
}
/**
* The service endpoint URL. If not specified, the default URL will be used.
*/
public void setServiceUrl(String serviceUrl) {
this.serviceUrl = serviceUrl;
}
public WatsonTextToSpeechOperations getOperation() {
return operation;
}
/**
* The operation to perform
*/
public void setOperation(WatsonTextToSpeechOperations operation) {
this.operation = operation;
}
public String getVoice() {
return voice;
}
/**
* The voice to use for synthesis. Default is en-US_MichaelV3Voice. Examples: en-US_AllisonV3Voice,
* en-GB_KateV3Voice, es-ES_EnriqueV3Voice, fr-FR_NicolasV3Voice
*/
public void setVoice(String voice) {
this.voice = voice;
}
public String getAccept() {
return accept;
}
/**
* The audio format for synthesized speech. Default is audio/wav. Supported formats: audio/wav, audio/mp3,
* audio/ogg, audio/flac, audio/webm
*/
public void setAccept(String accept) {
this.accept = accept;
}
public String getCustomizationId() {
return customizationId;
}
/**
* The customization ID (GUID) of a custom voice model to use for synthesis
*/
public void setCustomizationId(String customizationId) {
this.customizationId = customizationId;
}
public WatsonTextToSpeechConfiguration copy() {
try {
return (WatsonTextToSpeechConfiguration) clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeCamelException(e);
}
}
}
|
WatsonTextToSpeechConfiguration
|
java
|
elastic__elasticsearch
|
server/src/main/java/org/elasticsearch/search/SearchSortValuesAndFormats.java
|
{
"start": 817,
"end": 4119
}
|
class ____ implements Writeable {
private final Object[] rawSortValues;
private final Object[] formattedSortValues;
private final DocValueFormat[] sortValueFormats;
public SearchSortValuesAndFormats(Object[] rawSortValues, DocValueFormat[] sortValueFormats) {
assert rawSortValues.length == sortValueFormats.length;
this.rawSortValues = rawSortValues;
this.sortValueFormats = sortValueFormats;
this.formattedSortValues = Arrays.copyOf(rawSortValues, rawSortValues.length);
for (int i = 0; i < rawSortValues.length; ++i) {
Object sortValue = rawSortValues[i];
if (sortValue instanceof BytesRef) {
this.formattedSortValues[i] = sortValueFormats[i].format((BytesRef) sortValue);
} else if (sortValue instanceof Long) {
this.formattedSortValues[i] = sortValueFormats[i].format((long) sortValue);
} else if (sortValue instanceof Double) {
this.formattedSortValues[i] = sortValueFormats[i].format((double) sortValue);
} else if (sortValue instanceof Float || sortValue instanceof Integer) {
// sort by _score or _doc
this.formattedSortValues[i] = sortValue;
} else {
assert sortValue == null
: "Sort values must be a BytesRef, Long, Integer, Double or Float, but got " + sortValue.getClass() + ": " + sortValue;
this.formattedSortValues[i] = sortValue;
}
}
}
public SearchSortValuesAndFormats(StreamInput in) throws IOException {
this.rawSortValues = Lucene.readSortValues(in);
this.formattedSortValues = Lucene.readSortValues(in);
this.sortValueFormats = new DocValueFormat[formattedSortValues.length];
for (int i = 0; i < sortValueFormats.length; ++i) {
sortValueFormats[i] = in.readNamedWriteable(DocValueFormat.class);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeArray(Lucene::writeSortValue, rawSortValues);
out.writeArray(Lucene::writeSortValue, formattedSortValues);
for (int i = 0; i < sortValueFormats.length; i++) {
out.writeNamedWriteable(sortValueFormats[i]);
}
}
public Object[] getRawSortValues() {
return rawSortValues;
}
public Object[] getFormattedSortValues() {
return formattedSortValues;
}
public DocValueFormat[] getSortValueFormats() {
return sortValueFormats;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SearchSortValuesAndFormats that = (SearchSortValuesAndFormats) o;
return Arrays.equals(rawSortValues, that.rawSortValues)
&& Arrays.equals(formattedSortValues, that.formattedSortValues)
&& Arrays.equals(sortValueFormats, that.sortValueFormats);
}
@Override
public int hashCode() {
int result = Arrays.hashCode(rawSortValues);
result = 31 * result + Arrays.hashCode(formattedSortValues);
result = 31 * result + Arrays.hashCode(sortValueFormats);
return result;
}
}
|
SearchSortValuesAndFormats
|
java
|
spring-projects__spring-framework
|
spring-core/src/main/java/org/springframework/util/ReflectionUtils.java
|
{
"start": 12429,
"end": 12821
}
|
class ____ introspect
* @param mc the callback to invoke for each method
* @throws IllegalStateException if introspection fails
* @see #doWithMethods(Class, MethodCallback, MethodFilter)
*/
public static void doWithMethods(Class<?> clazz, MethodCallback mc) {
doWithMethods(clazz, mc, null);
}
/**
* Perform the given callback operation on all matching methods of the given
*
|
to
|
java
|
quarkusio__quarkus
|
extensions/redis-client/runtime/src/test/java/io/quarkus/redis/datasource/TransactionalSearchCommandsTest.java
|
{
"start": 935,
"end": 5304
}
|
class ____ extends DatasourceTestBase {
private RedisDataSource blocking;
private ReactiveRedisDataSource reactive;
@BeforeEach
void initialize() {
blocking = new BlockingRedisDataSourceImpl(vertx, redis, api, Duration.ofSeconds(60));
reactive = new ReactiveRedisDataSourceImpl(vertx, redis, api);
}
@AfterEach
public void clear() {
blocking.flushall();
}
void setup() {
var hash = blocking.hash(String.class);
hash.hset("movie:11002", Map.of("title", "Star Wars: Episode V - The Empire Strikes Back",
"plot", "After the Rebels are brutally overpowered by the Empire on the ice planet Hoth, ...",
"release_year", "1972",
"genre", "Action",
"rating", "8.7",
"votes", "1127635",
"imbd_id", "tt0080684"));
hash.hset("movie:11003", Map.of("title", "The Godfather",
"plot", "The aging patriarch of an organized crime dynasty transfers control of his ...",
"release_year", "1972",
"genre", "Drama",
"rating", "9.2",
"votes", "1563839",
"imbd_id", "tt0068646"));
hash.hset("movie:11004", Map.of("title", "Heat",
"plot", "A group of professional bank robbers start to feel the heat ...",
"release_year", "1995",
"genre", "Thriller",
"rating", "8.2",
"votes", "559490",
"imbd_id", "tt0113277"));
hash.hset("movie:11005", Map.of("title", "Star Wars: Episode VI - Return of the Jedi",
"plot", "The Rebels dispatch to Endor to destroy the second Empire's Death Star.",
"release_year", "1983",
"genre", "Action",
"rating", "8.3",
"votes", "906260",
"imbd_id", "tt0086190"));
}
@Test
public void transactionalSearchBlocking() {
setup();
TransactionResult result = blocking.withTransaction(tx -> {
TransactionalSearchCommands search = tx.search();
assertThat(search.getDataSource()).isEqualTo(tx);
search.ftCreate("idx:movie", new CreateArgs().onHash().prefixes("movie:")
.indexedField("title", FieldType.TEXT, new FieldOptions().sortable())
.indexedField("release_year", FieldType.NUMERIC, new FieldOptions().sortable())
.indexedField("rating", FieldType.NUMERIC, new FieldOptions().sortable())
.indexedField("genre", FieldType.TAG, new FieldOptions().sortable()));
search.ftSearch("idx:movie", "war");
search.ftAlter("idx:movie", IndexedField.from("plot", FieldType.TEXT, new FieldOptions().weight(0.5)));
search.ftSearch("idx:movie", "empire @genre:{Action}", new QueryArgs().returnAttribute("title"));
});
assertThat(result.size()).isEqualTo(4);
assertThat(result.discarded()).isFalse();
}
@Test
public void transactionalSearchReactive() {
setup();
TransactionResult result = reactive.withTransaction(tx -> {
ReactiveTransactionalSearchCommands search = tx.search();
assertThat(search.getDataSource()).isEqualTo(tx);
var u1 = search.ftCreate("idx:movie", new CreateArgs().onHash().prefixes("movie:")
.indexedField("title", FieldType.TEXT, new FieldOptions().sortable())
.indexedField("release_year", FieldType.NUMERIC, new FieldOptions().sortable())
.indexedField("rating", FieldType.NUMERIC, new FieldOptions().sortable())
.indexedField("genre", FieldType.TAG, new FieldOptions().sortable()));
var u2 = search.ftSearch("idx:movie", "war");
var u3 = search.ftAlter("idx:movie", IndexedField.from("plot", FieldType.TEXT, new FieldOptions().weight(0.5)));
var u4 = search.ftSearch("idx:movie", "empire @genre:{Action}", new QueryArgs().returnAttribute("title"));
return u1.chain(() -> u2).chain(() -> u3).chain(() -> u4);
}).await().indefinitely();
assertThat(result.size()).isEqualTo(4);
assertThat(result.discarded()).isFalse();
}
}
|
TransactionalSearchCommandsTest
|
java
|
elastic__elasticsearch
|
modules/repository-azure/src/yamlRestTest/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java
|
{
"start": 1317,
"end": 6427
}
|
class ____ extends ESClientYamlSuiteTestCase {
private static final boolean USE_FIXTURE = Booleans.parseBoolean(System.getProperty("test.azure.fixture", "true"));
private static final boolean USE_HTTPS_FIXTURE = USE_FIXTURE && ESTestCase.inFipsJvm() == false;
// TODO when https://github.com/elastic/elasticsearch/issues/111532 addressed, use a HTTPS fixture in FIPS mode too
private static final String AZURE_TEST_ACCOUNT = System.getProperty("test.azure.account");
private static final String AZURE_TEST_CONTAINER = System.getProperty("test.azure.container");
private static final String AZURE_TEST_KEY = System.getProperty("test.azure.key");
private static final String AZURE_TEST_SASTOKEN = System.getProperty("test.azure.sas_token");
private static final String AZURE_TEST_TENANT_ID = System.getProperty("test.azure.tenant_id");
private static final String AZURE_TEST_CLIENT_ID = System.getProperty("test.azure.client_id");
private static final AzureHttpFixture fixture = new AzureHttpFixture(
USE_HTTPS_FIXTURE ? AzureHttpFixture.Protocol.HTTPS : USE_FIXTURE ? AzureHttpFixture.Protocol.HTTP : AzureHttpFixture.Protocol.NONE,
AZURE_TEST_ACCOUNT,
AZURE_TEST_CONTAINER,
AZURE_TEST_TENANT_ID,
AZURE_TEST_CLIENT_ID,
decideAuthHeaderPredicate(),
MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE
);
private static Predicate<String> decideAuthHeaderPredicate() {
if (Strings.hasText(AZURE_TEST_KEY) || Strings.hasText(AZURE_TEST_SASTOKEN)) {
return AzureHttpFixture.sharedKeyForAccountPredicate(AZURE_TEST_ACCOUNT);
} else if (Strings.hasText(AZURE_TEST_TENANT_ID) && Strings.hasText(AZURE_TEST_CLIENT_ID)) {
return AzureHttpFixture.WORK_IDENTITY_BEARER_TOKEN_PREDICATE;
} else if (Strings.hasText(AZURE_TEST_TENANT_ID) || Strings.hasText(AZURE_TEST_CLIENT_ID)) {
fail(null, "Both [test.azure.tenant_id] and [test.azure.client_id] must be set if either is set");
}
return AzureHttpFixture.MANAGED_IDENTITY_BEARER_TOKEN_PREDICATE;
}
private static TestTrustStore trustStore = new TestTrustStore(
() -> AzureHttpFixture.class.getResourceAsStream("azure-http-fixture.pem")
);
private static ElasticsearchCluster cluster = ElasticsearchCluster.local()
.module("repository-azure")
.keystore("azure.client.integration_test.account", AZURE_TEST_ACCOUNT)
.keystore(
"azure.client.integration_test.key",
() -> AZURE_TEST_KEY,
s -> AZURE_TEST_KEY != null && AZURE_TEST_KEY.isEmpty() == false
)
.keystore(
"azure.client.integration_test.sas_token",
() -> AZURE_TEST_SASTOKEN,
s -> AZURE_TEST_SASTOKEN != null && AZURE_TEST_SASTOKEN.isEmpty() == false
)
.setting(
"azure.client.integration_test.endpoint_suffix",
() -> "ignored;DefaultEndpointsProtocol=https;BlobEndpoint=" + fixture.getAddress(),
s -> USE_FIXTURE
)
.systemProperty(
"tests.azure.credentials.disable_instance_discovery",
() -> "true",
s -> USE_HTTPS_FIXTURE && Strings.hasText(AZURE_TEST_CLIENT_ID) && Strings.hasText(AZURE_TEST_TENANT_ID)
)
.systemProperty("AZURE_POD_IDENTITY_AUTHORITY_HOST", fixture::getMetadataAddress, s -> USE_FIXTURE)
.systemProperty("AZURE_AUTHORITY_HOST", fixture::getOAuthTokenServiceAddress, s -> USE_HTTPS_FIXTURE)
.systemProperty("AZURE_CLIENT_ID", () -> AZURE_TEST_CLIENT_ID, s -> Strings.hasText(AZURE_TEST_CLIENT_ID))
.systemProperty("AZURE_TENANT_ID", () -> AZURE_TEST_TENANT_ID, s -> Strings.hasText(AZURE_TEST_TENANT_ID))
.configFile("storage-azure/azure-federated-token", Resource.fromString(fixture.getFederatedToken()))
.environment(
nodeSpec -> USE_HTTPS_FIXTURE && Strings.hasText(AZURE_TEST_CLIENT_ID) && Strings.hasText(AZURE_TEST_TENANT_ID)
? Map.of("AZURE_FEDERATED_TOKEN_FILE", "${ES_PATH_CONF}/storage-azure/azure-federated-token")
: Map.of()
)
.setting("thread_pool.repository_azure.max", () -> String.valueOf(randomIntBetween(1, 10)), s -> USE_FIXTURE)
.systemProperty("javax.net.ssl.trustStore", () -> trustStore.getTrustStorePath().toString(), s -> USE_HTTPS_FIXTURE)
.systemProperty("javax.net.ssl.trustStoreType", () -> "jks", s -> USE_HTTPS_FIXTURE)
.build();
@ClassRule(order = 1)
public static TestRule ruleChain = RuleChain.outerRule(fixture).around(trustStore).around(cluster);
public RepositoryAzureClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
@Override
protected String getTestRestCluster() {
return cluster.getHttpAddresses();
}
}
|
RepositoryAzureClientYamlTestSuiteIT
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/nodes/exec/processor/utils/TopologyGraph.java
|
{
"start": 7932,
"end": 8291
}
|
class ____ {
private final ExecNode<?> execNode;
private final Set<TopologyNode> inputs;
private final Set<TopologyNode> outputs;
private TopologyNode(ExecNode<?> execNode) {
this.execNode = execNode;
this.inputs = new HashSet<>();
this.outputs = new HashSet<>();
}
}
}
|
TopologyNode
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/mappedBy/OneToOneMappedByTypeTest.java
|
{
"start": 3563,
"end": 3750
}
|
class ____ {
@Id
private Long id;
@OneToOne
private EntityACorrect parent;
}
@Entity( name = "EntityA2Correct" )
@Table( name = "entity_a_correct" )
public static
|
EntityBCorrect
|
java
|
spring-projects__spring-framework
|
spring-context/src/main/java/org/springframework/format/datetime/DateFormatterRegistrar.java
|
{
"start": 3228,
"end": 3392
}
|
class ____ implements Converter<Date, Long> {
@Override
public Long convert(Date source) {
return source.getTime();
}
}
private static
|
DateToLongConverter
|
java
|
elastic__elasticsearch
|
modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniNormalizationFilterFactory.java
|
{
"start": 967,
"end": 1384
}
|
class ____ extends AbstractTokenFilterFactory implements NormalizingTokenFilterFactory {
public SoraniNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(name);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new SoraniNormalizationFilter(tokenStream);
}
}
|
SoraniNormalizationFilterFactory
|
java
|
quarkusio__quarkus
|
extensions/panache/hibernate-orm-panache/deployment/src/test/java/io/quarkus/hibernate/orm/panache/deployment/test/PanacheEntityMetaModelTest.java
|
{
"start": 200,
"end": 354
}
|
class ____ {
@Test
public void testMetaModelExistence() {
Assertions.assertEquals("id", PanacheEntity_.ID);
}
}
|
PanacheEntityMetaModelTest
|
java
|
apache__kafka
|
metadata/src/test/java/org/apache/kafka/image/loader/MetadataLoaderTest.java
|
{
"start": 6631,
"end": 16816
}
|
class ____ implements SnapshotReader<ApiMessageAndVersion> {
private final MetadataProvenance provenance;
private final Iterator<Batch<ApiMessageAndVersion>> iterator;
private MockTime time = null;
boolean closed = false;
static MockSnapshotReader fromRecordLists(
MetadataProvenance provenance,
List<List<ApiMessageAndVersion>> lists
) {
List<Batch<ApiMessageAndVersion>> batches = lists
.stream()
.map(records -> Batch.data(0, 0, 0, 0, records))
.toList();
return new MockSnapshotReader(provenance, batches);
}
MockSnapshotReader(
MetadataProvenance provenance,
List<Batch<ApiMessageAndVersion>> batches
) {
this.provenance = provenance;
this.iterator = batches.iterator();
}
MockSnapshotReader setTime(MockTime time) {
this.time = time;
return this;
}
@Override
public OffsetAndEpoch snapshotId() {
return provenance.snapshotId();
}
@Override
public long lastContainedLogOffset() {
return provenance.lastContainedOffset();
}
@Override
public int lastContainedLogEpoch() {
return provenance.lastContainedEpoch();
}
@Override
public long lastContainedLogTimestamp() {
return provenance.lastContainedLogTimeMs();
}
@Override
public void close() {
closed = true;
}
@Override
public boolean hasNext() {
if (time != null) time.sleep(1);
return iterator.hasNext();
}
@Override
public Batch<ApiMessageAndVersion> next() {
if (time != null) time.sleep(1);
return iterator.next();
}
}
/**
* Test that a publisher cannot be installed more than once.
*/
@ParameterizedTest
@CsvSource(value = {"false,false", "false,true", "true,false", "true,true"})
public void testPublisherCannotBeInstalledMoreThanOnce(
boolean loadSnapshot,
boolean sameObject
) throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("testPublisherCannotBeInstalledMoreThanOnce");
MockPublisher publisher = new MockPublisher();
try (MetadataLoader loader = new MetadataLoader.Builder().
setFaultHandler(faultHandler).
setHighWaterMarkAccessor(() -> OptionalLong.of(0L)).
build()) {
loader.installPublishers(List.of(publisher)).get();
if (loadSnapshot) {
MockSnapshotReader snapshotReader = new MockSnapshotReader(
new MetadataProvenance(200, 100, 4000, true),
List.of(
Batch.control(
200,
100,
4000,
10,
List.of(ControlRecord.of(new SnapshotHeaderRecord()))
),
Batch.data(0, 0, 0, 0,
List.of(new ApiMessageAndVersion(new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MINIMUM_VERSION.featureLevel()), (short) 0))
)
)
);
loader.handleLoadSnapshot(snapshotReader);
TestUtils.retryOnExceptionWithTimeout(30_000, () ->
assertEquals(1L, loader.metrics().handleLoadSnapshotCount())
);
} else {
TestUtils.retryOnExceptionWithTimeout(30_000, () ->
assertEquals(0L, loader.metrics().handleLoadSnapshotCount())
);
}
loader.waitForAllEventsToBeHandled();
if (sameObject) {
assertEquals("testPublisherCannotBeInstalledMoreThanOnce: Attempted to install " +
"publisher MockPublisher, which is already installed.",
assertThrows(ExecutionException.class,
() -> loader.installPublishers(List.of(publisher)).get()).
getCause().getMessage());
} else {
assertEquals("testPublisherCannotBeInstalledMoreThanOnce: Attempted to install " +
"a new publisher named MockPublisher, but there is already a publisher with that name.",
assertThrows(ExecutionException.class,
() -> loader.installPublishers(List.of(new MockPublisher())).get()).
getCause().getMessage());
}
}
}
/**
* Install 2 publishers and remove one.
*/
@Test
public void testRemovePublisher() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("testRemovePublisher");
List<MockPublisher> publishers = List.of(new MockPublisher("a"),
new MockPublisher("b"),
new MockPublisher("c"));
try (MetadataLoader loader = new MetadataLoader.Builder().
setFaultHandler(faultHandler).
setHighWaterMarkAccessor(() -> OptionalLong.of(1L)).
build()) {
loader.installPublishers(publishers.subList(0, 2)).get();
loader.removeAndClosePublisher(publishers.get(1)).get();
MockSnapshotReader snapshotReader = MockSnapshotReader.fromRecordLists(
new MetadataProvenance(100, 50, 2000, true),
List.of(List.of(new ApiMessageAndVersion(
new FeatureLevelRecord().
setName(MetadataVersion.FEATURE_NAME).
setFeatureLevel(MINIMUM_VERSION.featureLevel()), (short) 0))));
assertFalse(snapshotReader.closed);
loader.handleLoadSnapshot(snapshotReader);
loader.waitForAllEventsToBeHandled();
assertTrue(snapshotReader.closed);
publishers.get(0).firstPublish.get(1, TimeUnit.MINUTES);
loader.removeAndClosePublisher(publishers.get(0)).get();
}
assertTrue(publishers.get(0).closed);
assertEquals(Optional.of(MINIMUM_VERSION),
publishers.get(0).latestImage.features().metadataVersion());
assertTrue(publishers.get(1).closed);
assertNull(publishers.get(1).latestImage);
assertFalse(publishers.get(2).closed);
assertNull(publishers.get(2).latestImage);
faultHandler.maybeRethrowFirstException();
}
/**
* Test loading a snapshot with 0 records.
*/
@Test
public void testLoadEmptySnapshot() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("testLoadEmptySnapshot");
MockTime time = new MockTime();
List<MockPublisher> publishers = List.of(new MockPublisher());
try (MetadataLoader loader = new MetadataLoader.Builder().
setFaultHandler(faultHandler).
setTime(time).
setHighWaterMarkAccessor(() -> OptionalLong.of(0L)).
build()) {
loader.installPublishers(publishers).get();
loadEmptySnapshot(loader, 200);
loader.waitForAllEventsToBeHandled();
assertFalse(publishers.get(0).firstPublish.isDone());
loader.handleCommit(MockBatchReader.newSingleBatchReader(250, 50, List.of(
new ApiMessageAndVersion(new FeatureLevelRecord()
.setName(MetadataVersion.FEATURE_NAME)
.setFeatureLevel(MINIMUM_VERSION.featureLevel()), (short) 0)))
);
publishers.get(0).firstPublish.get(10, TimeUnit.SECONDS);
assertEquals(250L, loader.lastAppliedOffset());
loadEmptySnapshot(loader, 300);
assertEquals(300L, loader.lastAppliedOffset());
assertEquals(new SnapshotManifest(new MetadataProvenance(300, 100, 4000, true), 3000000L),
publishers.get(0).latestSnapshotManifest);
assertEquals(MINIMUM_VERSION,
loader.metrics().currentMetadataVersion());
assertEquals(MINIMUM_VERSION.featureLevel(),
loader.metrics().finalizedFeatureLevel(FEATURE_NAME));
}
assertTrue(publishers.get(0).closed);
assertEquals(Optional.of(MINIMUM_VERSION), publishers.get(0).latestImage.features().metadataVersion());
var latestImage = publishers.get(0).latestImage;
assertFalse(latestImage.isEmpty());
assertFalse(latestImage.features().isEmpty());
assertTrue(latestImage.features().finalizedVersions().isEmpty());
assertTrue(latestImage.cluster().isEmpty());
assertTrue(latestImage.topics().isEmpty());
assertTrue(latestImage.cluster().isEmpty());
assertTrue(latestImage.configs().isEmpty());
assertTrue(latestImage.producerIds().isEmpty());
assertTrue(latestImage.acls().isEmpty());
assertTrue(latestImage.scram().isEmpty());
assertTrue(latestImage.delegationTokens().isEmpty());
faultHandler.maybeRethrowFirstException();
}
private void loadEmptySnapshot(
MetadataLoader loader,
long offset
) throws Exception {
MockSnapshotReader snapshotReader = new MockSnapshotReader(
new MetadataProvenance(offset, 100, 4000, true),
List.of(
Batch.control(
200,
100,
4000,
10,
List.of(ControlRecord.of(new SnapshotHeaderRecord()))
)
)
);
if (loader.time() instanceof MockTime) {
snapshotReader.setTime((MockTime) loader.time());
}
loader.handleLoadSnapshot(snapshotReader);
loader.waitForAllEventsToBeHandled();
}
static
|
MockSnapshotReader
|
java
|
apache__logging-log4j2
|
log4j-core/src/main/java/org/apache/logging/log4j/core/util/Loader.java
|
{
"start": 10456,
"end": 11086
}
|
class ____.
* @return The Class.
* @throws ClassNotFoundException if the Class could not be found.
*/
public static Class<?> loadSystemClass(final String className) throws ClassNotFoundException {
try {
return Class.forName(className, true, ClassLoader.getSystemClassLoader());
} catch (final Throwable t) {
LOGGER.trace("Couldn't use SystemClassLoader. Trying Class.forName({}).", className, t);
return Class.forName(className);
}
}
/**
* Loads and instantiates a Class using the default constructor.
*
* @param className The
|
name
|
java
|
apache__kafka
|
server-common/src/test/java/org/apache/kafka/server/common/FeatureTest.java
|
{
"start": 1454,
"end": 13963
}
|
class ____ {
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testV0SupportedInEarliestMV(Feature feature) {
assertTrue(feature.featureVersions().length >= 1);
assertEquals(MetadataVersion.MINIMUM_VERSION,
feature.featureVersions()[0].bootstrapMetadataVersion());
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testFromFeatureLevelAllFeatures(Feature feature) {
FeatureVersion[] featureImplementations = feature.featureVersions();
int numFeatures = featureImplementations.length;
short latestProductionLevel = feature.latestProduction();
for (short i = 0; i < numFeatures; i++) {
short level = i;
if (latestProductionLevel < i) {
assertEquals(featureImplementations[i], feature.fromFeatureLevel(level, true));
assertThrows(IllegalArgumentException.class, () -> feature.fromFeatureLevel(level, false));
} else {
assertEquals(featureImplementations[i], feature.fromFeatureLevel(level, false));
}
}
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testValidateVersionAllFeatures(Feature feature) {
for (FeatureVersion featureImpl : feature.featureVersions()) {
// Ensure the minimum bootstrap metadata version is included if no metadata version dependency.
Map<String, Short> deps = new HashMap<>();
deps.putAll(featureImpl.dependencies());
if (!deps.containsKey(MetadataVersion.FEATURE_NAME)) {
deps.put(MetadataVersion.FEATURE_NAME, MetadataVersion.MINIMUM_VERSION.featureLevel());
}
// Ensure that the feature is valid given the typical metadataVersionMapping and the dependencies.
// Note: Other metadata versions are valid, but this one should always be valid.
Feature.validateVersion(featureImpl, deps);
}
}
@Test
public void testInvalidValidateVersion() {
// No MetadataVersion is invalid
assertThrows(IllegalArgumentException.class,
() -> Feature.validateVersion(
TestFeatureVersion.TEST_1,
Map.of()
)
);
// Using too low of a MetadataVersion is invalid
assertThrows(IllegalArgumentException.class,
() -> Feature.validateVersion(
TestFeatureVersion.TEST_1,
Map.of(MetadataVersion.FEATURE_NAME, MetadataVersionTestUtils.IBP_3_0_IV1_FEATURE_LEVEL)
)
);
// Using a version that is lower than the dependency will fail.
assertThrows(IllegalArgumentException.class,
() -> Feature.validateVersion(
TestFeatureVersion.TEST_2,
Map.of(MetadataVersion.FEATURE_NAME, MetadataVersion.IBP_3_7_IV0.featureLevel())
)
);
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testDefaultLevelAllFeatures(Feature feature) {
for (FeatureVersion featureImpl : feature.featureVersions()) {
// If features have the same bootstrapMetadataVersion, the highest level feature should be chosen.
short defaultLevel = feature.defaultLevel(featureImpl.bootstrapMetadataVersion());
if (defaultLevel != featureImpl.featureLevel()) {
FeatureVersion otherFeature = feature.fromFeatureLevel(defaultLevel, true);
assertEquals(featureImpl.bootstrapMetadataVersion(), otherFeature.bootstrapMetadataVersion());
assertTrue(defaultLevel > featureImpl.featureLevel());
}
}
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testLatestProductionIsOneOfFeatureValues(Feature feature) {
assertTrue(feature.hasFeatureVersion(feature.latestProduction));
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testLatestProductionIsNotBehindLatestMetadataVersion(Feature feature) {
assertTrue(feature.latestProduction() >= feature.defaultLevel(MetadataVersion.latestProduction()));
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testLatestProductionDependencyIsProductionReady(Feature feature) {
for (Map.Entry<String, Short> dependency: feature.latestProduction.dependencies().entrySet()) {
String featureName = dependency.getKey();
if (!featureName.equals(MetadataVersion.FEATURE_NAME)) {
Feature dependencyFeature = Feature.featureFromName(featureName);
assertTrue(dependencyFeature.isProductionReady(dependency.getValue()));
}
}
}
@ParameterizedTest
@EnumSource(value = Feature.class, names = {
"UNIT_TEST_VERSION_0",
"UNIT_TEST_VERSION_1",
"UNIT_TEST_VERSION_2",
"UNIT_TEST_VERSION_3",
"UNIT_TEST_VERSION_4",
"UNIT_TEST_VERSION_5",
"UNIT_TEST_VERSION_6",
"UNIT_TEST_VERSION_7"}, mode = EnumSource.Mode.EXCLUDE)
public void testDefaultVersionDependencyIsDefaultReady(Feature feature) {
for (Map.Entry<String, Short> dependency: feature.defaultVersion(MetadataVersion.LATEST_PRODUCTION).dependencies().entrySet()) {
String featureName = dependency.getKey();
if (!featureName.equals(MetadataVersion.FEATURE_NAME)) {
Feature dependencyFeature = Feature.featureFromName(featureName);
assertTrue(dependency.getValue() <= dependencyFeature.defaultLevel(MetadataVersion.LATEST_PRODUCTION));
}
}
}
@ParameterizedTest
@EnumSource(MetadataVersion.class)
public void testDefaultTestVersion(MetadataVersion metadataVersion) {
short expectedVersion;
if (!metadataVersion.isLessThan(MetadataVersion.latestTesting())) {
expectedVersion = 2;
} else if (!metadataVersion.isLessThan(MetadataVersion.IBP_3_7_IV0)) {
expectedVersion = 1;
} else {
expectedVersion = 0;
}
assertEquals(expectedVersion, Feature.TEST_VERSION.defaultLevel(metadataVersion));
}
@Test
public void testUnstableTestVersion() {
// If the latest MetadataVersion is stable, we don't throw an error. In that case, we don't worry about unstable feature
// versions since all feature versions are stable.
if (MetadataVersion.latestProduction().isLessThan(MetadataVersion.latestTesting())) {
assertThrows(IllegalArgumentException.class, () ->
Feature.TEST_VERSION.fromFeatureLevel(Feature.TEST_VERSION.latestTesting(), false));
}
Feature.TEST_VERSION.fromFeatureLevel(Feature.TEST_VERSION.latestTesting(), true);
}
@Test
public void testValidateWithNonExistentLatestProduction() {
assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_0),
"Feature UNIT_TEST_VERSION_0 has latest production version UT_FV0_1 " +
"which is not one of its feature versions.");
}
@Test
public void testValidateWithLaggingLatestProduction() {
assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_1),
"Feature UNIT_TEST_VERSION_1 has latest production value UT_FV1_0 " +
"smaller than its default version UT_FV1_1 with latest production MV.");
}
@Test
public void testValidateWithDependencyNotProductionReady() {
assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_3),
"Feature UNIT_TEST_VERSION_3 has latest production FeatureVersion UT_FV3_1 with dependency " +
"UT_FV2_1 that is not production ready. (UNIT_TEST_VERSION_2 latest production: UT_FV2_0)");
}
@Test
public void testValidateWithDefaultValueDependencyAheadOfItsDefaultLevel() {
if (MetadataVersion.latestProduction().isLessThan(MetadataVersion.latestTesting())) {
assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_5),
"Feature UNIT_TEST_VERSION_5 has default FeatureVersion UT_FV5_1 when MV=3.7-IV0 with " +
"dependency UT_FV4_1 that is behind its default version UT_FV4_0.");
}
}
@Test
public void testValidateWithMVDependencyNotProductionReady() {
if (MetadataVersion.latestProduction().isLessThan(MetadataVersion.latestTesting())) {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_6));
assertEquals("Feature UNIT_TEST_VERSION_6 has latest production FeatureVersion UT_FV6_1 with MV dependency "
+ MetadataVersion.latestTesting() + " that is not production ready. (MV latest production: " + MetadataVersion.latestProduction() + ")",
exception.getMessage());
}
}
@Test
public void testValidateWithMVDependencyAheadOfBootstrapMV() {
IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () ->
validateDefaultValueAndLatestProductionValue(Feature.UNIT_TEST_VERSION_7));
assertEquals("Feature UNIT_TEST_VERSION_7 has default FeatureVersion UT_FV7_0 when MV=" + MetadataVersion.MINIMUM_VERSION
+ " with MV dependency 3.7-IV0 that is behind its bootstrap MV " + MetadataVersion.MINIMUM_VERSION + ".", exception.getMessage());
}
@Test
public void testValidateEligibleLeaderReplicasVersion() {
assertThrows(IllegalArgumentException.class, () ->
validateVersion(EligibleLeaderReplicasVersion.ELRV_1, Map.of(MetadataVersion.FEATURE_NAME, MetadataVersion.IBP_4_0_IV0.featureLevel())),
"ELR requires MV to be at least 4.0IV1.");
assertDoesNotThrow(() ->
validateVersion(EligibleLeaderReplicasVersion.ELRV_1, Map.of(MetadataVersion.FEATURE_NAME, MetadataVersion.IBP_4_0_IV1.featureLevel())),
"ELR requires MV to be at least 4.0IV1.");
}
}
|
FeatureTest
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.