language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/cdi/general/mixed/Helper.java | {
"start": 296,
"end": 579
} | class ____ {
public static SeContainer createSeContainer() {
final SeContainerInitializer cdiInitializer = SeContainerInitializer.newInstance()
.disableDiscovery()
.addBeanClasses( HostedBean.class, InjectedHostedBean.class );
return cdiInitializer.initialize();
}
}
| Helper |
java | apache__camel | components/camel-sjms/src/test/java/org/apache/camel/component/sjms/tx/TransactedTopicConsumerTest.java | {
"start": 1092,
"end": 2078
} | class ____ extends TransactedConsumerSupport {
@RegisterExtension
protected static ArtemisService service = ArtemisServiceFactory.createVMService();
/**
* We want to verify that when consuming from a single destination with multiple routes that we are thread safe and
* behave accordingly.
*/
@Test
public void testRoute() throws Exception {
final String destinationName = "sjms:topic:one.consumer.one.route.tx.test.TransactedTopicConsumerTest";
int routeCount = 2;
int concurrentConsumers = 1;
int messageCount = 20;
int maxAttemptsCount = 10;
int totalRedeliverdFalse = 20;
int totalRedeliveredTrue = 1;
runTest(destinationName, routeCount, messageCount, totalRedeliverdFalse, totalRedeliveredTrue,
concurrentConsumers, maxAttemptsCount);
}
@Override
public String getBrokerUri() {
return service.serviceAddress();
}
}
| TransactedTopicConsumerTest |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/StreamJsonTest.java | {
"start": 1340,
"end": 5330
} | class ____ {
private static final long TICK_EVERY_MS = 200;
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar.addClasses(TestJacksonBasicMessageBodyReader.class));
@TestHTTPResource
URI uri;
@Test
void shouldReadStreamJsonStringAsMulti() throws InterruptedException {
var client = createClient(uri);
var collected = new CopyOnWriteArrayList<String>();
var completionLatch = new CountDownLatch(1);
client.readString().onCompletion().invoke(completionLatch::countDown)
.subscribe().with(collected::add);
if (!completionLatch.await(5, TimeUnit.SECONDS)) {
fail("Streaming did not complete in time");
}
assertThat(collected).hasSize(4)
.contains("\"one\"", "\"two\"", "\"3\"", "\"four\"");
}
@Test
void shouldReadNdjsonPojoAsMulti() throws InterruptedException {
var client = createClient(uri);
var collected = new CopyOnWriteArrayList<Message>();
var completionLatch = new CountDownLatch(1);
client.readPojo().onCompletion().invoke(completionLatch::countDown)
.subscribe().with(collected::add);
if (!completionLatch.await(5, TimeUnit.SECONDS)) {
fail("Streaming did not complete in time");
}
var expected = Arrays.asList(Message.of("one", "1"),
Message.of("two", "2"), Message.of("three", "3"),
Message.of("four", "4"));
assertThat(collected).hasSize(4).containsAll(expected);
}
@Test
void shouldReadNdjsonPojoFromReactiveRoutes() throws InterruptedException {
URI reactiveRoutesBaseUri = URI.create(uri.toString() + "/rr");
var client = createClient(reactiveRoutesBaseUri);
var collected = new CopyOnWriteArrayList<Message>();
var completionLatch = new CountDownLatch(1);
client.readPojo().onCompletion().invoke(completionLatch::countDown)
.subscribe().with(collected::add);
if (!completionLatch.await(5, TimeUnit.SECONDS)) {
fail("Streaming did not complete in time");
}
var expected = Arrays.asList(Message.of("superman", "1"),
Message.of("batman", "2"), Message.of("spiderman", "3"));
assertThat(collected).hasSize(3).containsAll(expected);
}
@Test
void shouldReadNdjsonFromSingleMessage() throws InterruptedException {
var client = createClient(uri);
var collected = new CopyOnWriteArrayList<Message>();
var completionLatch = new CountDownLatch(1);
client.readPojoSingle().onCompletion().invoke(completionLatch::countDown)
.subscribe().with(collected::add);
if (!completionLatch.await(5, TimeUnit.SECONDS)) {
fail("Streaming did not complete in time");
}
var expected = Arrays.asList(
Message.of("zero", "0"), Message.of("one", "1"),
Message.of("two", "2"), Message.of("three", "3"));
assertThat(collected).hasSize(4).containsAll(expected);
}
/**
* Reproduce <a href="https://github.com/quarkusio/quarkus/issues/30690">#30690</a>.
*/
@Test
public void shouldReadUpToThreeTicks() {
createClient(uri)
.ticks()
.onItem()
.invoke(Objects::nonNull)
.subscribe()
.withSubscriber(AssertSubscriber.create(3))
// wait for 3 ticks plus some half tick ms of extra time (this should not be necessary, but CI is slow)
.awaitItems(3, Duration.ofMillis((TICK_EVERY_MS * 3) + (TICK_EVERY_MS / 2)));
}
private Client createClient(URI uri) {
return RestClientBuilder.newBuilder().baseUri(uri).register(new TestJacksonBasicMessageBodyReader())
.build(Client.class);
}
@Path("/stream")
public | StreamJsonTest |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/console/ConsoleHelper.java | {
"start": 1130,
"end": 4538
} | class ____
//which is important for the test suite
QuarkusConsole.INSTANCE = new BasicConsole(colorEnabled, false, QuarkusConsole.ORIGINAL_OUT, System.console());
return;
}
try {
new TerminalConnection(new Consumer<Connection>() {
@Override
public void accept(Connection connection) {
if (connection.supportsAnsi() && !consoleConfig.basic()) {
QuarkusConsole.INSTANCE = new AeshConsole(connection);
} else {
LinkedBlockingDeque<Integer> queue = new LinkedBlockingDeque<>();
if (inputSupport) {
connection.openNonBlocking();
}
connection.setStdinHandler(new Consumer<int[]>() {
@Override
public void accept(int[] ints) {
QuarkusConsole.StateChangeInputStream redirectIn = QuarkusConsole.REDIRECT_IN;
for (int i : ints) {
if (redirectIn != null && !redirectIn.acceptInput(i)) {
queue.add(i);
}
}
}
});
connection.setSignalHandler(event -> {
switch (event) {
case INT:
//todo: why does async exit not work here
//Quarkus.asyncExit();
//end(conn);
new Thread(new Runnable() {
@Override
public void run() {
System.exit(0);
}
}).start();
break;
}
});
connection.setCloseHandler(new Consumer<Void>() {
@Override
public void accept(Void unused) {
queue.add(-1);
}
});
QuarkusConsole.INSTANCE = new BasicConsole(colorEnabled,
inputSupport,
connection::write, new Supplier<Integer>() {
@Override
public Integer get() {
try {
return queue.takeFirst();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
});
}
}
});
} catch (IOException e) {
QuarkusConsole.INSTANCE = new BasicConsole(colorEnabled, false, QuarkusConsole.ORIGINAL_OUT, System.console());
}
QuarkusConsole.installRedirects();
}
}
| loader |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/RackResolver.java | {
"start": 1718,
"end": 2540
} | class ____ {
private static DNSToSwitchMapping dnsToSwitchMapping;
private static boolean initCalled = false;
private static final Logger LOG = LoggerFactory.getLogger(RackResolver.class);
/**
* Hide the default constructor for utility class.
*/
private RackResolver() {
}
public synchronized static void init(Configuration conf) {
if (initCalled) {
return;
}
initCalled = true;
Class<? extends DNSToSwitchMapping> dnsToSwitchMappingClass =
conf.getClass(
CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
ScriptBasedMapping.class,
DNSToSwitchMapping.class);
try {
DNSToSwitchMapping newInstance = ReflectionUtils.newInstance(
dnsToSwitchMappingClass, conf);
// Wrap around the configured | RackResolver |
java | quarkusio__quarkus | extensions/vertx-http/deployment/src/main/java/io/quarkus/vertx/http/deployment/SecurityInformationBuildItem.java | {
"start": 210,
"end": 1566
} | class ____ extends MultiBuildItem {
private final SecurityModel securityModel;
private final Optional<OpenIDConnectInformation> openIDConnectInformation;
public static SecurityInformationBuildItem BASIC() {
return new SecurityInformationBuildItem(SecurityModel.basic, Optional.empty());
}
public static SecurityInformationBuildItem JWT() {
return new SecurityInformationBuildItem(SecurityModel.jwt, Optional.empty());
}
public static SecurityInformationBuildItem OAUTH2() {
return new SecurityInformationBuildItem(SecurityModel.oauth2, Optional.empty());
}
public static SecurityInformationBuildItem OPENIDCONNECT(String urlConfigKey) {
return new SecurityInformationBuildItem(SecurityModel.oidc,
Optional.of(new OpenIDConnectInformation(urlConfigKey)));
}
public SecurityInformationBuildItem(SecurityModel securityModel,
Optional<OpenIDConnectInformation> openIDConnectInformation) {
this.securityModel = securityModel;
this.openIDConnectInformation = openIDConnectInformation;
}
public SecurityModel getSecurityModel() {
return securityModel;
}
public Optional<OpenIDConnectInformation> getOpenIDConnectInformation() {
return openIDConnectInformation;
}
public | SecurityInformationBuildItem |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java | {
"start": 47527,
"end": 47921
} | class ____ {
public Integer integer;
@DataTypeHint(value = "RAW", bridgedTo = PojoWithRawSelfReference.class)
public PojoWithRawSelfReference reference;
}
// --------------------------------------------------------------------------------------------
/** Accumulator with invalid default extraction for data view. */
public static | PojoWithRawSelfReference |
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/BedrockAgentRuntimeEndpointBuilderFactory.java | {
"start": 1442,
"end": 1597
} | interface ____ {
/**
* Builder for endpoint for the AWS Bedrock Agent Runtime component.
*/
public | BedrockAgentRuntimeEndpointBuilderFactory |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/struct/UnwrappedWithCreator1467Test.java | {
"start": 1390,
"end": 1868
} | class ____ {
private final String _unrelated;
private final Inner _inner;
public ImplicitWithName(@JsonProperty("unrelated") String unrelated, @JsonProperty("inner") @JsonUnwrapped Inner inner) {
_unrelated = unrelated;
_inner = inner;
}
public String getUnrelated() {
return _unrelated;
}
public Inner getInner() {
return _inner;
}
}
static | ImplicitWithName |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/cache/config/EnableCachingTests.java | {
"start": 8510,
"end": 8693
} | class ____ implements CachingConfigurer {
@Bean
public CacheManager cm() {
return new NoOpCacheManager();
}
}
@Configuration
@EnableCaching
static | EmptyConfigSupportConfig |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MBeanInfoBuilder.java | {
"start": 1198,
"end": 1248
} | class ____ build MBeanInfo from metrics records
*/
| to |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMContainerWebSocket.java | {
"start": 2301,
"end": 6517
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(
TestNMContainerWebSocket.class);
private static final File TESTROOTDIR = new File("target",
TestNMWebServer.class.getSimpleName());
private static File testLogDir = new File("target",
TestNMWebServer.class.getSimpleName() + "LogDir");
private WebServer server;
@BeforeEach
public void setup() {
TESTROOTDIR.mkdirs();
testLogDir.mkdir();
}
@AfterEach
public void tearDown() {
FileUtil.fullyDelete(TESTROOTDIR);
FileUtil.fullyDelete(testLogDir);
}
private int startNMWebAppServer(String webAddr) {
Configuration conf = new Configuration();
Context nmContext = new NodeManager.NMContext(null, null, null, null, null,
false, conf);
ResourceView resourceView = new ResourceView() {
@Override
public long getVmemAllocatedForContainers() {
return 0;
}
@Override
public long getPmemAllocatedForContainers() {
return 0;
}
@Override
public long getVCoresAllocatedForContainers() {
return 0;
}
@Override
public boolean isVmemCheckEnabled() {
return true;
}
@Override
public boolean isPmemCheckEnabled() {
return true;
}
};
conf.set(YarnConfiguration.NM_LOCAL_DIRS, TESTROOTDIR.getAbsolutePath());
conf.set(YarnConfiguration.NM_LOG_DIRS, testLogDir.getAbsolutePath());
NodeHealthCheckerService healthChecker = createNodeHealthCheckerService();
healthChecker.init(conf);
LocalDirsHandlerService dirsHandler = healthChecker.getDiskHandler();
conf.set(YarnConfiguration.NM_WEBAPP_ADDRESS, webAddr);
server = new WebServer(nmContext, resourceView,
new ApplicationACLsManager(conf), dirsHandler);
try {
server.init(conf);
server.start();
return server.getPort();
} finally {
}
}
private NodeHealthCheckerService createNodeHealthCheckerService() {
LocalDirsHandlerService dirsHandler = new LocalDirsHandlerService();
return new NodeHealthCheckerService(dirsHandler);
}
@Test
public void testWebServerWithServlet() {
int port = startNMWebAppServer("0.0.0.0");
LOG.info("bind to port: " + port);
StringBuilder sb = new StringBuilder();
sb.append("ws://localhost:").append(port).append("/container/abc/");
String dest = sb.toString();
WebSocketClient client = new WebSocketClient();
try {
ContainerShellClientSocketTest socket = new ContainerShellClientSocketTest();
client.start();
URI echoUri = new URI(dest);
Future<Session> future = client.connect(socket, echoUri);
Session session = future.get();
session.getRemote().sendString("hello world");
session.close();
client.stop();
} catch (Throwable t) {
LOG.error("Failed to connect WebSocket and send message to server", t);
} finally {
try {
client.stop();
server.close();
} catch (Exception e) {
LOG.error("Failed to close client", e);
}
}
}
@Test
public void testContainerShellWebSocket() {
Context nm = mock(Context.class);
Session session = mock(Session.class);
Container container = mock(Container.class);
UpgradeRequest request = mock(UpgradeRequest.class);
ApplicationACLsManager aclManager = mock(ApplicationACLsManager.class);
ContainerShellWebSocket.init(nm);
ContainerShellWebSocket ws = new ContainerShellWebSocket();
List<String> names = new ArrayList<>();
names.add("foobar");
Map<String, List<String>> mockParameters = new HashMap<>();
mockParameters.put("user.name", names);
when(session.getUpgradeRequest()).thenReturn(request);
when(request.getParameterMap()).thenReturn(mockParameters);
when(container.getUser()).thenReturn("foobar");
when(nm.getApplicationACLsManager()).thenReturn(aclManager);
when(aclManager.areACLsEnabled()).thenReturn(false);
try {
boolean authorized = ws.checkAuthorization(session, container);
assertTrue(authorized, "Not authorized");
} catch (IOException e) {
fail("Should not throw exception.");
}
}
}
| TestNMContainerWebSocket |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/TypeNameShadowingTest.java | {
"start": 5801,
"end": 6077
} | class ____<T1> {
void bar(T1 t) {}
}
""")
.doTest();
}
@Test
public void negativeNestedClass() {
compilationHelper
.addSourceLines(
"T.java",
"""
package foo.bar;
| Foo |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging-kafka/deployment/src/main/java/io/quarkus/smallrye/reactivemessaging/kafka/deployment/SmallRyeReactiveMessagingKafkaProcessor.java | {
"start": 52352,
"end": 60711
} | class ____ GenericRecord (serializer/deserializer provided by Confluent or Apicurio)
boolean isAvroGenerated = discovery.isAvroGenerated(typeName);
if (isAvroGenerated || DotNames.AVRO_GENERIC_RECORD.equals(typeName)) {
int avroLibraries = 0;
avroLibraries += discovery.hasConfluent() ? 1 : 0;
avroLibraries += discovery.hasApicurio1() ? 1 : 0;
avroLibraries += discovery.hasApicurio2Avro() ? 1 : 0;
if (avroLibraries > 1) {
LOGGER.debugf("Skipping Avro serde autodetection for %s, because multiple Avro serde libraries are present",
typeName);
return Result.nonexistent();
}
if (discovery.hasConfluent()) {
return serializer
? Result.of("io.confluent.kafka.serializers.KafkaAvroSerializer")
: Result.of("io.confluent.kafka.serializers.KafkaAvroDeserializer")
.with(isAvroGenerated, "specific.avro.reader", "true");
} else if (discovery.hasApicurio1()) {
return serializer
? Result.of("io.apicurio.registry.utils.serde.AvroKafkaSerializer")
: Result.of("io.apicurio.registry.utils.serde.AvroKafkaDeserializer")
.with(isAvroGenerated, "apicurio.registry.use-specific-avro-reader", "true");
} else if (discovery.hasApicurio2Avro()) {
return serializer
? Result.of("io.apicurio.registry.serde.avro.AvroKafkaSerializer")
: Result.of("io.apicurio.registry.serde.avro.AvroKafkaDeserializer")
.with(isAvroGenerated, "apicurio.registry.use-specific-avro-reader", "true");
} else {
// we know it is an Avro type, no point in serializing it as JSON
return Result.nonexistent();
}
}
//TODO autodiscovery of json serdes
// Jackson-based serializer/deserializer
// note that Jackson is always present with Kafka, so no need to check
{
ClassInfo subclass = discovery.getSubclassOfWithTypeArgument(
serializer ? DotNames.OBJECT_MAPPER_SERIALIZER : DotNames.OBJECT_MAPPER_DESERIALIZER, typeName);
if (subclass != null) {
return Result.of(subclass.name().toString());
}
}
// Jsonb-based serializer/deserializer
if (discovery.hasJsonb()) {
ClassInfo subclass = discovery.getSubclassOfWithTypeArgument(
serializer ? DotNames.JSONB_SERIALIZER : DotNames.JSONB_DESERIALIZER, typeName);
if (subclass != null) {
return Result.of(subclass.name().toString());
}
}
// unknown
return null;
}
// ---
@BuildStep
@Consume(RuntimeConfigSetupCompleteBuildItem.class)
public void reflectiveValueSerializerPayload(CombinedIndexBuildItem combinedIndex,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
IndexView index = combinedIndex.getIndex();
Config config = ConfigProvider.getConfig();
processOutgoingForReflectiveClassPayload(index, config,
(annotation, payloadType) -> produceReflectiveClass(reflectiveClass, payloadType));
processOutgoingChannelForReflectiveClassPayload(index, config,
(annotation, payloadType) -> produceReflectiveClass(reflectiveClass, payloadType));
processIncomingForReflectiveClassPayload(index, config,
(annotation, payloadType) -> produceReflectiveClass(reflectiveClass, payloadType));
processIncomingChannelForReflectiveClassPayload(index, config,
(annotation, payloadType) -> produceReflectiveClass(reflectiveClass, payloadType));
}
void produceReflectiveClass(BuildProducer<ReflectiveClassBuildItem> reflectiveClass, Type type) {
reflectiveClass.produce(
ReflectiveClassBuildItem.builder(type.name().toString())
.reason(getClass().getName())
.methods().fields().build());
}
// visible for testing
void processOutgoingForReflectiveClassPayload(IndexView index, Config config,
BiConsumer<AnnotationInstance, Type> annotationAcceptor) {
processAnnotationsForReflectiveClassPayload(index, config, DotNames.OUTGOING, true,
annotation -> getOutgoingTypeFromMethod(annotation.target().asMethod()), annotationAcceptor);
}
// visible for testing
void processOutgoingChannelForReflectiveClassPayload(IndexView index, Config config,
BiConsumer<AnnotationInstance, Type> annotationAcceptor) {
processAnnotationsForReflectiveClassPayload(index, config, DotNames.CHANNEL, true,
annotation -> getOutgoingTypeFromChannelInjectionPoint(getInjectionPointType(annotation)), annotationAcceptor);
}
// visible for testing
void processIncomingForReflectiveClassPayload(IndexView index, Config config,
BiConsumer<AnnotationInstance, Type> annotationAcceptor) {
processAnnotationsForReflectiveClassPayload(index, config, DotNames.INCOMING, false,
annotation -> getIncomingTypeFromMethod(annotation.target().asMethod()), annotationAcceptor);
}
// visible for testing
void processIncomingChannelForReflectiveClassPayload(IndexView index, Config config,
BiConsumer<AnnotationInstance, Type> annotationAcceptor) {
processAnnotationsForReflectiveClassPayload(index, config, DotNames.CHANNEL, false,
annotation -> getIncomingTypeFromChannelInjectionPoint(getInjectionPointType(annotation)),
annotationAcceptor);
}
private void processAnnotationsForReflectiveClassPayload(IndexView index, Config config, DotName annotationType,
boolean serializer, Function<AnnotationInstance, Type> typeExtractor,
BiConsumer<AnnotationInstance, Type> annotationAcceptor) {
for (AnnotationInstance annotation : index.getAnnotations(annotationType)) {
String channelName = annotation.value().asString();
Type type = typeExtractor.apply(annotation);
extractKeyValueType(type, (key, value, isBatch) -> {
if (key != null && isSerdeJson(index, config, channelName, serializer, true)) {
annotationAcceptor.accept(annotation, key);
}
if (value != null && isSerdeJson(index, config, channelName, serializer, false)) {
annotationAcceptor.accept(annotation, value);
}
});
}
}
private boolean isSerdeJson(IndexView index, Config config, String channelName, boolean serializer, boolean isKey) {
String configKey = getChannelPropertyName(channelName, (isKey ? "key" : "value") + "." +
(serializer ? "serializer" : "deserializer"), !serializer);
ConfigValue configValue = config.getConfigValue(configKey);
if (configValue.getValue() != null) {
DotName serdeName = DotName.createSimple(configValue.getValue());
return serializer ? isSubclassOfJsonSerializer(index, serdeName) : isSubclassOfJsonDeserializer(index, serdeName);
}
return false;
}
private boolean isSubclassOfJsonSerializer(IndexView index, DotName serializerName) {
return isSubclassOf(index, DotNames.OBJECT_MAPPER_SERIALIZER, serializerName) ||
isSubclassOf(index, DotNames.JSONB_SERIALIZER, serializerName);
}
private boolean isSubclassOfJsonDeserializer(IndexView index, DotName serializerName) {
return isSubclassOf(index, DotNames.OBJECT_MAPPER_DESERIALIZER, serializerName) ||
isSubclassOf(index, DotNames.JSONB_DESERIALIZER, serializerName);
}
private boolean isSubclassOf(IndexView index, DotName superclass, DotName expectedType) {
if (superclass.equals(expectedType)) {
return true;
}
return index.getKnownDirectSubclasses(superclass)
.stream()
.anyMatch(ci -> ci.name().equals(expectedType));
}
}
| or |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/index/translog/Translog.java | {
"start": 52649,
"end": 61455
} | class ____ extends Operation {
public static final int FORMAT_NO_PARENT = 9; // since 7.0
public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1;
public static final int FORMAT_NO_DOC_TYPE = FORMAT_NO_VERSION_TYPE + 1;
public static final int FORMAT_REORDERED = FORMAT_NO_DOC_TYPE + 1;
public static final int SERIALIZATION_FORMAT = FORMAT_REORDERED;
private final BytesRef uid;
private final long autoGeneratedIdTimestamp;
private final long version;
private final BytesReference source;
private final String routing;
private static Index readFrom(StreamInput in) throws IOException {
final int format = in.readVInt(); // SERIALIZATION_FORMAT
assert format >= FORMAT_NO_PARENT : "format was: " + format;
BytesRef uid;
BytesReference source;
String routing;
long version;
long autoGeneratedIdTimestamp;
long seqNo;
long primaryTerm;
if (format < FORMAT_REORDERED) {
uid = Uid.encodeId(in.readString());
if (format < FORMAT_NO_DOC_TYPE) {
in.readString();
// can't assert that this is _doc because pre-8.0 indexes can have any name for a type
}
source = in.readBytesReference();
routing = in.readOptionalString();
version = in.readLong();
if (format < FORMAT_NO_VERSION_TYPE) {
in.readByte(); // _version_type
}
autoGeneratedIdTimestamp = in.readLong();
seqNo = in.readLong();
primaryTerm = in.readLong();
} else {
version = in.readLong();
seqNo = in.readLong();
primaryTerm = in.readLong();
autoGeneratedIdTimestamp = in.readLong();
uid = in.readBytesRef();
routing = in.readOptionalString();
source = in.readBytesReference();
}
return new Index(uid, seqNo, primaryTerm, version, source, routing, autoGeneratedIdTimestamp);
}
public Index(Engine.Index index, Engine.IndexResult indexResult) {
this(
index.uid(),
indexResult.getSeqNo(),
index.primaryTerm(),
indexResult.getVersion(),
index.source(),
index.routing(),
index.getAutoGeneratedIdTimestamp()
);
}
public Index(
String id,
long seqNo,
long primaryTerm,
long version,
BytesReference source,
String routing,
long autoGeneratedIdTimestamp
) {
this(Uid.encodeId(id), seqNo, primaryTerm, version, source, routing, autoGeneratedIdTimestamp);
}
public Index(
BytesRef uid,
long seqNo,
long primaryTerm,
long version,
BytesReference source,
String routing,
long autoGeneratedIdTimestamp
) {
super(seqNo, primaryTerm);
this.uid = uid;
this.source = source;
this.version = version;
this.routing = routing;
this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp;
}
@Override
public Type opType() {
return Type.INDEX;
}
@Override
public long estimateSize() {
return uid.length + source.length() + (routing != null ? 2 * routing.length() : 0) + (4 * Long.BYTES); // timestamp,
// seq_no,
// primary_term,
// and version
}
public BytesRef uid() {
return uid;
}
public String routing() {
return this.routing;
}
public BytesReference source() {
return this.source;
}
public long version() {
return this.version;
}
@Override
protected void writeHeader(int format, StreamOutput out) throws IOException {
out.writeVInt(format);
out.writeLong(version);
out.writeLong(seqNo);
out.writeLong(primaryTerm);
out.writeLong(autoGeneratedIdTimestamp);
out.writeBytesRef(uid);
out.writeOptionalString(routing);
out.writeVInt(source == null ? 0 : source.length());
}
@Override
public void writeBody(final StreamOutput out) throws IOException {
final int format = out.getTransportVersion().onOrAfter(TransportVersions.V_8_0_0)
? out.getTransportVersion().supports(REORDERED_TRANSLOG_OPERATIONS) ? SERIALIZATION_FORMAT : FORMAT_NO_DOC_TYPE
: FORMAT_NO_VERSION_TYPE;
if (format < FORMAT_REORDERED) {
out.writeVInt(format);
out.writeString(Uid.decodeId(uid.bytes, uid.offset, uid.length));
if (format < FORMAT_NO_DOC_TYPE) {
out.writeString(MapperService.SINGLE_MAPPING_NAME);
}
out.writeBytesReference(source);
out.writeOptionalString(routing);
out.writeLong(version);
out.writeLong(autoGeneratedIdTimestamp);
out.writeLong(seqNo);
out.writeLong(primaryTerm);
} else {
writeHeader(format, out);
if (source != null) {
source.writeTo(out);
}
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Index other = (Index) o;
return autoGeneratedIdTimestamp == other.autoGeneratedIdTimestamp && equalsWithoutAutoGeneratedTimestamp(this, other, true);
}
@Override
public int hashCode() {
int result = uid.hashCode();
result = 31 * result + Long.hashCode(seqNo);
result = 31 * result + Long.hashCode(primaryTerm);
result = 31 * result + Long.hashCode(version);
result = 31 * result + source.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp);
return result;
}
@Override
public String toString() {
return "Index{"
+ "id='"
+ Uid.decodeId(uid.bytes, uid.offset, uid.length)
+ '\''
+ ", seqNo="
+ seqNo
+ ", primaryTerm="
+ primaryTerm
+ ", version="
+ version
+ ", autoGeneratedIdTimestamp="
+ autoGeneratedIdTimestamp
+ '}';
}
public long getAutoGeneratedIdTimestamp() {
return autoGeneratedIdTimestamp;
}
public static boolean equalsWithoutAutoGeneratedTimestamp(Translog.Index o1, Translog.Index o2, boolean checkSourceBytes) {
if (o1.version != o2.version
|| o1.seqNo != o2.seqNo
|| o1.primaryTerm != o2.primaryTerm
|| o1.uid.equals(o2.uid) == false
|| Objects.equals(o1.routing, o2.routing) == false) {
return false;
}
if (checkSourceBytes) {
return o1.source.equals(o2.source);
}
var s1 = Source.fromBytes(o1.source);
var s2 = Source.fromBytes(o2.source);
try (
var actualParser = XContentHelper.createParserNotCompressed(
XContentParserConfiguration.EMPTY,
s1.internalSourceRef(),
s1.sourceContentType()
)
) {
var actualMap = actualParser.map();
try (
var expectedParser = XContentHelper.createParserNotCompressed(
XContentParserConfiguration.EMPTY,
s2.internalSourceRef(),
s2.sourceContentType()
)
) {
var expectedMap = expectedParser.map();
return expectedMap.equals(actualMap);
}
} catch (IOException exc) {
return false;
}
}
}
public static final | Index |
java | apache__flink | flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/rules/logical/CalcSnapshotTransposeRule.java | {
"start": 1376,
"end": 2482
} | class ____
extends RelRule<CalcSnapshotTransposeRule.CalcSnapshotTransposeRuleConfig> {
public static final CalcSnapshotTransposeRule INSTANCE =
CalcSnapshotTransposeRule.CalcSnapshotTransposeRuleConfig.DEFAULT.toRule();
protected CalcSnapshotTransposeRule(CalcSnapshotTransposeRuleConfig config) {
super(config);
}
@Override
public boolean matches(RelOptRuleCall call) {
FlinkLogicalCalc calc = call.rel(0);
// Don't push a calc which contains windowed aggregates into a snapshot for now.
return !RexOver.containsOver(calc.getProgram());
}
@Override
public void onMatch(RelOptRuleCall call) {
FlinkLogicalCalc calc = call.rel(0);
FlinkLogicalSnapshot snapshot = call.rel(1);
Calc newClac = calc.copy(calc.getTraitSet(), snapshot.getInputs());
Snapshot newSnapshot = snapshot.copy(snapshot.getTraitSet(), newClac, snapshot.getPeriod());
call.transformTo(newSnapshot);
}
/** Rule configuration. */
@Value.Immutable(singleton = false)
public | CalcSnapshotTransposeRule |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/protocol/DatanodeProtocol.java | {
"start": 2139,
"end": 9699
} | class ____ ALSO related protocol buffer
* wire protocol definition in DatanodeProtocol.proto.
*
* For more details on protocol buffer wire protocol, please see
* .../org/apache/hadoop/hdfs/protocolPB/overview.html
*/
public static final long versionID = 28L;
// error code
final static int NOTIFY = 0;
final static int DISK_ERROR = 1; // there are still valid volumes on DN
final static int INVALID_BLOCK = 2;
final static int FATAL_DISK_ERROR = 3; // no valid volumes left on DN
/**
* Determines actions that data node should perform
* when receiving a datanode command.
*/
final static int DNA_UNKNOWN = 0; // unknown action
final static int DNA_TRANSFER = 1; // transfer blocks to another datanode
final static int DNA_INVALIDATE = 2; // invalidate blocks
final static int DNA_SHUTDOWN = 3; // shutdown node
final static int DNA_REGISTER = 4; // re-register
final static int DNA_FINALIZE = 5; // finalize previous upgrade
final static int DNA_RECOVERBLOCK = 6; // request a block recovery
final static int DNA_ACCESSKEYUPDATE = 7; // update access key
final static int DNA_BALANCERBANDWIDTHUPDATE = 8; // update balancer bandwidth
final static int DNA_CACHE = 9; // cache blocks
final static int DNA_UNCACHE = 10; // uncache blocks
final static int DNA_ERASURE_CODING_RECONSTRUCTION = 11; // erasure coding reconstruction command
int DNA_BLOCK_STORAGE_MOVEMENT = 12; // block storage movement command
int DNA_DROP_SPS_WORK_COMMAND = 13; // drop sps work command
/**
* Register Datanode.
*
* @see org.apache.hadoop.hdfs.server.namenode.FSNamesystem#registerDatanode(DatanodeRegistration)
* @param registration datanode registration information
* @return the given {@link org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration} with
* updated registration information
*/
@Idempotent
public DatanodeRegistration registerDatanode(DatanodeRegistration registration
) throws IOException;
/**
* sendHeartbeat() tells the NameNode that the DataNode is still
* alive and well. Includes some status info, too.
* It also gives the NameNode a chance to return
* an array of "DatanodeCommand" objects in HeartbeatResponse.
* A DatanodeCommand tells the DataNode to invalidate local block(s),
* or to copy them to other DataNodes, etc.
* @param registration datanode registration information.
* @param reports utilization report per storage.
* @param dnCacheCapacity the total cache capacity of the datanode (in bytes).
* @param dnCacheUsed the amount of cache used by the datanode (in bytes).
* @param xmitsInProgress number of transfers from this datanode to others.
* @param xceiverCount number of active transceiver threads.
* @param failedVolumes number of failed volumes.
* @param volumeFailureSummary info about volume failures.
* @param requestFullBlockReportLease whether to request a full block
* report lease.
* @param slowPeers Details of peer DataNodes that were detected as being
* slow to respond to packet writes. Empty report if no
* slow peers were detected by the DataNode.
* @param slowDisks Details of disks on DataNodes that were detected as
* being slow. Empty report if no slow disks were detected.
* @throws IOException on error.
*/
@Idempotent
public HeartbeatResponse sendHeartbeat(DatanodeRegistration registration,
StorageReport[] reports,
long dnCacheCapacity,
long dnCacheUsed,
int xmitsInProgress,
int xceiverCount,
int failedVolumes,
VolumeFailureSummary volumeFailureSummary,
boolean requestFullBlockReportLease,
@Nonnull SlowPeerReports slowPeers,
@Nonnull SlowDiskReports slowDisks)
throws IOException;
/**
* blockReport() tells the NameNode about all the locally-stored blocks.
* The NameNode returns an array of Blocks that have become obsolete
* and should be deleted. This function is meant to upload *all*
* the locally-stored blocks. It's invoked upon startup and then
* infrequently afterwards.
* @param registration datanode registration
* @param poolId the block pool ID for the blocks
* @param reports report of blocks per storage
* Each finalized block is represented as 3 longs. Each under-
* construction replica is represented as 4 longs.
* This is done instead of Block[] to reduce memory used by block reports.
* @param reports report of blocks per storage
* @param context Context information for this block report.
*
* @return - the next command for DN to process.
* @throws IOException
*/
@Idempotent
public DatanodeCommand blockReport(DatanodeRegistration registration,
String poolId, StorageBlockReport[] reports,
BlockReportContext context) throws IOException;
/**
* Communicates the complete list of locally cached blocks to the NameNode.
*
* This method is similar to
* {@link #blockReport(DatanodeRegistration, String, StorageBlockReport[], BlockReportContext)},
* which is used to communicated blocks stored on disk.
*
* @param registration The datanode registration.
* @param poolId The block pool ID for the blocks.
* @param blockIds A list of block IDs.
* @return The DatanodeCommand.
* @throws IOException
*/
@Idempotent
public DatanodeCommand cacheReport(DatanodeRegistration registration,
String poolId, List<Long> blockIds) throws IOException;
/**
* blockReceivedAndDeleted() allows the DataNode to tell the NameNode about
* recently-received and -deleted block data.
*
* For the case of received blocks, a hint for preferred replica to be
* deleted when there is any excessive blocks is provided.
* For example, whenever client code
* writes a new Block here, or another DataNode copies a Block to
* this DataNode, it will call blockReceived().
*/
@Idempotent
public void blockReceivedAndDeleted(DatanodeRegistration registration,
String poolId,
StorageReceivedDeletedBlocks[] rcvdAndDeletedBlocks)
throws IOException;
/**
* errorReport() tells the NameNode about something that has gone
* awry. Useful for debugging.
*/
@Idempotent
public void errorReport(DatanodeRegistration registration,
int errorCode,
String msg) throws IOException;
@Idempotent
public NamespaceInfo versionRequest() throws IOException;
/**
* same as {@link org.apache.hadoop.hdfs.protocol.ClientProtocol#reportBadBlocks(LocatedBlock[])}
* }
*/
@Idempotent
public void reportBadBlocks(LocatedBlock[] blocks) throws IOException;
/**
* Commit block synchronization in lease recovery
*/
@Idempotent
public void commitBlockSynchronization(ExtendedBlock block,
long newgenerationstamp, long newlength,
boolean closeFile, boolean deleteblock, DatanodeID[] newtargets,
String[] newtargetstorages) throws IOException;
}
| and |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-router/src/main/java/org/apache/hadoop/yarn/server/router/rmadmin/DefaultRMAdminRequestInterceptor.java | {
"start": 5346,
"end": 11761
} | class ____
extends AbstractRMAdminRequestInterceptor {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultRMAdminRequestInterceptor.class);
private ResourceManagerAdministrationProtocol rmAdminProxy;
@Override
public void init(String userName) {
super.init(userName);
try {
final Configuration conf = this.getConf();
rmAdminProxy = user.doAs(
(PrivilegedExceptionAction<ResourceManagerAdministrationProtocol>) () ->
ClientRMProxy.createRMProxy(conf, ResourceManagerAdministrationProtocol.class));
} catch (Exception e) {
StringBuilder message = new StringBuilder();
message.append("Error while creating Router RMAdmin Service");
if (user != null) {
message.append(", user: " + user);
}
LOG.error(message.toString(), e);
throw new YarnRuntimeException(message.toString(), e);
}
}
@Override
public void setNextInterceptor(RMAdminRequestInterceptor next) {
throw new YarnRuntimeException("setNextInterceptor is being called on "
+ "DefaultRMAdminRequestInterceptor, which should be the last one "
+ "in the chain. Check if the interceptor pipeline configuration "
+ "is correct");
}
@VisibleForTesting
public void setRMAdmin(ResourceManagerAdministrationProtocol rmAdmin) {
this.rmAdminProxy = rmAdmin;
}
@Override
public RefreshQueuesResponse refreshQueues(RefreshQueuesRequest request)
throws StandbyException, YarnException, IOException {
return rmAdminProxy.refreshQueues(request);
}
@Override
public RefreshNodesResponse refreshNodes(RefreshNodesRequest request)
throws StandbyException, YarnException, IOException {
return rmAdminProxy.refreshNodes(request);
}
@Override
public RefreshSuperUserGroupsConfigurationResponse refreshSuperUserGroupsConfiguration(
RefreshSuperUserGroupsConfigurationRequest request)
throws StandbyException, YarnException, IOException {
return rmAdminProxy.refreshSuperUserGroupsConfiguration(request);
}
@Override
public RefreshUserToGroupsMappingsResponse refreshUserToGroupsMappings(
RefreshUserToGroupsMappingsRequest request)
throws StandbyException, YarnException, IOException {
return rmAdminProxy.refreshUserToGroupsMappings(request);
}
@Override
public RefreshAdminAclsResponse refreshAdminAcls(
RefreshAdminAclsRequest request) throws YarnException, IOException {
return rmAdminProxy.refreshAdminAcls(request);
}
@Override
public RefreshServiceAclsResponse refreshServiceAcls(
RefreshServiceAclsRequest request) throws YarnException, IOException {
return rmAdminProxy.refreshServiceAcls(request);
}
@Override
public UpdateNodeResourceResponse updateNodeResource(
UpdateNodeResourceRequest request) throws YarnException, IOException {
return rmAdminProxy.updateNodeResource(request);
}
@Override
public RefreshNodesResourcesResponse refreshNodesResources(
RefreshNodesResourcesRequest request) throws YarnException, IOException {
return rmAdminProxy.refreshNodesResources(request);
}
@Override
public AddToClusterNodeLabelsResponse addToClusterNodeLabels(
AddToClusterNodeLabelsRequest request) throws YarnException, IOException {
return rmAdminProxy.addToClusterNodeLabels(request);
}
@Override
public RemoveFromClusterNodeLabelsResponse removeFromClusterNodeLabels(
RemoveFromClusterNodeLabelsRequest request)
throws YarnException, IOException {
return rmAdminProxy.removeFromClusterNodeLabels(request);
}
@Override
public ReplaceLabelsOnNodeResponse replaceLabelsOnNode(
ReplaceLabelsOnNodeRequest request) throws YarnException, IOException {
return rmAdminProxy.replaceLabelsOnNode(request);
}
@Override
public CheckForDecommissioningNodesResponse checkForDecommissioningNodes(
CheckForDecommissioningNodesRequest checkForDecommissioningNodesRequest)
throws YarnException, IOException {
return rmAdminProxy
.checkForDecommissioningNodes(checkForDecommissioningNodesRequest);
}
@Override
public RefreshClusterMaxPriorityResponse refreshClusterMaxPriority(
RefreshClusterMaxPriorityRequest request)
throws YarnException, IOException {
return rmAdminProxy.refreshClusterMaxPriority(request);
}
@Override
public String[] getGroupsForUser(String userName) throws IOException {
return rmAdminProxy.getGroupsForUser(userName);
}
@Override
public NodesToAttributesMappingResponse mapAttributesToNodes(
NodesToAttributesMappingRequest request)
throws YarnException, IOException {
return rmAdminProxy.mapAttributesToNodes(request);
}
@Override
public DeregisterSubClusterResponse deregisterSubCluster(DeregisterSubClusterRequest request)
throws YarnException, IOException {
return rmAdminProxy.deregisterSubCluster(request);
}
@Override
public SaveFederationQueuePolicyResponse saveFederationQueuePolicy(
SaveFederationQueuePolicyRequest request) throws YarnException, IOException {
return rmAdminProxy.saveFederationQueuePolicy(request);
}
@Override
public BatchSaveFederationQueuePoliciesResponse batchSaveFederationQueuePolicies(
BatchSaveFederationQueuePoliciesRequest request) throws YarnException, IOException {
return rmAdminProxy.batchSaveFederationQueuePolicies(request);
}
@Override
public QueryFederationQueuePoliciesResponse listFederationQueuePolicies(
QueryFederationQueuePoliciesRequest request) throws YarnException, IOException {
return rmAdminProxy.listFederationQueuePolicies(request);
}
@Override
public DeleteFederationApplicationResponse deleteFederationApplication(
DeleteFederationApplicationRequest request)
throws YarnException, IOException {
return rmAdminProxy.deleteFederationApplication(request);
}
@Override
public GetSubClustersResponse getFederationSubClusters(
GetSubClustersRequest request) throws YarnException, IOException {
return rmAdminProxy.getFederationSubClusters(request);
}
@Override
public DeleteFederationQueuePoliciesResponse deleteFederationPoliciesByQueues(
DeleteFederationQueuePoliciesRequest request) throws YarnException, IOException {
return rmAdminProxy.deleteFederationPoliciesByQueues(request);
}
}
| DefaultRMAdminRequestInterceptor |
java | hibernate__hibernate-orm | tooling/metamodel-generator/src/test/java/org/hibernate/processor/test/embeddable/nested/field/Author.java | {
"start": 273,
"end": 388
} | class ____ {
@Id
String ssn;
@Basic(optional = false)
String name;
Address address;
Boolean deceased;
}
| Author |
java | quarkusio__quarkus | extensions/smallrye-reactive-messaging/runtime/src/main/java/io/quarkus/smallrye/reactivemessaging/runtime/DuplicatedContextConnectorFactoryInterceptor.java | {
"start": 785,
"end": 2032
} | class ____ {
@AroundInvoke
public Object intercept(InvocationContext ctx) throws Exception {
if (ctx.getMethod().getName().equals("getPublisherBuilder")) {
PublisherBuilder<Message<?>> result = (PublisherBuilder<Message<?>>) ctx.proceed();
return result.map(DuplicatedContextConnectorFactoryInterceptor::setMessageContextSafe);
}
if (ctx.getMethod().getName().equals("getPublisher")) {
Flow.Publisher<Message<?>> result = (Flow.Publisher<Message<?>>) ctx.proceed();
return Multi.createFrom().publisher(result)
.map(DuplicatedContextConnectorFactoryInterceptor::setMessageContextSafe);
}
return ctx.proceed();
}
private static Message<?> setMessageContextSafe(Message<?> message) {
Optional<LocalContextMetadata> metadata = message.getMetadata(LocalContextMetadata.class);
if (metadata.isPresent()) {
Context context = metadata.get().context();
if (context != null && VertxContext.isDuplicatedContext(context)) {
VertxContextSafetyToggle.setContextSafe(context, true);
}
}
return message;
}
}
| DuplicatedContextConnectorFactoryInterceptor |
java | micronaut-projects__micronaut-core | core-processor/src/main/java/io/micronaut/inject/writer/BeanDefinitionWriter.java | {
"start": 58890,
"end": 59054
} | interface
____(factoryClass, factoryMethod, parameters);
}
}
/**
* <p>In the case where the produced | visitBuildFactoryMethodDefinition |
java | apache__camel | components/camel-undertow/src/main/java/org/apache/camel/component/undertow/RestUndertowHttpBinding.java | {
"start": 1031,
"end": 2199
} | class ____ extends DefaultUndertowHttpBinding {
public RestUndertowHttpBinding() {
}
public RestUndertowHttpBinding(boolean useStreaming) {
super(useStreaming);
}
@Override
public void populateCamelHeaders(HttpServerExchange httpExchange, Map<String, Object> headersMap, Exchange exchange)
throws Exception {
super.populateCamelHeaders(httpExchange, headersMap, exchange);
String path = httpExchange.getRequestPath();
if (path == null) {
return;
}
// in the endpoint the user may have defined rest {} placeholders
// so we need to map those placeholders with data from the incoming request context path
UndertowEndpoint endpoint = (UndertowEndpoint) exchange.getFromEndpoint();
String consumerPath = endpoint.getHttpURI().getPath();
if (useRestMatching(consumerPath)) {
evalPlaceholders(headersMap, path, consumerPath);
}
}
private boolean useRestMatching(String path) {
// only need to do rest matching if using { } placeholders
return path.indexOf('{') > -1;
}
}
| RestUndertowHttpBinding |
java | google__guava | android/guava-tests/test/com/google/common/collect/EnumHashBiMapTest.java | {
"start": 2188,
"end": 8599
} | class ____ implements TestBiMapGenerator<Country, String> {
@SuppressWarnings("unchecked")
@Override
public BiMap<Country, String> create(Object... entries) {
BiMap<Country, String> result = EnumHashBiMap.create(Country.class);
for (Object o : entries) {
Entry<Country, String> entry = (Entry<Country, String>) o;
result.put(entry.getKey(), entry.getValue());
}
return result;
}
@Override
public SampleElements<Entry<Country, String>> samples() {
return new SampleElements<>(
immutableEntry(Country.CANADA, "DOLLAR"),
immutableEntry(Country.CHILE, "PESO"),
immutableEntry(Country.UK, "POUND"),
immutableEntry(Country.JAPAN, "YEN"),
immutableEntry(Country.SWITZERLAND, "FRANC"));
}
@SuppressWarnings("unchecked")
@Override
public Entry<Country, String>[] createArray(int length) {
return (Entry<Country, String>[]) new Entry<?, ?>[length];
}
@Override
public Iterable<Entry<Country, String>> order(List<Entry<Country, String>> insertionOrder) {
return insertionOrder;
}
@Override
public Country[] createKeyArray(int length) {
return new Country[length];
}
@Override
public String[] createValueArray(int length) {
return new String[length];
}
}
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTest(
BiMapTestSuiteBuilder.using(new EnumHashBiMapGenerator())
.named("EnumHashBiMap")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.SUPPORTS_ITERATOR_REMOVE,
MapFeature.ALLOWS_NULL_VALUES,
MapFeature.GENERAL_PURPOSE,
CollectionFeature.KNOWN_ORDER)
.createTestSuite());
suite.addTestSuite(EnumHashBiMapTest.class);
return suite;
}
public void testCreate() {
EnumHashBiMap<Currency, String> bimap = EnumHashBiMap.create(Currency.class);
assertTrue(bimap.isEmpty());
assertEquals("{}", bimap.toString());
assertEquals(HashBiMap.create(), bimap);
bimap.put(Currency.DOLLAR, "dollar");
assertEquals("dollar", bimap.get(Currency.DOLLAR));
assertEquals(Currency.DOLLAR, bimap.inverse().get("dollar"));
}
public void testCreateFromMap() {
/* Test with non-empty Map. */
Map<Currency, String> map =
ImmutableMap.of(
Currency.DOLLAR, "dollar",
Currency.PESO, "peso",
Currency.FRANC, "franc");
EnumHashBiMap<Currency, String> bimap = EnumHashBiMap.create(map);
assertEquals("dollar", bimap.get(Currency.DOLLAR));
assertEquals(Currency.DOLLAR, bimap.inverse().get("dollar"));
/* Map must have at least one entry if not an EnumHashBiMap. */
assertThrows(
IllegalArgumentException.class,
() -> EnumHashBiMap.create(Collections.<Currency, String>emptyMap()));
/* Map can be empty if it's an EnumHashBiMap. */
Map<Currency, String> emptyBimap = EnumHashBiMap.create(Currency.class);
bimap = EnumHashBiMap.create(emptyBimap);
assertTrue(bimap.isEmpty());
/* Map can be empty if it's an EnumBiMap. */
Map<Currency, Country> emptyBimap2 = EnumBiMap.create(Currency.class, Country.class);
EnumHashBiMap<Currency, Country> bimap2 = EnumHashBiMap.create(emptyBimap2);
assertTrue(bimap2.isEmpty());
}
public void testEnumHashBiMapConstructor() {
/* Test that it copies existing entries. */
EnumHashBiMap<Currency, String> bimap1 = EnumHashBiMap.create(Currency.class);
bimap1.put(Currency.DOLLAR, "dollar");
EnumHashBiMap<Currency, String> bimap2 = EnumHashBiMap.create(bimap1);
assertEquals("dollar", bimap2.get(Currency.DOLLAR));
assertEquals(bimap1, bimap2);
bimap2.inverse().put("franc", Currency.FRANC);
assertEquals("franc", bimap2.get(Currency.FRANC));
assertThat(bimap1.get(Currency.FRANC)).isNull();
assertFalse(bimap2.equals(bimap1));
/* Test that it can be empty. */
EnumHashBiMap<Currency, String> emptyBimap = EnumHashBiMap.create(Currency.class);
EnumHashBiMap<Currency, String> bimap3 = EnumHashBiMap.create(emptyBimap);
assertEquals(bimap3, emptyBimap);
}
public void testEnumBiMapConstructor() {
/* Test that it copies existing entries. */
EnumBiMap<Currency, Country> bimap1 = EnumBiMap.create(Currency.class, Country.class);
bimap1.put(Currency.DOLLAR, Country.SWITZERLAND);
EnumHashBiMap<Currency, Object> bimap2 = // use supertype
EnumHashBiMap.<Currency, Object>create(bimap1);
assertEquals(Country.SWITZERLAND, bimap2.get(Currency.DOLLAR));
assertEquals(bimap1, bimap2);
bimap2.inverse().put("franc", Currency.FRANC);
assertEquals("franc", bimap2.get(Currency.FRANC));
assertThat(bimap1.get(Currency.FRANC)).isNull();
assertFalse(bimap2.equals(bimap1));
/* Test that it can be empty. */
EnumBiMap<Currency, Country> emptyBimap = EnumBiMap.create(Currency.class, Country.class);
EnumHashBiMap<Currency, Country> bimap3 = // use exact type
EnumHashBiMap.create(emptyBimap);
assertEquals(bimap3, emptyBimap);
}
@GwtIncompatible // keyType
public void testKeyType() {
EnumHashBiMap<Currency, String> bimap = EnumHashBiMap.create(Currency.class);
assertEquals(Currency.class, bimap.keyType());
}
public void testEntrySet() {
// Bug 3168290
Map<Currency, String> map =
ImmutableMap.of(
Currency.DOLLAR, "dollar",
Currency.PESO, "peso",
Currency.FRANC, "franc");
EnumHashBiMap<Currency, String> bimap = EnumHashBiMap.create(map);
Set<Object> uniqueEntries = Sets.newIdentityHashSet();
uniqueEntries.addAll(bimap.entrySet());
assertEquals(3, uniqueEntries.size());
}
@GwtIncompatible
@J2ktIncompatible
public void testSerializable() {
SerializableTester.reserializeAndAssert(EnumHashBiMap.create(Currency.class));
}
@J2ktIncompatible
@GwtIncompatible // reflection
public void testNulls() {
new NullPointerTester().testAllPublicStaticMethods(EnumHashBiMap.class);
new NullPointerTester().testAllPublicInstanceMethods(EnumHashBiMap.create(Currency.class));
}
}
| EnumHashBiMapGenerator |
java | apache__logging-log4j2 | log4j-1.2-api/src/main/java/org/apache/log4j/builders/appender/AppenderBuilder.java | {
"start": 1166,
"end": 1550
} | interface ____<T extends Appender> extends Builder<T> {
Appender parseAppender(Element element, XmlConfiguration configuration);
Appender parseAppender(
String name,
String appenderPrefix,
String layoutPrefix,
String filterPrefix,
Properties props,
PropertiesConfiguration configuration);
}
| AppenderBuilder |
java | quarkusio__quarkus | devtools/cli-common/src/main/java/io/quarkus/cli/common/HelpOption.java | {
"start": 68,
"end": 228
} | class ____ {
@CommandLine.Option(names = { "-h", "--help" }, usageHelp = true, description = "Display this help message.")
public boolean help;
}
| HelpOption |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/serializer/ErrorGetterTest.java | {
"start": 160,
"end": 485
} | class ____ extends TestCase {
public void test_0() throws Exception {
Model m = new Model();
Exception error = null;
try {
JSON.toJSONString(m);
} catch (JSONException ex) {
error = ex;
}
assertNotNull(error);
}
private static | ErrorGetterTest |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassPostProcessorTests.java | {
"start": 63802,
"end": 63872
} | class ____ {
@Configuration
@Order(1)
| ConfigWithOrderedInnerClasses |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/observable/ObservableMap.java | {
"start": 1357,
"end": 2536
} | class ____<T, U> extends BasicFuseableObserver<T, U> {
final Function<? super T, ? extends U> mapper;
MapObserver(Observer<? super U> actual, Function<? super T, ? extends U> mapper) {
super(actual);
this.mapper = mapper;
}
@Override
public void onNext(T t) {
if (done) {
return;
}
if (sourceMode != NONE) {
downstream.onNext(null);
return;
}
U v;
try {
v = Objects.requireNonNull(mapper.apply(t), "The mapper function returned a null value.");
} catch (Throwable ex) {
fail(ex);
return;
}
downstream.onNext(v);
}
@Override
public int requestFusion(int mode) {
return transitiveBoundaryFusion(mode);
}
@Nullable
@Override
public U poll() throws Throwable {
T t = qd.poll();
return t != null ? Objects.requireNonNull(mapper.apply(t), "The mapper function returned a null value.") : null;
}
}
}
| MapObserver |
java | google__dagger | dagger-compiler/main/java/dagger/internal/codegen/base/DiagnosticFormatting.java | {
"start": 828,
"end": 2784
} | class ____ {
/**
* A regular expression to match a small list of specific packages deemed to be unhelpful to
* display in fully qualified types in error messages.
*
* <p>Note: This should never be applied to messages themselves.
*/
private static final Pattern COMMON_PACKAGE_PATTERN =
Pattern.compile(
"(?:^|[^.a-z_])" // What we want to match on but not capture.
+ "((?:" // Start a group with a non-capturing or part
+ "java[.]lang"
+ "|java[.]util"
+ "|javax[.]inject"
+ "|dagger"
+ "|dagger[.]multibindings"
+ "|com[.]google[.]common[.]base"
+ "|com[.]google[.]common[.]collect"
+ ")[.])" // Always end with a literal .
+ "[A-Z]"); // What we want to match on but not capture.
/**
* A method to strip out common packages and a few rare type prefixes from types' string
* representation before being used in error messages.
*
* <p>This type assumes a String value that is a valid fully qualified (and possibly
* parameterized) type, and should NOT be used with arbitrary text, especially prose error
* messages.
*
* <p>TODO(cgruber): Tighten these to take type representations (mirrors and elements) to avoid
* accidental mis-use by running errors through this method.
*/
public static String stripCommonTypePrefixes(String type) {
// Do regex magic to remove common packages we care to shorten.
Matcher matcher = COMMON_PACKAGE_PATTERN.matcher(type);
StringBuilder result = new StringBuilder();
int index = 0;
while (matcher.find()) {
result.append(type.subSequence(index, matcher.start(1)));
index = matcher.end(1); // Skip the matched pattern content.
}
result.append(type.subSequence(index, type.length()));
return result.toString();
}
private DiagnosticFormatting() {}
}
| DiagnosticFormatting |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/InterceptorInfo.java | {
"start": 1229,
"end": 4679
} | class ____ extends BeanInfo implements Comparable<InterceptorInfo> {
private static final Logger LOGGER = Logger.getLogger(InterceptorInfo.class);
private final Set<AnnotationInstance> bindings;
private final List<MethodInfo> aroundInvokes;
private final List<MethodInfo> aroundConstructs;
private final List<MethodInfo> postConstructs;
private final List<MethodInfo> preDestroys;
// These fields are only used for synthetic interceptors
private final InterceptionType interceptionType;
private final Class<? extends InterceptorCreator> creatorClass;
InterceptorInfo(Class<? extends InterceptorCreator> creatorClass, BeanDeployment beanDeployment,
Set<AnnotationInstance> bindings, List<Injection> injections, int priority, InterceptionType interceptionType,
Map<String, Object> params, String identifier) {
super(null, ClassType.create(InterceptFunction.class), null, beanDeployment, BuiltinScope.DEPENDENT.getInfo(),
Sets.singletonHashSet(Type.create(DotName.OBJECT_NAME, Kind.CLASS)), new HashSet<>(), injections, null,
null, false,
Collections.emptyList(), null, false, cg -> {
BlockCreator b0 = cg.createMethod();
Expr creator = b0.new_(ConstructorDesc.of(creatorClass));
LocalVar result = b0.localVar("result", b0.invokeInterface(MethodDesc.of(InterceptorCreator.class,
"create", InterceptFunction.class, SyntheticCreationalContext.class),
creator, cg.syntheticCreationalContext()));
b0.ifNull(result, b1 -> {
b1.throw_(IllegalStateException.class, creatorClass.getName() + ".create() must not return null");
});
b0.return_(result);
},
null, params, true, false, null, priority, creatorClass.getName() + (identifier != null ? identifier : ""),
null, null, null, null);
this.bindings = bindings;
this.interceptionType = interceptionType;
this.creatorClass = creatorClass;
this.aroundInvokes = List.of();
this.aroundConstructs = List.of();
this.postConstructs = List.of();
this.preDestroys = List.of();
}
InterceptorInfo(AnnotationTarget target, BeanDeployment beanDeployment, Set<AnnotationInstance> bindings,
List<Injection> injections, int priority) {
super(target, beanDeployment, BuiltinScope.DEPENDENT.getInfo(),
Sets.singletonHashSet(Type.create(target.asClass().name(), Kind.CLASS)), new HashSet<>(), injections,
null, null, false, Collections.emptyList(), null, false, null, priority, null, null);
this.bindings = bindings;
this.interceptionType = null;
this.creatorClass = null;
AnnotationStore store = beanDeployment.getAnnotationStore();
List<MethodInfo> aroundInvokes = new ArrayList<>();
List<MethodInfo> aroundConstructs = new ArrayList<>();
List<MethodInfo> postConstructs = new ArrayList<>();
List<MethodInfo> preDestroys = new ArrayList<>();
List<MethodInfo> allMethods = new ArrayList<>();
ClassInfo aClass = target.asClass();
while (aClass != null) {
// Only one interceptor method of a given type may be declared on a given | InterceptorInfo |
java | elastic__elasticsearch | modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSource.java | {
"start": 1494,
"end": 7094
} | class ____ extends IntervalsSource {
private final IntervalsSource in;
private final Query approximation;
private final IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> valueFetcherProvider;
private final Analyzer indexAnalyzer;
public SourceIntervalsSource(
IntervalsSource in,
Query approximation,
IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> valueFetcherProvider,
Analyzer indexAnalyzer
) {
this.in = Objects.requireNonNull(in);
this.approximation = Objects.requireNonNull(approximation);
this.valueFetcherProvider = Objects.requireNonNull(valueFetcherProvider);
this.indexAnalyzer = Objects.requireNonNull(indexAnalyzer);
}
public IntervalsSource getIntervalsSource() {
return in;
}
private LeafReaderContext createSingleDocLeafReaderContext(String field, List<Object> values) {
MemoryIndex index = new MemoryIndex();
for (Object value : values) {
if (value == null) {
continue;
}
index.addField(field, value.toString(), indexAnalyzer);
}
index.freeze();
return index.createSearcher().getIndexReader().leaves().get(0);
}
@Override
public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException {
final IndexSearcher searcher = new IndexSearcher(ctx.reader());
final Weight weight = searcher.createWeight(searcher.rewrite(approximation), ScoreMode.COMPLETE_NO_SCORES, 1f);
final Scorer scorer = weight.scorer(ctx.reader().getContext());
if (scorer == null) {
return null;
}
final DocIdSetIterator approximationIter = scorer.iterator();
final CheckedIntFunction<List<Object>, IOException> valueFetcher = valueFetcherProvider.apply(ctx);
return new IntervalIterator() {
private IntervalIterator in;
@Override
public int docID() {
return approximationIter.docID();
}
@Override
public long cost() {
return approximationIter.cost();
}
@Override
public int nextDoc() throws IOException {
return doNext(approximationIter.nextDoc());
}
@Override
public int advance(int target) throws IOException {
return doNext(approximationIter.advance(target));
}
private int doNext(int doc) throws IOException {
while (doc != NO_MORE_DOCS && setIterator(doc) == false) {
doc = approximationIter.nextDoc();
}
return doc;
}
private boolean setIterator(int doc) throws IOException {
final List<Object> values = valueFetcher.apply(doc);
final LeafReaderContext singleDocContext = createSingleDocLeafReaderContext(field, values);
in = SourceIntervalsSource.this.in.intervals(field, singleDocContext);
final boolean isSet = in != null && in.nextDoc() != NO_MORE_DOCS;
assert isSet == false || in.docID() == 0;
return isSet;
}
@Override
public int start() {
return in.start();
}
@Override
public int end() {
return in.end();
}
@Override
public int gaps() {
return in.gaps();
}
@Override
public int nextInterval() throws IOException {
return in.nextInterval();
}
@Override
public float matchCost() {
// a high number since we need to parse the _source
return 10_000;
}
};
}
@Override
public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException {
final CheckedIntFunction<List<Object>, IOException> valueFetcher = valueFetcherProvider.apply(ctx);
final List<Object> values = valueFetcher.apply(doc);
final LeafReaderContext singleDocContext = createSingleDocLeafReaderContext(field, values);
return in.matches(field, singleDocContext, 0);
}
@Override
public void visit(String field, QueryVisitor visitor) {
in.visit(field, visitor);
}
@Override
public int minExtent() {
return in.minExtent();
}
@Override
public Collection<IntervalsSource> pullUpDisjunctions() {
return Collections.singleton(this);
}
@Override
public int hashCode() {
// Not using matchesProvider and valueFetcherProvider, which don't identify this source but are only used to avoid scanning linearly
// through all documents
return Objects.hash(in, indexAnalyzer);
}
@Override
public boolean equals(Object other) {
if (other == null || getClass() != other.getClass()) {
return false;
}
SourceIntervalsSource that = (SourceIntervalsSource) other;
// Not using matchesProvider and valueFetcherProvider, which don't identify this source but are only used to avoid scanning linearly
// through all documents
return in.equals(that.in) && indexAnalyzer.equals(that.indexAnalyzer);
}
@Override
public String toString() {
return in.toString();
}
}
| SourceIntervalsSource |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantNanosEvaluator.java | {
"start": 1152,
"end": 4301
} | class ____ implements EvalOperator.ExpressionEvaluator {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DateExtractConstantNanosEvaluator.class);
private final Source source;
private final EvalOperator.ExpressionEvaluator value;
private final ChronoField chronoField;
private final ZoneId zone;
private final DriverContext driverContext;
private Warnings warnings;
public DateExtractConstantNanosEvaluator(Source source, EvalOperator.ExpressionEvaluator value,
ChronoField chronoField, ZoneId zone, DriverContext driverContext) {
this.source = source;
this.value = value;
this.chronoField = chronoField;
this.zone = zone;
this.driverContext = driverContext;
}
@Override
public Block eval(Page page) {
try (LongBlock valueBlock = (LongBlock) value.eval(page)) {
LongVector valueVector = valueBlock.asVector();
if (valueVector == null) {
return eval(page.getPositionCount(), valueBlock);
}
return eval(page.getPositionCount(), valueVector).asBlock();
}
}
@Override
public long baseRamBytesUsed() {
long baseRamBytesUsed = BASE_RAM_BYTES_USED;
baseRamBytesUsed += value.baseRamBytesUsed();
return baseRamBytesUsed;
}
public LongBlock eval(int positionCount, LongBlock valueBlock) {
try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
switch (valueBlock.getValueCount(p)) {
case 0:
result.appendNull();
continue position;
case 1:
break;
default:
warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value"));
result.appendNull();
continue position;
}
long value = valueBlock.getLong(valueBlock.getFirstValueIndex(p));
result.appendLong(DateExtract.processNanos(value, this.chronoField, this.zone));
}
return result.build();
}
}
public LongVector eval(int positionCount, LongVector valueVector) {
try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) {
position: for (int p = 0; p < positionCount; p++) {
long value = valueVector.getLong(p);
result.appendLong(p, DateExtract.processNanos(value, this.chronoField, this.zone));
}
return result.build();
}
}
@Override
public String toString() {
return "DateExtractConstantNanosEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]";
}
@Override
public void close() {
Releasables.closeExpectNoException(value);
}
private Warnings warnings() {
if (warnings == null) {
this.warnings = Warnings.createWarnings(
driverContext.warningsMode(),
source.source().getLineNumber(),
source.source().getColumnNumber(),
source.text()
);
}
return warnings;
}
static | DateExtractConstantNanosEvaluator |
java | junit-team__junit5 | junit-jupiter-engine/src/main/java/org/junit/jupiter/engine/execution/ExtensionContextSupplier.java | {
"start": 1616,
"end": 2489
} | class ____ implements ExtensionContextSupplier {
private final ExtensionContext currentExtensionContext;
private final ExtensionContext legacyExtensionContext;
private ScopeBasedExtensionContextSupplier(ExtensionContext currentExtensionContext,
ExtensionContext legacyExtensionContext) {
this.currentExtensionContext = currentExtensionContext;
this.legacyExtensionContext = legacyExtensionContext;
}
@Override
public ExtensionContext get(TestInstantiationAwareExtension extension) {
return isTestScoped(extension) ? currentExtensionContext : legacyExtensionContext;
}
private boolean isTestScoped(TestInstantiationAwareExtension extension) {
ExtensionContext rootContext = legacyExtensionContext.getRoot();
return extension.getTestInstantiationExtensionContextScope(rootContext) == TEST_METHOD;
}
}
}
| ScopeBasedExtensionContextSupplier |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EditLogInputStream.java | {
"start": 1385,
"end": 6413
} | class ____ implements Closeable {
private FSEditLogOp cachedOp = null;
/**
* Returns the name of the currently active underlying stream. The default
* implementation returns the same value as getName unless overridden by the
* subclass.
*
* @return String name of the currently active underlying stream
*/
public String getCurrentStreamName() {
return getName();
}
/**
* @return the name of the EditLogInputStream
*/
public abstract String getName();
/**
* @return the first transaction which will be found in this stream
*/
public abstract long getFirstTxId();
/**
* @return the last transaction which will be found in this stream
*/
public abstract long getLastTxId();
/**
* Close the stream.
* @throws IOException if an error occurred while closing
*/
@Override
public abstract void close() throws IOException;
/**
* Read an operation from the stream
* @return an operation from the stream or null if at end of stream
* @throws IOException if there is an error reading from the stream
*/
public FSEditLogOp readOp() throws IOException {
FSEditLogOp ret;
if (cachedOp != null) {
ret = cachedOp;
cachedOp = null;
return ret;
}
return nextOp();
}
/**
* Position the stream so that a valid operation can be read from it with
* readOp().
*
* This method can be used to skip over corrupted sections of edit logs.
*/
public void resync() {
if (cachedOp != null) {
return;
}
cachedOp = nextValidOp();
}
/**
* Get the next operation from the stream storage.
*
* @return an operation from the stream or null if at end of stream
* @throws IOException if there is an error reading from the stream
*/
protected abstract FSEditLogOp nextOp() throws IOException;
/**
* Go through the next operation from the stream storage.
* @return the txid of the next operation.
*/
protected long scanNextOp() throws IOException {
FSEditLogOp next = readOp();
return next != null ? next.txid : HdfsServerConstants.INVALID_TXID;
}
/**
* Get the next valid operation from the stream storage.
*
* This is exactly like nextOp, except that we attempt to skip over damaged
* parts of the edit log
*
* @return an operation from the stream or null if at end of stream
*/
protected FSEditLogOp nextValidOp() {
// This is a trivial implementation which just assumes that any errors mean
// that there is nothing more of value in the log. Subclasses that support
// error recovery will want to override this.
try {
return nextOp();
} catch (Throwable e) {
return null;
}
}
/**
* Skip edit log operations up to a given transaction ID, or until the
* end of the edit log is reached.
*
* After this function returns, the next call to readOp will return either
* end-of-file (null) or a transaction with a txid equal to or higher than
* the one we asked for.
*
* @param txid The transaction ID to read up until.
* @return Returns true if we found a transaction ID greater than
* or equal to 'txid' in the log.
*/
public boolean skipUntil(long txid) throws IOException {
while (true) {
FSEditLogOp op = readOp();
if (op == null) {
return false;
}
if (op.getTransactionId() >= txid) {
cachedOp = op;
return true;
}
}
}
/**
* return the cachedOp, and reset it to null.
*/
FSEditLogOp getCachedOp() {
FSEditLogOp op = this.cachedOp;
cachedOp = null;
return op;
}
/**
* Get the layout version of the data in the stream.
* @return the layout version of the ops in the stream.
* @throws IOException if there is an error reading the version
*/
public abstract int getVersion(boolean verifyVersion) throws IOException;
/**
* Get the "position" of in the stream. This is useful for
* debugging and operational purposes.
*
* Different stream types can have a different meaning for
* what the position is. For file streams it means the byte offset
* from the start of the file.
*
* @return the position in the stream
*/
public abstract long getPosition();
/**
* Return the size of the current edits log or -1 if unknown.
*
* @return long size of the current edits log or -1 if unknown
*/
public abstract long length() throws IOException;
/**
* Return true if this stream is in progress, false if it is finalized.
*/
public abstract boolean isInProgress();
/**
* Set the maximum opcode size in bytes.
*/
public abstract void setMaxOpSize(int maxOpSize);
/**
* Returns true if we are currently reading the log from a local disk or an
* even faster data source (e.g. a byte buffer).
*/
public abstract boolean isLocalLog();
@Override
public String toString() {
return getName();
}
}
| EditLogInputStream |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/boot/model/relational/ColumnOrderingStrategyStandard.java | {
"start": 2495,
"end": 3459
} | class ____ implements Comparator<Column> {
private final Metadata metadata;
protected ColumnComparator(Metadata metadata) {
this.metadata = metadata;
}
@Override
public int compare(Column o1, Column o2) {
final var dialect = metadata.getDatabase().getDialect();
final int physicalSizeInBytes1 = physicalSizeInBytes(
o1.getSqlTypeCode( metadata ),
o1.getColumnSize( dialect, metadata ),
metadata
);
final int physicalSizeInBytes2 = physicalSizeInBytes(
o2.getSqlTypeCode( metadata ),
o2.getColumnSize( dialect, metadata ),
metadata
);
int cmp = Integer.compare( Integer.max( physicalSizeInBytes1, 4 ), Integer.max( physicalSizeInBytes2, 4 ) );
if ( cmp != 0 ) {
return cmp;
}
cmp = Boolean.compare( physicalSizeInBytes1 > 2048, physicalSizeInBytes2 > 2048 );
if ( cmp != 0 ) {
return cmp;
}
return o1.getName().compareTo( o2.getName() );
}
}
protected static | ColumnComparator |
java | alibaba__druid | core/src/test/java/com/alibaba/druid/bvt/pool/Case3.java | {
"start": 895,
"end": 2614
} | class ____ extends PoolTestCase {
// public void test_0() throws Exception {
// DruidDataSource dataSource = new DruidDataSource();
// dataSource.setUrl("jdbc:mock:xxx");
// dataSource.setPoolPreparedStatements(true);
// dataSource.close();
// }
//
// public void test_1() throws Exception {
// DruidDataSource dataSource = new DruidDataSource();
// dataSource.setUrl("jdbc:mock:xxx");
//
// Connection conn = dataSource.getConnection();
// Statement stmt = conn.createStatement();
// ResultSet rs = stmt.executeQuery("SELECT 1");
// rs.next();
//
// conn.close();
//
// assertEquals(true, stmt.isClosed());
// assertEquals(true, rs.isClosed());
//
// rs.close();
// stmt.close();
//
// dataSource.close();
// }
public void test_2() throws Exception {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:mock:xxx");
String sql = "SELECT 1";
Connection conn = dataSource.getConnection();
PreparedStatement stmt = conn.prepareStatement(sql);
ResultSet rs = stmt.executeQuery();
rs.next();
Statement mockStmt = stmt.unwrap(Statement.class);
assertEquals(false, mockStmt.isClosed());
conn.close();
assertEquals(true, mockStmt.isClosed());
assertEquals(true, stmt.isClosed());
assertEquals(true, rs.isClosed());
rs.close();
stmt.close();
SQLException error = null;
try {
stmt.execute("SELECT 1");
} catch (SQLException ex) {
error = ex;
}
assertNotNull(error);
dataSource.close();
}
}
| Case3 |
java | alibaba__nacos | istio/src/main/java/com/alibaba/nacos/istio/model/VirtualService.java | {
"start": 3139,
"end": 3489
} | class ____ {
private String uri;
public String getUri() {
return uri;
}
public void setUri(String uri) {
this.uri = uri;
}
}
public static | Rewrite |
java | apache__kafka | clients/src/test/java/org/apache/kafka/common/utils/TimerTest.java | {
"start": 1064,
"end": 4937
} | class ____ {
private final MockTime time = new MockTime();
@Test
public void testTimerUpdate() {
Timer timer = time.timer(500);
assertEquals(500, timer.timeoutMs());
assertEquals(500, timer.remainingMs());
assertEquals(0, timer.elapsedMs());
time.sleep(100);
timer.update();
assertEquals(500, timer.timeoutMs());
assertEquals(400, timer.remainingMs());
assertEquals(100, timer.elapsedMs());
time.sleep(400);
timer.update(time.milliseconds());
assertEquals(500, timer.timeoutMs());
assertEquals(0, timer.remainingMs());
assertEquals(500, timer.elapsedMs());
assertTrue(timer.isExpired());
// Going over the expiration is fine and the elapsed time can exceed
// the initial timeout. However, remaining time should be stuck at 0.
time.sleep(200);
timer.update(time.milliseconds());
assertTrue(timer.isExpired());
assertEquals(500, timer.timeoutMs());
assertEquals(0, timer.remainingMs());
assertEquals(700, timer.elapsedMs());
}
@Test
public void testTimerUpdateAndReset() {
Timer timer = time.timer(500);
timer.sleep(200);
assertEquals(500, timer.timeoutMs());
assertEquals(300, timer.remainingMs());
assertEquals(200, timer.elapsedMs());
timer.updateAndReset(400);
assertEquals(400, timer.timeoutMs());
assertEquals(400, timer.remainingMs());
assertEquals(0, timer.elapsedMs());
timer.sleep(400);
assertTrue(timer.isExpired());
timer.updateAndReset(200);
assertEquals(200, timer.timeoutMs());
assertEquals(200, timer.remainingMs());
assertEquals(0, timer.elapsedMs());
assertFalse(timer.isExpired());
}
@Test
public void testTimerResetUsesCurrentTime() {
Timer timer = time.timer(500);
timer.sleep(200);
assertEquals(300, timer.remainingMs());
assertEquals(200, timer.elapsedMs());
time.sleep(300);
timer.reset(500);
assertEquals(500, timer.remainingMs());
timer.update();
assertEquals(200, timer.remainingMs());
}
@Test
public void testTimerResetDeadlineUsesCurrentTime() {
Timer timer = time.timer(500);
timer.sleep(200);
assertEquals(300, timer.remainingMs());
assertEquals(200, timer.elapsedMs());
timer.sleep(100);
timer.resetDeadline(time.milliseconds() + 200);
assertEquals(200, timer.timeoutMs());
assertEquals(200, timer.remainingMs());
timer.sleep(100);
assertEquals(200, timer.timeoutMs());
assertEquals(100, timer.remainingMs());
}
@Test
public void testTimeoutOverflow() {
Timer timer = time.timer(Long.MAX_VALUE);
assertEquals(Long.MAX_VALUE - timer.currentTimeMs(), timer.remainingMs());
assertEquals(0, timer.elapsedMs());
}
@Test
public void testNonMonotonicUpdate() {
Timer timer = time.timer(100);
long currentTimeMs = timer.currentTimeMs();
timer.update(currentTimeMs - 1);
assertEquals(currentTimeMs, timer.currentTimeMs());
assertEquals(100, timer.remainingMs());
assertEquals(0, timer.elapsedMs());
}
@Test
public void testTimerSleep() {
Timer timer = time.timer(500);
long currentTimeMs = timer.currentTimeMs();
timer.sleep(200);
assertEquals(time.milliseconds(), timer.currentTimeMs());
assertEquals(currentTimeMs + 200, timer.currentTimeMs());
timer.sleep(1000);
assertEquals(time.milliseconds(), timer.currentTimeMs());
assertEquals(currentTimeMs + 500, timer.currentTimeMs());
assertTrue(timer.isExpired());
}
}
| TimerTest |
java | elastic__elasticsearch | libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java | {
"start": 14116,
"end": 27499
} | class ____ implements ToXContentObject {
@Nullable
final String animal;
@Nullable
final Integer vegetable;
int mineral;
int fruit;
String a;
String b;
String c;
boolean d;
HasCtorArguments(@Nullable String animal, @Nullable Integer vegetable) {
this.animal = animal;
this.vegetable = vegetable;
}
public void setMineral(int mineral) {
this.mineral = mineral;
}
public void setFruit(int fruit) {
this.fruit = fruit;
}
public void setA(String a) {
if (a != null && a.length() > 9) {
throw new IllegalArgumentException("[a] must be less than 10 characters in length but was [" + a + "]");
}
this.a = a;
}
public void setB(String b) {
this.b = b;
}
public void setC(String c) {
this.c = c;
}
public void setD(boolean d) {
this.d = d;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("animal", animal);
builder.field("vegetable", vegetable);
if (mineral != 0) { // We're just using 0 as the default because it is easy for testing
builder.field("mineral", mineral);
}
if (fruit != 0) {
builder.field("fruit", fruit);
}
if (a != null) {
builder.field("a", a);
}
if (b != null) {
builder.field("b", b);
}
if (c != null) {
builder.field("c", c);
}
if (d) {
builder.field("d", d);
}
builder.endObject();
return builder;
}
/*
* It is normal just to declare a single PARSER but we use a couple of different parsers for testing so we have all of these. Don't
* this this style is normal just because it is in the test.
*/
public static final ConstructingObjectParser<HasCtorArguments, Void> PARSER = buildParser(true, true);
public static final ConstructingObjectParser<HasCtorArguments, Void> PARSER_VEGETABLE_OPTIONAL = buildParser(true, false);
public static final ConstructingObjectParser<HasCtorArguments, Void> PARSER_ALL_OPTIONAL = buildParser(false, false);
public static final List<ConstructingObjectParser<HasCtorArguments, Void>> ALL_PARSERS = List.of(
PARSER,
PARSER_VEGETABLE_OPTIONAL,
PARSER_ALL_OPTIONAL
);
public static final ConstructingObjectParser<HasCtorArguments, Integer> PARSER_INT_CONTEXT = buildContextParser();
private static ConstructingObjectParser<HasCtorArguments, Void> buildParser(boolean animalRequired, boolean vegetableRequired) {
ConstructingObjectParser<HasCtorArguments, Void> parser = new ConstructingObjectParser<>(
"has_required_arguments",
args -> new HasCtorArguments((String) args[0], (Integer) args[1])
);
parser.declareString(animalRequired ? constructorArg() : optionalConstructorArg(), new ParseField("animal"));
parser.declareInt(vegetableRequired ? constructorArg() : optionalConstructorArg(), new ParseField("vegetable"));
declareSetters(parser);
return parser;
}
private static ConstructingObjectParser<HasCtorArguments, Integer> buildContextParser() {
ConstructingObjectParser<HasCtorArguments, Integer> parser = new ConstructingObjectParser<>(
"has_required_arguments",
false,
(args, ctx) -> new HasCtorArguments((String) args[0], ctx)
);
parser.declareString(constructorArg(), new ParseField("animal"));
declareSetters(parser);
return parser;
}
private static void declareSetters(ConstructingObjectParser<HasCtorArguments, ?> parser) {
parser.declareInt(HasCtorArguments::setMineral, new ParseField("mineral"));
parser.declareInt(HasCtorArguments::setFruit, new ParseField("fruit"));
parser.declareString(HasCtorArguments::setA, new ParseField("a"));
parser.declareString(HasCtorArguments::setB, new ParseField("b"));
parser.declareString(HasCtorArguments::setC, new ParseField("c"));
parser.declareBoolean(HasCtorArguments::setD, new ParseField("d"));
}
}
public void testParseNamedObject() throws IOException {
XContentParser parser = createParser(
JsonXContent.jsonXContent,
"{\"named\": { \"a\": {} }, \"named_in_constructor\": { \"b\": {} } }"
);
NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null);
assertThat(h.named, hasSize(1));
assertEquals("a", h.named.get(0).name);
assertThat(h.namedInConstructor, hasSize(1));
assertEquals("b", h.namedInConstructor.get(0).name);
assertFalse(h.namedSuppliedInOrder);
}
public void testParseNamedObjectInOrder() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}} ], "named_in_constructor": [ {"b": {}} ]}""");
NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null);
assertThat(h.named, hasSize(1));
assertEquals("a", h.named.get(0).name);
assertThat(h.namedInConstructor, hasSize(1));
assertEquals("b", h.namedInConstructor.get(0).name);
assertTrue(h.namedSuppliedInOrder);
}
public void testParseNamedObjectTwoFieldsInArray() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}, "b": {}}], "named_in_constructor": [ {"c": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectTwoFieldsInArrayConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}}], "named_in_constructor": [ {"c": {}, "d": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named_in_constructor] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectNoFieldsInArray() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {} ], \"named_in_constructor\": [ {\"a\": {}} ]}");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectNoFieldsInArrayConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {\"a\": {}} ], \"named_in_constructor\": [ {} ]}");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named_in_constructor] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectJunkInArray() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ "junk" ], "named_in_constructor": [ {"a": {}} ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectJunkInArrayConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{"named": [ {"a": {}} ], "named_in_constructor": [ "junk" ]}""");
XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
e.getCause().getMessage(),
containsString(
"[named_in_constructor] can be a single object with any number of fields "
+ "or an array where each entry is an object with a single field"
)
);
}
public void testParseNamedObjectInOrderNotSupported() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"named": [ { "a": {} } ],
"named_in_constructor": {
"b": {}
}
}""");
// Create our own parser for this test so we can disable support for the "ordered" mode specified by the array above
@SuppressWarnings("unchecked")
ConstructingObjectParser<NamedObjectHolder, Void> objectParser = new ConstructingObjectParser<>(
"named_object_holder",
a -> new NamedObjectHolder(((List<NamedObject>) a[0]))
);
objectParser.declareNamedObjects(
ConstructingObjectParser.constructorArg(),
NamedObject.PARSER,
new ParseField("named_in_constructor")
);
objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named"));
// Now firing the xml through it fails
XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]"));
assertEquals("[named] doesn't support arrays. Use a single object with multiple fields.", e.getCause().getMessage());
}
public void testParseNamedObjectInOrderNotSupportedConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent, """
{
"named": {
"a": {}
},
"named_in_constructor": [ { "b": {} } ]
}""");
// Create our own parser for this test so we can disable support for the "ordered" mode specified by the array above
@SuppressWarnings("unchecked")
ConstructingObjectParser<NamedObjectHolder, Void> objectParser = new ConstructingObjectParser<>(
"named_object_holder",
a -> new NamedObjectHolder(((List<NamedObject>) a[0]))
);
objectParser.declareNamedObjects(
ConstructingObjectParser.constructorArg(),
NamedObject.PARSER,
new ParseField("named_in_constructor")
);
objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named"));
// Now firing the xml through it fails
XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null));
assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named_in_constructor]"));
assertThat(
e.getCause().getMessage(),
containsString("[named_in_constructor] doesn't support arrays. Use a single object with multiple fields.")
);
}
static | HasCtorArguments |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java | {
"start": 50386,
"end": 55141
} | enum ____ {
SINGLETON,
DATA_PARALLELISM,
TASK_LEVEL_PARALLELISM
}
}
/**
* Context object used while generating a local plan. Currently only collects the driver factories as well as
* maintains information how many driver instances should be created for a given driver.
*/
public record LocalExecutionPlannerContext(
String description,
List<DriverFactory> driverFactories,
Holder<DriverParallelism> driverParallelism,
QueryPragmas queryPragmas,
BigArrays bigArrays,
BlockFactory blockFactory,
FoldContext foldCtx,
PlannerSettings plannerSettings,
boolean timeSeries,
IndexedByShardId<? extends ShardContext> shardContexts
) {
void addDriverFactory(DriverFactory driverFactory) {
driverFactories.add(driverFactory);
}
void driverParallelism(DriverParallelism parallelism) {
driverParallelism.set(parallelism);
}
DataPartitioning.AutoStrategy autoPartitioningStrategy() {
return timeSeries ? DataPartitioning.AutoStrategy.DEFAULT_TIME_SERIES : DataPartitioning.AutoStrategy.DEFAULT;
}
int pageSize(PhysicalPlan node, Integer estimatedRowSize) {
if (estimatedRowSize == null) {
throw new IllegalStateException("estimated row size hasn't been set");
}
if (estimatedRowSize == 0) {
throw new IllegalStateException("estimated row size can't be 0");
}
if (queryPragmas.pageSize() != 0) {
return queryPragmas.pageSize();
}
if (timeSeries && node instanceof EsQueryExec) {
return TimeSeriesSourceOperator.pageSize(estimatedRowSize, plannerSettings.valuesLoadingJumboSize().getBytes());
} else {
return Math.max(SourceOperator.MIN_TARGET_PAGE_SIZE, SourceOperator.TARGET_PAGE_SIZE / estimatedRowSize);
}
}
}
record DriverSupplier(
String description,
String clusterName,
String nodeName,
BigArrays bigArrays,
BlockFactory blockFactory,
IndexedByShardId<? extends ShardContext> shardContexts,
PhysicalOperation physicalOperation,
TimeValue statusInterval,
Settings settings
) implements Function<String, Driver>, Describable {
@Override
public Driver apply(String sessionId) {
SourceOperator source = null;
List<Operator> operators = new ArrayList<>();
SinkOperator sink = null;
boolean success = false;
var localBreakerSettings = new LocalCircuitBreaker.SizeSettings(settings);
final var localBreaker = new LocalCircuitBreaker(
blockFactory.breaker(),
localBreakerSettings.overReservedBytes(),
localBreakerSettings.maxOverReservedBytes()
);
var driverContext = new DriverContext(bigArrays, blockFactory.newChildFactory(localBreaker), description);
try {
source = physicalOperation.source(driverContext);
physicalOperation.operators(operators, driverContext);
sink = physicalOperation.sink(driverContext);
success = true;
return new Driver(
sessionId,
description,
clusterName,
nodeName,
System.currentTimeMillis(),
System.nanoTime(),
driverContext,
physicalOperation::describe,
source,
operators,
sink,
statusInterval,
localBreaker
);
} finally {
if (false == success) {
Releasables.close(source, () -> Releasables.close(operators), sink, localBreaker);
}
}
}
@Override
public String describe() {
return physicalOperation.describe();
}
}
record DriverFactory(DriverSupplier driverSupplier, DriverParallelism driverParallelism) implements Describable {
@Override
public String describe() {
return "DriverFactory(instances = "
+ driverParallelism.instanceCount()
+ ", type = "
+ driverParallelism.type()
+ ")\n"
+ driverSupplier.describe();
}
}
/**
* Plan representation that is geared towards execution on a single node
*/
public static | Type |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/web/server/OAuth2ClientSpecTests.java | {
"start": 15395,
"end": 15642
} | class ____ {
@GetMapping("/")
String home(@RegisteredOAuth2AuthorizedClient("github") OAuth2AuthorizedClient authorizedClient) {
return "home";
}
}
@Configuration
@EnableWebFlux
@EnableWebFluxSecurity
static | AuthorizedClientController |
java | spring-projects__spring-boot | module/spring-boot-webmvc/src/main/java/org/springframework/boot/webmvc/autoconfigure/error/ErrorMvcAutoConfiguration.java | {
"start": 11460,
"end": 11542
} | class ____ ErrorController
* MVC beans are preserved when using AOP.
*/
static | of |
java | lettuce-io__lettuce-core | src/test/java/io/lettuce/core/resource/DefaultEventLoopGroupProviderUnitTests.java | {
"start": 427,
"end": 1318
} | class ____ {
@Test
void shutdownTerminatedEventLoopGroup() {
DefaultEventLoopGroupProvider sut = new DefaultEventLoopGroupProvider(1);
NioEventLoopGroup eventLoopGroup = sut.allocate(NioEventLoopGroup.class);
Future<Boolean> shutdown = sut.release(eventLoopGroup, 10, 10, TimeUnit.MILLISECONDS);
TestFutures.awaitOrTimeout(shutdown);
Future<Boolean> shutdown2 = sut.release(eventLoopGroup, 10, 10, TimeUnit.MILLISECONDS);
TestFutures.awaitOrTimeout(shutdown2);
}
@Test
void getAfterShutdown() {
DefaultEventLoopGroupProvider sut = new DefaultEventLoopGroupProvider(1);
TestFutures.awaitOrTimeout(sut.shutdown(10, 10, TimeUnit.MILLISECONDS));
assertThatThrownBy(() -> sut.allocate(NioEventLoopGroup.class)).isInstanceOf(IllegalStateException.class);
}
}
| DefaultEventLoopGroupProviderUnitTests |
java | quarkusio__quarkus | devtools/gradle/gradle-application-plugin/src/main/java/io/quarkus/devtools/project/buildfile/GradleGroovyProjectBuildFile.java | {
"start": 238,
"end": 1710
} | class ____ extends GradleProjectBuildFile {
static final String BUILD_GRADLE_PATH = "build.gradle";
static final String SETTINGS_GRADLE_PATH = "settings.gradle";
public GradleGroovyProjectBuildFile(Project project, ExtensionCatalog catalog) {
super(project, catalog);
}
@Override
String getSettingsGradlePath() {
return SETTINGS_GRADLE_PATH;
}
@Override
String getBuildGradlePath() {
return BUILD_GRADLE_PATH;
}
@Override
protected boolean importBom(ArtifactCoords coords) {
return importBomInModel(getModel(), toBomImportCoords(coords));
}
@Override
protected boolean addDependency(ArtifactCoords coords, boolean managed) {
return addDependencyInModel(getModel(), coords, managed);
}
@Override
public BuildTool getBuildTool() {
return BuildTool.GRADLE;
}
static boolean importBomInModel(Model model, ArtifactCoords coords) {
return addDependencyInModel(model,
String.format(" implementation %s%n",
createDependencyCoordinatesString(coords, false, containsProperty(coords) ? '\"' : '\'')));
}
static boolean addDependencyInModel(Model model, ArtifactCoords coords, boolean managed) {
return addDependencyInModel(model,
String.format(" implementation %s%n", createDependencyCoordinatesString(coords, managed, '\'')));
}
}
| GradleGroovyProjectBuildFile |
java | junit-team__junit5 | junit-platform-engine/src/main/java/org/junit/platform/engine/support/discovery/EngineDiscoveryRequestResolver.java | {
"start": 15590,
"end": 16345
} | class ____<T extends TestDescriptor> implements InitializationContext<T> {
private final EngineDiscoveryRequest request;
private final T engineDescriptor;
private final Predicate<String> classNameFilter;
private final Predicate<String> packageFilter;
private final DiscoveryIssueReporter issueReporter;
DefaultInitializationContext(EngineDiscoveryRequest request, T engineDescriptor,
DiscoveryIssueReporter issueReporter) {
this.request = request;
this.engineDescriptor = engineDescriptor;
this.classNameFilter = buildClassNamePredicate(request);
this.packageFilter = buildPackagePredicate(request);
this.issueReporter = issueReporter;
}
/**
* Build a {@link Predicate} for fully qualified | DefaultInitializationContext |
java | apache__rocketmq | remoting/src/main/java/org/apache/rocketmq/remoting/protocol/body/HARuntimeInfo.java | {
"start": 982,
"end": 2416
} | class ____ extends RemotingSerializable {
private boolean master;
private long masterCommitLogMaxOffset;
private int inSyncSlaveNums;
private List<HAConnectionRuntimeInfo> haConnectionInfo = new ArrayList<>();
private HAClientRuntimeInfo haClientRuntimeInfo = new HAClientRuntimeInfo();
public boolean isMaster() {
return this.master;
}
public void setMaster(boolean master) {
this.master = master;
}
public long getMasterCommitLogMaxOffset() {
return this.masterCommitLogMaxOffset;
}
public void setMasterCommitLogMaxOffset(long masterCommitLogMaxOffset) {
this.masterCommitLogMaxOffset = masterCommitLogMaxOffset;
}
public int getInSyncSlaveNums() {
return this.inSyncSlaveNums;
}
public void setInSyncSlaveNums(int inSyncSlaveNums) {
this.inSyncSlaveNums = inSyncSlaveNums;
}
public List<HAConnectionRuntimeInfo> getHaConnectionInfo() {
return this.haConnectionInfo;
}
public void setHaConnectionInfo(List<HAConnectionRuntimeInfo> haConnectionInfo) {
this.haConnectionInfo = haConnectionInfo;
}
public HAClientRuntimeInfo getHaClientRuntimeInfo() {
return this.haClientRuntimeInfo;
}
public void setHaClientRuntimeInfo(HAClientRuntimeInfo haClientRuntimeInfo) {
this.haClientRuntimeInfo = haClientRuntimeInfo;
}
public static | HARuntimeInfo |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/query/sql/ClassIdNativeQueryTest.java | {
"start": 2624,
"end": 3603
} | class ____ {
@Id
private String fileid;
@Id
private String versionid;
@Column(name = "description")
private String description;
@Column(name = "title")
private String title;
public Book() {
}
public Book(final String fileid, final String versionid) {
this.fileid = fileid;
this.versionid = versionid;
}
public String getFileId() {
return fileid;
}
public void setFileId(final String fileid) {
this.fileid = fileid;
}
public String getVersionid() {
return versionid;
}
public void setVersionid(final String versionid) {
this.versionid = versionid;
}
public String getDescription() {
return description;
}
public void setDescription(final String description) {
this.description = description;
}
public String getTitle() {
return title;
}
public void setTitle(final String title) {
this.title = title;
}
}
@Entity(name = "Publisher")
@Table(name = "PUBLISHER_T")
public static | Book |
java | spring-projects__spring-framework | spring-test/src/test/java/org/springframework/test/context/bean/override/mockito/MockitoBeanByNameLookupTestClassScopedExtensionContextIntegrationTests.java | {
"start": 4921,
"end": 5291
} | class ____ {
@Bean("field")
ExampleService bean1() {
return new RealExampleService("Hello Field");
}
@Bean("nestedField")
ExampleService bean2() {
return new RealExampleService("Hello Nested Field");
}
@Bean("prototypeScoped")
@Scope("prototype")
ExampleService bean3() {
return new RealExampleService("Hello Prototype Field");
}
}
}
| Config |
java | netty__netty | codec-http2/src/test/java/io/netty/handler/codec/http2/HpackDecoderTest.java | {
"start": 2638,
"end": 33220
} | class ____ {
private HpackDecoder hpackDecoder;
private Http2Headers mockHeaders;
private static String hex(String s) {
return StringUtil.toHexString(s.getBytes());
}
private void decode(String encoded) throws Http2Exception {
byte[] b = StringUtil.decodeHexDump(encoded);
ByteBuf in = Unpooled.wrappedBuffer(b);
try {
hpackDecoder.decode(0, in, mockHeaders, true);
} finally {
in.release();
}
}
@BeforeEach
public void setUp() {
hpackDecoder = new HpackDecoder(8192);
mockHeaders = mock(Http2Headers.class);
}
@Test
public void testDecodeULE128IntMax() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x07};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertEquals(MAX_VALUE, decodeULE128(in, 0));
} finally {
in.release();
}
}
@Test
public void testDecodeULE128IntOverflow1() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x07};
final ByteBuf in = Unpooled.wrappedBuffer(input);
final int readerIndex = in.readerIndex();
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decodeULE128(in, 1);
}
});
} finally {
assertEquals(readerIndex, in.readerIndex());
in.release();
}
}
@Test
public void testDecodeULE128IntOverflow2() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x08};
final ByteBuf in = Unpooled.wrappedBuffer(input);
final int readerIndex = in.readerIndex();
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decodeULE128(in, 0);
}
});
} finally {
assertEquals(readerIndex, in.readerIndex());
in.release();
}
}
@Test
public void testDecodeULE128LongMax() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF,
(byte) 0xFF, (byte) 0x7F};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertEquals(Long.MAX_VALUE, decodeULE128(in, 0L));
} finally {
in.release();
}
}
@Test
public void testDecodeULE128LongOverflow1() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF,
(byte) 0xFF, (byte) 0xFF};
final ByteBuf in = Unpooled.wrappedBuffer(input);
final int readerIndex = in.readerIndex();
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decodeULE128(in, 0L);
}
});
} finally {
assertEquals(readerIndex, in.readerIndex());
in.release();
}
}
@Test
public void testDecodeULE128LongOverflow2() throws Http2Exception {
byte[] input = {(byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF,
(byte) 0xFF, (byte) 0x7F};
final ByteBuf in = Unpooled.wrappedBuffer(input);
final int readerIndex = in.readerIndex();
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decodeULE128(in, 1L);
}
});
} finally {
assertEquals(readerIndex, in.readerIndex());
in.release();
}
}
@Test
public void testSetTableSizeWithMaxUnsigned32BitValueSucceeds() throws Http2Exception {
byte[] input = {(byte) 0x3F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0E};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
final long expectedHeaderSize = 4026531870L; // based on the input above
hpackDecoder.setMaxHeaderTableSize(expectedHeaderSize);
hpackDecoder.decode(0, in, mockHeaders, true);
assertEquals(expectedHeaderSize, hpackDecoder.getMaxHeaderTableSize());
} finally {
in.release();
}
}
@Test
public void testSetTableSizeOverLimitFails() throws Http2Exception {
byte[] input = {(byte) 0x3F, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0x0E};
final ByteBuf in = Unpooled.wrappedBuffer(input);
try {
hpackDecoder.setMaxHeaderTableSize(4026531870L - 1); // based on the input above ... 1 less than is above.
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void testLiteralHuffmanEncodedWithEmptyNameAndValue() throws Http2Exception {
byte[] input = {0, (byte) 0x80, 0};
ByteBuf in = Unpooled.wrappedBuffer(input);
try {
hpackDecoder.decode(0, in, mockHeaders, true);
verify(mockHeaders, times(1)).add(EMPTY_STRING, EMPTY_STRING);
} finally {
in.release();
}
}
@Test
public void testLiteralHuffmanEncodedWithPaddingGreaterThan7Throws() throws Http2Exception {
byte[] input = {0, (byte) 0x81, -1};
final ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void testLiteralHuffmanEncodedWithDecodingEOSThrows() throws Http2Exception {
byte[] input = {0, (byte) 0x84, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF, (byte) 0xFF};
final ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void testLiteralHuffmanEncodedWithPaddingNotCorrespondingToMSBThrows() throws Http2Exception {
byte[] input = {0, (byte) 0x81, 0};
final ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void testIncompleteIndex() throws Http2Exception {
byte[] compressed = StringUtil.decodeHexDump("FFF0");
final ByteBuf in = Unpooled.wrappedBuffer(compressed);
try {
assertEquals(2, in.readableBytes());
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void testUnusedIndex() throws Http2Exception {
// Index 0 is not used
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("80");
}
});
}
@Test
public void testIllegalIndex() throws Http2Exception {
// Index larger than the header table
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("FF00");
}
});
}
@Test
public void testInsidiousIndex() throws Http2Exception {
// Insidious index so the last shift causes sign overflow
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("FF8080808007");
}
});
}
@Test
public void testDynamicTableSizeUpdate() throws Http2Exception {
decode("20");
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("3FE11F");
assertEquals(4096, hpackDecoder.getMaxHeaderTableSize());
}
@Test
public void testDynamicTableSizeUpdateRequired() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(32);
decode("3F00");
assertEquals(31, hpackDecoder.getMaxHeaderTableSize());
}
@Test
public void testIllegalDynamicTableSizeUpdate() throws Http2Exception {
// max header table size = MAX_HEADER_TABLE_SIZE + 1
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("3FE21F");
}
});
}
@Test
public void testInsidiousMaxDynamicTableSize() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
// max header table size sign overflow
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("3FE1FFFFFF07");
}
});
}
@Test
public void testMaxValidDynamicTableSize() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(MAX_VALUE);
String baseValue = "3FE1FFFFFF0";
for (int i = 0; i < 7; ++i) {
decode(baseValue + i);
}
}
@Test
public void testReduceMaxDynamicTableSize() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
decode("2081");
}
@Test
public void testTooLargeDynamicTableSizeUpdate() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("21"); // encoder max header table size not small enough
}
});
}
@Test
public void testMissingDynamicTableSizeUpdate() throws Http2Exception {
hpackDecoder.setMaxHeaderTableSize(0);
assertEquals(0, hpackDecoder.getMaxHeaderTableSize());
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("81");
}
});
}
@Test
public void testDynamicTableSizeUpdateAfterTheBeginingOfTheBlock() throws Http2Exception {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("8120");
}
});
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode("813FE11F");
}
});
}
@Test
public void testLiteralWithIncrementalIndexingWithEmptyName() throws Http2Exception {
decode("400005" + hex("value"));
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test
public void testLiteralWithIncrementalIndexingCompleteEviction() throws Http2Exception {
// Verify indexed host header
decode("4004" + hex("name") + "05" + hex("value"));
verify(mockHeaders).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
reset(mockHeaders);
StringBuilder sb = new StringBuilder();
for (int i = 0; i < 4096; i++) {
sb.append('a');
}
String value = sb.toString();
sb = new StringBuilder();
sb.append("417F811F");
for (int i = 0; i < 4096; i++) {
sb.append("61"); // 'a'
}
decode(sb.toString());
verify(mockHeaders).add(of(":authority"), of(value));
MockingDetails details = mockingDetails(mockHeaders);
for (Invocation invocation : details.getInvocations()) {
Method method = invocation.getMethod();
if ("authority".equals(method.getName())
&& invocation.getArguments().length == 0) {
invocation.markVerified();
} else if ("contains".equals(method.getName())
&& invocation.getArguments().length == 1
&& invocation.getArgument(0).equals(of(":authority"))) {
invocation.markVerified();
}
}
verifyNoMoreInteractions(mockHeaders);
reset(mockHeaders);
// Verify next header is inserted at index 62
decode("4004" + hex("name") + "05" + hex("value") + "BE");
verify(mockHeaders, times(2)).add(of("name"), of("value"));
verifyNoMoreInteractions(mockHeaders);
}
@Test
public void testLiteralWithIncrementalIndexingWithLargeValue() throws Http2Exception {
// Ignore header that exceeds max header size
final StringBuilder sb = new StringBuilder();
sb.append("4004");
sb.append(hex("name"));
sb.append("7F813F");
for (int i = 0; i < 8192; i++) {
sb.append("61"); // 'a'
}
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(sb.toString());
}
});
}
@Test
public void testLiteralWithoutIndexingWithEmptyName() throws Http2Exception {
decode("000005" + hex("value"));
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test
public void testLiteralWithoutIndexingWithLargeName() throws Http2Exception {
// Ignore header name that exceeds max header size
final StringBuilder sb = new StringBuilder();
sb.append("007F817F");
for (int i = 0; i < 16384; i++) {
sb.append("61"); // 'a'
}
sb.append("00");
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(sb.toString());
}
});
}
@Test
public void testLiteralWithoutIndexingWithLargeValue() throws Http2Exception {
// Ignore header that exceeds max header size
final StringBuilder sb = new StringBuilder();
sb.append("0004");
sb.append(hex("name"));
sb.append("7F813F");
for (int i = 0; i < 8192; i++) {
sb.append("61"); // 'a'
}
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(sb.toString());
}
});
}
@Test
public void testLiteralNeverIndexedWithEmptyName() throws Http2Exception {
decode("100005" + hex("value"));
verify(mockHeaders, times(1)).add(EMPTY_STRING, of("value"));
}
@Test
public void testLiteralNeverIndexedWithLargeName() throws Http2Exception {
// Ignore header name that exceeds max header size
final StringBuilder sb = new StringBuilder();
sb.append("107F817F");
for (int i = 0; i < 16384; i++) {
sb.append("61"); // 'a'
}
sb.append("00");
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(sb.toString());
}
});
}
@Test
public void testLiteralNeverIndexedWithLargeValue() throws Http2Exception {
// Ignore header that exceeds max header size
final StringBuilder sb = new StringBuilder();
sb.append("1004");
sb.append(hex("name"));
sb.append("7F813F");
for (int i = 0; i < 8192; i++) {
sb.append("61"); // 'a'
}
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
decode(sb.toString());
}
});
}
@Test
public void testDecodeLargerThanMaxHeaderListSizeUpdatesDynamicTable() throws Http2Exception {
final ByteBuf in = Unpooled.buffer(300);
try {
hpackDecoder.setMaxHeaderListSize(200);
HpackEncoder hpackEncoder = new HpackEncoder(true);
// encode headers that are slightly larger than maxHeaderListSize
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add("test_1", "1");
toEncode.add("test_2", "2");
toEncode.add("long", String.format("%0100d", 0).replace('0', 'A'));
toEncode.add("test_3", "3");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
// decode the headers, we should get an exception
final Http2Headers decoded = new DefaultHttp2Headers();
assertThrows(Http2Exception.HeaderListSizeException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in, decoded, true);
}
});
// but the dynamic table should have been updated, so that later blocks
// can refer to earlier headers
in.clear();
// 0x80, "indexed header field representation"
// index 62, the first (most recent) dynamic table entry
in.writeByte(0x80 | 62);
Http2Headers decoded2 = new DefaultHttp2Headers();
hpackDecoder.decode(1, in, decoded2, true);
Http2Headers golden = new DefaultHttp2Headers();
golden.add("test_3", "3");
assertEquals(golden, decoded2);
} finally {
in.release();
}
}
@Test
public void testDecodeCountsNamesOnlyOnce() throws Http2Exception {
ByteBuf in = Unpooled.buffer(200);
try {
hpackDecoder.setMaxHeaderListSize(3500);
HpackEncoder hpackEncoder = new HpackEncoder(true);
// encode headers that are slightly larger than maxHeaderListSize
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(String.format("%03000d", 0).replace('0', 'f'), "value");
toEncode.add("accept", "value");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
Http2Headers decoded = new DefaultHttp2Headers();
hpackDecoder.decode(1, in, decoded, true);
assertEquals(2, decoded.size());
} finally {
in.release();
}
}
@Test
public void testAccountForHeaderOverhead() throws Exception {
final ByteBuf in = Unpooled.buffer(100);
try {
String headerName = "12345";
String headerValue = "56789";
long headerSize = headerName.length() + headerValue.length();
hpackDecoder.setMaxHeaderListSize(headerSize);
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(headerName, headerValue);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
// SETTINGS_MAX_HEADER_LIST_SIZE is big enough for the header to fit...
assertThat(hpackDecoder.getMaxHeaderListSize()).isGreaterThanOrEqualTo(headerSize);
// ... but decode should fail because we add some overhead for each header entry
assertThrows(Http2Exception.HeaderListSizeException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in, decoded, true);
}
});
} finally {
in.release();
}
}
@Test
public void testIncompleteHeaderFieldRepresentation() throws Http2Exception {
// Incomplete Literal Header Field with Incremental Indexing
byte[] input = {(byte) 0x40};
final ByteBuf in = Unpooled.wrappedBuffer(input);
try {
assertThrows(Http2Exception.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(0, in, mockHeaders, true);
}
});
} finally {
in.release();
}
}
@Test
public void unknownPseudoHeader() throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers(false);
toEncode.add(":test", "1");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers(true);
assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in, decoded, true);
}
});
} finally {
in.release();
}
}
@Test
public void disableHeaderValidation() throws Exception {
ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers(false);
toEncode.add(":test", "1");
toEncode.add(":status", "200");
toEncode.add(":method", "GET");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
Http2Headers decoded = new DefaultHttp2Headers(false);
hpackDecoder.decode(1, in, decoded, false);
assertEquals("1", decoded.valueIterator(":test").next().toString());
assertEquals("200", decoded.status().toString());
assertEquals("GET", decoded.method().toString());
} finally {
in.release();
}
}
@Test
public void requestPseudoHeaderInResponse() throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(":status", "200");
toEncode.add(":method", "GET");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in, decoded, true);
}
});
} finally {
in.release();
}
}
@Test
public void responsePseudoHeaderInRequest() throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(":method", "GET");
toEncode.add(":status", "200");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in, decoded, true);
}
});
} finally {
in.release();
}
}
@Test
public void pseudoHeaderAfterRegularHeader() throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new InOrderHttp2Headers();
toEncode.add("test", "1");
toEncode.add(":method", "GET");
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
Http2Exception.StreamException e = assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(3, in, decoded, true);
}
});
assertEquals(3, e.streamId());
assertEquals(PROTOCOL_ERROR, e.error());
} finally {
in.release();
}
}
@ParameterizedTest(name = "{displayName} [{index}] name={0} value={1}")
@CsvSource(value = {"upgrade,protocol1", "connection,close", "keep-alive,timeout=5", "proxy-connection,close",
"transfer-encoding,chunked", "te,something-else"})
public void receivedConnectionHeader(String name, String value) throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new InOrderHttp2Headers();
toEncode.add(":method", "GET");
toEncode.add(name, value);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
Http2Exception.StreamException e = assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(3, in, decoded, true);
}
});
assertEquals(3, e.streamId());
assertEquals(PROTOCOL_ERROR, e.error());
} finally {
in.release();
}
}
@Test
public void failedValidationDoesntCorruptHpack() throws Exception {
final ByteBuf in1 = Unpooled.buffer(200);
ByteBuf in2 = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new DefaultHttp2Headers();
toEncode.add(":method", "GET");
toEncode.add(":status", "200");
toEncode.add("foo", "bar");
hpackEncoder.encodeHeaders(1, in1, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
Http2Exception.StreamException expected =
assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(1, in1, decoded, true);
}
});
assertEquals(1, expected.streamId());
// Do it again, this time without validation, to make sure the HPACK state is still sane.
decoded.clear();
hpackEncoder.encodeHeaders(1, in2, toEncode, NEVER_SENSITIVE);
hpackDecoder.decode(1, in2, decoded, false);
assertEquals(3, decoded.size());
assertEquals("GET", decoded.method().toString());
assertEquals("200", decoded.status().toString());
assertEquals("bar", decoded.get("foo").toString());
} finally {
in1.release();
in2.release();
}
}
@ParameterizedTest
@CsvSource(value = {":method,''", ":scheme,''", ":authority,''", ":path,''"})
public void testPseudoHeaderEmptyValidationEnabled(String name, String value) throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new InOrderHttp2Headers();
toEncode.add(name, value);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers();
Http2Exception.StreamException e = assertThrows(Http2Exception.StreamException.class, new Executable() {
@Override
public void execute() throws Throwable {
hpackDecoder.decode(3, in, decoded, true);
}
});
assertEquals(3, e.streamId());
assertEquals(PROTOCOL_ERROR, e.error());
} finally {
in.release();
}
}
@ParameterizedTest
@CsvSource(value = {":method,''", ":scheme,''", ":authority,''", ":path,''"})
public void testPseudoHeaderEmptyValidationDisabled(String name, String value) throws Exception {
final ByteBuf in = Unpooled.buffer(200);
try {
HpackEncoder hpackEncoder = new HpackEncoder(true);
Http2Headers toEncode = new InOrderHttp2Headers();
toEncode.add(name, value);
hpackEncoder.encodeHeaders(1, in, toEncode, NEVER_SENSITIVE);
final Http2Headers decoded = new DefaultHttp2Headers(false);
hpackDecoder.decode(3, in, decoded, true);
assertSame(AsciiString.EMPTY_STRING, decoded.get(name));
} finally {
in.release();
}
}
}
| HpackDecoderTest |
java | quarkusio__quarkus | extensions/quartz/deployment/src/test/java/io/quarkus/quartz/test/OverdueExecutionTest.java | {
"start": 1581,
"end": 2063
} | class ____ {
@Scheduled(identity = "overdueJob", every = "0.1s", overdueGracePeriod = "0.1s")
void overdueJob() throws InterruptedException {
}
@Scheduled(identity = "tolerantJob", every = "0.1s", overdueGracePeriod = "2H")
void tolerantJob() throws InterruptedException {
}
@Scheduled(identity = "defaultGracePeriodJob", every = "0.1s")
void defaultGracePeriodJob() throws InterruptedException {
}
}
}
| Jobs |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/beanvalidation/ValidTitle.java | {
"start": 854,
"end": 999
} | interface ____ {
String message() default "foo";
Class<?>[] groups() default {};
Class<? extends Payload>[] payload() default {};
}
| ValidTitle |
java | google__guava | android/guava-tests/test/com/google/common/primitives/ImmutableDoubleArrayTest.java | {
"start": 19639,
"end": 20076
} | class ____
extends TestDoubleListGenerator {
@Override
protected List<Double> create(Double[] elements) {
Double[] prefix = {86.0, 99.0};
Double[] all = concat(prefix, elements);
return makeArray(all).subArray(2, elements.length + 2).asList();
}
}
@J2ktIncompatible
@GwtIncompatible // used only from suite
@AndroidIncompatible
public static final | ImmutableDoubleArrayTailSubListAsListGenerator |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/web/multipart/MultipartException.java | {
"start": 1052,
"end": 1503
} | class ____ extends NestedRuntimeException {
/**
* Constructor for MultipartException.
* @param msg the detail message
*/
public MultipartException(String msg) {
super(msg);
}
/**
* Constructor for MultipartException.
* @param msg the detail message
* @param cause the root cause from the multipart parsing API in use
*/
public MultipartException(String msg, @Nullable Throwable cause) {
super(msg, cause);
}
}
| MultipartException |
java | apache__kafka | server-common/src/main/java/org/apache/kafka/server/util/ShutdownableThread.java | {
"start": 1114,
"end": 4974
} | class ____ extends Thread {
public final String logPrefix;
protected final Logger log;
private final boolean isInterruptible;
private final CountDownLatch shutdownInitiated = new CountDownLatch(1);
private final CountDownLatch shutdownComplete = new CountDownLatch(1);
private volatile boolean isStarted = false;
public ShutdownableThread(String name) {
this(name, true);
}
public ShutdownableThread(String name, boolean isInterruptible) {
this(name, isInterruptible, "[" + name + "]: ");
}
@SuppressWarnings("this-escape")
public ShutdownableThread(String name, boolean isInterruptible, String logPrefix) {
super(name);
this.isInterruptible = isInterruptible;
this.logPrefix = logPrefix;
log = new LogContext(logPrefix).logger(this.getClass());
this.setDaemon(false);
}
public void shutdown() throws InterruptedException {
initiateShutdown();
awaitShutdown();
}
public boolean isShutdownInitiated() {
return shutdownInitiated.getCount() == 0;
}
public boolean isShutdownComplete() {
return shutdownComplete.getCount() == 0;
}
public boolean isStarted() {
return isStarted;
}
/**
* @return true if there has been an unexpected error and the thread shut down
*/
// mind that run() might set both when we're shutting down the broker
// but the return value of this function at that point wouldn't matter
public boolean isThreadFailed() {
return isShutdownComplete() && !isShutdownInitiated();
}
/**
* @return true if the thread hasn't initiated shutdown already
*/
public boolean initiateShutdown() {
synchronized (this) {
if (isRunning()) {
log.info("Shutting down");
shutdownInitiated.countDown();
if (isInterruptible)
interrupt();
return true;
} else
return false;
}
}
/**
* After calling initiateShutdown(), use this API to wait until the shutdown is complete.
*/
public void awaitShutdown() throws InterruptedException {
if (!isShutdownInitiated())
throw new IllegalStateException("initiateShutdown() was not called before awaitShutdown()");
else {
if (isStarted)
shutdownComplete.await();
log.info("Shutdown completed");
}
}
/**
* Causes the current thread to wait until the shutdown is initiated,
* or the specified waiting time elapses.
*
* @param timeout wait time in units.
* @param unit TimeUnit value for the wait time.
*/
public void pause(long timeout, TimeUnit unit) throws InterruptedException {
if (shutdownInitiated.await(timeout, unit))
log.trace("shutdownInitiated latch count reached zero. Shutdown called.");
}
/**
* This method is repeatedly invoked until the thread shuts down or this method throws an exception
*/
public abstract void doWork();
public void run() {
isStarted = true;
log.info("Starting");
try {
while (isRunning())
doWork();
} catch (FatalExitError e) {
shutdownInitiated.countDown();
shutdownComplete.countDown();
log.error("Stopped due to fatal error with exit code {}", e.statusCode(), e);
Exit.exit(e.statusCode());
} catch (Throwable e) {
if (isRunning())
log.error("Error due to", e);
} finally {
shutdownComplete.countDown();
}
log.info("Stopped");
}
public boolean isRunning() {
return !isShutdownInitiated();
}
}
| ShutdownableThread |
java | spring-projects__spring-boot | core/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/condition/ConditionalOnMissingFilterBean.java | {
"start": 1515,
"end": 1718
} | class ____ to the return type of
* the factory method or the type of the {@link Filter} if the bean is a
* {@link FilterRegistrationBean}:
*
* <pre class="code">
* @Configuration
* public | defaults |
java | redisson__redisson | redisson/src/main/java/org/redisson/reactive/RedissonSetMultimapCacheReactive.java | {
"start": 893,
"end": 1632
} | class ____<K, V> {
private final RSetMultimap<K, V> instance;
private final CommandReactiveExecutor commandExecutor;
private final RedissonReactiveClient redisson;
public RedissonSetMultimapCacheReactive(RSetMultimap<K, V> instance, CommandReactiveExecutor commandExecutor,
RedissonReactiveClient redisson) {
this.instance = instance;
this.redisson = redisson;
this.commandExecutor = commandExecutor;
}
public RSetReactive<V> get(K key) {
RSet<V> set = instance.get(key);
return ReactiveProxyBuilder.create(commandExecutor, set, new RedissonSetReactive<>(set, redisson), RSetReactive.class);
}
}
| RedissonSetMultimapCacheReactive |
java | apache__flink | flink-dstl/flink-dstl-dfs/src/main/java/org/apache/flink/changelog/fs/ChangelogStorageMetricGroup.java | {
"start": 1439,
"end": 5109
} | class ____ extends ProxyMetricGroup<MetricGroup> {
private static final int WINDOW_SIZE = 1000;
private final Counter uploadsCounter;
private final Counter uploadFailuresCounter;
private final Histogram uploadBatchSizes;
private final Histogram uploadSizes;
private final Histogram uploadLatenciesNanos;
private final Histogram attemptsPerUpload;
private final Histogram totalAttemptsPerUpload;
public ChangelogStorageMetricGroup(MetricGroup parent) {
super(parent);
this.uploadsCounter =
counter(CHANGELOG_STORAGE_NUM_UPLOAD_REQUESTS, new ThreadSafeSimpleCounter());
this.uploadBatchSizes =
histogram(
CHANGELOG_STORAGE_UPLOAD_BATCH_SIZES,
new DescriptiveStatisticsHistogram(WINDOW_SIZE));
this.attemptsPerUpload =
histogram(
CHANGELOG_STORAGE_ATTEMPTS_PER_UPLOAD,
new DescriptiveStatisticsHistogram(WINDOW_SIZE));
this.totalAttemptsPerUpload =
histogram(
CHANGELOG_STORAGE_TOTAL_ATTEMPTS_PER_UPLOAD,
new DescriptiveStatisticsHistogram(WINDOW_SIZE));
this.uploadSizes =
histogram(
CHANGELOG_STORAGE_UPLOAD_SIZES,
new DescriptiveStatisticsHistogram(WINDOW_SIZE));
this.uploadLatenciesNanos =
histogram(
CHANGELOG_STORAGE_UPLOAD_LATENCIES_NANOS,
new DescriptiveStatisticsHistogram(WINDOW_SIZE));
this.uploadFailuresCounter =
counter(CHANGELOG_STORAGE_NUM_UPLOAD_FAILURES, new ThreadSafeSimpleCounter());
}
public Counter getUploadsCounter() {
return uploadsCounter;
}
public Counter getUploadFailuresCounter() {
return uploadFailuresCounter;
}
public Histogram getAttemptsPerUpload() {
return attemptsPerUpload;
}
public Histogram getTotalAttemptsPerUpload() {
return totalAttemptsPerUpload;
}
/**
* The number of upload tasks (coming from one or more writers, i.e. backends/tasks) that were
* grouped together and form a single upload resulting in a single file.
*/
public Histogram getUploadBatchSizes() {
return uploadBatchSizes;
}
public Histogram getUploadSizes() {
return uploadSizes;
}
public Histogram getUploadLatenciesNanos() {
return uploadLatenciesNanos;
}
public void registerUploadQueueSizeGauge(Gauge<Integer> gauge) {
gauge(CHANGELOG_STORAGE_UPLOAD_QUEUE_SIZE, gauge);
}
private static final String PREFIX = "ChangelogStorage";
public static final String CHANGELOG_STORAGE_NUM_UPLOAD_REQUESTS =
PREFIX + ".numberOfUploadRequests";
public static final String CHANGELOG_STORAGE_NUM_UPLOAD_FAILURES =
PREFIX + ".numberOfUploadFailures";
public static final String CHANGELOG_STORAGE_UPLOAD_SIZES = PREFIX + ".uploadSizes";
public static final String CHANGELOG_STORAGE_UPLOAD_LATENCIES_NANOS =
PREFIX + ".uploadLatenciesNanos";
public static final String CHANGELOG_STORAGE_ATTEMPTS_PER_UPLOAD =
PREFIX + ".attemptsPerUpload";
public static final String CHANGELOG_STORAGE_TOTAL_ATTEMPTS_PER_UPLOAD =
PREFIX + ".totalAttemptsPerUpload";
public static final String CHANGELOG_STORAGE_UPLOAD_BATCH_SIZES = PREFIX + ".uploadBatchSizes";
public static final String CHANGELOG_STORAGE_UPLOAD_QUEUE_SIZE = PREFIX + ".uploadQueueSize";
}
| ChangelogStorageMetricGroup |
java | junit-team__junit5 | junit-jupiter-params/src/main/java/org/junit/jupiter/params/support/ParameterNameAndArgument.java | {
"start": 616,
"end": 866
} | class ____ {@link Named} for technical reasons, it
* serves a different purpose than {@link Named#of(String, Object)} and is only
* used for internal display name processing.
*
* @since 6.0
*/
@API(status = INTERNAL, since = "6.0")
public | implements |
java | alibaba__nacos | common/src/main/java/com/alibaba/nacos/common/utils/ClassUtils.java | {
"start": 6758,
"end": 6812
} | class ____.
*
* @param cls Instances of the | name |
java | ReactiveX__RxJava | src/main/java/io/reactivex/rxjava3/internal/operators/maybe/MaybeConcatIterable.java | {
"start": 2018,
"end": 6028
} | class ____<T>
extends AtomicInteger
implements MaybeObserver<T>, Subscription {
private static final long serialVersionUID = 3520831347801429610L;
final Subscriber<? super T> downstream;
final AtomicLong requested;
final AtomicReference<Object> current;
final SequentialDisposable disposables;
final Iterator<? extends MaybeSource<? extends T>> sources;
long produced;
ConcatMaybeObserver(Subscriber<? super T> actual, Iterator<? extends MaybeSource<? extends T>> sources) {
this.downstream = actual;
this.sources = sources;
this.requested = new AtomicLong();
this.disposables = new SequentialDisposable();
this.current = new AtomicReference<>(NotificationLite.COMPLETE); // as if a previous completed
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.add(requested, n);
drain();
}
}
@Override
public void cancel() {
disposables.dispose();
}
@Override
public void onSubscribe(Disposable d) {
disposables.replace(d);
}
@Override
public void onSuccess(T value) {
current.lazySet(value);
drain();
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
}
@Override
public void onComplete() {
current.lazySet(NotificationLite.COMPLETE);
drain();
}
@SuppressWarnings("unchecked")
void drain() {
if (getAndIncrement() != 0) {
return;
}
AtomicReference<Object> c = current;
Subscriber<? super T> a = downstream;
Disposable cancelled = disposables;
for (;;) {
if (cancelled.isDisposed()) {
c.lazySet(null);
return;
}
Object o = c.get();
if (o != null) {
boolean goNextSource;
if (o != NotificationLite.COMPLETE) {
long p = produced;
if (p != requested.get()) {
produced = p + 1;
c.lazySet(null);
goNextSource = true;
a.onNext((T)o);
} else {
goNextSource = false;
}
} else {
goNextSource = true;
c.lazySet(null);
}
if (goNextSource && !cancelled.isDisposed()) {
boolean b;
try {
b = sources.hasNext();
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
if (b) {
MaybeSource<? extends T> source;
try {
source = Objects.requireNonNull(sources.next(), "The source Iterator returned a null MaybeSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
a.onError(ex);
return;
}
source.subscribe(this);
} else {
a.onComplete();
}
}
}
if (decrementAndGet() == 0) {
break;
}
}
}
}
}
| ConcatMaybeObserver |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/tenantid/State.java | {
"start": 315,
"end": 433
} | class ____ {
public boolean deleted;
public @TenantId String tenantId;
public @UpdateTimestamp Instant updated;
}
| State |
java | quarkusio__quarkus | independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/parameters/LocatableResourcePathParamExtractor.java | {
"start": 148,
"end": 515
} | class ____ implements ParameterExtractor {
private final String name;
public LocatableResourcePathParamExtractor(String name) {
this.name = name;
}
@Override
public Object extractParameter(ResteasyReactiveRequestContext context) {
return context.getResourceLocatorPathParam(name, false);
}
}
| LocatableResourcePathParamExtractor |
java | quarkusio__quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/ValidatorTestCase.java | {
"start": 493,
"end": 2507
} | class ____ {
@RegisterExtension
static QuarkusTestExtension quarkusTestExtension = new QuarkusTestExtension();
@TestHTTPResource("validator/manual")
URL uri;
@Test
public void testManualValidationFailed() throws Exception {
URLConnection connection = uri.openConnection();
connection.setDoOutput(true);
connection.setRequestProperty("Content-Type", "application/json");
byte[] body = Json.createObjectBuilder()
.add("name", "Stuart")
.add("email", "aa")
.build().toString().getBytes(StandardCharsets.UTF_8);
try (OutputStream o = connection.getOutputStream()) {
o.write(body);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (InputStream in = connection.getInputStream()) {
byte[] buf = new byte[100];
int r;
while ((r = in.read(buf)) > 0) {
out.write(buf, 0, r);
}
}
Assertions.assertEquals("failed:email", new String(out.toByteArray(), "UTF-8"));
}
@Test
public void testManualValidationPassed() throws Exception {
URLConnection connection = uri.openConnection();
connection.setDoOutput(true);
connection.setRequestProperty("Content-Type", "application/json");
byte[] body = Json.createObjectBuilder()
.add("name", "Stuart")
.add("email", "test@test.com")
.build().toString().getBytes(StandardCharsets.UTF_8);
try (OutputStream o = connection.getOutputStream()) {
o.write(body);
}
InputStream in = connection.getInputStream();
byte[] buf = new byte[100];
int r;
ByteArrayOutputStream out = new ByteArrayOutputStream();
while ((r = in.read(buf)) > 0) {
out.write(buf, 0, r);
}
Assertions.assertEquals("passed", new String(out.toByteArray(), "UTF-8"));
}
}
| ValidatorTestCase |
java | elastic__elasticsearch | x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialGridFunctionTestCase.java | {
"start": 1640,
"end": 1755
} | class ____ extends AbstractScalarFunctionTestCase {
@FunctionalInterface
protected | SpatialGridFunctionTestCase |
java | elastic__elasticsearch | x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/SpatialDocValuesExtraction.java | {
"start": 5411,
"end": 12396
} | class ____ extends PhysicalOptimizerRules.ParameterizedOptimizerRule<
AggregateExec,
LocalPhysicalOptimizerContext> {
@Override
protected PhysicalPlan rule(AggregateExec aggregate, LocalPhysicalOptimizerContext ctx) {
var foundAttributes = new HashSet<FieldAttribute>();
PhysicalPlan plan = aggregate.transformDown(UnaryExec.class, exec -> {
if (exec instanceof AggregateExec agg) {
var orderedAggregates = new ArrayList<NamedExpression>();
var changedAggregates = false;
for (NamedExpression aggExpr : agg.aggregates()) {
if (aggExpr instanceof Alias as && as.child() instanceof SpatialAggregateFunction af) {
if (af.field() instanceof FieldAttribute fieldAttribute
&& allowedForDocValues(fieldAttribute, ctx.searchStats(), agg, foundAttributes)) {
// We need to both mark the field to load differently, and change the spatial function to know to use it
foundAttributes.add(fieldAttribute);
changedAggregates = true;
orderedAggregates.add(
as.replaceChild(af.withFieldExtractPreference(MappedFieldType.FieldExtractPreference.DOC_VALUES))
);
} else {
orderedAggregates.add(aggExpr);
}
} else {
orderedAggregates.add(aggExpr);
}
}
if (changedAggregates) {
exec = agg.withAggregates(orderedAggregates);
}
}
if (exec instanceof EvalExec evalExec) {
List<Alias> fields = evalExec.fields();
List<Alias> changed = fields.stream()
.map(
f -> (Alias) f.transformDown(BinarySpatialFunction.class, s -> withDocValues(s, foundAttributes))
.transformDown(SpatialGridFunction.class, s -> withDocValues(s, foundAttributes))
)
.toList();
if (changed.equals(fields) == false) {
exec = new EvalExec(exec.source(), exec.child(), changed);
}
}
if (exec instanceof FilterExec filterExec) {
// Note that ST_CENTROID does not support shapes, but SpatialRelatesFunction does, so when we extend the centroid
// to support shapes, we need to consider loading shape doc-values for both centroid and relates (ST_INTERSECTS)
var condition = filterExec.condition()
.transformDown(BinarySpatialFunction.class, s -> withDocValues(s, foundAttributes))
.transformDown(SpatialGridFunction.class, s -> withDocValues(s, foundAttributes));
if (filterExec.condition().equals(condition) == false) {
exec = new FilterExec(filterExec.source(), filterExec.child(), condition);
}
}
if (exec instanceof FieldExtractExec fieldExtractExec) {
// Tell the field extractor that it should extract the field from doc-values instead of source values
var attributesToExtract = fieldExtractExec.attributesToExtract();
Set<Attribute> docValuesAttributes = new HashSet<>();
for (Attribute found : foundAttributes) {
if (attributesToExtract.contains(found)) {
docValuesAttributes.add(found);
}
}
if (docValuesAttributes.isEmpty() == false) {
exec = fieldExtractExec.withDocValuesAttributes(docValuesAttributes);
}
}
return exec;
});
return plan;
}
private BinarySpatialFunction withDocValues(BinarySpatialFunction spatial, Set<FieldAttribute> foundAttributes) {
// Only update the docValues flags if the field is found in the attributes
boolean foundLeft = foundField(spatial.left(), foundAttributes);
boolean foundRight = foundField(spatial.right(), foundAttributes);
return foundLeft || foundRight ? spatial.withDocValues(foundLeft, foundRight) : spatial;
}
private SpatialGridFunction withDocValues(SpatialGridFunction spatial, Set<FieldAttribute> foundAttributes) {
// Only update the docValues flags if the field is found in the attributes
boolean found = foundField(spatial.spatialField(), foundAttributes);
return found ? spatial.withDocValues(found) : spatial;
}
private boolean hasFieldAttribute(BinarySpatialFunction spatial, Set<FieldAttribute> foundAttributes) {
return foundField(spatial.left(), foundAttributes) || foundField(spatial.right(), foundAttributes);
}
private boolean foundField(Expression expression, Set<FieldAttribute> foundAttributes) {
return expression instanceof FieldAttribute field && foundAttributes.contains(field);
}
/**
* This function disallows the use of more than one field for doc-values extraction in the same spatial relation function.
* This is because comparing two doc-values fields is not supported in the current implementation.
* This also rejects fields that do not have doc-values in the field mapping, as well as rejecting geo_shape and cartesian_shape
* because we do not yet support full doc-values extraction for non-point geometries. We do have aggregations that support
* shapes, and to prevent them triggering this rule on non-point geometries we have to explicitly disallow them here.
*/
private boolean allowedForDocValues(
FieldAttribute fieldAttribute,
SearchStats stats,
AggregateExec agg,
Set<FieldAttribute> foundAttributes
) {
if (stats.hasDocValues(fieldAttribute.fieldName()) == false) {
return false;
}
if (fieldAttribute.dataType() == DataType.GEO_SHAPE || fieldAttribute.dataType() == DataType.CARTESIAN_SHAPE) {
return false;
}
var candidateDocValuesAttributes = new HashSet<>(foundAttributes);
candidateDocValuesAttributes.add(fieldAttribute);
var spatialRelatesAttributes = new HashSet<FieldAttribute>();
agg.forEachExpressionDown(SpatialRelatesFunction.class, relatesFunction -> {
candidateDocValuesAttributes.forEach(candidate -> {
if (hasFieldAttribute(relatesFunction, Set.of(candidate))) {
spatialRelatesAttributes.add(candidate);
}
});
});
// Disallow more than one spatial field to be extracted using doc-values (for now)
return spatialRelatesAttributes.size() < 2;
}
}
| SpatialDocValuesExtraction |
java | junit-team__junit5 | jupiter-tests/src/test/java/org/junit/jupiter/api/extension/ExtensionComposabilityTests.java | {
"start": 1737,
"end": 5343
} | class ____ {
@Test
void ensureJupiterExtensionApisAreComposable() {
// 1) Find all existing top-level Extension APIs
List<Class<?>> extensionApis = findExtensionApis();
// 2) Determine which methods we expect the kitchen sink to implement...
// @formatter:off
List<Method> expectedMethods = extensionApis.stream()
.map(Class::getDeclaredMethods)
.flatMap(Arrays::stream)
.filter(not(Method::isSynthetic))
.filter(not(where(Method::getModifiers, Modifier::isStatic)))
.toList();
List<String> expectedMethodSignatures = expectedMethods.stream()
.map(this::methodSignature)
.sorted()
.toList();
List<String> expectedMethodNames = expectedMethods.stream()
.map(Method::getName)
.distinct()
.sorted()
.toList();
// @formatter:on
// 3) Dynamically implement all Extension APIs
Object dynamicKitchenSinkExtension = Proxy.newProxyInstance(getClass().getClassLoader(),
extensionApis.toArray(Class[]::new), (proxy, method, args) -> null);
// 4) Determine what ended up in the kitchen sink...
// @formatter:off
List<Method> actualMethods = Arrays.stream(dynamicKitchenSinkExtension.getClass().getDeclaredMethods())
.filter(ModifierSupport::isNotStatic)
.toList();
List<String> actualMethodSignatures = actualMethods.stream()
.map(this::methodSignature)
.distinct()
.sorted()
.collect(toCollection(ArrayList::new));
List<String> actualMethodNames = actualMethods.stream()
.map(Method::getName)
.distinct()
.sorted()
.collect(toCollection(ArrayList::new));
// @formatter:on
// 5) Remove methods from java.lang.Object
actualMethodSignatures.remove("equals(Object)");
actualMethodSignatures.remove("hashCode()");
actualMethodSignatures.remove("toString()");
actualMethodNames.remove("equals");
actualMethodNames.remove("hashCode");
actualMethodNames.remove("toString");
// 6) Verify our expectations
// @formatter:off
assertAll(
() -> assertThat(actualMethodSignatures).isEqualTo(expectedMethodSignatures),
() -> assertThat(actualMethodNames).isEqualTo(expectedMethodNames)
);
// @formatter:on
}
@TestFactory
Stream<DynamicContainer> kitchenSinkExtensionImplementsAllExtensionApis() {
var declaredMethods = List.of(KitchenSinkExtension.class.getDeclaredMethods());
return findExtensionApis().stream() //
.map(c -> dynamicContainer( //
c.getSimpleName(), //
Stream.concat( //
Stream.of(
dynamicTest("implements interface", () -> c.isAssignableFrom(KitchenSinkExtension.class))), //
Arrays.stream(c.getMethods()) //
.filter(ModifierSupport::isNotStatic).map(m -> dynamicTest( //
"overrides " + m.getName(), //
() -> assertTrue( //
declaredMethods.stream().anyMatch(it -> //
it.getName().equals(m.getName()) //
&& it.getReturnType().equals(m.getReturnType()) //
&& Arrays.equals(it.getParameterTypes(), m.getParameterTypes()) //
))) //
) //
) //
));
}
private List<Class<?>> findExtensionApis() {
return ReflectionSupport.findAllClassesInPackage(Extension.class.getPackage().getName(), this::isExtensionApi,
name -> true);
}
private boolean isExtensionApi(Class<?> candidate) {
return candidate.isInterface() && (candidate != Extension.class) && Extension.class.isAssignableFrom(candidate);
}
private String methodSignature(Method method) {
return "%s(%s)".formatted(method.getName(),
ClassUtils.nullSafeToString(Class::getSimpleName, method.getParameterTypes()));
}
}
| ExtensionComposabilityTests |
java | apache__dubbo | dubbo-remoting/dubbo-remoting-api/src/test/java/org/apache/dubbo/remoting/transport/AbstractCodecTest.java | {
"start": 1475,
"end": 4770
} | class ____ {
@Test
void testCheckPayloadDefault8M() throws Exception {
Channel channel = mock(Channel.class);
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://1.1.1.1"));
AbstractCodec.checkPayload(channel, 1 * 1024 * 1024);
try {
AbstractCodec.checkPayload(channel, 15 * 1024 * 1024);
} catch (IOException expected) {
assertThat(
expected.getMessage(),
allOf(
containsString("Data length too large: "),
containsString("max payload: " + 8 * 1024 * 1024)));
}
verify(channel, VerificationModeFactory.atLeastOnce()).getUrl();
}
@Test
void testCheckProviderPayload() throws Exception {
Channel channel = mock(Channel.class);
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://1.1.1.1"));
AbstractCodec.checkPayload(channel, 1024 * 1024 + 1, 1024 * 1024);
try {
AbstractCodec.checkPayload(channel, 1024 * 1024, 1024 * 1024);
} catch (IOException expected) {
assertThat(
expected.getMessage(),
allOf(containsString("Data length too large: "), containsString("max payload: " + 1024 * 1024)));
}
try {
AbstractCodec.checkPayload(channel, 0, 15 * 1024 * 1024);
} catch (IOException expected) {
assertThat(
expected.getMessage(),
allOf(
containsString("Data length too large: "),
containsString("max payload: " + 8 * 1024 * 1024)));
}
verify(channel, VerificationModeFactory.atLeastOnce()).getUrl();
}
@Test
void tesCheckPayloadMinusPayloadNoLimit() throws Exception {
Channel channel = mock(Channel.class);
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://1.1.1.1?payload=-1"));
AbstractCodec.checkPayload(channel, 15 * 1024 * 1024);
verify(channel, VerificationModeFactory.atLeastOnce()).getUrl();
}
@Test
void testIsClientSide() {
AbstractCodec codec = getAbstractCodec();
Channel channel = mock(Channel.class);
given(channel.getRemoteAddress()).willReturn(new InetSocketAddress("172.24.157.13", 9103));
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://172.24.157.13:9103"));
assertThat(codec.isClientSide(channel), is(true));
assertThat(codec.isServerSide(channel), is(false));
given(channel.getRemoteAddress()).willReturn(new InetSocketAddress("172.24.157.14", 9103));
given(channel.getUrl()).willReturn(URL.valueOf("dubbo://172.24.157.13:9103"));
assertThat(codec.isClientSide(channel), is(false));
assertThat(codec.isServerSide(channel), is(true));
}
private AbstractCodec getAbstractCodec() {
AbstractCodec codec = new AbstractCodec() {
@Override
public void encode(Channel channel, ChannelBuffer buffer, Object message) {}
@Override
public Object decode(Channel channel, ChannelBuffer buffer) {
return null;
}
};
return codec;
}
}
| AbstractCodecTest |
java | spring-projects__spring-framework | spring-expression/src/test/java/org/springframework/expression/spel/ast/InlineCollectionTests.java | {
"start": 1498,
"end": 3573
} | class ____ {
@Test
void listIsCached() {
InlineList list = parseList("{1, -2, 3, 4}");
assertThat(list.isConstant()).isTrue();
assertThat(list.getConstantValue()).isEqualTo(List.of(1, -2, 3, 4));
}
@Test
void dynamicListIsNotCached() {
InlineList list = parseList("{1, (5 - 3), 3, 4}");
assertThat(list.isConstant()).isFalse();
assertThat(list.getValue(null)).isEqualTo(List.of(1, 2, 3, 4));
}
@Test
void listWithVariableIsNotCached() {
StandardEvaluationContext evaluationContext = new StandardEvaluationContext();
ExpressionState expressionState = new ExpressionState(evaluationContext);
InlineList list = parseList("{1, -#num, 3, 4}");
assertThat(list.isConstant()).isFalse();
evaluationContext.setVariable("num", 2);
assertThat(list.getValue(expressionState)).isEqualTo(List.of(1, -2, 3, 4));
}
@Test
void listWithPropertyAccessIsNotCached() {
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(new NumberHolder());
ExpressionState expressionState = new ExpressionState(evaluationContext);
InlineList list = parseList("{1, -num, 3, 4}");
assertThat(list.isConstant()).isFalse();
assertThat(list.getValue(expressionState)).isEqualTo(List.of(1, -99, 3, 4));
parser.parseExpression("num = 2").getValue(evaluationContext);
assertThat(list.getValue(expressionState)).isEqualTo(List.of(1, -2, 3, 4));
}
@Test
void listCanBeCompiled() {
SpelExpression listExpression = parseExpression("{1, -2, 3, 4}");
assertThat(listExpression.getAST().isCompilable()).isTrue();
assertThat(SpelCompiler.compile(listExpression)).isTrue();
}
@Test
void dynamicListCannotBeCompiled() {
SpelExpression listExpression = parseExpression("{1, (5 - 3), 3, 4}");
assertThat(listExpression.getAST().isCompilable()).isFalse();
assertThat(SpelCompiler.compile(listExpression)).isFalse();
}
private InlineList parseList(String s) {
SpelExpression expression = parseExpression(s);
return (InlineList) expression.getAST();
}
}
@Nested
| InlineListTests |
java | apache__flink | flink-test-utils-parent/flink-test-utils/src/main/java/org/apache/flink/test/util/JobSubmission.java | {
"start": 2742,
"end": 4668
} | class ____ the job
* @return the modified builder
*/
public JobSubmissionBuilder setMainClass(final String mainClass) {
this.mainClass = mainClass;
return this;
}
/**
* Sets the parallelism for the job.
*
* @param parallelism parallelism for the job
* @return the modified builder
*/
public JobSubmissionBuilder setParallelism(final int parallelism) {
this.parallelism = parallelism;
return this;
}
/**
* Sets whether the job should be submitted in a detached manner.
*
* @param detached whether to submit the job in a detached manner
* @return the modified builder
*/
public JobSubmissionBuilder setDetached(final boolean detached) {
this.detached = detached;
return this;
}
/**
* Adds a program argument.
*
* @param argument argument argument
* @return the modified builder
*/
public JobSubmissionBuilder addArgument(final String argument) {
Preconditions.checkNotNull(argument);
this.arguments.add(argument);
return this;
}
/**
* Convenience method for providing key-value program arguments. Invoking this method is
* equivalent to invoking {@link #addArgument(String)} twice.
*
* @param key argument key
* @param value argument value
* @return the modified builder
*/
public JobSubmissionBuilder addArgument(final String key, final String value) {
addArgument(key);
addArgument(value);
return this;
}
public JobSubmission build() {
return new JobSubmission(jar, mainClass, parallelism, detached, arguments);
}
}
}
| for |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/formatstring/AnnotateFormatMethodTest.java | {
"start": 3350,
"end": 3808
} | class ____ {
@FormatMethod
String formatMe(@FormatString String formatString, Object... args) {
return String.format(formatString, args);
}
}
""")
.doTest();
}
@Test
public void notTerminalArguments() {
compilationHelper
.addSourceLines(
"AnnotateFormatMethodNegativeCases.java",
"""
| AnnotateFormatMethodNegativeCases |
java | reactor__reactor-core | reactor-core/src/main/java/reactor/util/Logger.java | {
"start": 1045,
"end": 12177
} | interface ____ {
/**
* Provide two possible versions of a message {@link String}, depending on the
* level of detail desired.
*
* @param isVerbose {@code true} for higher level of detail, {@code false} for lower level of detail
* @return the message {@link String} according to the passed level of detail
*/
String get(boolean isVerbose);
}
/**
* Return the name of this <code>Logger</code> instance.
* @return name of this logger instance
*/
String getName();
/**
* Is the logger instance enabled for the TRACE level?
*
* @return True if this Logger is enabled for the TRACE level,
* false otherwise.
*/
boolean isTraceEnabled();
/**
* Log a message at the TRACE level.
*
* @param msg the message string to be logged
*/
void trace(String msg);
/**
* Log a message at the TRACE level according to the specified format
* and arguments.
* <p/>
* <p>This form avoids superfluous string concatenation when the logger
* is disabled for the TRACE level. However, this variant incurs the hidden
* (and relatively small) cost of creating an <code>Object[]</code> before invoking the method,
* even if this logger is disabled for TRACE.</p>
*
* @param format the format string
* @param arguments a list of 3 or more arguments
*/
void trace(String format, @Nullable Object @Nullable... arguments);
/**
* Log an exception (throwable) at the TRACE level with an
* accompanying message.
*
* @param msg the message accompanying the exception
* @param t the exception (throwable) to log
*/
void trace(String msg, Throwable t);
/**
* Is the logger instance enabled for the DEBUG level?
*
* @return True if this Logger is enabled for the DEBUG level,
* false otherwise.
*/
boolean isDebugEnabled();
/**
* Log a message at the DEBUG level.
*
* @param msg the message string to be logged
*/
void debug(String msg);
/**
* Log a message at the DEBUG level according to the specified format
* and arguments.
* <p/>
* <p>This form avoids superfluous string concatenation when the logger
* is disabled for the DEBUG level. However, this variant incurs the hidden
* (and relatively small) cost of creating an <code>Object[]</code> before invoking the method,
* even if this logger is disabled for DEBUG. </p>
*
* @param format the format string
* @param arguments a list of 3 or more arguments
*/
void debug(String format, @Nullable Object @Nullable... arguments);
/**
* Log an exception (throwable) at the DEBUG level with an
* accompanying message.
*
* @param msg the message accompanying the exception
* @param t the exception (throwable) to log
*/
void debug(String msg, Throwable t);
/**
* Is the logger instance enabled for the INFO level?
*
* @return True if this Logger is enabled for the INFO level,
* false otherwise.
*/
boolean isInfoEnabled();
/**
* Log a message at the INFO level.
*
* @param msg the message string to be logged
*/
void info(String msg);
/**
* Log a message at the INFO level according to the specified format
* and arguments.
* <p/>
* <p>This form avoids superfluous string concatenation when the logger
* is disabled for the INFO level. However, this variant incurs the hidden
* (and relatively small) cost of creating an <code>Object[]</code> before invoking the method,
* even if this logger is disabled for INFO. </p>
*
* @param format the format string
* @param arguments a list of 3 or more arguments
*/
void info(String format, @Nullable Object @Nullable... arguments);
/**
* Log an exception (throwable) at the INFO level with an
* accompanying message.
*
* @param msg the message accompanying the exception
* @param t the exception (throwable) to log
*/
void info(String msg, Throwable t);
/**
* Convenience method to log a message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, INFO level is used (unless {@link #isInfoEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for INFO level
* @see #info(String)
*/
default void infoOrDebug(ChoiceOfMessageSupplier messageSupplier) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true));
}
else if (isInfoEnabled()) {
info(messageSupplier.get(false));
}
}
/**
* Convenience method to log an exception (throwable), with an accompanying
* message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, INFO level is used (unless {@link #isInfoEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for INFO level
* @param cause the {@link Throwable} the original exception to be logged
* @see #info(String, Throwable)
*/
default void infoOrDebug(ChoiceOfMessageSupplier messageSupplier, Throwable cause) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true), cause);
}
else if (isInfoEnabled()) {
info(messageSupplier.get(false), cause);
}
}
/**
* Is the logger instance enabled for the WARN level?
*
* @return True if this Logger is enabled for the WARN level,
* false otherwise.
*/
boolean isWarnEnabled();
/**
* Log a message at the WARN level.
*
* @param msg the message string to be logged
*/
void warn(String msg);
/**
* Log a message at the WARN level according to the specified format
* and arguments.
* <p/>
* <p>This form avoids superfluous string concatenation when the logger
* is disabled for the WARN level. However, this variant incurs the hidden
* (and relatively small) cost of creating an <code>Object[]</code> before invoking the method,
* even if this logger is disabled for WARN. </p>
*
* @param format the format string
* @param arguments a list of 3 or more arguments
*/
void warn(String format, @Nullable Object @Nullable... arguments);
/**
* Log an exception (throwable) at the WARN level with an
* accompanying message.
*
* @param msg the message accompanying the exception
* @param t the exception (throwable) to log
*/
void warn(String msg, Throwable t);
/**
* Convenience method to log a message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, WARN level is used (unless {@link #isWarnEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for WARN level
* @see #warn(String)
*/
default void warnOrDebug(ChoiceOfMessageSupplier messageSupplier) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true));
}
else if (isWarnEnabled()) {
warn(messageSupplier.get(false));
}
}
/**
* Convenience method to log an exception (throwable), with an accompanying
* message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, WARN level is used (unless {@link #isWarnEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for WARN level
* @param cause the {@link Throwable} the original exception to be logged
* @see #warn(String, Throwable)
*/
default void warnOrDebug(ChoiceOfMessageSupplier messageSupplier, Throwable cause) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true), cause);
}
else if (isWarnEnabled()) {
warn(messageSupplier.get(false), cause);
}
}
/**
* Is the logger instance enabled for the ERROR level?
*
* @return True if this Logger is enabled for the ERROR level,
* false otherwise.
*/
boolean isErrorEnabled();
/**
* Log a message at the ERROR level.
*
* @param msg the message string to be logged
*/
void error(String msg);
/**
* Log a message at the ERROR level according to the specified format
* and arguments.
* <p/>
* <p>This form avoids superfluous string concatenation when the logger
* is disabled for the ERROR level. However, this variant incurs the hidden
* (and relatively small) cost of creating an <code>Object[]</code> before invoking the method,
* even if this logger is disabled for ERROR. </p>
*
* @param format the format string
* @param arguments a list of 3 or more arguments
*/
void error(String format, @Nullable Object @Nullable... arguments);
/**
* Log an exception (throwable) at the ERROR level with an
* accompanying message.
*
* @param msg the message accompanying the exception
* @param t the exception (throwable) to log
*/
void error(String msg, Throwable t);
/**
* Convenience method to log a message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, ERROR level is used (unless {@link #isErrorEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for ERROR level
* @see #error(String)
*/
default void errorOrDebug(ChoiceOfMessageSupplier messageSupplier) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true));
}
else if (isErrorEnabled()) {
error(messageSupplier.get(false));
}
}
/**
* Convenience method to log an exception (throwable), with an accompanying
* message that is different according to the log level.
* In priority, DEBUG level is used if {@link #isDebugEnabled()}.
* Otherwise, ERROR level is used (unless {@link #isErrorEnabled()} is false).
* <p>
* This can be used to log different level of details according to the active
* log level.
*
* @param messageSupplier the {@link ChoiceOfMessageSupplier} invoked in priority
* with {@code true} for the DEBUG level message, or {@code false} for ERROR level
* @param cause the {@link Throwable} the original exception to be logged
* @see #error(String, Throwable)
*/
default void errorOrDebug(ChoiceOfMessageSupplier messageSupplier, Throwable cause) {
if (isDebugEnabled()) {
debug(messageSupplier.get(true), cause);
}
else if (isErrorEnabled()) {
error(messageSupplier.get(false), cause);
}
}
}
| ChoiceOfMessageSupplier |
java | apache__hadoop | hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/lib/servlet/ServerWebApp.java | {
"start": 1517,
"end": 8049
} | class ____
extends Server
implements ServletContextListener {
private static final String HOME_DIR = ".home.dir";
private static final String CONFIG_DIR = ".config.dir";
private static final String LOG_DIR = ".log.dir";
private static final String TEMP_DIR = ".temp.dir";
private static final String HTTP_HOSTNAME = ".http.hostname";
private static final String HTTP_PORT = ".http.port";
public static final String SSL_ENABLED = ".ssl.enabled";
private static final ThreadLocal<String> HOME_DIR_TL =
new ThreadLocal<String>();
private InetSocketAddress authority;
/**
* Method for testing purposes.
*/
public static void setHomeDirForCurrentThread(String homeDir) {
HOME_DIR_TL.set(homeDir);
}
/**
* Constructor for testing purposes.
*/
protected ServerWebApp(String name, String homeDir, String configDir,
String logDir, String tempDir, Configuration config) {
super(name, homeDir, configDir, logDir, tempDir, config);
}
/**
* Constructor for testing purposes.
*/
protected ServerWebApp(String name, String homeDir, Configuration config) {
super(name, homeDir, config);
}
/**
* Constructor. Subclasses must have a default constructor specifying
* the server name.
* <p>
* The server name is used to resolve the Java System properties that define
* the server home, config, log and temp directories.
* <p>
* The home directory is looked in the Java System property
* <code>#SERVER_NAME#.home.dir</code>.
* <p>
* The config directory is looked in the Java System property
* <code>#SERVER_NAME#.config.dir</code>, if not defined it resolves to
* the <code>#SERVER_HOME_DIR#/conf</code> directory.
* <p>
* The log directory is looked in the Java System property
* <code>#SERVER_NAME#.log.dir</code>, if not defined it resolves to
* the <code>#SERVER_HOME_DIR#/log</code> directory.
* <p>
* The temp directory is looked in the Java System property
* <code>#SERVER_NAME#.temp.dir</code>, if not defined it resolves to
* the <code>#SERVER_HOME_DIR#/temp</code> directory.
*
* @param name server name.
*/
public ServerWebApp(String name) {
super(name, getHomeDir(name),
getDir(name, CONFIG_DIR, getHomeDir(name) + "/conf"),
getDir(name, LOG_DIR, getHomeDir(name) + "/log"),
getDir(name, TEMP_DIR, getHomeDir(name) + "/temp"), null);
}
/**
* Returns the server home directory.
* <p>
* It is looked up in the Java System property
* <code>#SERVER_NAME#.home.dir</code>.
*
* @param name the server home directory.
*
* @return the server home directory.
*/
static String getHomeDir(String name) {
String homeDir = HOME_DIR_TL.get();
if (homeDir == null) {
String sysProp = name + HOME_DIR;
homeDir = System.getProperty(sysProp);
if (homeDir == null) {
throw new IllegalArgumentException(MessageFormat.format(
"System property [{0}] not defined", sysProp));
}
}
return homeDir;
}
/**
* Convenience method that looks for Java System property defining a
* diretory and if not present defaults to the specified directory.
*
* @param name server name, used as prefix of the Java System property.
* @param dirType dir type, use as postfix of the Java System property.
* @param defaultDir the default directory to return if the Java System
* property <code>name + dirType</code> is not defined.
*
* @return the directory defined in the Java System property or the
* the default directory if the Java System property is not defined.
*/
static String getDir(String name, String dirType, String defaultDir) {
String sysProp = name + dirType;
return System.getProperty(sysProp, defaultDir);
}
/**
* Initializes the <code>ServletContextListener</code> which initializes
* the Server.
*
* @param event servelt context event.
*/
@Override
public void contextInitialized(ServletContextEvent event) {
try {
init();
} catch (ServerException ex) {
event.getServletContext().log("ERROR: " + ex.getMessage());
throw new RuntimeException(ex);
}
}
/**
* Resolves the host and port InetSocketAddress the
* web server is listening to.
* <p>
* This implementation looks for the following 2 properties:
* <ul>
* <li>#SERVER_NAME#.http.hostname</li>
* <li>#SERVER_NAME#.http.port</li>
* </ul>
*
* @return the host and port InetSocketAddress the
* web server is listening to.
* @throws ServerException thrown
* if any of the above 2 properties is not defined.
*/
protected InetSocketAddress resolveAuthority() throws ServerException {
String hostnameKey = getName() + HTTP_HOSTNAME;
String portKey = getName() + HTTP_PORT;
String host = System.getProperty(hostnameKey);
String port = System.getProperty(portKey);
if (host == null) {
throw new ServerException(ServerException.ERROR.S13, hostnameKey);
}
if (port == null) {
throw new ServerException(ServerException.ERROR.S13, portKey);
}
try {
InetAddress add = InetAddress.getByName(host);
int portNum = Integer.parseInt(port);
return new InetSocketAddress(add, portNum);
} catch (UnknownHostException ex) {
throw new ServerException(ServerException.ERROR.S14, ex.toString(), ex);
}
}
/**
* Destroys the <code>ServletContextListener</code> which destroys
* the Server.
*
* @param event servelt context event.
*/
@Override
public void contextDestroyed(ServletContextEvent event) {
destroy();
}
/**
* Returns the hostname:port InetSocketAddress the webserver is listening to.
*
* @return the hostname:port InetSocketAddress the webserver is listening to.
*/
public InetSocketAddress getAuthority() throws ServerException {
synchronized (this) {
if (authority == null) {
authority = resolveAuthority();
}
}
return authority;
}
/**
* Sets an alternate hostname:port InetSocketAddress to use.
* <p>
* For testing purposes.
*
* @param authority alterante authority.
*/
@VisibleForTesting
public void setAuthority(InetSocketAddress authority) {
this.authority = authority;
}
/**
*
*/
public boolean isSslEnabled() {
return Boolean.parseBoolean(
System.getProperty(getName() + SSL_ENABLED, "false"));
}
}
| ServerWebApp |
java | apache__maven | impl/maven-core/src/main/java/org/apache/maven/plugin/internal/MavenPluginValidator.java | {
"start": 1018,
"end": 1154
} | interface ____ {
void validate(Artifact pluginArtifact, PluginDescriptor pluginDescriptor, List<String> errors);
}
| MavenPluginValidator |
java | apache__kafka | shell/src/test/java/org/apache/kafka/shell/MetadataShellIntegrationTest.java | {
"start": 1605,
"end": 1753
} | class ____ {
private static final Logger LOG = LoggerFactory.getLogger(MetadataShellIntegrationTest.class);
static | MetadataShellIntegrationTest |
java | quarkusio__quarkus | core/processor/src/test/java/io/quarkus/annotation/processor/documentation/config/util/ConfigNamingUtilTest.java | {
"start": 2579,
"end": 2821
} | class ____
String simpleClassName = "RootName";
String actual = ConfigNamingUtil.deriveConfigRootName(simpleClassName, "", ConfigPhase.RUN_TIME);
assertEquals("quarkus.root-name", actual);
// should hyphenate | name |
java | quarkusio__quarkus | extensions/jaxb/deployment/src/test/java/io/quarkus/jaxb/deployment/one/Model.java | {
"start": 116,
"end": 294
} | class ____ {
private String name1;
public String getName1() {
return name1;
}
public void setName1(String name1) {
this.name1 = name1;
}
}
| Model |
java | mapstruct__mapstruct | processor/src/test/java/org/mapstruct/ap/test/selection/qualifier/defaults/DirectoryNode.java | {
"start": 240,
"end": 433
} | class ____ {
private String parent;
public void setParent(String parent) {
this.parent = parent;
}
public String getParent() {
return parent;
}
}
| DirectoryNode |
java | quarkusio__quarkus | test-framework/common/src/test/java/io/quarkus/test/common/TestResourceManagerTest.java | {
"start": 2254,
"end": 2403
} | class ____ {
}
@QuarkusTestResource(FirstLifecycleManager.class)
@QuarkusTestResource(SecondLifecycleManager.class)
public static | MyTest |
java | apache__hadoop | hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/ITestAzureBlobFileSystemCheckAccess.java | {
"start": 6320,
"end": 16444
} | enum
____.access(new Path("test.txt"), null);
});
}
@Test
public void testCheckAccessForNonExistentFile() throws Exception {
checkPrerequisites();
assertThrows(FileNotFoundException.class, () -> {
Path nonExistentFile = setupTestDirectoryAndUserAccess(
"/nonExistentFile1.txt", FsAction.ALL);
superUserFs.delete(nonExistentFile, true);
testUserFs.access(nonExistentFile, FsAction.READ);
});
}
@Test
public void testWhenCheckAccessConfigIsOff() throws Exception {
assumeThat(isHNSEnabled).as(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT + " is false").isTrue();
Configuration conf = getRawConfiguration();
conf.setBoolean(FS_AZURE_ENABLE_CHECK_ACCESS, false);
FileSystem fs = FileSystem.newInstance(conf);
Path testFilePath = setupTestDirectoryAndUserAccess("/test1.txt",
FsAction.NONE);
fs.access(testFilePath, FsAction.EXECUTE);
fs.access(testFilePath, FsAction.READ);
fs.access(testFilePath, FsAction.WRITE);
fs.access(testFilePath, FsAction.READ_EXECUTE);
fs.access(testFilePath, FsAction.WRITE_EXECUTE);
fs.access(testFilePath, FsAction.READ_WRITE);
fs.access(testFilePath, FsAction.ALL);
testFilePath = setupTestDirectoryAndUserAccess("/test1.txt", FsAction.ALL);
fs.access(testFilePath, FsAction.EXECUTE);
fs.access(testFilePath, FsAction.READ);
fs.access(testFilePath, FsAction.WRITE);
fs.access(testFilePath, FsAction.READ_EXECUTE);
fs.access(testFilePath, FsAction.WRITE_EXECUTE);
fs.access(testFilePath, FsAction.READ_WRITE);
fs.access(testFilePath, FsAction.ALL);
fs.access(testFilePath, null);
Path nonExistentFile = setupTestDirectoryAndUserAccess(
"/nonExistentFile2" + ".txt", FsAction.NONE);
superUserFs.delete(nonExistentFile, true);
fs.access(nonExistentFile, FsAction.READ);
}
@Test
public void testCheckAccessForAccountWithoutNS() throws Exception {
assumeThat(getConfiguration().getBoolean(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT,
true))
.as(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT + " is true")
.isFalse();
assumeThat(isCheckAccessEnabled)
.as(FS_AZURE_ENABLE_CHECK_ACCESS + " is false")
.isTrue();
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_CLIENT_ID);
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_CLIENT_SECRET);
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_USER_GUID);
setTestUserFs();
// When the driver does not know if the account is HNS enabled or not it
// makes a server call and fails
intercept(AccessControlException.class,
"\"This request is not authorized to perform this operation using "
+ "this permission.\", 403",
() -> testUserFs.access(new Path("/"), FsAction.READ));
// When the driver has already determined if the account is HNS enabled
// or not, and as the account is non HNS the AzureBlobFileSystem#access
// acts as noop
AzureBlobFileSystemStore mockAbfsStore =
Mockito.mock(AzureBlobFileSystemStore.class);
Mockito.when(mockAbfsStore
.getIsNamespaceEnabled(getTestTracingContext(getFileSystem(), false)))
.thenReturn(true);
Field abfsStoreField = AzureBlobFileSystem.class.getDeclaredField(
"abfsStore");
abfsStoreField.setAccessible(true);
abfsStoreField.set(testUserFs, mockAbfsStore);
testUserFs.access(new Path("/"), FsAction.READ);
superUserFs.access(new Path("/"), FsAction.READ);
}
@Test
public void testFsActionNONE() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test2.txt",
FsAction.NONE);
assertInaccessible(testFilePath, FsAction.EXECUTE);
assertInaccessible(testFilePath, FsAction.READ);
assertInaccessible(testFilePath, FsAction.WRITE);
assertInaccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionEXECUTE() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test3.txt",
FsAction.EXECUTE);
assertAccessible(testFilePath, FsAction.EXECUTE);
assertInaccessible(testFilePath, FsAction.READ);
assertInaccessible(testFilePath, FsAction.WRITE);
assertInaccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionREAD() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test4.txt",
FsAction.READ);
assertAccessible(testFilePath, FsAction.READ);
assertInaccessible(testFilePath, FsAction.EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE);
assertInaccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionWRITE() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test5.txt",
FsAction.WRITE);
assertAccessible(testFilePath, FsAction.WRITE);
assertInaccessible(testFilePath, FsAction.EXECUTE);
assertInaccessible(testFilePath, FsAction.READ);
assertInaccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionREADEXECUTE() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test6.txt",
FsAction.READ_EXECUTE);
assertAccessible(testFilePath, FsAction.EXECUTE);
assertAccessible(testFilePath, FsAction.READ);
assertAccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.WRITE);
assertInaccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionWRITEEXECUTE() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test7.txt",
FsAction.WRITE_EXECUTE);
assertAccessible(testFilePath, FsAction.EXECUTE);
assertAccessible(testFilePath, FsAction.WRITE);
assertAccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ);
assertInaccessible(testFilePath, FsAction.READ_EXECUTE);
assertInaccessible(testFilePath, FsAction.READ_WRITE);
assertInaccessible(testFilePath, FsAction.ALL);
}
@Test
public void testFsActionALL() throws Exception {
checkPrerequisites();
Path testFilePath = setupTestDirectoryAndUserAccess("/test8.txt",
FsAction.ALL);
assertAccessible(testFilePath, FsAction.EXECUTE);
assertAccessible(testFilePath, FsAction.WRITE);
assertAccessible(testFilePath, FsAction.WRITE_EXECUTE);
assertAccessible(testFilePath, FsAction.READ);
assertAccessible(testFilePath, FsAction.READ_EXECUTE);
assertAccessible(testFilePath, FsAction.READ_WRITE);
assertAccessible(testFilePath, FsAction.ALL);
}
private void checkPrerequisites() throws Exception {
assumeThat(isHNSEnabled).as(FS_AZURE_TEST_NAMESPACE_ENABLED_ACCOUNT + " is false").isTrue();
assumeThat(isCheckAccessEnabled).as(FS_AZURE_ENABLE_CHECK_ACCESS + " is false").isTrue();
setTestUserFs();
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_CLIENT_ID);
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_CLIENT_SECRET);
checkIfConfigIsSet(FS_AZURE_BLOB_FS_CHECKACCESS_TEST_USER_GUID);
}
private void checkIfConfigIsSet(String configKey){
AbfsConfiguration conf = getConfiguration();
String value = conf.get(configKey);
assumeThat(value)
.as(configKey + " config is mandatory for the test to run")
.isNotNull()
.matches(v -> v.trim().length() > 1, "trimmed length > 1");
}
private void assertAccessible(Path testFilePath, FsAction fsAction)
throws IOException {
assertTrue(isAccessible(testUserFs, testFilePath, fsAction),
"Should have been given access " + fsAction + " on " + testFilePath);
}
private void assertInaccessible(Path testFilePath, FsAction fsAction)
throws IOException {
assertFalse(isAccessible(testUserFs, testFilePath, fsAction),
"Should have been denied access " + fsAction + " on " + testFilePath);
}
private void setExecuteAccessForParentDirs(Path dir) throws IOException {
dir = dir.getParent();
while (dir != null) {
modifyAcl(dir, testUserGuid, FsAction.EXECUTE);
dir = dir.getParent();
}
}
private void modifyAcl(Path file, String uid, FsAction fsAction)
throws IOException {
List<AclEntry> aclSpec = Lists.newArrayList(AclTestHelpers
.aclEntry(AclEntryScope.ACCESS, AclEntryType.USER, uid, fsAction));
this.superUserFs.modifyAclEntries(file, aclSpec);
}
private Path setupTestDirectoryAndUserAccess(String testFileName,
FsAction fsAction) throws Exception {
Path testPath = path(TEST_FOLDER_PATH);
Path file = new Path(testPath + testFileName);
file = this.superUserFs.makeQualified(file);
this.superUserFs.delete(file, true);
this.superUserFs.create(file);
modifyAcl(file, testUserGuid, fsAction);
setExecuteAccessForParentDirs(file);
return file;
}
private boolean isAccessible(FileSystem fs, Path path, FsAction fsAction)
throws IOException {
try {
fs.access(path, fsAction);
} catch (AccessControlException ace) {
return false;
}
return true;
}
}
| superUserFs |
java | mapstruct__mapstruct | core/src/main/java/org/mapstruct/InheritInverseConfiguration.java | {
"start": 2255,
"end": 2840
} | interface ____ {
*
* @Mapping( target = "seatCount", source = "numberOfSeats")
* @Mapping( target = "enginePower", source = "engineClass", ignore=true) // NOTE: source specified as well
* CarDto carToDto(Car car);
*
* @InheritInverseConfiguration
* @Mapping(target = "numberOfSeats", ignore = true)
* // no need to specify a mapping with ignore for "engineClass": specifying source above will assume
* Car carDtoToCar(CarDto carDto);
* }
* </code></pre>
* @author Sjaak Derksen
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)
public @ | CarMapper |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-prometheus/src/test/java/smoketest/prometheus/SamplePrometheusApplicationTests.java | {
"start": 1643,
"end": 2448
} | class ____ {
@Autowired
private TestRestTemplate restTemplate;
@Test
void shouldExportExemplars() {
for (int i = 0; i < 10; i++) {
ResponseEntity<String> response = this.restTemplate.getForEntity("/actuator", String.class);
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.OK);
}
HttpHeaders headers = new HttpHeaders();
headers.add(HttpHeaders.ACCEPT, "application/openmetrics-text; version=1.0.0; charset=utf-8");
ResponseEntity<String> metrics = this.restTemplate.exchange("/actuator/prometheus", HttpMethod.GET,
new HttpEntity<>(headers), String.class);
assertThat(metrics.getStatusCode()).isEqualTo(HttpStatus.OK);
assertThat(metrics.getBody()).containsSubsequence("http_client_requests_seconds_count", "span_id", "trace_id");
}
}
| SamplePrometheusApplicationTests |
java | spring-cloud__spring-cloud-gateway | spring-cloud-gateway-server-webflux/src/test/java/org/springframework/cloud/gateway/filter/factory/SaveSessionGatewayFilterFactoryTests.java | {
"start": 2002,
"end": 3289
} | class ____ extends BaseWebClientTests {
static WebSession mockWebSession = mock(WebSession.class);
private final Map<String, WebSession> fakeSessionStore = new ConcurrentHashMap<>();
private static final String SESSION_ID = "RANDOM_SESSION_ID";
@Test
public void webCallShouldTriggerWebSessionSaveAction() {
when(mockWebSession.getAttributes()).thenReturn(new HashMap<>());
Mono<Void> doSaveSession = Mono.fromRunnable(() -> {
// Do save session. Need to make sure the Mono is subscribed
fakeSessionStore.put(SESSION_ID, mockWebSession);
});
when(mockWebSession.save()).thenReturn(doSaveSession);
Mono<Map> result = webClient.get().uri("/get").retrieve().bodyToMono(Map.class);
StepVerifier.create(result).consumeNextWith(response -> {
// Don't care about data, just need to catch signal
}).expectComplete().verify(Duration.ofMinutes(10));
verify(mockWebSession).save();
assertThat(fakeSessionStore.get(SESSION_ID)).isEqualTo(mockWebSession);
}
@Test
public void toStringFormat() {
GatewayFilter filter = new SaveSessionGatewayFilterFactory().apply("");
assertThat(filter.toString()).contains("SaveSession");
}
@EnableAutoConfiguration
@SpringBootConfiguration
@Import(DefaultTestConfig.class)
static | SaveSessionGatewayFilterFactoryTests |
java | spring-projects__spring-framework | spring-context/src/test/java/example/scannable_implicitbasepackage/ComponentScanAnnotatedConfigWithImplicitBasePackage.java | {
"start": 1010,
"end": 1120
} | class
____ ConfigurableComponent configurableComponent() {
return new ConfigurableComponent(true);
}
}
| public |
java | elastic__elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java | {
"start": 3468,
"end": 15579
} | class ____ {
public static final MlConfigVersion V_0_000_001 = new MlConfigVersion(1);
public static final MlConfigVersion V_0_000_002 = new MlConfigVersion(2);
public static final MlConfigVersion V_0_000_003 = new MlConfigVersion(2);
}
public void testStaticMlConfigVersionChecks() {
assertThat(
MlConfigVersion.getAllVersionIds(CorrectFakeVersion.class),
equalTo(
Map.of(
199,
CorrectFakeVersion.V_0_00_01,
2,
CorrectFakeVersion.V_0_000_002,
3,
CorrectFakeVersion.V_0_000_003,
4,
CorrectFakeVersion.V_0_000_004
)
)
);
AssertionError e = expectThrows(AssertionError.class, () -> MlConfigVersion.getAllVersionIds(DuplicatedIdFakeVersion.class));
assertThat(e.getMessage(), containsString("have the same version number"));
}
private static final Set<DiscoveryNodeRole> ROLES_WITH_ML = Set.of(
DiscoveryNodeRole.MASTER_ROLE,
DiscoveryNodeRole.ML_ROLE,
DiscoveryNodeRole.DATA_ROLE
);
public void testGetMinMaxMlConfigVersion() {
Map<String, String> nodeAttr1 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_7_1_0.toString());
Map<String, String> nodeAttr2 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_2_0.toString());
Map<String, String> nodeAttr3 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_10.toString());
DiscoveryNodes nodes = DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.builder("_node_id1")
.name("_node_name1")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300))
.attributes(nodeAttr1)
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("7.2.0")))
.build()
)
.add(
DiscoveryNodeUtils.builder("_node_id2")
.name("_node_name2")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9301))
.attributes(nodeAttr2)
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("7.1.0")))
.build()
)
.add(
DiscoveryNodeUtils.builder("_node_id3")
.name("_node_name3")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9302))
.attributes(nodeAttr3)
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("7.0.0")))
.build()
)
.build();
assertEquals(MlConfigVersion.V_7_1_0, MlConfigVersion.getMinMlConfigVersion(nodes));
assertEquals(MlConfigVersion.V_10, MlConfigVersion.getMaxMlConfigVersion(nodes));
}
public void testGetMinMaxMlConfigVersionWhenMlConfigVersionAttrIsMissing() {
Map<String, String> nodeAttr1 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_7_1_0.toString());
Map<String, String> nodeAttr2 = Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_2_0.toString());
Map<String, String> nodeAttr3 = Map.of();
DiscoveryNodes nodes = DiscoveryNodes.builder()
.add(
DiscoveryNodeUtils.builder("_node_id1")
.name("_node_name1")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9300))
.attributes(nodeAttr1)
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("7.2.0")))
.build()
)
.add(
DiscoveryNodeUtils.builder("_node_id2")
.name("_node_name2")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9301))
.attributes(nodeAttr2)
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("7.1.0")))
.build()
)
.add(
DiscoveryNodeUtils.builder("_node_id3")
.name("_node_name3")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9302))
.attributes(nodeAttr3)
.roles(ROLES_WITH_ML)
.version(
new VersionInformation(
Version.V_8_11_0,
IndexVersion.getMinimumCompatibleIndexVersion(Version.V_8_11_0.id),
IndexVersion.fromId(Version.V_8_11_0.id)
)
)
.build()
)
.build();
assertEquals(MlConfigVersion.V_7_1_0, MlConfigVersion.getMinMlConfigVersion(nodes));
// _node_name3 is ignored
assertEquals(MlConfigVersion.V_8_2_0, MlConfigVersion.getMaxMlConfigVersion(nodes));
}
public void testGetMlConfigVersionForNode() {
DiscoveryNode node1 = DiscoveryNodeUtils.builder("_node_id5")
.name("_node_name5")
.address(new TransportAddress(InetAddress.getLoopbackAddress(), 9304))
.attributes(Map.of(MlConfigVersion.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.V_8_5_0.toString()))
.roles(ROLES_WITH_ML)
.version(VersionInformation.inferVersions(Version.fromString("8.7.0")))
.build();
MlConfigVersion mlConfigVersion1 = MlConfigVersion.getMlConfigVersionForNode(node1);
assertEquals(MlConfigVersion.V_8_5_0, mlConfigVersion1);
}
public void testDefinedConstants() throws IllegalAccessException {
Pattern historicalVersion = Pattern.compile("^V_(\\d{1,2})_(\\d{1,2})_(\\d{1,2})$");
Pattern MlConfigVersion = Pattern.compile("^V_(\\d+)$");
Set<String> ignore = Set.of("ZERO", "CURRENT", "MINIMUM_COMPATIBLE");
for (java.lang.reflect.Field field : MlConfigVersion.class.getFields()) {
String fieldName = field.getName();
if (fieldName.equals("V_8_10_0") == false) {
continue;
}
if (field.getType() == MlConfigVersion.class && ignore.contains(field.getName()) == false) {
// check the field modifiers
assertEquals(
"Field " + field.getName() + " should be public static final",
Modifier.PUBLIC | Modifier.STATIC | Modifier.FINAL,
field.getModifiers()
);
Matcher matcher = historicalVersion.matcher(field.getName());
if (matcher.matches()) {
// old-style version constant
String idString = matcher.group(1) + "." + matcher.group(2) + "." + matcher.group(3);
String fieldStr = field.get(null).toString();
assertEquals(
"Field " + field.getName() + " does not have expected id " + idString,
idString,
field.get(null).toString()
);
} else if ((matcher = MlConfigVersion.matcher(field.getName())).matches()) {
String idString = matcher.group(1);
assertEquals(
"Field " + field.getName() + " does not have expected id " + idString,
idString,
field.get(null).toString()
);
} else {
fail("Field " + field.getName() + " does not have expected format");
}
}
}
}
public void testMin() {
assertEquals(
MlConfigVersionUtils.getPreviousVersion(),
MlConfigVersion.min(MlConfigVersion.CURRENT, MlConfigVersionUtils.getPreviousVersion())
);
assertEquals(
MlConfigVersion.fromId(MlConfigVersion.FIRST_ML_VERSION.id()),
MlConfigVersion.min(MlConfigVersion.fromId(MlConfigVersion.FIRST_ML_VERSION.id()), MlConfigVersion.CURRENT)
);
}
public void testMax() {
assertEquals(MlConfigVersion.CURRENT, MlConfigVersion.max(MlConfigVersion.CURRENT, MlConfigVersionUtils.getPreviousVersion()));
assertEquals(
MlConfigVersion.CURRENT,
MlConfigVersion.max(MlConfigVersion.fromId(MlConfigVersion.FIRST_ML_VERSION.id()), MlConfigVersion.CURRENT)
);
}
public void testVersionConstantPresent() {
Set<MlConfigVersion> ignore = Set.of(MlConfigVersion.ZERO, MlConfigVersion.CURRENT, MlConfigVersion.FIRST_ML_VERSION);
assertThat(MlConfigVersion.CURRENT, sameInstance(MlConfigVersion.fromId(MlConfigVersion.CURRENT.id())));
final int iters = scaledRandomIntBetween(20, 100);
for (int i = 0; i < iters; i++) {
MlConfigVersion version = MlConfigVersionUtils.randomVersion(ignore);
assertThat(version, sameInstance(MlConfigVersion.fromId(version.id())));
}
}
public void testCurrentIsLatest() {
assertThat(Collections.max(MlConfigVersion.getAllVersions()), Matchers.is(MlConfigVersion.CURRENT));
}
public void testToString() {
MlConfigVersion mlVersion = MlConfigVersion.fromId(5_00_00_99);
String mlVersionStr = mlVersion.toString();
Version version = Version.fromId(5_00_00_99);
String versionStr = version.toString();
assertEquals("5.0.0", MlConfigVersion.fromId(5_00_00_99).toString());
assertEquals("2.3.0", MlConfigVersion.fromId(2_03_00_99).toString());
assertEquals("1.0.0", MlConfigVersion.fromId(1_00_00_99).toString());
assertEquals("2.0.0", MlConfigVersion.fromId(2_00_00_99).toString());
assertEquals("5.0.0", MlConfigVersion.fromId(5_00_00_99).toString());
String str = MlConfigVersion.fromId(10_00_00_10).toString();
assertEquals("10.0.0", MlConfigVersion.fromId(10_00_00_10).toString());
assertEquals("7.3.0", MlConfigVersion.V_7_3_0.toString());
assertEquals("8.6.1", MlConfigVersion.V_8_6_1.toString());
assertEquals("8.0.0", MlConfigVersion.V_8_0_0.toString());
assertEquals("7.0.1", MlConfigVersion.V_7_0_1.toString());
assertEquals("7.15.1", MlConfigVersion.V_7_15_1.toString());
assertEquals("10.0.0", MlConfigVersion.V_10.toString());
}
public void testFromString() {
assertEquals(MlConfigVersion.V_7_3_0, MlConfigVersion.fromString("7.3.0"));
assertEquals(MlConfigVersion.V_8_6_1, MlConfigVersion.fromString("8.6.1"));
assertEquals(MlConfigVersion.V_8_0_0, MlConfigVersion.fromString("8.0.0"));
assertEquals(MlConfigVersion.V_10, MlConfigVersion.fromString("8.10.0"));
assertEquals(MlConfigVersion.V_10, MlConfigVersion.fromString("10.0.0"));
MlConfigVersion V_8_0_1 = MlConfigVersion.fromString("8.0.1");
assertEquals(false, KnownMlConfigVersions.ALL_VERSIONS.contains(V_8_0_1));
assertEquals(8000199, V_8_0_1.id());
MlConfigVersion unknownVersion = MlConfigVersion.fromId(MlConfigVersion.CURRENT.id() + 1);
assertEquals(false, KnownMlConfigVersions.ALL_VERSIONS.contains(unknownVersion));
assertEquals(MlConfigVersion.CURRENT.id() + 1, unknownVersion.id());
for (String version : new String[] { "10.2", "7.17.2.99", "9" }) {
Exception e = expectThrows(IllegalArgumentException.class, () -> MlConfigVersion.fromString(version));
assertEquals("ML config version [" + version + "] not valid", e.getMessage());
}
}
}
| DuplicatedIdFakeVersion |
java | google__guava | android/guava/src/com/google/common/collect/Maps.java | {
"start": 117622,
"end": 118574
} | class ____<K extends @Nullable Object, V extends @Nullable Object>
extends AbstractFilteredMap<K, V> {
final Predicate<? super K> keyPredicate;
FilteredKeyMap(
Map<K, V> unfiltered,
Predicate<? super K> keyPredicate,
Predicate<? super Entry<K, V>> entryPredicate) {
super(unfiltered, entryPredicate);
this.keyPredicate = keyPredicate;
}
@Override
protected Set<Entry<K, V>> createEntrySet() {
return Sets.filter(unfiltered.entrySet(), predicate);
}
@Override
Set<K> createKeySet() {
return Sets.filter(unfiltered.keySet(), keyPredicate);
}
// The cast is called only when the key is in the unfiltered map, implying
// that key is a K.
@Override
@SuppressWarnings("unchecked")
public boolean containsKey(@Nullable Object key) {
return unfiltered.containsKey(key) && keyPredicate.apply((K) key);
}
}
private static | FilteredKeyMap |
java | alibaba__fastjson | src/test/java/com/alibaba/json/bvt/parser/autoType/AutoTypeTest0.java | {
"start": 1352,
"end": 1438
} | class ____ {
public int id;
public ModelNested nested;
}
}
| ModelNested |
java | quarkusio__quarkus | extensions/spring-web/core/deployment/src/main/java/io/quarkus/spring/web/deployment/ResponseBuilder.java | {
"start": 260,
"end": 1522
} | class ____ {
private final MethodCreator methodCreator;
private final ResultHandle delegate;
ResponseBuilder(MethodCreator methodCreator, int status) {
this.methodCreator = methodCreator;
this.delegate = withStatus(status);
}
public ResultHandle build() {
return methodCreator.invokeVirtualMethod(
ofMethod(Response.ResponseBuilder.class, "build", Response.class), delegate);
}
public ResponseBuilder withType(ResultHandle type) {
methodCreator.invokeVirtualMethod(
ofMethod(Response.ResponseBuilder.class, "type", Response.ResponseBuilder.class, MediaType.class),
delegate, type);
return this;
}
public ResponseBuilder withEntity(ResultHandle entity) {
methodCreator.invokeVirtualMethod(
ofMethod(Response.ResponseBuilder.class, "entity", Response.ResponseBuilder.class, Object.class),
delegate, entity);
return this;
}
private ResultHandle withStatus(int status) {
return methodCreator.invokeStaticMethod(
ofMethod(Response.class, "status", Response.ResponseBuilder.class, int.class),
methodCreator.load(status));
}
}
| ResponseBuilder |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/bulk/ObjectFactory.java | {
"start": 1613,
"end": 6890
} | class ____ {
private static final QName JOB_INFO_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "jobInfo");
private static final QName BATCH_INFO_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "batchInfo");
private static final QName ERROR_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "error");
private static final QName RESULTS_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "results");
private static final QName RESULT_LIST_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "result-list");
private static final QName BATCH_INFO_LIST_QNAME
= new QName("http://www.force.com/2009/06/asyncapi/dataload", "batchInfoList");
private static final QName QUERY_RESULT_QNAME = new QName("http://www.force.com/2009/06/asyncapi/dataload", "queryResult");
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package:
* org.apache.camel.component.salesforce.api.dto.bulk
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link SObject }
*/
public SObject createSObject() {
return new SObject();
}
/**
* Create an instance of {@link ResultError }
*/
public ResultError createResultError() {
return new ResultError();
}
/**
* Create an instance of {@link BatchInfo }
*/
public BatchInfo createBatchInfo() {
return new BatchInfo();
}
/**
* Create an instance of {@link BatchResult }
*/
public BatchResult createBatchResult() {
return new BatchResult();
}
/**
* Create an instance of {@link QueryResultList }
*/
public QueryResultList createQueryResultList() {
return new QueryResultList();
}
/**
* Create an instance of {@link Error }
*/
public Error createError() {
return new Error();
}
/**
* Create an instance of {@link BatchInfoList }
*/
public BatchInfoList createBatchInfoList() {
return new BatchInfoList();
}
/**
* Create an instance of {@link Result }
*/
public Result createResult() {
return new Result();
}
/**
* Create an instance of {@link JobInfo }
*/
public JobInfo createJobInfo() {
return new JobInfo();
}
/**
* Create an instance of {@link QueryResult }
*/
public QueryResult createQueryResult() {
return new QueryResult();
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link JobInfo }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "jobInfo")
public JAXBElement<JobInfo> createJobInfo(JobInfo value) {
return new JAXBElement<>(JOB_INFO_QNAME, JobInfo.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link BatchInfo }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "batchInfo")
public JAXBElement<BatchInfo> createBatchInfo(BatchInfo value) {
return new JAXBElement<>(BATCH_INFO_QNAME, BatchInfo.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link Error }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "error")
public JAXBElement<Error> createError(Error value) {
return new JAXBElement<>(ERROR_QNAME, Error.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link BatchResult }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "results")
public JAXBElement<BatchResult> createResults(BatchResult value) {
return new JAXBElement<>(RESULTS_QNAME, BatchResult.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link QueryResultList }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "result-list")
public JAXBElement<QueryResultList> createResultList(QueryResultList value) {
return new JAXBElement<>(RESULT_LIST_QNAME, QueryResultList.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link BatchInfoList }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "batchInfoList")
public JAXBElement<BatchInfoList> createBatchInfoList(BatchInfoList value) {
return new JAXBElement<>(BATCH_INFO_LIST_QNAME, BatchInfoList.class, null, value);
}
/**
* Create an instance of {@link jakarta.xml.bind.JAXBElement }{@code <}{@link QueryResult }{@code >}}
*/
@XmlElementDecl(namespace = "http://www.force.com/2009/06/asyncapi/dataload", name = "queryResult")
public JAXBElement<QueryResult> createQueryResult(QueryResult value) {
return new JAXBElement<>(QUERY_RESULT_QNAME, QueryResult.class, null, value);
}
}
| ObjectFactory |
java | mybatis__mybatis-3 | src/main/java/org/apache/ibatis/scripting/xmltags/WhereSqlNode.java | {
"start": 834,
"end": 1160
} | class ____ extends TrimSqlNode {
private static final List<String> prefixList = Arrays.asList("AND ", "OR ", "AND\n", "OR\n", "AND\r", "OR\r", "AND\t",
"OR\t");
public WhereSqlNode(Configuration configuration, SqlNode contents) {
super(configuration, contents, "WHERE", prefixList, null, null);
}
}
| WhereSqlNode |
java | spring-projects__spring-boot | smoke-test/spring-boot-smoke-test-integration/src/main/java/smoketest/integration/SampleIntegrationApplication.java | {
"start": 1367,
"end": 2750
} | class ____ {
private final ServiceProperties serviceProperties;
public SampleIntegrationApplication(ServiceProperties serviceProperties) {
this.serviceProperties = serviceProperties;
}
@Bean
public FileReadingMessageSource fileReader() {
FileReadingMessageSource reader = new FileReadingMessageSource();
File inputDir = this.serviceProperties.getInputDir();
Assert.state(inputDir != null, "'inputDir' must not be null");
reader.setDirectory(inputDir);
return reader;
}
@Bean
public DirectChannel inputChannel() {
return new DirectChannel();
}
@Bean
public DirectChannel outputChannel() {
return new DirectChannel();
}
@Bean
public FileWritingMessageHandler fileWriter() {
File outputDir = this.serviceProperties.getOutputDir();
Assert.state(outputDir != null, "'outputDir' must not be null");
FileWritingMessageHandler writer = new FileWritingMessageHandler(outputDir);
writer.setExpectReply(false);
return writer;
}
@Bean
public IntegrationFlow integrationFlow(SampleEndpoint endpoint) {
return IntegrationFlow.from(fileReader(), new FixedRatePoller())
.channel(inputChannel())
.handle(endpoint)
.channel(outputChannel())
.handle(fileWriter())
.get();
}
public static void main(String[] args) {
SpringApplication.run(SampleIntegrationApplication.class, args);
}
private static final | SampleIntegrationApplication |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleCondition.java | {
"start": 767,
"end": 4102
} | class ____ implements ToXContentObject, Writeable {
public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition");
public static final ParseField APPLIES_TO_FIELD = new ParseField("applies_to");
public static final ParseField VALUE_FIELD = new ParseField("value");
// These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly
public static final ConstructingObjectParser<RuleCondition, Void> LENIENT_PARSER = createParser(true);
public static final ConstructingObjectParser<RuleCondition, Void> STRICT_PARSER = createParser(false);
private static ConstructingObjectParser<RuleCondition, Void> createParser(boolean ignoreUnknownFields) {
ConstructingObjectParser<RuleCondition, Void> parser = new ConstructingObjectParser<>(
RULE_CONDITION_FIELD.getPreferredName(),
ignoreUnknownFields,
a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2])
);
parser.declareString(ConstructingObjectParser.constructorArg(), AppliesTo::fromString, APPLIES_TO_FIELD);
parser.declareString(ConstructingObjectParser.constructorArg(), Operator::fromString, Operator.OPERATOR_FIELD);
parser.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD);
return parser;
}
private final AppliesTo appliesTo;
private final Operator operator;
private final double value;
public RuleCondition(StreamInput in) throws IOException {
appliesTo = AppliesTo.readFromStream(in);
operator = Operator.readFromStream(in);
value = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
appliesTo.writeTo(out);
operator.writeTo(out);
out.writeDouble(value);
}
public RuleCondition(AppliesTo appliesTo, Operator operator, double value) {
this.appliesTo = appliesTo;
this.operator = operator;
this.value = value;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(APPLIES_TO_FIELD.getPreferredName(), appliesTo);
builder.field(Operator.OPERATOR_FIELD.getPreferredName(), operator);
builder.field(VALUE_FIELD.getPreferredName(), value);
builder.endObject();
return builder;
}
public AppliesTo getAppliesTo() {
return appliesTo;
}
public Operator getOperator() {
return operator;
}
public double getValue() {
return value;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof RuleCondition == false) {
return false;
}
RuleCondition other = (RuleCondition) obj;
return appliesTo == other.appliesTo && operator == other.operator && value == other.value;
}
@Override
public int hashCode() {
return Objects.hash(appliesTo, operator, value);
}
public static RuleCondition createTime(Operator operator, long epochSeconds) {
return new RuleCondition(AppliesTo.TIME, operator, epochSeconds);
}
public | RuleCondition |
java | apache__flink | flink-core-api/src/main/java/org/apache/flink/util/function/TriFunction.java | {
"start": 1159,
"end": 1470
} | interface ____<S, T, U, R> {
/**
* Applies this function to the given arguments.
*
* @param s the first function argument
* @param t the second function argument
* @param u the third function argument
* @return the function result
*/
R apply(S s, T t, U u);
}
| TriFunction |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java | {
"start": 1561,
"end": 1853
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(OpensslCipher.class.getName());
public static final int ENCRYPT_MODE = 1;
public static final int DECRYPT_MODE = 0;
/** Currently only support AES/CTR/NoPadding and SM4/CTR/NoPadding. */
private | OpensslCipher |
java | redisson__redisson | redisson/src/main/java/org/redisson/RedissonReactive.java | {
"start": 1409,
"end": 62403
} | class ____ implements RedissonReactiveClient {
private final WriteBehindService writeBehindService;
private final EvictionScheduler evictionScheduler;
private final CommandReactiveExecutor commandExecutor;
private final ConnectionManager connectionManager;
RedissonReactive(ConnectionManager connectionManager, EvictionScheduler evictionScheduler,
WriteBehindService writeBehindService) {
this.connectionManager = connectionManager;
RedissonObjectBuilder objectBuilder = null;
if (connectionManager.getServiceManager().getCfg().isReferenceEnabled()) {
objectBuilder = new RedissonObjectBuilder(this);
}
commandExecutor = CommandReactiveExecutor.create(connectionManager, objectBuilder);
this.evictionScheduler = evictionScheduler;
this.writeBehindService = writeBehindService;
}
public CommandReactiveExecutor getCommandExecutor() {
return commandExecutor;
}
@Override
public <K, V> RStreamReactive<K, V> getStream(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonStream<K, V>(commandExecutor, name), RStreamReactive.class);
}
@Override
public <K, V> RStreamReactive<K, V> getStream(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonStream<K, V>(codec, commandExecutor, name), RStreamReactive.class);
}
@Override
public <K, V> RStreamReactive<K, V> getStream(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonStream<K, V>(params.getCodec(), ca, params.getName()), RStreamReactive.class);
}
@Override
public RSearchReactive getSearch() {
return getSearch((Codec) null);
}
@Override
public RSearchReactive getSearch(Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSearch(codec, commandExecutor), RSearchReactive.class);
}
@Override
public RSearchReactive getSearch(OptionalOptions options) {
OptionalParams params = (OptionalParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSearch(params.getCodec(), ca), RSearchReactive.class);
}
@Override
public <V> RGeoReactive<V> getGeo(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonGeo<V>(commandExecutor, name, null),
new RedissonScoredSortedSetReactive<V>(commandExecutor, name), RGeoReactive.class);
}
@Override
public <V> RGeoReactive<V> getGeo(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonGeo<V>(codec, commandExecutor, name, null),
new RedissonScoredSortedSetReactive<V>(codec, commandExecutor, name), RGeoReactive.class);
}
@Override
public <V> RGeoReactive<V> getGeo(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonGeo<V>(params.getCodec(), ca, params.getName(), null),
new RedissonScoredSortedSetReactive<V>(params.getCodec(), ca, params.getName()), RGeoReactive.class);
}
@Override
public RLockReactive getFairLock(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFairLock(commandExecutor, name), RLockReactive.class);
}
@Override
public RLockReactive getFairLock(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFairLock(ca, params.getName()), RLockReactive.class);
}
@Override
public RRateLimiterReactive getRateLimiter(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonRateLimiter(commandExecutor, name), RRateLimiterReactive.class);
}
@Override
public RRateLimiterReactive getRateLimiter(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonRateLimiter(ca, params.getName()), RRateLimiterReactive.class);
}
@Override
public RBinaryStreamReactive getBinaryStream(String name) {
RedissonBinaryStream stream = new RedissonBinaryStream(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, stream,
new RedissonBinaryStreamReactive(commandExecutor, stream), RBinaryStreamReactive.class);
}
@Override
public RBinaryStreamReactive getBinaryStream(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonBinaryStream stream = new RedissonBinaryStream(ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, stream,
new RedissonBinaryStreamReactive(ca, stream), RBinaryStreamReactive.class);
}
@Override
public RSemaphoreReactive getSemaphore(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSemaphore(commandExecutor, name), RSemaphoreReactive.class);
}
@Override
public RSemaphoreReactive getSemaphore(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSemaphore(ca, params.getName()), RSemaphoreReactive.class);
}
@Override
public RPermitExpirableSemaphoreReactive getPermitExpirableSemaphore(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonPermitExpirableSemaphore(commandExecutor, name), RPermitExpirableSemaphoreReactive.class);
}
@Override
public RPermitExpirableSemaphoreReactive getPermitExpirableSemaphore(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonPermitExpirableSemaphore(ca, params.getName()), RPermitExpirableSemaphoreReactive.class);
}
@Override
public RReadWriteLockReactive getReadWriteLock(String name) {
return new RedissonReadWriteLockReactive(commandExecutor, name);
}
@Override
public RReadWriteLockReactive getReadWriteLock(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return new RedissonReadWriteLockReactive(ca, params.getName());
}
@Override
public RLockReactive getLock(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonLock(commandExecutor, name), RLockReactive.class);
}
@Override
public RLockReactive getLock(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonLock(ca, params.getName()), RLockReactive.class);
}
@Override
public RLockReactive getSpinLock(String name) {
return getSpinLock(name, LockOptions.defaults());
}
@Override
public RLockReactive getSpinLock(String name, LockOptions.BackOff backOff) {
RedissonSpinLock spinLock = new RedissonSpinLock(commandExecutor, name, backOff);
return ReactiveProxyBuilder.create(commandExecutor, spinLock, RLockReactive.class);
}
@Override
public RFencedLockReactive getFencedLock(String name) {
RedissonFencedLock lock = new RedissonFencedLock(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, lock, RFencedLockReactive.class);
}
@Override
public RFencedLockReactive getFencedLock(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonFencedLock lock = new RedissonFencedLock(ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, lock, RFencedLockReactive.class);
}
@Override
public RLockReactive getMultiLock(RLockReactive... locks) {
RLock[] ls = Arrays.stream(locks)
.map(l -> new RedissonLock(commandExecutor, l.getName()))
.toArray(RLock[]::new);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonMultiLock(ls), RLockReactive.class);
}
@Override
public RLockReactive getMultiLock(String group, Collection<Object> values) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFasterMultiLock(commandExecutor, group, values), RLockReactive.class);
}
@Override
public RLockReactive getMultiLock(RLock... locks) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonMultiLock(locks), RLockReactive.class);
}
@Override
public RLockReactive getRedLock(RLock... locks) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonRedLock(locks), RLockReactive.class);
}
@Override
public RCountDownLatchReactive getCountDownLatch(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonCountDownLatch(commandExecutor, name), RCountDownLatchReactive.class);
}
@Override
public RCountDownLatchReactive getCountDownLatch(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonCountDownLatch(ca, params.getName()), RCountDownLatchReactive.class);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name, Codec codec) {
RMapCache<K, V> map = new RedissonMapCache<K, V>(codec, evictionScheduler, commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheReactive.class);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name) {
RMapCache<K, V> map = new RedissonMapCache<K, V>(evictionScheduler, commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheReactive.class);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(org.redisson.api.options.MapCacheOptions<K, V> options) {
MapCacheParams<K, V> params = (MapCacheParams) options;
MapCacheOptions<K, V> ops = createOptions(params);
CommandReactiveExecutor ca = commandExecutor.copy(params);
RMapCache<K, V> map = new RedissonMapCache<>(params.getCodec(), evictionScheduler, ca,
params.getName(), null, ops, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, ca), RMapCacheReactive.class);
}
@Override
public <K, V> RMapCacheNativeReactive<K, V> getMapCacheNative(String name) {
RMapCacheNative<K, V> map = new RedissonMapCacheNative<>(commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheNativeReactive.class);
}
@Override
public <K, V> RMapCacheNativeReactive<K, V> getMapCacheNative(String name, Codec codec) {
RMapCacheNative<K, V> map = new RedissonMapCacheNative<>(codec, commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheNativeReactive.class);
}
@Override
public <K, V> RMapCacheNativeReactive<K, V> getMapCacheNative(org.redisson.api.options.MapOptions<K, V> options) {
MapParams<K, V> params = (MapParams<K, V>) options;
MapOptions<K, V> ops = createOptions(params);
CommandReactiveExecutor ca = commandExecutor.copy(params);
RMapCacheNative<K, V> map = new RedissonMapCacheNative<>(params.getCodec(), ca,
params.getName(), null, ops, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, ca), RMapCacheNativeReactive.class);
}
private static <K, V> MapOptions<K, V> createOptions(MapParams<K, V> params) {
MapOptions<K, V> ops = MapOptions.<K, V>defaults()
.loader(params.getLoader())
.loaderAsync(params.getLoaderAsync())
.writer(params.getWriter())
.writerAsync(params.getWriterAsync())
.writeBehindDelay(params.getWriteBehindDelay())
.writeBehindBatchSize(params.getWriteBehindBatchSize())
.writerRetryInterval(Duration.ofMillis(params.getWriteRetryInterval()));
if (params.getWriteMode() != null) {
ops.writeMode(MapOptions.WriteMode.valueOf(params.getWriteMode().toString()));
}
if (params.getWriteRetryAttempts() > 0) {
ops.writerRetryAttempts(params.getWriteRetryAttempts());
}
return ops;
}
private static <K, V> MapCacheOptions<K, V> createOptions(MapCacheParams<K, V> params) {
MapCacheOptions<K, V> ops = MapCacheOptions.<K, V>defaults()
.loader(params.getLoader())
.loaderAsync(params.getLoaderAsync())
.writer(params.getWriter())
.writerAsync(params.getWriterAsync())
.writeBehindDelay(params.getWriteBehindDelay())
.writeBehindBatchSize(params.getWriteBehindBatchSize())
.writerRetryInterval(Duration.ofMillis(params.getWriteRetryInterval()));
if (params.getWriteMode() != null) {
ops.writeMode(MapOptions.WriteMode.valueOf(params.getWriteMode().toString()));
}
if (params.getWriteRetryAttempts() > 0) {
ops.writerRetryAttempts(params.getWriteRetryAttempts());
}
if (params.isRemoveEmptyEvictionTask()) {
ops.removeEmptyEvictionTask();
}
return ops;
}
@Override
public <V> RBucketReactive<V> getBucket(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBucket<V>(commandExecutor, name), RBucketReactive.class);
}
@Override
public <V> RBucketReactive<V> getBucket(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBucket<V>(codec, commandExecutor, name), RBucketReactive.class);
}
@Override
public <V> RBucketReactive<V> getBucket(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonBucket<V>(params.getCodec(), ca, params.getName()), RBucketReactive.class);
}
@Override
public RBucketsReactive getBuckets() {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBuckets(commandExecutor), RBucketsReactive.class);
}
@Override
public RBucketsReactive getBuckets(Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBuckets(codec, commandExecutor), RBucketsReactive.class);
}
@Override
public RBucketsReactive getBuckets(OptionalOptions options) {
OptionalParams params = (OptionalParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBuckets(params.getCodec(), ca), RBucketsReactive.class);
}
@Override
public <V> List<RBucketReactive<V>> findBuckets(String pattern) {
RKeys redissonKeys = new RedissonKeys(commandExecutor);
Iterable<String> keys = redissonKeys.getKeysByPattern(pattern);
List<RBucketReactive<V>> buckets = new ArrayList<RBucketReactive<V>>();
for (Object key : keys) {
if (key != null) {
buckets.add(this.<V>getBucket(key.toString()));
}
}
return buckets;
}
@Override
public <V> RJsonBucketReactive<V> getJsonBucket(String name, JsonCodec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonJsonBucket<V>(codec, commandExecutor, name), RJsonBucketReactive.class);
}
@Override
public <V> RJsonBucketReactive<V> getJsonBucket(JsonBucketOptions<V> options) {
JsonBucketParams<V> params = (JsonBucketParams<V>) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonJsonBucket<V>(params.getCodec(), ca, params.getName()), RJsonBucketReactive.class);
}
@Override
public RJsonBucketsReactive getJsonBuckets(JsonCodec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonJsonBuckets(codec, commandExecutor), RJsonBucketsReactive.class);
}
@Override
public <V> RHyperLogLogReactive<V> getHyperLogLog(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonHyperLogLog<V>(commandExecutor, name), RHyperLogLogReactive.class);
}
@Override
public <V> RHyperLogLogReactive<V> getHyperLogLog(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonHyperLogLog<V>(codec, commandExecutor, name), RHyperLogLogReactive.class);
}
@Override
public <V> RHyperLogLogReactive<V> getHyperLogLog(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonHyperLogLog<V>(params.getCodec(), ca, params.getName()), RHyperLogLogReactive.class);
}
@Override
public RIdGeneratorReactive getIdGenerator(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonIdGenerator(commandExecutor, name), RIdGeneratorReactive.class);
}
@Override
public RIdGeneratorReactive getIdGenerator(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonIdGenerator(ca, params.getName()), RIdGeneratorReactive.class);
}
@Override
public <V> RListReactive<V> getList(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonList<V>(commandExecutor, name, null),
new RedissonListReactive<V>(commandExecutor, name), RListReactive.class);
}
@Override
public <V> RListReactive<V> getList(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonList<V>(codec, commandExecutor, name, null),
new RedissonListReactive<V>(codec, commandExecutor, name), RListReactive.class);
}
@Override
public <V> RListReactive<V> getList(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonList<V>(params.getCodec(), ca, params.getName(), null),
new RedissonListReactive<V>(params.getCodec(), ca, params.getName()), RListReactive.class);
}
@Override
public <K, V> RListMultimapReactive<K, V> getListMultimap(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonListMultimap<K, V>(commandExecutor, name),
new RedissonListMultimapReactive<K, V>(commandExecutor, name), RListMultimapReactive.class);
}
@Override
public <K, V> RListMultimapReactive<K, V> getListMultimap(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonListMultimap<K, V>(codec, commandExecutor, name),
new RedissonListMultimapReactive<K, V>(codec, commandExecutor, name), RListMultimapReactive.class);
}
@Override
public <K, V> RListMultimapReactive<K, V> getListMultimap(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonListMultimap<K, V>(params.getCodec(), ca, params.getName()),
new RedissonListMultimapReactive<K, V>(params.getCodec(), ca, params.getName()), RListMultimapReactive.class);
}
@Override
public <K, V> RSetMultimapReactive<K, V> getSetMultimap(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSetMultimap<K, V>(commandExecutor, name),
new RedissonSetMultimapReactive<K, V>(commandExecutor, name, this), RSetMultimapReactive.class);
}
@Override
public <K, V> RSetMultimapReactive<K, V> getSetMultimap(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSetMultimap<K, V>(codec, commandExecutor, name),
new RedissonSetMultimapReactive<K, V>(codec, commandExecutor, name, this), RSetMultimapReactive.class);
}
@Override
public <K, V> RSetMultimapReactive<K, V> getSetMultimap(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonSetMultimap<K, V>(params.getCodec(), ca, params.getName()),
new RedissonSetMultimapReactive<K, V>(params.getCodec(), ca, params.getName(), this), RSetMultimapReactive.class);
}
@Override
public <K, V> RListMultimapCacheReactive<K, V> getListMultimapCache(String name) {
RedissonListMultimapCache<K, V> listMultimap = new RedissonListMultimapCache<>(evictionScheduler, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<K, V>(listMultimap, commandExecutor), RListMultimapCacheReactive.class);
}
@Override
public <K, V> RListMultimapCacheReactive<K, V> getListMultimapCache(String name, Codec codec) {
RedissonListMultimapCache<K, V> listMultimap = new RedissonListMultimapCache<>(evictionScheduler, codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<>(listMultimap, commandExecutor), RListMultimapCacheReactive.class);
}
@Override
public <K, V> RListMultimapCacheReactive<K, V> getListMultimapCache(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonListMultimapCache<K, V> listMultimap = new RedissonListMultimapCache<>(evictionScheduler, params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<>(listMultimap, ca), RListMultimapCacheReactive.class);
}
@Override
public <K, V> RListMultimapCacheNativeReactive<K, V> getListMultimapCacheNative(String name) {
RedissonListMultimapCacheNative<K, V> listMultimap = new RedissonListMultimapCacheNative<>(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<>(listMultimap, commandExecutor), RListMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RListMultimapCacheNativeReactive<K, V> getListMultimapCacheNative(String name, Codec codec) {
RedissonListMultimapCacheNative<K, V> listMultimap = new RedissonListMultimapCacheNative<>(codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<>(listMultimap, commandExecutor), RListMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RListMultimapCacheNativeReactive<K, V> getListMultimapCacheNative(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonListMultimapCacheNative<K, V> listMultimap = new RedissonListMultimapCacheNative<>(params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, listMultimap,
new RedissonListMultimapCacheReactive<>(listMultimap, ca), RListMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCache(String name) {
RedissonSetMultimapCache<K, V> setMultimap = new RedissonSetMultimapCache<>(evictionScheduler, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, commandExecutor, this), RSetMultimapCacheReactive.class);
}
@Override
public <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCache(String name, Codec codec) {
RedissonSetMultimapCache<K, V> setMultimap = new RedissonSetMultimapCache<>(evictionScheduler, codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, commandExecutor, this), RSetMultimapCacheReactive.class);
}
@Override
public <K, V> RSetMultimapCacheReactive<K, V> getSetMultimapCache(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonSetMultimapCache<K, V> setMultimap = new RedissonSetMultimapCache<>(evictionScheduler, params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, ca, this), RSetMultimapCacheReactive.class);
}
@Override
public <K, V> RSetMultimapCacheNativeReactive<K, V> getSetMultimapCacheNative(String name) {
RedissonSetMultimapCacheNative<K, V> setMultimap = new RedissonSetMultimapCacheNative<>(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, commandExecutor, this), RSetMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RSetMultimapCacheNativeReactive<K, V> getSetMultimapCacheNative(String name, Codec codec) {
RedissonSetMultimapCacheNative<K, V> setMultimap = new RedissonSetMultimapCacheNative<>(codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, commandExecutor, this), RSetMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RSetMultimapCacheNativeReactive<K, V> getSetMultimapCacheNative(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonSetMultimapCacheNative<K, V> setMultimap = new RedissonSetMultimapCacheNative<>(params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, setMultimap,
new RedissonSetMultimapCacheReactive<K, V>(setMultimap, ca, this), RSetMultimapCacheNativeReactive.class);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name) {
RedissonMap<K, V> map = new RedissonMap<K, V>(commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<K, V>(map, commandExecutor), RMapReactive.class);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name, Codec codec) {
RedissonMap<K, V> map = new RedissonMap<K, V>(codec, commandExecutor, name, null, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<K, V>(map, commandExecutor), RMapReactive.class);
}
@Override
public <K, V> RMapReactive<K, V> getMap(org.redisson.api.options.MapOptions<K, V> options) {
MapParams<K, V> params = (MapParams<K, V>) options;
MapOptions<K, V> ops = createOptions(params);
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonMap<K, V> map = new RedissonMap<>(params.getCodec(), ca, params.getName(), null, ops, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<K, V>(map, ca), RMapReactive.class);
}
@Override
public <V> RSetReactive<V> getSet(String name) {
RedissonSet<V> set = new RedissonSet<V>(commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetReactive<V>(set, this), RSetReactive.class);
}
@Override
public <V> RSetReactive<V> getSet(String name, Codec codec) {
RedissonSet<V> set = new RedissonSet<V>(codec, commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetReactive<V>(set, this), RSetReactive.class);
}
@Override
public <V> RSetReactive<V> getSet(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonSet<V> set = new RedissonSet<V>(params.getCodec(), ca, params.getName(), null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetReactive<V>(set, this), RSetReactive.class);
}
@Override
public <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScoredSortedSet<V>(commandExecutor, name, null),
new RedissonScoredSortedSetReactive<V>(commandExecutor, name), RScoredSortedSetReactive.class);
}
@Override
public <V> RScoredSortedSetReactive<V> getScoredSortedSet(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScoredSortedSet<V>(codec, commandExecutor, name, null),
new RedissonScoredSortedSetReactive<V>(codec, commandExecutor, name), RScoredSortedSetReactive.class);
}
@Override
public <V> RScoredSortedSetReactive<V> getScoredSortedSet(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScoredSortedSet<V>(params.getCodec(), ca, params.getName(), null),
new RedissonScoredSortedSetReactive<V>(params.getCodec(), ca, params.getName()), RScoredSortedSetReactive.class);
}
@Override
public RLexSortedSetReactive getLexSortedSet(String name) {
RedissonLexSortedSet set = new RedissonLexSortedSet(commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonLexSortedSetReactive(set),
RLexSortedSetReactive.class);
}
@Override
public RLexSortedSetReactive getLexSortedSet(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonLexSortedSet set = new RedissonLexSortedSet(ca, params.getName(), null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonLexSortedSetReactive(set),
RLexSortedSetReactive.class);
}
@Override
public RShardedTopicReactive getShardedTopic(String name) {
RedissonShardedTopic topic = new RedissonShardedTopic(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RShardedTopicReactive.class);
}
@Override
public RShardedTopicReactive getShardedTopic(String name, Codec codec) {
RedissonShardedTopic topic = new RedissonShardedTopic(codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RShardedTopicReactive.class);
}
@Override
public RShardedTopicReactive getShardedTopic(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonShardedTopic topic = new RedissonShardedTopic(params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RShardedTopicReactive.class);
}
@Override
public RTopicReactive getTopic(String name) {
RedissonTopic topic = new RedissonTopic(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RTopicReactive.class);
}
@Override
public RTopicReactive getTopic(String name, Codec codec) {
RedissonTopic topic = new RedissonTopic(codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RTopicReactive.class);
}
@Override
public RTopicReactive getTopic(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonTopic topic = new RedissonTopic(params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonTopicReactive(topic), RTopicReactive.class);
}
@Override
public RReliableTopicReactive getReliableTopic(String name) {
RedissonReliableTopic topic = new RedissonReliableTopic(commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonReliableTopicReactive(topic), RReliableTopicReactive.class);
}
@Override
public RReliableTopicReactive getReliableTopic(String name, Codec codec) {
RedissonReliableTopic topic = new RedissonReliableTopic(codec, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonReliableTopicReactive(topic), RReliableTopicReactive.class);
}
@Override
public RReliableTopicReactive getReliableTopic(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonReliableTopic topic = new RedissonReliableTopic(params.getCodec(), ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, topic,
new RedissonReliableTopicReactive(topic), RReliableTopicReactive.class);
}
@Override
public RPatternTopicReactive getPatternTopic(String pattern) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonPatternTopic(commandExecutor, pattern), RPatternTopicReactive.class);
}
@Override
public RPatternTopicReactive getPatternTopic(String pattern, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonPatternTopic(codec, commandExecutor, pattern), RPatternTopicReactive.class);
}
@Override
public RPatternTopicReactive getPatternTopic(PatternTopicOptions options) {
PatternTopicParams params = (PatternTopicParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonPatternTopic(params.getCodec(), ca, params.getPattern()), RPatternTopicReactive.class);
}
@Override
public <V> RQueueReactive<V> getQueue(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonQueue<V>(commandExecutor, name, null),
new RedissonListReactive<V>(commandExecutor, name), RQueueReactive.class);
}
@Override
public <V> RQueueReactive<V> getQueue(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonQueue<V>(codec, commandExecutor, name, null),
new RedissonListReactive<V>(codec, commandExecutor, name), RQueueReactive.class);
}
@Override
public <V> RQueueReactive<V> getQueue(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonQueue<V>(params.getCodec(), ca, params.getName(), null),
new RedissonListReactive<V>(params.getCodec(), ca, params.getName()), RQueueReactive.class);
}
@Override
public <V> RReliableQueueReactive<V> getReliableQueue(String name) {
throw new UnsupportedOperationException("This feature is implemented in the Redisson PRO version. Please refer to https://redisson.pro/feature-comparison.html");
}
@Override
public <V> RReliableQueueReactive<V> getReliableQueue(String name, Codec codec) {
throw new UnsupportedOperationException("This feature is implemented in the Redisson PRO version. Please refer to https://redisson.pro/feature-comparison.html");
}
@Override
public <V> RReliableQueueReactive<V> getReliableQueue(PlainOptions options) {
throw new UnsupportedOperationException("This feature is implemented in the Redisson PRO version. Please refer to https://redisson.pro/feature-comparison.html");
}
@Override
public <V> RRingBufferReactive<V> getRingBuffer(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonRingBuffer<V>(commandExecutor, name, null), RRingBufferReactive.class);
}
@Override
public <V> RRingBufferReactive<V> getRingBuffer(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonRingBuffer<V>(codec, commandExecutor, name, null), RRingBufferReactive.class);
}
@Override
public <V> RRingBufferReactive<V> getRingBuffer(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonRingBuffer<V>(params.getCodec(), ca, params.getName(), null), RRingBufferReactive.class);
}
@Override
public <V> RBlockingQueueReactive<V> getBlockingQueue(String name) {
RedissonBlockingQueue<V> queue = new RedissonBlockingQueue<V>(commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonBlockingQueueReactive<V>(queue), RBlockingQueueReactive.class);
}
@Override
public <V> RBlockingQueueReactive<V> getBlockingQueue(String name, Codec codec) {
RedissonBlockingQueue<V> queue = new RedissonBlockingQueue<V>(codec, commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonBlockingQueueReactive<V>(queue), RBlockingQueueReactive.class);
}
@Override
public <V> RBlockingQueueReactive<V> getBlockingQueue(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonBlockingQueue<V> queue = new RedissonBlockingQueue<V>(params.getCodec(), ca, params.getName(), null);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonBlockingQueueReactive<V>(queue), RBlockingQueueReactive.class);
}
@Override
public <V> RDequeReactive<V> getDeque(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonDeque<V>(commandExecutor, name, null),
new RedissonListReactive<V>(commandExecutor, name), RDequeReactive.class);
}
@Override
public <V> RDequeReactive<V> getDeque(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonDeque<V>(codec, commandExecutor, name, null),
new RedissonListReactive<V>(codec, commandExecutor, name), RDequeReactive.class);
}
@Override
public <V> RDequeReactive<V> getDeque(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonDeque<V>(params.getCodec(), ca, params.getName(), null),
new RedissonListReactive<V>(params.getCodec(), ca, params.getName()), RDequeReactive.class);
}
@Override
public <V, L> RTimeSeriesReactive<V, L> getTimeSeries(String name) {
RTimeSeries<V, L> timeSeries = new RedissonTimeSeries<V, L>(evictionScheduler, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, timeSeries,
new RedissonTimeSeriesReactive<V, L>(timeSeries, this), RTimeSeriesReactive.class);
}
@Override
public <V, L> RTimeSeriesReactive<V, L> getTimeSeries(String name, Codec codec) {
RTimeSeries<V, L> timeSeries = new RedissonTimeSeries<V, L>(codec, evictionScheduler, commandExecutor, name);
return ReactiveProxyBuilder.create(commandExecutor, timeSeries,
new RedissonTimeSeriesReactive<V, L>(timeSeries, this), RTimeSeriesReactive.class);
}
@Override
public <V, L> RTimeSeriesReactive<V, L> getTimeSeries(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RTimeSeries<V, L> timeSeries = new RedissonTimeSeries<>(params.getCodec(), evictionScheduler, ca, params.getName());
return ReactiveProxyBuilder.create(commandExecutor, timeSeries,
new RedissonTimeSeriesReactive<V, L>(timeSeries, this), RTimeSeriesReactive.class);
}
@Override
public <V> RSetCacheReactive<V> getSetCache(String name) {
RSetCache<V> set = new RedissonSetCache<V>(evictionScheduler, commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetCacheReactive<V>(set, this), RSetCacheReactive.class);
}
@Override
public <V> RSetCacheReactive<V> getSetCache(String name, Codec codec) {
RSetCache<V> set = new RedissonSetCache<V>(codec, evictionScheduler, commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetCacheReactive<V>(set, this), RSetCacheReactive.class);
}
@Override
public <V> RSetCacheReactive<V> getSetCache(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RSetCache<V> set = new RedissonSetCache<V>(params.getCodec(), evictionScheduler, ca, params.getName(), null);
return ReactiveProxyBuilder.create(commandExecutor, set,
new RedissonSetCacheReactive<V>(set, this), RSetCacheReactive.class);
}
@Override
public RAtomicLongReactive getAtomicLong(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonAtomicLong(commandExecutor, name), RAtomicLongReactive.class);
}
@Override
public RAtomicLongReactive getAtomicLong(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonAtomicLong(ca, params.getName()), RAtomicLongReactive.class);
}
@Override
public RAtomicDoubleReactive getAtomicDouble(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonAtomicDouble(commandExecutor, name), RAtomicDoubleReactive.class);
}
@Override
public RAtomicDoubleReactive getAtomicDouble(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonAtomicDouble(ca, params.getName()), RAtomicDoubleReactive.class);
}
@Override
public RRemoteService getRemoteService() {
return getRemoteService("redisson_rs", connectionManager.getServiceManager().getCfg().getCodec());
}
@Override
public RRemoteService getRemoteService(String name) {
return getRemoteService(name, connectionManager.getServiceManager().getCfg().getCodec());
}
@Override
public RRemoteService getRemoteService(Codec codec) {
return getRemoteService("redisson_rs", codec);
}
@Override
public RRemoteService getRemoteService(String name, Codec codec) {
String executorId = connectionManager.getServiceManager().getId();
if (codec != connectionManager.getServiceManager().getCfg().getCodec()) {
executorId = executorId + ":" + name;
}
return new RedissonRemoteService(codec, name, commandExecutor, executorId);
}
@Override
public RRemoteService getRemoteService(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
String executorId = connectionManager.getServiceManager().getId();
if (params.getCodec() != null && params.getCodec() != connectionManager.getServiceManager().getCfg().getCodec()) {
executorId = executorId + ":" + params.getName();
}
return new RedissonRemoteService(params.getCodec(), params.getName(), ca, executorId);
}
@Override
public RBitSetReactive getBitSet(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBitSet(commandExecutor, name), RBitSetReactive.class);
}
@Override
public RBitSetReactive getBitSet(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBitSet(ca, params.getName()), RBitSetReactive.class);
}
@Override
public <V> RBloomFilterReactive<V> getBloomFilter(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBloomFilter<>(commandExecutor, name), RBloomFilterReactive.class);
}
@Override
public <V> RBloomFilterReactive<V> getBloomFilter(String name, Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonBloomFilter<>(codec, commandExecutor, name), RBloomFilterReactive.class);
}
@Override
public <V> RBloomFilterReactive<V> getBloomFilter(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor,
new RedissonBloomFilter<V>(params.getCodec(), ca, params.getName()), RBloomFilterReactive.class);
}
@Override
public RFunctionReactive getFunction() {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFuction(commandExecutor), RFunctionReactive.class);
}
@Override
public RFunctionReactive getFunction(Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFuction(commandExecutor, codec), RFunctionReactive.class);
}
@Override
public RFunctionReactive getFunction(OptionalOptions options) {
OptionalParams params = (OptionalParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonFuction(ca, params.getCodec()), RFunctionReactive.class);
}
@Override
public RScriptReactive getScript() {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScript(commandExecutor), RScriptReactive.class);
}
@Override
public RScriptReactive getScript(Codec codec) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScript(commandExecutor, codec), RScriptReactive.class);
}
@Override
public RScriptReactive getScript(OptionalOptions options) {
OptionalParams params = (OptionalParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonScript(ca, params.getCodec()), RScriptReactive.class);
}
@Override
public RVectorSetReactive getVectorSet(String name) {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonVectorSet(commandExecutor, name), RVectorSetReactive.class);
}
@Override
public RVectorSetReactive getVectorSet(CommonOptions options) {
CommonParams params = (CommonParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonVectorSet(ca, params.getName()), RVectorSetReactive.class);
}
@Override
public RBatchReactive createBatch(BatchOptions options) {
return new RedissonBatchReactive(evictionScheduler, connectionManager, commandExecutor, options);
}
@Override
public RBatchReactive createBatch() {
return createBatch(BatchOptions.defaults());
}
@Override
public RKeysReactive getKeys() {
return ReactiveProxyBuilder.create(commandExecutor, new RedissonKeys(commandExecutor), new RedissonKeysReactive(commandExecutor), RKeysReactive.class);
}
@Override
public RKeysReactive getKeys(KeysOptions options) {
KeysParams params = (KeysParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
return ReactiveProxyBuilder.create(commandExecutor, new RedissonKeys(ca), new RedissonKeysReactive(ca), RKeysReactive.class);
}
@Override
public Config getConfig() {
return connectionManager.getServiceManager().getCfg();
}
@Override
public NodesGroup<Node> getNodesGroup() {
return new RedisNodes<>(connectionManager, connectionManager.getServiceManager(), commandExecutor);
}
@Override
public NodesGroup<ClusterNode> getClusterNodesGroup() {
if (!getConfig().isClusterConfig()) {
throw new IllegalStateException("Redisson not in cluster mode!");
}
return new RedisNodes<>(connectionManager, connectionManager.getServiceManager(), commandExecutor);
}
@Override
public void shutdown() {
writeBehindService.stop();
connectionManager.shutdown();
}
@Override
public boolean isShutdown() {
return connectionManager.getServiceManager().isShutdown();
}
@Override
public boolean isShuttingDown() {
return connectionManager.getServiceManager().isShuttingDown();
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name, Codec codec, MapCacheOptions<K, V> options) {
RMapCache<K, V> map = new RedissonMapCache<>(codec, evictionScheduler, commandExecutor, name, null, options, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheReactive.class);
}
@Override
public <K, V> RMapCacheReactive<K, V> getMapCache(String name, MapCacheOptions<K, V> options) {
RMapCache<K, V> map = new RedissonMapCache<K, V>(evictionScheduler, commandExecutor, name, null, options, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapCacheReactive<>(map, commandExecutor), RMapCacheReactive.class);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name, MapOptions<K, V> options) {
RMap<K, V> map = new RedissonMap<K, V>(commandExecutor, name, null, options, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<K, V>(map, commandExecutor), RMapReactive.class);
}
@Override
public <K, V> RMapReactive<K, V> getMap(String name, Codec codec, MapOptions<K, V> options) {
RMap<K, V> map = new RedissonMap<>(codec, commandExecutor, name, null, options, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<>(map, commandExecutor), RMapReactive.class);
}
@Override
public <K, V> RLocalCachedMapReactive<K, V> getLocalCachedMap(String name, LocalCachedMapOptions<K, V> options) {
return getLocalCachedMap(name, null, options);
}
@Override
public <K, V> RLocalCachedMapReactive<K, V> getLocalCachedMap(String name, Codec codec, LocalCachedMapOptions<K, V> options) {
RMap<K, V> map = new RedissonLocalCachedMap<>(codec, commandExecutor, name,
options, evictionScheduler, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<>(map, commandExecutor), RLocalCachedMapReactive.class);
}
@Override
public <K, V> RLocalCachedMapReactive<K, V> getLocalCachedMap(org.redisson.api.options.LocalCachedMapOptions<K, V> options) {
LocalCachedMapParams<K, V> params = (LocalCachedMapParams) options;
LocalCachedMapOptions<K, V> ops = LocalCachedMapOptions.<K, V>defaults()
.cacheProvider(LocalCachedMapOptions.CacheProvider.valueOf(params.getCacheProvider().toString()))
.cacheSize(params.getCacheSize())
.storeMode(LocalCachedMapOptions.StoreMode.valueOf(params.getStoreMode().toString()))
.evictionPolicy(LocalCachedMapOptions.EvictionPolicy.valueOf(params.getEvictionPolicy().toString()))
.maxIdle(params.getMaxIdleInMillis())
.loader(params.getLoader())
.loaderAsync(params.getLoaderAsync())
.reconnectionStrategy(LocalCachedMapOptions.ReconnectionStrategy.valueOf(params.getReconnectionStrategy().toString()))
.storeCacheMiss(params.isStoreCacheMiss())
.timeToLive(params.getTimeToLiveInMillis())
.syncStrategy(LocalCachedMapOptions.SyncStrategy.valueOf(params.getSyncStrategy().toString()))
.useObjectAsCacheKey(params.isUseObjectAsCacheKey())
.useTopicPattern(params.isUseTopicPattern())
.expirationEventPolicy(LocalCachedMapOptions.ExpirationEventPolicy.valueOf(params.getExpirationEventPolicy().toString()))
.writer(params.getWriter())
.writerAsync(params.getWriterAsync())
.writeBehindDelay(params.getWriteBehindDelay())
.writeBehindBatchSize(params.getWriteBehindBatchSize())
.writerRetryInterval(Duration.ofMillis(params.getWriteRetryInterval()));
if (params.getWriteMode() != null) {
ops.writeMode(MapOptions.WriteMode.valueOf(params.getWriteMode().toString()));
}
if (params.getWriteRetryAttempts() > 0) {
ops.writerRetryAttempts(params.getWriteRetryAttempts());
}
CommandReactiveExecutor ca = commandExecutor.copy(params);
RMap<K, V> map = new RedissonLocalCachedMap<>(params.getCodec(), ca, params.getName(),
ops, evictionScheduler, null, writeBehindService);
return ReactiveProxyBuilder.create(commandExecutor, map,
new RedissonMapReactive<>(map, ca), RLocalCachedMapReactive.class);
}
@Override
public <K, V> RLocalCachedMapCacheReactive<K, V> getLocalCachedMapCache(String name, LocalCachedMapCacheOptions<K, V> options) {
throw new UnsupportedOperationException("This feature is implemented in the Redisson PRO version. Visit https://redisson.pro");
}
@Override
public <K, V> RLocalCachedMapCacheReactive<K, V> getLocalCachedMapCache(String name, Codec codec, LocalCachedMapCacheOptions<K, V> options) {
throw new UnsupportedOperationException("This feature is implemented in the Redisson PRO version. Visit https://redisson.pro");
}
@Override
public RTransactionReactive createTransaction(TransactionOptions options) {
return new RedissonTransactionReactive(commandExecutor, options);
}
@Override
public <V> RBlockingDequeReactive<V> getBlockingDeque(String name) {
RedissonBlockingDeque<V> deque = new RedissonBlockingDeque<V>(commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, deque,
new RedissonBlockingDequeReactive<V>(deque), RBlockingDequeReactive.class);
}
@Override
public <V> RBlockingDequeReactive<V> getBlockingDeque(String name, Codec codec) {
RedissonBlockingDeque<V> deque = new RedissonBlockingDeque<V>(codec, commandExecutor, name, null);
return ReactiveProxyBuilder.create(commandExecutor, deque,
new RedissonBlockingDequeReactive<V>(deque), RBlockingDequeReactive.class);
}
@Override
public <V> RBlockingDequeReactive<V> getBlockingDeque(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
RedissonBlockingDeque<V> deque = new RedissonBlockingDeque<V>(params.getCodec(), ca, params.getName(), null);
return ReactiveProxyBuilder.create(commandExecutor, deque,
new RedissonBlockingDequeReactive<V>(deque), RBlockingDequeReactive.class);
}
@Override
public <V> RTransferQueueReactive<V> getTransferQueue(String name) {
String remoteName = RedissonObject.suffixName(name, "remoteService");
RRemoteService service = getRemoteService(remoteName);
RedissonTransferQueue<V> queue = new RedissonTransferQueue<V>(commandExecutor, name, service);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonTransferQueueReactive<V>(queue), RTransferQueueReactive.class);
}
@Override
public <V> RTransferQueueReactive<V> getTransferQueue(String name, Codec codec) {
String remoteName = RedissonObject.suffixName(name, "remoteService");
RRemoteService service = getRemoteService(remoteName);
RedissonTransferQueue<V> queue = new RedissonTransferQueue<V>(codec, commandExecutor, name, service);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonTransferQueueReactive<V>(queue), RTransferQueueReactive.class);
}
@Override
public <V> RTransferQueueReactive<V> getTransferQueue(PlainOptions options) {
PlainParams params = (PlainParams) options;
CommandReactiveExecutor ca = commandExecutor.copy(params);
String remoteName = RedissonObject.suffixName(params.getName(), "remoteService");
RRemoteService service = getRemoteService(remoteName);
RedissonTransferQueue<V> queue = new RedissonTransferQueue<V>(params.getCodec(), ca, params.getName(), service);
return ReactiveProxyBuilder.create(commandExecutor, queue,
new RedissonTransferQueueReactive<V>(queue), RTransferQueueReactive.class);
}
@Override
public String getId() {
return commandExecutor.getServiceManager().getId();
}
}
| RedissonReactive |
java | netty__netty | transport-native-epoll/src/test/java/io/netty/channel/epoll/EpollDatagramMulticastTest.java | {
"start": 862,
"end": 1136
} | class ____ extends DatagramMulticastTest {
@Override
protected List<TestsuitePermutation.BootstrapComboFactory<Bootstrap, Bootstrap>> newFactories() {
return EpollSocketTestPermutation.INSTANCE.datagram(socketProtocolFamily());
}
}
| EpollDatagramMulticastTest |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/streaming/runtime/operators/sink/committables/CommittableCollectorSerializerTest.java | {
"start": 1808,
"end": 10647
} | class ____ {
private static final SimpleVersionedSerializer<Integer> COMMITTABLE_SERIALIZER =
new IntegerSerializer();
private static final int SUBTASK_ID = 1;
private static final int NUMBER_OF_SUBTASKS = 1;
private static final SinkCommitterMetricGroup METRIC_GROUP =
MetricsGroupTestUtils.mockCommitterMetricGroup();
private static final CommittableCollectorSerializer<Integer> SERIALIZER =
new CommittableCollectorSerializer<>(
COMMITTABLE_SERIALIZER, SUBTASK_ID, NUMBER_OF_SUBTASKS, METRIC_GROUP);
@Test
void testCommittableCollectorV1SerDe() throws IOException {
final List<Integer> legacyState = Arrays.asList(1, 2, 3);
final DataOutputSerializer out = new DataOutputSerializer(256);
out.writeInt(SinkV1CommittableDeserializer.MAGIC_NUMBER);
SimpleVersionedSerialization.writeVersionAndSerializeList(
COMMITTABLE_SERIALIZER, legacyState, out);
final byte[] serialized = out.getCopyOfBuffer();
final CommittableCollector<Integer> committableCollector =
SERIALIZER.deserialize(1, serialized);
assertThat(committableCollector.getCheckpointCommittables())
.singleElement()
.extracting(
checkpointCommittable ->
checkpointCommittable
.getSubtaskCommittableManager(0)
.getPendingRequests()
.map(CommitRequestImpl::getCommittable),
InstanceOfAssertFactories.stream(Integer.class))
.containsExactly(1, 2, 3);
}
@Test
void testCommittableCollectorV2SerDe() throws IOException {
int subtaskId = 2;
int numberOfSubtasks = 3;
final CommittableCollectorSerializer<Integer> ccSerializer =
new CommittableCollectorSerializer<>(
COMMITTABLE_SERIALIZER, subtaskId, numberOfSubtasks, METRIC_GROUP);
final CommittableCollector<Integer> committableCollector =
new CommittableCollector<>(METRIC_GROUP);
committableCollector.addMessage(
new CommittableSummary<>(subtaskId, numberOfSubtasks, 1L, 1, 0));
committableCollector.addMessage(
new CommittableSummary<>(subtaskId, numberOfSubtasks, 2L, 1, 0));
committableCollector.addMessage(new CommittableWithLineage<>(1, 1L, subtaskId));
committableCollector.addMessage(new CommittableWithLineage<>(2, 2L, subtaskId));
final CommittableCollector<Integer> copy =
ccSerializer.deserialize(2, SERIALIZER.serialize(committableCollector));
// assert original CommittableCollector
assertCommittableCollector(
"Original CommittableCollector",
subtaskId,
numberOfSubtasks,
committableCollector,
Arrays.asList(Collections.singletonList(1), Collections.singletonList(2)));
// assert deserialized CommittableCollector
assertCommittableCollector(
"Deserialized CommittableCollector",
subtaskId,
numberOfSubtasks,
copy,
Arrays.asList(Collections.singletonList(1), Collections.singletonList(2)));
}
@Test
void testCommittablesForSameSubtaskIdV2SerDe() throws IOException {
int subtaskId = 1;
int numberOfSubtasks = 3;
final CommittableCollectorSerializer<Integer> ccSerializer =
new CommittableCollectorSerializer<>(
COMMITTABLE_SERIALIZER, subtaskId, numberOfSubtasks, METRIC_GROUP);
final CommittableCollector<Integer> committableCollector =
new CommittableCollector<>(METRIC_GROUP);
committableCollector.addMessage(
new CommittableSummary<>(subtaskId, numberOfSubtasks, 1L, 1, 1, 0));
committableCollector.addMessage(
new CommittableSummary<>(subtaskId + 1, numberOfSubtasks, 1L, 1, 1, 0));
committableCollector.addMessage(new CommittableWithLineage<>(1, 1L, subtaskId));
committableCollector.addMessage(new CommittableWithLineage<>(1, 1L, subtaskId + 1));
final CommittableCollector<Integer> copy =
ccSerializer.deserialize(2, SERIALIZER.serialize(committableCollector));
// assert original CommittableCollector
assertCommittableCollector(
"Original CommittableCollector",
subtaskId,
numberOfSubtasks,
committableCollector,
Collections.singletonList(Collections.singletonList(1)));
// assert deserialized CommittableCollector
assertThat(copy).isEqualTo(committableCollector);
}
@Test
void testAlignSubtaskCommittableManagerCheckpointWithCheckpointCommittableManagerCheckpointId()
throws IOException {
// Create CommittableCollector holding a higher checkpointId than
// WriterInitContext#INITIAL_CHECKPOINT_ID
long checkpointId = WriterInitContext.INITIAL_CHECKPOINT_ID + 1;
final CommittableCollector<Integer> committableCollector =
new CommittableCollector<>(METRIC_GROUP);
committableCollector.addMessage(
new CommittableSummary<>(SUBTASK_ID, NUMBER_OF_SUBTASKS, checkpointId, 1, 1, 0));
committableCollector.addMessage(new CommittableWithLineage<>(1, checkpointId, SUBTASK_ID));
final CommittableCollector<Integer> copy =
SERIALIZER.deserialize(2, SERIALIZER.serialize(committableCollector));
final Collection<CheckpointCommittableManagerImpl<Integer>> checkpointCommittables =
copy.getCheckpointCommittables();
assertThat(checkpointCommittables).hasSize(1);
final CheckpointCommittableManagerImpl<Integer> committableManager =
checkpointCommittables.iterator().next();
assertThat(committableManager.getSubtaskCommittableManager(SUBTASK_ID).getCheckpointId())
.isEqualTo(committableManager.getCheckpointId());
}
/**
* @param assertMessageHeading prefix used for assertion fail message.
* @param subtaskId subtaskId to get {@link SubtaskCommittableManager} from {@link
* CheckpointCommittableManagerImpl}
* @param numberOfSubtasks expected number of subtasks for {@link CommittableSummary}
* @param committableCollector collector to get {@link CheckpointCommittableManager}s from.
* @param committablesPerSubtaskPerCheckpoint every of the list element represents expected
* number of pending request per {@link SubtaskCommittableManager}.
*/
private void assertCommittableCollector(
String assertMessageHeading,
int subtaskId,
int numberOfSubtasks,
CommittableCollector<Integer> committableCollector,
List<List<Integer>> committablesPerSubtaskPerCheckpoint) {
assertThat(committableCollector.getCheckpointCommittables())
.describedAs(assertMessageHeading)
.zipSatisfy(
committablesPerSubtaskPerCheckpoint,
(checkpointCommittableManager, expectedPendingRequestCount) -> {
final SubtaskCommittableManager<Integer> subtaskCommittableManager =
checkpointCommittableManager.getSubtaskCommittableManager(
subtaskId);
assertThat(checkpointCommittableManager)
.returns(
numberOfSubtasks,
CheckpointCommittableManagerImpl::getNumberOfSubtasks);
assertPendingRequests(
subtaskCommittableManager, expectedPendingRequestCount);
assertThat(subtaskCommittableManager.getSubtaskId())
.isEqualTo(subtaskId);
});
}
private void assertPendingRequests(
SubtaskCommittableManager<Integer> subtaskCommittableManagerCheckpoint,
List<Integer> expectedPendingRequestCount) {
assertThat(
subtaskCommittableManagerCheckpoint
.getPendingRequests()
.map(CommitRequestImpl::getCommittable)
.collect(Collectors.toList()))
.containsExactlyElementsOf(expectedPendingRequestCount);
}
}
| CommittableCollectorSerializerTest |
java | playframework__playframework | core/play/src/main/java/play/inject/Bindings.java | {
"start": 221,
"end": 363
} | class ____ {
/**
* Create a binding key for the given class.
*
* @param <T> the type of the bound class
* @param clazz the | Bindings |
java | resilience4j__resilience4j | resilience4j-reactor/src/main/java/io/github/resilience4j/reactor/bulkhead/operator/MonoBulkhead.java | {
"start": 944,
"end": 1498
} | class ____<T> extends MonoOperator<T, T> {
private final Bulkhead bulkhead;
MonoBulkhead(Mono<? extends T> source, Bulkhead bulkhead) {
super(source);
this.bulkhead = bulkhead;
}
@Override
public void subscribe(CoreSubscriber<? super T> actual) {
if (bulkhead.tryAcquirePermission()) {
source.subscribe(new BulkheadSubscriber<>(bulkhead, actual, true));
} else {
Operators.error(actual, BulkheadFullException.createBulkheadFullException(bulkhead));
}
}
}
| MonoBulkhead |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest-client/deployment/src/test/java/io/quarkus/rest/client/reactive/deployment/RestClientAnnotationExpressionParserTest.java | {
"start": 996,
"end": 2457
} | class ____ {
@ParameterizedTest
@MethodSource
void test(String input, List<Node> expectedResult) {
List<Node> result = new RestClientAnnotationExpressionParser(input, null).parse();
assertThat(result).isEqualTo(expectedResult);
}
private static Stream<Arguments> test() {
return Stream.of(
Arguments.of("", Collections.emptyList()),
Arguments.of("only verbatim", List.of(new Verbatim("only verbatim"))),
Arguments.of("${only.config}", List.of(new ConfigName("only.config"))),
Arguments.of("{only.methodCall}", List.of(new ConfigName("only.methodCall"))),
Arguments.of(
"first use a ${config.name} then a {methodCall} then a {fieldAccess} then another ${config} and we're done",
List.of(
new Verbatim("first use a "),
new ConfigName("config.name"),
new Verbatim(" then a "),
new Accessible("methodCall"),
new Verbatim(" then a "),
new Accessible("fieldAccess"),
new Verbatim(" then another "),
new ConfigName("config"),
new Verbatim(" and we're done"))));
}
}
| RestClientAnnotationExpressionParserTest |
java | apache__kafka | clients/src/main/java/org/apache/kafka/common/errors/OffsetMetadataTooLarge.java | {
"start": 975,
"end": 1401
} | class ____ extends ApiException {
private static final long serialVersionUID = 1L;
public OffsetMetadataTooLarge() {
}
public OffsetMetadataTooLarge(String message) {
super(message);
}
public OffsetMetadataTooLarge(Throwable cause) {
super(cause);
}
public OffsetMetadataTooLarge(String message, Throwable cause) {
super(message, cause);
}
}
| OffsetMetadataTooLarge |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.