language
stringclasses 1
value | repo
stringclasses 60
values | path
stringlengths 22
294
| class_span
dict | source
stringlengths 13
1.16M
| target
stringlengths 1
113
|
|---|---|---|---|---|---|
java
|
apache__camel
|
components/camel-aws/camel-aws2-lambda/src/main/java/org/apache/camel/component/aws2/lambda/Lambda2Constants.java
|
{
"start": 951,
"end": 9142
}
|
interface ____ {
@Metadata(label = "all", description = "The operation we want to perform. Override operation passed as query parameter",
javaType = "String", required = true)
String OPERATION = "CamelAwsLambdaOperation";
@Metadata(label = "createFunction", description = "Amazon S3 bucket name where the .zip file containing\n" +
"your deployment package is stored. This bucket must reside in the same AWS region where you are creating the Lambda function.",
javaType = "String")
String S3_BUCKET = "CamelAwsLambdaS3Bucket";
@Metadata(label = "createFunction", description = "The Amazon S3 object (the deployment package) key name\n" +
"you want to upload.",
javaType = "String")
String S3_KEY = "CamelAwsLambdaS3Key";
@Metadata(label = "createFunction", description = "The Amazon S3 object (the deployment package) version\n" +
"you want to upload.",
javaType = "String")
String S3_OBJECT_VERSION = "CamelAwsLambdaS3ObjectVersion";
@Metadata(label = "createFunction", description = "The local path of the zip file (the deployment package).\n" +
" Content of zip file can also be put in Message body.",
javaType = "String")
String ZIP_FILE = "CamelAwsLambdaZipFile";
@Metadata(label = "createFunction", description = "The user-provided description.", javaType = "String")
String DESCRIPTION = "CamelAwsLambdaDescription";
@Metadata(label = "createFunction", description = "The Amazon Resource Name (ARN) of the IAM role that Lambda assumes\n" +
" when it executes your function to access any other Amazon Web Services (AWS) resources.",
javaType = "String", required = true)
String ROLE = "CamelAwsLambdaRole";
@Metadata(label = "createFunction", description = "The runtime environment for the Lambda function you are uploading.\n" +
" (nodejs, nodejs4.3, nodejs6.10, java8, python2.7, python3.6, dotnetcore1.0, odejs4.3-edge)",
javaType = "String", required = true)
String RUNTIME = "CamelAwsLambdaRuntime";
@Metadata(label = "createFunction", description = "The function within your code that Lambda calls to begin execution.\n" +
" For Node.js, it is the module-name.export value in your function.\n" +
" For Java, it can be package.class-name::handler or package.class-name.",
javaType = "String", required = true)
String HANDLER = "CamelAwsLambdaHandler";
@Metadata(label = "createFunction",
description = "The parent object that contains the target ARN (Amazon Resource Name)\n" +
"of an Amazon SQS queue or Amazon SNS topic.",
javaType = "String")
String TARGET_ARN = "CamelAwsLambdaTargetArn";
@Metadata(label = "createFunction", description = "The memory size, in MB, you configured for the function.\n" +
"Must be a multiple of 64 MB.",
javaType = "Integer")
String MEMORY_SIZE = "CamelAwsLambdaMemorySize";
@Metadata(label = "createFunction",
description = "The Amazon Resource Name (ARN) of the KMS key used to encrypt your function's environment variables.\n"
+
"If not provided, AWS Lambda will use a default service key.",
javaType = "String")
String KMS_KEY_ARN = "CamelAwsLambdaKMSKeyArn";
@Metadata(label = "createFunction",
description = "The key-value pairs that represent your environment's configuration settings.",
javaType = "Map<String, String>")
String ENVIRONMENT_VARIABLES = "CamelAwsLambdaEnvironmentVariables";
@Metadata(label = "createFunction updateFunction",
description = "This boolean parameter can be used to request AWS Lambda\n" +
"to create the Lambda function and publish a version as an atomic operation.",
javaType = "Boolean")
String PUBLISH = "CamelAwsLambdaPublish";
@Metadata(label = "createFunction",
description = "The function execution time at which Lambda should terminate the function.\n" +
"The default is 3 seconds.",
javaType = "Integer")
String TIMEOUT = "CamelAwsLambdaTimeout";
@Metadata(label = "createFunction", description = "The list of tags (key-value pairs) assigned to the new function.",
javaType = "Map<String, String>")
String TAGS = "CamelAwsLambdaTags";
@Metadata(label = "createFunction", description = "Your function's tracing settings (Active or PassThrough).",
javaType = "String")
String TRACING_CONFIG = "CamelAwsLambdaTracingConfig";
@Metadata(label = "createFunction",
description = "If your Lambda function accesses resources in a VPC, a list of one or more security groups IDs in your VPC.",
javaType = "List<String>")
String SECURITY_GROUP_IDS = "CamelAwsLambdaSecurityGroupIds";
@Metadata(label = "createFunction",
description = "If your Lambda function accesses resources in a VPC, a list of one or more subnet IDs in your VPC.",
javaType = "List<String>")
String SUBNET_IDS = "CamelAwsLambdaSubnetIds";
@Metadata(label = "createEventSourceMapping", description = "The Amazon Resource Name (ARN) of the event source.",
javaType = "String")
String EVENT_SOURCE_ARN = "CamelAwsLambdaEventSourceArn";
@Metadata(label = "createEventSourceMapping",
description = "The maximum number of records in each batch that Lambda pulls from your stream or queue and sends to your function. ",
javaType = "Integer")
String EVENT_SOURCE_BATCH_SIZE = "CamelAwsLambdaEventSourceBatchSize";
@Metadata(label = "deleteEventSourceMapping", description = "The identifier of the event source mapping.",
javaType = "String")
String EVENT_SOURCE_UUID = "CamelAwsLambdaEventSourceUuid";
@Metadata(label = "listTags tagResource untagResource", description = "The function's Amazon Resource Name (ARN).",
javaType = "String")
String RESOURCE_ARN = "CamelAwsLambdaResourceArn";
@Metadata(label = "tagResource", description = "A list of tags to apply to the function.", javaType = "Map<String, String>")
String RESOURCE_TAGS = "CamelAwsLambdaResourceTags";
@Metadata(label = "untagResource", description = "A list of tag keys to remove from the function.",
javaType = "List<String>")
String RESOURCE_TAG_KEYS = "CamelAwsLambdaResourceTagKeys";
@Metadata(label = "publishVersion",
description = "A description for the version to override the description in the function configuration.",
javaType = "String")
String VERSION_DESCRIPTION = "CamelAwsLambdaVersionDescription";
@Metadata(label = "publishVersion",
description = "Only update the function if the revision ID matches the ID that's specified.", javaType = "String")
String VERSION_REVISION_ID = "CamelAwsLambdaVersionRevisionId";
@Metadata(label = "createAlias listAliases", description = "The function version to set in the alias", javaType = "String")
String FUNCTION_VERSION = "CamelAwsLambdaFunctionVersion";
@Metadata(label = "createAlias deleteAlias getAlias", description = "The function name of the alias", javaType = "String",
required = true)
String FUNCTION_ALIAS_NAME = "CamelAwsLambdaAliasFunctionName";
@Metadata(label = "createAlias", description = "The function description to set in the alias", javaType = "String")
String FUNCTION_ALIAS_DESCRIPTION = "CamelAwsLambdaAliasFunctionDescription";
}
|
Lambda2Constants
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/jdbc/Expectation.java
|
{
"start": 5429,
"end": 6003
}
|
class ____ implements Expectation {
public static final None INSTANCE = new None();
@Override
public void verifyOutcome(int rowCount, PreparedStatement statement, int batchPosition, String sql) {
// nothing to do
}
}
/**
* Row count checking. A row count is an integer value returned by
* {@link java.sql.PreparedStatement#executeUpdate()} or
* {@link java.sql.Statement#executeBatch()}. The row count is checked
* against an expected value. For example, the expected row count for
* an {@code INSERT} statement is always 1.
*
* @since 6.5
*/
|
None
|
java
|
grpc__grpc-java
|
core/src/test/java/io/grpc/internal/AbstractServerStreamTest.java
|
{
"start": 13183,
"end": 13800
}
|
class ____ extends AbstractServerStream {
private final Sink sink;
private final AbstractServerStream.TransportState state;
protected AbstractServerStreamBase(WritableBufferAllocator bufferAllocator, Sink sink,
AbstractServerStream.TransportState state) {
super(bufferAllocator, StatsTraceContext.NOOP);
this.sink = sink;
this.state = state;
}
@Override
protected Sink abstractServerStreamSink() {
return sink;
}
@Override
protected AbstractServerStream.TransportState transportState() {
return state;
}
static
|
AbstractServerStreamBase
|
java
|
elastic__elasticsearch
|
x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plan/physical/PivotExec.java
|
{
"start": 570,
"end": 1670
}
|
class ____ extends UnaryExec implements Unexecutable {
private final Pivot pivot;
public PivotExec(Source source, PhysicalPlan child, Pivot pivot) {
super(source, child);
this.pivot = pivot;
}
@Override
protected NodeInfo<PivotExec> info() {
return NodeInfo.create(this, PivotExec::new, child(), pivot);
}
@Override
protected PivotExec replaceChild(PhysicalPlan newChild) {
return new PivotExec(source(), newChild, pivot);
}
@Override
public List<Attribute> output() {
return pivot.output();
}
public Pivot pivot() {
return pivot;
}
@Override
public int hashCode() {
return Objects.hash(pivot, child());
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
PivotExec other = (PivotExec) obj;
return Objects.equals(pivot, other.pivot) && Objects.equals(child(), other.child());
}
}
|
PivotExec
|
java
|
spring-projects__spring-framework
|
spring-beans/src/test/java/org/springframework/beans/AbstractPropertyAccessorTests.java
|
{
"start": 68795,
"end": 68894
}
|
class ____ test naming of beans in an error message
}
@SuppressWarnings("unused")
private static
|
to
|
java
|
elastic__elasticsearch
|
x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java
|
{
"start": 58923,
"end": 60476
}
|
class ____ implements SSLSession {
private final byte[] id;
private final Runnable invalidation;
private MockSSLSession(byte[] id, Runnable invalidation) {
this.id = id;
this.invalidation = invalidation;
}
@Override
public byte[] getId() {
return id;
}
@Override
public SSLSessionContext getSessionContext() {
return null;
}
@Override
public long getCreationTime() {
return 0;
}
@Override
public long getLastAccessedTime() {
return 0;
}
@Override
public void invalidate() {
invalidation.run();
}
@Override
public boolean isValid() {
return false;
}
@Override
public void putValue(String name, Object value) {
}
@Override
public Object getValue(String name) {
return null;
}
@Override
public void removeValue(String name) {
}
@Override
public String[] getValueNames() {
return new String[0];
}
@Override
public Certificate[] getPeerCertificates() throws SSLPeerUnverifiedException {
return new Certificate[0];
}
@Override
public Certificate[] getLocalCertificates() {
return new Certificate[0];
}
@SuppressForbidden(reason = "need to reference deprecated
|
MockSSLSession
|
java
|
apache__kafka
|
clients/src/test/java/org/apache/kafka/common/utils/JavaTest.java
|
{
"start": 1083,
"end": 2696
}
|
class ____ {
private String javaVendor;
private String javaRuntimeName;
@BeforeEach
public void before() {
javaVendor = System.getProperty("java.vendor");
javaRuntimeName = System.getProperty("java.runtime.name");
}
@AfterEach
public void after() {
System.setProperty("java.vendor", javaVendor);
System.setProperty("java.runtime.name", javaRuntimeName);
}
@Test
public void testIsIBMJdk() {
System.setProperty("java.vendor", "Oracle Corporation");
assertFalse(Java.isIbmJdk());
System.setProperty("java.vendor", "IBM Corporation");
assertTrue(Java.isIbmJdk());
}
@Test
public void testIsIBMJdkSemeru() {
System.setProperty("java.vendor", "Oracle Corporation");
assertFalse(Java.isIbmJdkSemeru());
System.setProperty("java.vendor", "IBM Corporation");
System.setProperty("java.runtime.name", "Java(TM) SE Runtime Environment");
assertFalse(Java.isIbmJdkSemeru());
System.setProperty("java.vendor", "IBM Corporation");
System.setProperty("java.runtime.name", "IBM Semeru Runtime Certified Edition");
assertTrue(Java.isIbmJdkSemeru());
}
@Test
public void testLoadKerberosLoginModule() throws ClassNotFoundException {
// IBM Semeru JDKs use the OpenJDK security providers
String clazz = Java.isIbmJdk() && !Java.isIbmJdkSemeru()
? "com.ibm.security.auth.module.Krb5LoginModule"
: "com.sun.security.auth.module.Krb5LoginModule";
Class.forName(clazz);
}
}
|
JavaTest
|
java
|
square__retrofit
|
retrofit/java-test/src/test/java/retrofit2/RetrofitTest.java
|
{
"start": 20931,
"end": 21639
}
|
class ____.lang.String.\n"
+ " Tried:\n"
+ " * retrofit2.BuiltInConverters\n"
+ " * retrofit2.OptionalConverterFactory");
}
}
@Test
public void missingConverterThrowsOnNonResponseBody() throws IOException {
Retrofit retrofit = new Retrofit.Builder().baseUrl(server.url("/")).build();
CallMethod example = retrofit.create(CallMethod.class);
server.enqueue(new MockResponse().setBody("Hi"));
try {
example.disallowed();
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.isEqualTo(
""
+ "Unable to create converter for
|
java
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/MappableBlock.java
|
{
"start": 1197,
"end": 1618
}
|
interface ____ extends Closeable {
/**
* Get the number of bytes that have been cached.
* @return the number of bytes that have been cached.
*/
long getLength();
/**
* Get cache address if applicable.
* Return -1 if not applicable.
*/
long getAddress();
/**
* Get cached block's ExtendedBlockId.
* @return cached block's ExtendedBlockId..
*/
ExtendedBlockId getKey();
}
|
MappableBlock
|
java
|
spring-projects__spring-framework
|
spring-web/src/main/java/org/springframework/http/converter/cbor/JacksonCborHttpMessageConverter.java
|
{
"start": 1866,
"end": 2850
}
|
class ____ extends AbstractJacksonHttpMessageConverter<CBORMapper> {
/**
* Construct a new instance with a {@link CBORMapper} customized with the
* {@link tools.jackson.databind.JacksonModule}s found by
* {@link MapperBuilder#findModules(ClassLoader)}.
*/
public JacksonCborHttpMessageConverter() {
super(CBORMapper.builder(), MediaType.APPLICATION_CBOR);
}
/**
* Construct a new instance with the provided {@link CBORMapper.Builder}
* customized with the {@link tools.jackson.databind.JacksonModule}s
* found by {@link MapperBuilder#findModules(ClassLoader)}.
* @see CBORMapper#builder()
*/
public JacksonCborHttpMessageConverter(CBORMapper.Builder builder) {
super(builder, MediaType.APPLICATION_CBOR);
}
/**
* Construct a new instance with the provided {@link CBORMapper}.
* @see CBORMapper#builder()
*/
public JacksonCborHttpMessageConverter(CBORMapper mapper) {
super(mapper, MediaType.APPLICATION_CBOR);
}
}
|
JacksonCborHttpMessageConverter
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/query/criteria/ExtendableCriterion.java
|
{
"start": 204,
"end": 290
}
|
interface ____ {
ExtendableCriterion add(AuditCriterion criterion);
}
|
ExtendableCriterion
|
java
|
quarkusio__quarkus
|
core/deployment/src/main/java/io/quarkus/deployment/util/ProcessUtil.java
|
{
"start": 430,
"end": 5302
}
|
class ____ {
private static final Logger logger = Logger.getLogger(ProcessUtil.class);
/**
* Launches and returns a {@link Process} built from the {@link ProcessBuilder builder}.
* Before launching the process, this method checks if inherit IO is disabled and if so,
* streams both the {@code STDOUT} and {@code STDERR} of the launched process using
* {@link #streamToSysOutSysErr(Process)}. Else, it launches the process with {@link ProcessBuilder#inheritIO()}
*
* @param builder The process builder
* @param shouldRedirectIO Whether {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for
* launching the process
* @return Returns the newly launched process
* @throws IOException
*/
public static Process launchProcess(final ProcessBuilder builder,
final boolean shouldRedirectIO) throws IOException {
if (!shouldRedirectIO) {
return builder.inheritIO().start();
}
final Process process = builder.redirectOutput(ProcessBuilder.Redirect.PIPE)
.redirectError(ProcessBuilder.Redirect.PIPE)
.start();
// stream both stdout and stderr of the process
ProcessUtil.streamToSysOutSysErr(process);
return process;
}
/**
* Launches and returns a {@link Process} built from the {@link ProcessBuilder builder}.
* Before launching the process, this method checks if inheritIO is disabled
* and if so, streams (only) the {@code STDOUT} of the launched process using {@link #streamOutputToSysOut(Process)}
* (Process)}. Else, it launches the process with {@link ProcessBuilder#inheritIO()}
*
* @param builder The process builder
* @param shouldRedirectIO Whether {@link java.lang.ProcessBuilder.Redirect#INHERIT} can be used for
* launching the process
* @return Returns the newly launched process
* @throws IOException
*/
public static Process launchProcessStreamStdOut(final ProcessBuilder builder,
boolean shouldRedirectIO) throws IOException {
if (!shouldRedirectIO) {
return builder.inheritIO().start();
}
final Process process = builder.redirectOutput(ProcessBuilder.Redirect.PIPE)
.redirectError(ProcessBuilder.Redirect.PIPE)
.start();
// stream only stdout of the process
ProcessUtil.streamOutputToSysOut(process);
return process;
}
/**
* This is a convenience method which internally calls both the {@link #streamOutputToSysOut(Process)}
* and {@link #streamErrorToSysErr(Process)} methods
*
* @param process The process whose STDOUT and STDERR needs to be streamed.
*/
public static void streamToSysOutSysErr(final Process process) {
streamOutputToSysOut(process);
streamErrorToSysErr(process);
}
/**
* Streams the {@link Process process'} {@code STDOUT} to the current process'
* {@code System.out stream}. This creates and starts a thread to stream the contents.
* The {@link Process} is expected to have been started in {@link java.lang.ProcessBuilder.Redirect#PIPE}
* mode
*
* @param process The process whose STDOUT needs to be streamed.
*/
public static void streamOutputToSysOut(final Process process) {
final InputStream processStdOut = process.getInputStream();
final Thread t = new Thread(new Streamer(processStdOut, System.out));
t.setName("Process stdout streamer");
t.setDaemon(true);
t.start();
}
/**
* Streams the {@link Process process'} {@code STDERR} to the current process'
* {@code System.err stream}. This creates and starts a thread to stream the contents.
* The {@link Process} is expected to have been started in {@link java.lang.ProcessBuilder.Redirect#PIPE}
* mode
*
* @param process The process whose STDERR needs to be streamed.
*/
public static void streamErrorToSysErr(final Process process) {
streamErrorTo(System.err, process);
}
/**
* Streams the {@link Process process'} {@code STDERR} to the given
* {@code printStream}. This creates and starts a thread to stream the contents.
* The {@link Process} is expected to have been started in {@link java.lang.ProcessBuilder.Redirect#PIPE}
* mode
*
* @param process The process whose STDERR needs to be streamed.
*/
public static void streamErrorTo(final PrintStream printStream, final Process process) {
final InputStream processStdErr = process.getErrorStream();
final Thread t = new Thread(new Streamer(processStdErr, printStream));
t.setName("Process stderr streamer");
t.setDaemon(true);
t.start();
}
private static final
|
ProcessUtil
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/main/java/org/hibernate/dialect/lock/internal/Helper.java
|
{
"start": 3413,
"end": 3511
}
|
interface ____ {
Timeout extractFrom(ResultSet resultSet) throws SQLException;
}
}
|
TimeoutExtractor
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test-autoconfigure/src/main/java/org/springframework/boot/test/autoconfigure/json/JsonTesterFactoryBean.java
|
{
"start": 1104,
"end": 2302
}
|
class ____<T, M> implements FactoryBean<T> {
private final Class<?> objectType;
private final @Nullable M marshaller;
public JsonTesterFactoryBean(Class<?> objectType, @Nullable M marshaller) {
this.objectType = objectType;
this.marshaller = marshaller;
}
@Override
public boolean isSingleton() {
return false;
}
@Override
@SuppressWarnings("unchecked")
public T getObject() throws Exception {
if (this.marshaller == null) {
Constructor<?> constructor = this.objectType.getDeclaredConstructor();
ReflectionUtils.makeAccessible(constructor);
return (T) BeanUtils.instantiateClass(constructor);
}
Constructor<?>[] constructors = this.objectType.getDeclaredConstructors();
for (Constructor<?> constructor : constructors) {
if (constructor.getParameterCount() == 1
&& constructor.getParameterTypes()[0].isInstance(this.marshaller)) {
ReflectionUtils.makeAccessible(constructor);
return (T) BeanUtils.instantiateClass(constructor, this.marshaller);
}
}
throw new IllegalStateException(this.objectType + " does not have a usable constructor");
}
@Override
public Class<?> getObjectType() {
return this.objectType;
}
}
|
JsonTesterFactoryBean
|
java
|
google__guava
|
guava-testlib/test/com/google/common/testing/ClassSanityTesterTest.java
|
{
"start": 35937,
"end": 36232
}
|
class ____ {
private FactoryMethodParameterNotInstantiable() {}
static FactoryMethodParameterNotInstantiable create(
@SuppressWarnings("unused") NotInstantiable x) {
return new FactoryMethodParameterNotInstantiable();
}
}
static
|
FactoryMethodParameterNotInstantiable
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/mixins/TestMixinDeserForClass.java
|
{
"start": 965,
"end": 1020
}
|
interface ____ { }
// [databind#1990]
public
|
MixIn
|
java
|
assertj__assertj-core
|
assertj-core/src/main/java/org/assertj/core/error/ShouldBeLowerCase.java
|
{
"start": 800,
"end": 1270
}
|
class ____ extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldBeLowerCase}</code>.
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldBeLowerCase(Object actual) {
return new ShouldBeLowerCase(actual);
}
private ShouldBeLowerCase(Object actual) {
super("%nExpecting %s to be a lowercase", actual);
}
}
|
ShouldBeLowerCase
|
java
|
apache__flink
|
flink-connectors/flink-connector-datagen-test/src/test/java/org/apache/flink/connector/datagen/source/DataGeneratorSourceTest.java
|
{
"start": 9162,
"end": 10200
}
|
class ____<E> implements ReaderOutput<E> {
private final ArrayList<E> emittedRecords = new ArrayList<>();
@Override
public void collect(E record) {
emittedRecords.add(record);
}
@Override
public void collect(E record, long timestamp) {
collect(record);
}
@Override
public void emitWatermark(Watermark watermark) {
throw new UnsupportedOperationException();
}
@Override
public void markIdle() {
throw new UnsupportedOperationException();
}
@Override
public void markActive() {
throw new UnsupportedOperationException();
}
@Override
public SourceOutput<E> createOutputForSplit(String splitId) {
return this;
}
@Override
public void releaseOutputForSplit(String splitId) {}
public ArrayList<E> getEmittedRecords() {
return emittedRecords;
}
}
}
|
TestingReaderOutput
|
java
|
apache__camel
|
components/camel-kubernetes/src/main/java/org/apache/camel/component/kubernetes/cronjob/KubernetesCronJobEndpoint.java
|
{
"start": 1671,
"end": 2230
}
|
class ____ extends AbstractKubernetesEndpoint {
public KubernetesCronJobEndpoint(String uri, KubernetesCronJobComponent component, KubernetesConfiguration config) {
super(uri, component, config);
}
@Override
public Producer createProducer() throws Exception {
return new KubernetesCronJobProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new IllegalArgumentException("The kubernetes-cronjob doesn't support consumer");
}
}
|
KubernetesCronJobEndpoint
|
java
|
google__gson
|
test-jpms/src/test/java/com/google/gson/jpms_test/ExportedPackagesTest.java
|
{
"start": 1295,
"end": 1622
}
|
class ____ {
/** Tests package {@code com.google.gson} */
@Test
public void testMainPackage() {
Gson gson = new Gson();
assertThat(gson.toJson(1)).isEqualTo("1");
}
/** Tests package {@code com.google.gson.annotations} */
@Test
public void testAnnotationsPackage() throws Exception {
|
ExportedPackagesTest
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/state/KeyGroupsSavepointStateHandle.java
|
{
"start": 943,
"end": 2628
}
|
class ____ extends KeyGroupsStateHandle
implements SavepointKeyedStateHandle {
private static final long serialVersionUID = 1L;
/**
* @param groupRangeOffsets range of key-group ids that in the state of this handle
* @param streamStateHandle handle to the actual state of the key-groups
*/
public KeyGroupsSavepointStateHandle(
KeyGroupRangeOffsets groupRangeOffsets, StreamStateHandle streamStateHandle) {
super(groupRangeOffsets, streamStateHandle);
}
/**
* @param keyGroupRange a key group range to intersect.
* @return key-group state over a range that is the intersection between this handle's key-group
* range and the provided key-group range.
*/
@Override
public KeyGroupsStateHandle getIntersection(KeyGroupRange keyGroupRange) {
KeyGroupRangeOffsets offsets = getGroupRangeOffsets().getIntersection(keyGroupRange);
if (offsets.getKeyGroupRange().getNumberOfKeyGroups() <= 0) {
return null;
}
return new KeyGroupsSavepointStateHandle(offsets, getDelegateStateHandle());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof KeyGroupsSavepointStateHandle)) {
return false;
}
return super.equals(o);
}
@Override
public String toString() {
return "KeyGroupsSavepointStateHandle{"
+ "groupRangeOffsets="
+ getGroupRangeOffsets()
+ ", stateHandle="
+ getDelegateStateHandle()
+ '}';
}
}
|
KeyGroupsSavepointStateHandle
|
java
|
apache__flink
|
flink-runtime/src/test/java/org/apache/flink/runtime/rest/MultipartUploadExtension.java
|
{
"start": 12581,
"end": 13979
}
|
class ____
extends AbstractRestHandler<
RestfulGateway, TestRequestBody, EmptyResponseBody, EmptyMessageParameters> {
volatile TestRequestBody lastReceivedRequest = null;
MultipartJsonHandler(GatewayRetriever<RestfulGateway> leaderRetriever) {
super(
leaderRetriever,
RpcUtils.INF_TIMEOUT,
Collections.emptyMap(),
MultipartJsonHeaders.INSTANCE);
}
@Override
protected CompletableFuture<EmptyResponseBody> handleRequest(
@Nonnull HandlerRequest<TestRequestBody> request, @Nonnull RestfulGateway gateway)
throws RestHandlerException {
Collection<Path> uploadedFiles =
request.getUploadedFiles().stream()
.map(File::toPath)
.collect(Collectors.toList());
if (!uploadedFiles.isEmpty()) {
throw new RestHandlerException(
"This handler should not have received file uploads.",
HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
this.lastReceivedRequest = request.getRequestBody();
return CompletableFuture.completedFuture(EmptyResponseBody.getInstance());
}
private static final
|
MultipartJsonHandler
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/callbacks/ongeneratedmethods/EmployeeDto.java
|
{
"start": 252,
"end": 466
}
|
class ____ {
private AddressDto address;
public AddressDto getAddress() {
return address;
}
public void setAddress( AddressDto address ) {
this.address = address;
}
}
|
EmployeeDto
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/HashtableContainsTest.java
|
{
"start": 1727,
"end": 2195
}
|
class ____ {
void f(Hashtable<String, Integer> m, Integer v) {
// BUG: Diagnostic contains: containsValue(v)
m.contains(v);
}
}
""")
.doTest();
}
@Test
public void positive_wildcardUpperBound() {
compilationHelper
.addSourceLines(
"test/Test.java",
"""
package test;
import java.util.Hashtable;
|
Test
|
java
|
quarkusio__quarkus
|
extensions/grpc/deployment/src/main/java/io/quarkus/grpc/deployment/GrpcServerProcessor.java
|
{
"start": 12908,
"end": 13353
}
|
class ____
* {@code io.grpc.BindableService} (for example via the protobuf generated {@code *ImplBase}) as the last one.
* Implementing {@code BindableService} is not mandatory.
*/
private static List<ClassInfo> classHierarchy(ClassInfo service, IndexView index) {
List<ClassInfo> collected = new ArrayList<>();
while (service != null) {
collected.add(service);
// Stop at the
|
implementing
|
java
|
apache__avro
|
lang/java/perf/src/main/java/org/apache/avro/perf/test/basic/IntTest.java
|
{
"start": 3185,
"end": 4377
}
|
class ____ extends BasicState {
private byte[] testData;
private Decoder decoder;
public TestStateDecode() {
super();
}
/**
* Avro uses Zig-Zag variable length encoding for numeric values. Ensure there
* are some numeric values of each possible size.
*
* @throws IOException Could not setup test data
*/
@Setup(Level.Trial)
public void doSetupTrial() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
Encoder encoder = super.newEncoder(true, baos);
for (int i = 0; i < getBatchSize(); i += 4) {
// fits in 1 byte
encoder.writeInt(super.getRandom().nextInt(50));
// fits in 2 bytes
encoder.writeInt(super.getRandom().nextInt(5000));
// fits in 3 bytes
encoder.writeInt(super.getRandom().nextInt(500000));
// most in 4 bytes, some in 5 bytes
encoder.writeInt(super.getRandom().nextInt(150000000));
}
this.testData = baos.toByteArray();
}
@Setup(Level.Invocation)
public void doSetupInvocation() throws Exception {
this.decoder = super.newDecoder(this.testData);
}
}
}
|
TestStateDecode
|
java
|
apache__camel
|
components/camel-printer/src/main/java/org/apache/camel/component/printer/PrinterOperations.java
|
{
"start": 1582,
"end": 5634
}
|
class ____ implements PrinterOperationsInterface {
private static final Logger LOG = LoggerFactory.getLogger(PrinterOperations.class);
private PrintService printService;
private DocFlavor flavor;
private PrintRequestAttributeSet printRequestAttributeSet;
private Doc doc;
public PrinterOperations() throws PrintException {
printService = PrintServiceLookup.lookupDefaultPrintService();
if (printService == null) {
throw new PrintException("Printer lookup failure. No default printer set up for this host");
}
flavor = DocFlavor.BYTE_ARRAY.AUTOSENSE;
printRequestAttributeSet = new HashPrintRequestAttributeSet();
printRequestAttributeSet.add(new Copies(1));
printRequestAttributeSet.add(MediaSizeName.NA_LETTER);
printRequestAttributeSet.add(Sides.ONE_SIDED);
}
public PrinterOperations(PrintService printService, DocFlavor flavor,
PrintRequestAttributeSet printRequestAttributeSet) {
this.setPrintService(printService);
this.setFlavor(flavor);
this.setPrintRequestAttributeSet(printRequestAttributeSet);
}
public void print(Doc doc, boolean sendToPrinter, String mimeType, String jobName) throws PrintException {
LOG.trace("Print Service: {}", this.printService.getName());
if (!sendToPrinter) {
LOG.debug(
"Print flag is set to false. This job will not be printed as long as this setting remains in effect. Please set the flag to true or remove the setting.");
File file;
if (mimeType.equalsIgnoreCase("GIF") || mimeType.equalsIgnoreCase("RENDERABLE_IMAGE")) {
file = new File("./target/PrintOutput_" + UUID.randomUUID() + ".gif");
} else if (mimeType.equalsIgnoreCase("JPEG")) {
file = new File("./target/PrintOutput_" + UUID.randomUUID() + ".jpeg");
} else if (mimeType.equalsIgnoreCase("PDF")) {
file = new File("./target/PrintOutput_" + UUID.randomUUID() + ".pdf");
} else {
file = new File("./target/PrintOutput_" + UUID.randomUUID() + ".txt");
}
LOG.debug("Writing print job to file: {}", file.getAbsolutePath());
try {
InputStream in = doc.getStreamForBytes();
FileOutputStream fos = new FileOutputStream(file);
IOHelper.copyAndCloseInput(in, fos);
IOHelper.close(fos);
} catch (Exception e) {
throw new PrintException("Error writing Document to the target file " + file.getAbsolutePath());
}
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Issuing Job to Printer: {}", this.printService.getName());
}
print(doc, jobName);
}
}
@Override
public void print(Doc doc, String jobName) throws PrintException {
// we need create a new job for each print
DocPrintJob job = getPrintService().createPrintJob();
PrintRequestAttributeSet attrs = new HashPrintRequestAttributeSet(printRequestAttributeSet);
attrs.add(new JobName(jobName, Locale.getDefault()));
job.print(doc, attrs);
}
public PrintService getPrintService() {
return printService;
}
public void setPrintService(PrintService printService) {
this.printService = printService;
}
public DocFlavor getFlavor() {
return flavor;
}
public void setFlavor(DocFlavor flavor) {
this.flavor = flavor;
}
public PrintRequestAttributeSet getPrintRequestAttributeSet() {
return printRequestAttributeSet;
}
public void setPrintRequestAttributeSet(PrintRequestAttributeSet printRequestAttributeSet) {
this.printRequestAttributeSet = printRequestAttributeSet;
}
public Doc getDoc() {
return doc;
}
public void setDoc(Doc doc) {
this.doc = doc;
}
}
|
PrinterOperations
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/factories/assignment/Bar5Factory.java
|
{
"start": 275,
"end": 456
}
|
class ____ {
@ObjectFactory
public Bar5 createBar5(Foo5A foo5A, Foo5B foo5B) {
return new Bar5( foo5A.getPropA().toUpperCase(), foo5B.getPropB() );
}
}
|
Bar5Factory
|
java
|
apache__thrift
|
lib/java/src/main/java/org/apache/thrift/transport/THttpClient.java
|
{
"start": 3183,
"end": 10541
}
|
class ____ extends TTransportFactory {
private final String url;
private final HttpClient client;
public Factory(String url) {
this.url = url;
this.client = null;
}
public Factory(String url, HttpClient client) {
this.url = url;
this.client = client;
}
@Override
public TTransport getTransport(TTransport trans) {
try {
if (null != client) {
return new THttpClient(trans.getConfiguration(), url, client);
} else {
return new THttpClient(trans.getConfiguration(), url);
}
} catch (TTransportException tte) {
return null;
}
}
}
public THttpClient(TConfiguration config, String url) throws TTransportException {
super(config);
try {
url_ = new URL(url);
this.client = null;
this.host = null;
} catch (IOException iox) {
throw new TTransportException(iox);
}
}
public THttpClient(String url) throws TTransportException {
super(new TConfiguration());
try {
url_ = new URL(url);
this.client = null;
this.host = null;
} catch (IOException iox) {
throw new TTransportException(iox);
}
}
public THttpClient(TConfiguration config, String url, HttpClient client)
throws TTransportException {
super(config);
try {
url_ = new URL(url);
this.client = client;
this.host =
new HttpHost(
url_.getProtocol(),
url_.getHost(),
-1 == url_.getPort() ? url_.getDefaultPort() : url_.getPort());
} catch (IOException iox) {
throw new TTransportException(iox);
}
}
public THttpClient(String url, HttpClient client) throws TTransportException {
super(new TConfiguration());
try {
url_ = new URL(url);
this.client = client;
this.host =
new HttpHost(
url_.getProtocol(),
url_.getHost(),
-1 == url_.getPort() ? url_.getDefaultPort() : url_.getPort());
} catch (IOException iox) {
throw new TTransportException(iox);
}
}
public void setConnectTimeout(int timeout) {
connectTimeout_ = timeout;
}
/**
* Use instead {@link
* org.apache.hc.client5.http.impl.io.BasicHttpClientConnectionManager#setConnectionConfig} or
* {@link
* org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager#setDefaultConnectionConfig}
*/
@Deprecated
public void setReadTimeout(int timeout) {
readTimeout_ = timeout;
}
public void setCustomHeaders(Map<String, String> headers) {
customHeaders_ = new HashMap<>(headers);
}
public void setCustomHeader(String key, String value) {
if (customHeaders_ == null) {
customHeaders_ = new HashMap<>();
}
customHeaders_.put(key, value);
}
@Override
public void open() {}
@Override
public void close() {
if (null != inputStream_) {
try {
inputStream_.close();
} catch (IOException ioe) {
}
inputStream_ = null;
}
}
@Override
public boolean isOpen() {
return true;
}
@Override
public int read(byte[] buf, int off, int len) throws TTransportException {
if (inputStream_ == null) {
throw new TTransportException("Response buffer is empty, no request.");
}
checkReadBytesAvailable(len);
try {
int ret = inputStream_.read(buf, off, len);
if (ret == -1) {
throw new TTransportException("No more data available.");
}
countConsumedMessageBytes(ret);
return ret;
} catch (IOException iox) {
throw new TTransportException(iox);
}
}
@Override
public void write(byte[] buf, int off, int len) {
requestBuffer_.write(buf, off, len);
}
private RequestConfig getRequestConfig() {
RequestConfig requestConfig = RequestConfig.DEFAULT;
if (connectTimeout_ > 0) {
requestConfig =
RequestConfig.copy(requestConfig)
.setConnectionRequestTimeout(Timeout.ofMilliseconds(connectTimeout_))
.build();
}
return requestConfig;
}
private ConnectionConfig getConnectionConfig() {
ConnectionConfig connectionConfig = ConnectionConfig.DEFAULT;
if (readTimeout_ > 0) {
connectionConfig =
ConnectionConfig.copy(connectionConfig)
.setSocketTimeout(Timeout.ofMilliseconds(readTimeout_))
.build();
}
return connectionConfig;
}
private static Map<String, String> getDefaultHeaders() {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/x-thrift");
headers.put("Accept", "application/x-thrift");
headers.put("User-Agent", "Java/THttpClient/HC");
return headers;
}
private void flushUsingHttpClient() throws TTransportException {
if (null == this.client) {
throw new TTransportException("Null HttpClient, aborting.");
}
// Extract request and reset buffer
byte[] data = requestBuffer_.toByteArray();
requestBuffer_.reset();
HttpPost post = new HttpPost(this.url_.getFile());
try {
// Set request to path + query string
post.setConfig(getRequestConfig());
DEFAULT_HEADERS.forEach(post::addHeader);
if (null != customHeaders_) {
customHeaders_.forEach(post::addHeader);
}
post.setEntity(new ByteArrayEntity(data, null));
inputStream_ = client.execute(this.host, post, new THttpClientResponseHandler());
} catch (IOException ioe) {
// Abort method so the connection gets released back to the connection manager
post.abort();
throw new TTransportException(ioe);
} finally {
resetConsumedMessageSize(-1);
}
}
public void flush() throws TTransportException {
if (null != this.client) {
flushUsingHttpClient();
return;
}
// Extract request and reset buffer
byte[] data = requestBuffer_.toByteArray();
requestBuffer_.reset();
try {
// Create connection object
HttpURLConnection connection = (HttpURLConnection) url_.openConnection();
// Timeouts, only if explicitly set
if (connectTimeout_ > 0) {
connection.setConnectTimeout(connectTimeout_);
}
if (readTimeout_ > 0) {
connection.setReadTimeout(readTimeout_);
}
// Make the request
connection.setRequestMethod("POST");
connection.setRequestProperty("Content-Type", "application/x-thrift");
connection.setRequestProperty("Accept", "application/x-thrift");
connection.setRequestProperty("User-Agent", "Java/THttpClient");
if (customHeaders_ != null) {
for (Map.Entry<String, String> header : customHeaders_.entrySet()) {
connection.setRequestProperty(header.getKey(), header.getValue());
}
}
connection.setDoOutput(true);
connection.connect();
connection.getOutputStream().write(data);
int responseCode = connection.getResponseCode();
if (responseCode != HttpURLConnection.HTTP_OK) {
throw new TTransportException("HTTP Response code: " + responseCode);
}
// Read the responses
inputStream_ = connection.getInputStream();
} catch (IOException iox) {
throw new TTransportException(iox);
} finally {
resetConsumedMessageSize(-1);
}
}
}
|
Factory
|
java
|
apache__flink
|
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/logical/TimeAttributeWindowingStrategy.java
|
{
"start": 1371,
"end": 2452
}
|
class ____ extends WindowingStrategy {
public static final String FIELD_NAME_TIME_ATTRIBUTE_INDEX = "timeAttributeIndex";
@JsonProperty(FIELD_NAME_TIME_ATTRIBUTE_INDEX)
private final int timeAttributeIndex;
@JsonCreator
public TimeAttributeWindowingStrategy(
@JsonProperty(FIELD_NAME_WINDOW) WindowSpec window,
@JsonProperty(value = FIELD_NAME_TIME_ATTRIBUTE_TYPE) LogicalType timeAttributeType,
@JsonProperty(FIELD_NAME_TIME_ATTRIBUTE_INDEX) int timeAttributeIndex) {
super(window, timeAttributeType);
this.timeAttributeIndex = timeAttributeIndex;
}
@Override
public String toSummaryString(String[] inputFieldNames) {
checkArgument(timeAttributeIndex >= 0 && timeAttributeIndex < inputFieldNames.length);
String windowing = String.format("time_col=[%s]", inputFieldNames[timeAttributeIndex]);
return window.toSummaryString(windowing, inputFieldNames);
}
public int getTimeAttributeIndex() {
return timeAttributeIndex;
}
}
|
TimeAttributeWindowingStrategy
|
java
|
apache__camel
|
components/camel-ai/camel-djl/src/generated/java/org/apache/camel/component/djl/DJLConverterLoader.java
|
{
"start": 879,
"end": 6333
}
|
class ____ implements TypeConverterLoader, CamelContextAware {
private CamelContext camelContext;
public DJLConverterLoader() {
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {
registerConverters(registry);
}
private void registerConverters(TypeConverterRegistry registry) {
addTypeConverter(registry, ai.djl.modality.audio.Audio.class, byte[].class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toAudio((byte[]) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.audio.Audio.class, java.io.File.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toAudio((java.io.File) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.audio.Audio.class, java.io.InputStream.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toAudio((java.io.InputStream) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.audio.Audio.class, java.nio.file.Path.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toAudio((java.nio.file.Path) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image.class, ai.djl.modality.cv.output.DetectedObjects.DetectedObject.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImage((ai.djl.modality.cv.output.DetectedObjects.DetectedObject) value, exchange);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image.class, byte[].class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImage((byte[]) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image.class, java.io.File.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImage((java.io.File) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image.class, java.io.InputStream.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImage((java.io.InputStream) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image.class, java.nio.file.Path.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImage((java.nio.file.Path) value);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, ai.djl.modality.cv.Image[].class, ai.djl.modality.cv.output.DetectedObjects.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toImages((ai.djl.modality.cv.output.DetectedObjects) value, exchange);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
addTypeConverter(registry, byte[].class, ai.djl.modality.cv.Image.class, false,
(type, exchange, value) -> {
Object answer = org.apache.camel.component.djl.DJLConverter.toBytes((ai.djl.modality.cv.Image) value, exchange);
if (false && answer == null) {
answer = Void.class;
}
return answer;
});
}
private static void addTypeConverter(TypeConverterRegistry registry, Class<?> toType, Class<?> fromType, boolean allowNull, SimpleTypeConverter.ConversionMethod method) {
registry.addTypeConverter(toType, fromType, new SimpleTypeConverter(allowNull, method));
}
}
|
DJLConverterLoader
|
java
|
elastic__elasticsearch
|
libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentBuilderExtension.java
|
{
"start": 913,
"end": 1362
}
|
interface ____ {
/**
* Used for plugging in a generic writer for a class, for example, an example implementation:
*
* <pre>
* {@code
* Map<Class<?>, XContentBuilder.Writer> addlWriters = new HashMap<>();
* addlWriters.put(BytesRef.class, (builder, value) -> b.value(((BytesRef) value).utf8String()));
* return addlWriters;
* }
* </pre>
*
* @return a map of
|
XContentBuilderExtension
|
java
|
apache__spark
|
common/kvstore/src/test/java/org/apache/spark/util/kvstore/InMemoryStoreSuite.java
|
{
"start": 999,
"end": 7106
}
|
class ____ {
@Test
public void testObjectWriteReadDelete() throws Exception {
KVStore store = new InMemoryStore();
CustomType1 t = new CustomType1();
t.key = "key";
t.id = "id";
t.name = "name";
assertThrows(NoSuchElementException.class, () -> store.read(CustomType1.class, t.key));
store.write(t);
assertEquals(t, store.read(t.getClass(), t.key));
assertEquals(1L, store.count(t.getClass()));
store.delete(t.getClass(), t.key);
assertThrows(NoSuchElementException.class, () -> store.read(t.getClass(), t.key));
}
@Test
public void testMultipleObjectWriteReadDelete() throws Exception {
KVStore store = new InMemoryStore();
CustomType1 t1 = new CustomType1();
t1.key = "key1";
t1.id = "id";
t1.name = "name1";
CustomType1 t2 = new CustomType1();
t2.key = "key2";
t2.id = "id";
t2.name = "name2";
store.write(t1);
store.write(t2);
assertEquals(t1, store.read(t1.getClass(), t1.key));
assertEquals(t2, store.read(t2.getClass(), t2.key));
assertEquals(2L, store.count(t1.getClass()));
store.delete(t1.getClass(), t1.key);
assertEquals(t2, store.read(t2.getClass(), t2.key));
store.delete(t2.getClass(), t2.key);
assertThrows(NoSuchElementException.class, () -> store.read(t2.getClass(), t2.key));
}
@Test
public void testMetadata() throws Exception {
KVStore store = new InMemoryStore();
assertNull(store.getMetadata(CustomType1.class));
CustomType1 t = new CustomType1();
t.id = "id";
t.name = "name";
store.setMetadata(t);
assertEquals(t, store.getMetadata(CustomType1.class));
store.setMetadata(null);
assertNull(store.getMetadata(CustomType1.class));
}
@Test
public void testUpdate() throws Exception {
KVStore store = new InMemoryStore();
CustomType1 t = new CustomType1();
t.key = "key";
t.id = "id";
t.name = "name";
store.write(t);
t.name = "anotherName";
store.write(t);
assertEquals(1, store.count(t.getClass()));
assertSame(t, store.read(t.getClass(), t.key));
}
@Test
public void testArrayIndices() throws Exception {
KVStore store = new InMemoryStore();
ArrayKeyIndexType o = new ArrayKeyIndexType();
o.key = new int[] { 1, 2 };
o.id = new String[] { "3", "4" };
store.write(o);
assertEquals(o, store.read(ArrayKeyIndexType.class, o.key));
assertEquals(o, store.view(ArrayKeyIndexType.class).index("id").first(o.id).iterator().next());
}
@Test
public void testRemoveAll() throws Exception {
KVStore store = new InMemoryStore();
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
ArrayKeyIndexType o = new ArrayKeyIndexType();
o.key = new int[] { i, j, 0 };
o.id = new String[] { "things" };
store.write(o);
o = new ArrayKeyIndexType();
o.key = new int[] { i, j, 1 };
o.id = new String[] { "more things" };
store.write(o);
}
}
ArrayKeyIndexType o = new ArrayKeyIndexType();
o.key = new int[] { 2, 2, 2 };
o.id = new String[] { "things" };
store.write(o);
assertEquals(9, store.count(ArrayKeyIndexType.class));
// Try removing non-existing keys
assertFalse(store.removeAllByIndexValues(
ArrayKeyIndexType.class,
KVIndex.NATURAL_INDEX_NAME,
Set.of(new int[] {10, 10, 10}, new int[] { 3, 3, 3 })));
assertEquals(9, store.count(ArrayKeyIndexType.class));
assertTrue(store.removeAllByIndexValues(
ArrayKeyIndexType.class,
KVIndex.NATURAL_INDEX_NAME,
Set.of(new int[] {0, 0, 0}, new int[] { 2, 2, 2 })));
assertEquals(7, store.count(ArrayKeyIndexType.class));
assertTrue(store.removeAllByIndexValues(
ArrayKeyIndexType.class,
"id",
Set.<String[]>of(new String [] { "things" })));
assertEquals(4, store.count(ArrayKeyIndexType.class));
assertTrue(store.removeAllByIndexValues(
ArrayKeyIndexType.class,
"id",
Set.<String[]>of(new String [] { "more things" })));
assertEquals(0, store.count(ArrayKeyIndexType.class));
}
@Test
public void testBasicIteration() throws Exception {
KVStore store = new InMemoryStore();
CustomType1 t1 = new CustomType1();
t1.key = "1";
t1.id = "id1";
t1.name = "name1";
store.write(t1);
CustomType1 t2 = new CustomType1();
t2.key = "2";
t2.id = "id2";
t2.name = "name2";
store.write(t2);
assertEquals(t1.id, store.view(t1.getClass()).iterator().next().id);
assertEquals(t2.id, store.view(t1.getClass()).skip(1).iterator().next().id);
assertEquals(t2.id, store.view(t1.getClass()).skip(1).max(1).iterator().next().id);
assertEquals(t1.id,
store.view(t1.getClass()).first(t1.key).max(1).iterator().next().id);
assertEquals(t2.id,
store.view(t1.getClass()).first(t2.key).max(1).iterator().next().id);
assertFalse(store.view(t1.getClass()).first(t2.id).skip(1).iterator().hasNext());
}
@Test
public void testDeleteParentIndex() throws Exception {
KVStore store = new InMemoryStore();
CustomType2 t1 = new CustomType2();
t1.key = "key1";
t1.id = "id1";
t1.parentId = "parentId1";
store.write(t1);
CustomType2 t2 = new CustomType2();
t2.key = "key2";
t2.id = "id2";
t2.parentId = "parentId1";
store.write(t2);
CustomType2 t3 = new CustomType2();
t3.key = "key3";
t3.id = "id1";
t3.parentId = "parentId2";
store.write(t3);
CustomType2 t4 = new CustomType2();
t4.key = "key4";
t4.id = "id2";
t4.parentId = "parentId2";
store.write(t4);
assertEquals(4, store.count(CustomType2.class));
store.delete(t1.getClass(), t1.key);
assertEquals(3, store.count(CustomType2.class));
store.delete(t2.getClass(), t2.key);
assertEquals(2, store.count(CustomType2.class));
store.delete(t3.getClass(), t3.key);
assertEquals(1, store.count(CustomType2.class));
store.delete(t4.getClass(), t4.key);
assertEquals(0, store.count(CustomType2.class));
}
}
|
InMemoryStoreSuite
|
java
|
spring-projects__spring-boot
|
module/spring-boot-webclient-test/src/test/java/org/springframework/boot/webclient/test/autoconfigure/WebClientTestWithoutJacksonIntegrationTests.java
|
{
"start": 1339,
"end": 1992
}
|
class ____ {
@Autowired
private MockWebServer server;
@Autowired
private ExampleWebClientService client;
@Test
void webClientTestCanBeUsedWhenJacksonIsNotOnTheClassPath() throws InterruptedException {
ClassLoader classLoader = getClass().getClassLoader();
assertThat(ClassUtils.isPresent("com.fasterxml.jackson.databind.Module", classLoader)).isFalse();
assertThat(ClassUtils.isPresent("tools.jackson.databind.JacksonModule", classLoader)).isFalse();
this.server.enqueue(new MockResponse().setBody("hello"));
assertThat(this.client.test()).isEqualTo("hello");
this.server.takeRequest();
}
}
|
WebClientTestWithoutJacksonIntegrationTests
|
java
|
quarkusio__quarkus
|
integration-tests/main/src/main/java/io/quarkus/it/rest/GouvFrGeoApiClientImpl.java
|
{
"start": 219,
"end": 775
}
|
class ____ {
@GET
@Path("/communes")
public Set<Commune> getCommunes(
@QueryParam("codePostal") String postalCode) {
Set<Commune> ret = new HashSet<>();
Set<String> cp = new HashSet<>(Arrays.asList("75001", "75002", "75003", "75004", "75005", "75006", "75007", "75008",
"75009", "75010", "75011", "75012", "75013", "75014", "75015", "75016", "75017", "75018", "75019", "75020"));
ret.add(new Commune("Paris", "75056", "75", "11", cp, 2190327));
return ret;
}
}
|
GouvFrGeoApiClientImpl
|
java
|
apache__flink
|
flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testframe/utils/CollectIteratorAssert.java
|
{
"start": 10155,
"end": 10668
}
|
class ____<T> {
private int offset = 0;
private final List<T> records;
public RecordsFromSplit(List<T> records) {
this.records = records;
}
public T current() {
if (!hasNext()) {
return null;
}
return records.get(offset);
}
public void forward() {
++offset;
}
public boolean hasNext() {
return offset < records.size();
}
}
}
|
RecordsFromSplit
|
java
|
elastic__elasticsearch
|
test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterFactory.java
|
{
"start": 517,
"end": 617
}
|
interface ____<S extends ClusterSpec, H extends ClusterHandle> {
H create(S spec);
}
|
ClusterFactory
|
java
|
spring-projects__spring-security
|
test/src/test/java/org/springframework/security/test/context/support/ReactorContextTestExecutionListenerTests.java
|
{
"start": 1715,
"end": 7283
}
|
class ____ {
@Mock
private TestContext testContext;
private ReactorContextTestExecutionListener listener = new ReactorContextTestExecutionListener();
@AfterEach
public void cleanup() {
TestSecurityContextHolder.clearContext();
Hooks.resetOnLastOperator();
}
@Test
public void beforeTestMethodWhenSecurityContextEmptyThenReactorContextNull() throws Exception {
this.listener.beforeTestMethod(this.testContext);
Mono<?> result = ReactiveSecurityContextHolder.getContext();
StepVerifier.create(result).verifyComplete();
}
@Test
public void beforeTestMethodWhenNullAuthenticationThenReactorContextNull() throws Exception {
TestSecurityContextHolder.setContext(new SecurityContextImpl());
this.listener.beforeTestMethod(this.testContext);
Mono<?> result = ReactiveSecurityContextHolder.getContext();
StepVerifier.create(result).verifyComplete();
}
@Test
public void beforeTestMethodWhenAuthenticationThenReactorContextHasAuthentication() throws Exception {
TestingAuthenticationToken expectedAuthentication = new TestingAuthenticationToken("user", "password",
"ROLE_USER");
TestSecurityContextHolder.setAuthentication(expectedAuthentication);
this.listener.beforeTestMethod(this.testContext);
assertAuthentication(expectedAuthentication);
}
@Test
public void beforeTestMethodWhenCustomContext() throws Exception {
TestingAuthenticationToken expectedAuthentication = new TestingAuthenticationToken("user", "password",
"ROLE_USER");
SecurityContext context = new CustomContext(expectedAuthentication);
TestSecurityContextHolder.setContext(context);
this.listener.beforeTestMethod(this.testContext);
assertSecurityContext(context);
}
@Test
public void beforeTestMethodWhenExistingAuthenticationThenReactorContextHasOriginalAuthentication()
throws Exception {
TestingAuthenticationToken expectedAuthentication = new TestingAuthenticationToken("user", "password",
"ROLE_USER");
TestingAuthenticationToken contextHolder = new TestingAuthenticationToken("contextHolder", "password",
"ROLE_USER");
TestSecurityContextHolder.setAuthentication(contextHolder);
this.listener.beforeTestMethod(this.testContext);
Mono<Authentication> authentication = Mono.just("any")
.flatMap((s) -> ReactiveSecurityContextHolder.getContext().map(SecurityContext::getAuthentication))
.contextWrite(ReactiveSecurityContextHolder.withAuthentication(expectedAuthentication));
StepVerifier.create(authentication).expectNext(expectedAuthentication).verifyComplete();
}
@Test
public void beforeTestMethodWhenClearThenReactorContextDoesNotOverride() throws Exception {
TestingAuthenticationToken expectedAuthentication = new TestingAuthenticationToken("user", "password",
"ROLE_USER");
TestingAuthenticationToken contextHolder = new TestingAuthenticationToken("contextHolder", "password",
"ROLE_USER");
TestSecurityContextHolder.setAuthentication(contextHolder);
this.listener.beforeTestMethod(this.testContext);
Mono<Authentication> authentication = Mono.just("any")
.flatMap((s) -> ReactiveSecurityContextHolder.getContext().map(SecurityContext::getAuthentication))
.contextWrite(ReactiveSecurityContextHolder.clearContext());
StepVerifier.create(authentication).verifyComplete();
}
@Test
public void afterTestMethodWhenSecurityContextEmptyThenNoError() throws Exception {
this.listener.beforeTestMethod(this.testContext);
this.listener.afterTestMethod(this.testContext);
}
@Test
public void afterTestMethodWhenSetupThenReactorContextNull() throws Exception {
beforeTestMethodWhenAuthenticationThenReactorContextHasAuthentication();
this.listener.afterTestMethod(this.testContext);
assertThat(Mono.deferContextual(Mono::just).block().isEmpty()).isTrue();
}
@Test
public void afterTestMethodWhenDifferentHookIsRegistered() throws Exception {
Object obj = new Object();
Hooks.onLastOperator("CUSTOM_HOOK", (p) -> Mono.just(obj));
this.listener.afterTestMethod(this.testContext);
Object result = Mono.deferContextual(Mono::just).block();
assertThat(result).isEqualTo(obj);
}
@Test
public void orderWhenComparedToWithSecurityContextTestExecutionListenerIsAfter() {
OrderComparator comparator = new OrderComparator();
WithSecurityContextTestExecutionListener withSecurity = new WithSecurityContextTestExecutionListener();
ReactorContextTestExecutionListener reactorContext = new ReactorContextTestExecutionListener();
assertThat(comparator.compare(withSecurity, reactorContext)).isLessThan(0);
}
@Test
public void checkSecurityContextResolutionWhenSubscribedContextCalledOnTheDifferentThreadThanWithSecurityContextTestExecutionListener()
throws Exception {
TestingAuthenticationToken contextHolder = new TestingAuthenticationToken("contextHolder", "password",
"ROLE_USER");
TestSecurityContextHolder.setAuthentication(contextHolder);
this.listener.beforeTestMethod(this.testContext);
ForkJoinPool.commonPool().submit(() -> assertAuthentication(contextHolder)).join();
}
public void assertAuthentication(Authentication expected) {
Mono<Authentication> authentication = ReactiveSecurityContextHolder.getContext()
.map(SecurityContext::getAuthentication);
StepVerifier.create(authentication).expectNext(expected).verifyComplete();
}
private void assertSecurityContext(SecurityContext expected) {
Mono<SecurityContext> securityContext = ReactiveSecurityContextHolder.getContext();
StepVerifier.create(securityContext).expectNext(expected).verifyComplete();
}
static
|
ReactorContextTestExecutionListenerTests
|
java
|
apache__maven
|
api/maven-api-core/src/main/java/org/apache/maven/api/feature/Features.java
|
{
"start": 979,
"end": 1131
}
|
class ____ Maven Core feature information.
* Features configured are supposed to be final in a given maven session.
*
* @since 4.0.0
*/
public final
|
for
|
java
|
apache__camel
|
components/camel-sjms/src/main/java/org/apache/camel/component/sjms/SjmsMessage.java
|
{
"start": 1661,
"end": 10430
}
|
class ____ extends DefaultMessage {
private static final Logger LOG = LoggerFactory.getLogger(SjmsMessage.class);
private Message jmsMessage;
private Session jmsSession;
private JmsBinding binding;
public SjmsMessage(Exchange exchange, Message jmsMessage, Session jmsSession, JmsBinding binding) {
super(exchange);
setJmsMessage(jmsMessage);
setJmsSession(jmsSession);
setBinding(binding);
setPayloadForTrait(MessageTrait.REDELIVERY, JmsMessageHelper.evalRedeliveryMessageTrait(jmsMessage));
}
public void init(Exchange exchange, Message jmsMessage, Session jmsSession, JmsBinding binding) {
setExchange(exchange);
setJmsMessage(jmsMessage);
setJmsSession(jmsSession);
setBinding(binding);
// need to populate initial headers when we use pooled exchanges
populateInitialHeaders(getHeaders());
setPayloadForTrait(MessageTrait.REDELIVERY, JmsMessageHelper.evalRedeliveryMessageTrait(jmsMessage));
}
@Override
public void reset() {
super.reset();
setExchange(null);
jmsMessage = null;
jmsSession = null;
binding = null;
}
@Override
public String toString() {
// do not print jmsMessage as there could be sensitive details
if (jmsMessage != null) {
try {
return "SjmsMessage[JmsMessageID: " + jmsMessage.getJMSMessageID() + "]";
} catch (Exception e) {
// ignore
}
}
return "SjmsMessage@" + ObjectHelper.getIdentityHashCode(this);
}
@Override
public void copyFrom(org.apache.camel.Message that) {
if (that == this) {
// the same instance so do not need to copy
return;
}
// must initialize headers before we set the JmsMessage to avoid Camel
// populating it before we do the copy
getHeaders().clear();
boolean copyMessageId = true;
if (that instanceof SjmsMessage) {
SjmsMessage thatMessage = (SjmsMessage) that;
this.jmsMessage = thatMessage.jmsMessage;
if (this.jmsMessage != null) {
// for performance lets not copy the messageID if we are a JMS message
copyMessageId = false;
}
}
if (copyMessageId) {
setMessageId(that.getMessageId());
}
// cover over exchange if none has been assigned
if (getExchange() == null) {
setExchange(that.getExchange());
}
// copy body and fault flag
copyBody(that, this);
// we have already cleared the headers
if (that.hasHeaders()) {
getHeaders().putAll(that.getHeaders());
}
}
public JmsBinding getBinding() {
if (binding == null) {
binding = ExchangeHelper.getBinding(getExchange(), JmsBinding.class);
}
return binding;
}
public void setBinding(JmsBinding binding) {
this.binding = binding;
}
/**
* Returns the underlying JMS message
*/
public Message getJmsMessage() {
return jmsMessage;
}
public void setJmsMessage(Message jmsMessage) {
if (jmsMessage != null) {
try {
setMessageId(jmsMessage.getJMSMessageID());
} catch (JMSException e) {
LOG.warn("Unable to retrieve JMSMessageID from JMS Message", e);
}
}
this.jmsMessage = jmsMessage;
}
/**
* Returns the underlying JMS session.
* <p/>
* This may be <tt>null</tt>.
*/
public Session getJmsSession() {
return jmsSession;
}
public void setJmsSession(Session jmsSession) {
this.jmsSession = jmsSession;
}
@Override
public void setBody(Object body) {
super.setBody(body);
if (body == null) {
// preserver headers even if we set body to null
ensureInitialHeaders();
// remove underlying jmsMessage since we mutated body to null
jmsMessage = null;
}
}
@Override
public Object getHeader(String name) {
Object answer = null;
// we will exclude using JMS-prefixed headers here to avoid strangeness with some JMS providers
// e.g. ActiveMQ returns the String not the Destination type for "JMSReplyTo"!
// only look in jms message directly if we have not populated headers
if (jmsMessage != null && !hasPopulatedHeaders() && !name.startsWith("JMS")) {
try {
// use binding to do the lookup as it has to consider using encoded keys
answer = getBinding().getObjectProperty(jmsMessage, name);
} catch (JMSException e) {
throw new RuntimeExchangeException("Unable to retrieve header from JMS Message: " + name, getExchange(), e);
}
}
// only look if we have populated headers otherwise there are no headers at all
// if we do lookup a header starting with JMS then force a lookup
if (answer == null && (hasPopulatedHeaders() || name.startsWith("JMS"))) {
answer = super.getHeader(name);
}
return answer;
}
@Override
public Map<String, Object> getHeaders() {
ensureInitialHeaders();
return super.getHeaders();
}
@Override
public Object removeHeader(String name) {
ensureInitialHeaders();
return super.removeHeader(name);
}
@Override
public void setHeaders(Map<String, Object> headers) {
ensureInitialHeaders();
super.setHeaders(headers);
}
@Override
public void setHeader(String name, Object value) {
ensureInitialHeaders();
super.setHeader(name, value);
}
@Override
public SjmsMessage newInstance() {
SjmsMessage answer = new SjmsMessage(null, null, null, binding);
answer.setCamelContext(getCamelContext());
return answer;
}
/**
* Returns true if a new JMS message instance should be created to send to the next component
*/
public boolean shouldCreateNewMessage() {
return super.hasPopulatedHeaders();
}
/**
* Ensure that the headers have been populated from the underlying JMS message before we start mutating the headers
*/
protected void ensureInitialHeaders() {
if (jmsMessage != null && !hasPopulatedHeaders()) {
// we have not populated headers so force this by creating
// new headers and set it on super
super.setHeaders(createHeaders());
}
}
@Override
protected Object createBody() {
if (jmsMessage != null) {
return getBinding().extractBodyFromJms(getExchange(), jmsMessage);
}
return null;
}
@Override
protected void populateInitialHeaders(Map<String, Object> map) {
if (jmsMessage != null && map != null) {
map.putAll(getBinding().extractHeadersFromJms(jmsMessage, getExchange()));
try {
map.put(Exchange.MESSAGE_TIMESTAMP, jmsMessage.getJMSTimestamp());
} catch (JMSException e) {
// ignore
}
}
}
@Override
protected String createMessageId() {
if (jmsMessage == null) {
LOG.trace("No jakarta.jms.Message set so generating a new message id");
return super.createMessageId();
}
try {
String id = getDestinationAsString(jmsMessage.getJMSDestination());
if (id != null) {
id += jmsMessage.getJMSMessageID();
} else {
id = jmsMessage.getJMSMessageID();
}
return getSanitizedString(id);
} catch (JMSException e) {
throw new RuntimeExchangeException("Unable to retrieve JMSMessageID from JMS Message", getExchange(), e);
}
}
private String getDestinationAsString(Destination destination) throws JMSException {
String result = null;
if (destination == null) {
result = "null destination!" + File.separator;
} else if (destination instanceof Topic) {
result = "topic" + File.separator + ((Topic) destination).getTopicName() + File.separator;
} else if (destination instanceof Queue) {
result = "queue" + File.separator + ((Queue) destination).getQueueName() + File.separator;
}
return result;
}
private String getSanitizedString(Object value) {
return value != null ? value.toString().replaceAll("[^a-zA-Z0-9\\.\\_\\-]", "_") : "";
}
}
|
SjmsMessage
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/ExpressionAnnotationToDisambiguateMethodsTest.java
|
{
"start": 3004,
"end": 3449
}
|
class ____ {
public String bar;
public void bar(String body) {
fail("bar() called with: " + body);
}
@Handler
public void foo(@Simple("${header.foo}") String bar) {
this.bar = bar;
LOG.info("foo() method called with: {}", bar);
}
public void wrongMethod(String body) {
fail("wrongMethod() called with: " + body);
}
}
}
|
MyOtherBean
|
java
|
junit-team__junit5
|
documentation/src/test/java/example/ParameterizedTestDemo.java
|
{
"start": 11381,
"end": 11945
}
|
class ____ implements ArgumentsProvider {
@Override
public Stream<? extends Arguments> provideArguments(ParameterDeclarations parameters,
ExtensionContext context) {
return Stream.of("apple", "banana").map(Arguments::of);
}
}
// end::ArgumentsProvider_example[]
@ParameterizedTest
@ArgumentsSource(MyArgumentsProviderWithConstructorInjection.class)
void testWithArgumentsSourceWithConstructorInjection(String argument) {
assertNotNull(argument);
}
static
// tag::ArgumentsProviderWithConstructorInjection_example[]
public
|
MyArgumentsProvider
|
java
|
quarkusio__quarkus
|
extensions/resteasy-reactive/rest-jackson/deployment/src/test/java/io/quarkus/resteasy/reactive/jackson/deployment/test/NonAbsentSerializationTest.java
|
{
"start": 352,
"end": 947
}
|
class ____ extends AbstractNonAbsentSerializationTest {
@RegisterExtension
static QuarkusUnitTest test = new QuarkusUnitTest()
.setArchiveProducer(new Supplier<>() {
@Override
public JavaArchive get() {
return ShrinkWrap.create(JavaArchive.class)
.addClasses(JsonIncludeTestResource.class, MyObject.class, NonAbsentObjectMapperCustomizer.class)
.addAsResource(new StringAsset(""), "application.properties");
}
});
}
|
NonAbsentSerializationTest
|
java
|
netty__netty
|
codec-http3/src/main/java/io/netty/handler/codec/http3/Http3ServerConnectionHandler.java
|
{
"start": 1123,
"end": 5066
}
|
class ____ extends Http3ConnectionHandler {
private final ChannelHandler requestStreamHandler;
/**
* Create a new instance.
*
* @param requestStreamHandler the {@link ChannelHandler} that is used for each new request stream.
* This handler will receive {@link Http3HeadersFrame} and {@link Http3DataFrame}s.
*/
public Http3ServerConnectionHandler(ChannelHandler requestStreamHandler) {
this(requestStreamHandler, null, null, null, true);
}
/**
* Create a new instance.
* @param requestStreamHandler the {@link ChannelHandler} that is used for each new request stream.
* This handler will receive {@link Http3HeadersFrame} and
* {@link Http3DataFrame}s.
* @param inboundControlStreamHandler the {@link ChannelHandler} which will be notified about
* {@link Http3RequestStreamFrame}s or {@code null} if the user is not
* interested in these.
* @param unknownInboundStreamHandlerFactory the {@link LongFunction} that will provide a custom
* {@link ChannelHandler} for unknown inbound stream types or
* {@code null} if no special handling should be done.
* @param localSettings the local {@link Http3SettingsFrame} that should be sent to the
* remote peer or {@code null} if the default settings should be used.
* @param disableQpackDynamicTable If QPACK dynamic table should be disabled.
*/
public Http3ServerConnectionHandler(ChannelHandler requestStreamHandler,
@Nullable ChannelHandler inboundControlStreamHandler,
@Nullable LongFunction<ChannelHandler> unknownInboundStreamHandlerFactory,
@Nullable Http3SettingsFrame localSettings, boolean disableQpackDynamicTable) {
super(true, inboundControlStreamHandler, unknownInboundStreamHandlerFactory, localSettings,
disableQpackDynamicTable);
this.requestStreamHandler = ObjectUtil.checkNotNull(requestStreamHandler, "requestStreamHandler");
}
@Override
void initBidirectionalStream(ChannelHandlerContext ctx, QuicStreamChannel streamChannel) {
ChannelPipeline pipeline = streamChannel.pipeline();
Http3RequestStreamEncodeStateValidator encodeStateValidator = new Http3RequestStreamEncodeStateValidator();
Http3RequestStreamDecodeStateValidator decodeStateValidator = new Http3RequestStreamDecodeStateValidator();
// Add the encoder and decoder in the pipeline so we can handle Http3Frames
pipeline.addLast(newCodec(encodeStateValidator, decodeStateValidator));
pipeline.addLast(encodeStateValidator);
pipeline.addLast(decodeStateValidator);
pipeline.addLast(newRequestStreamValidationHandler(streamChannel, encodeStateValidator, decodeStateValidator));
pipeline.addLast(requestStreamHandler);
}
@Override
void initUnidirectionalStream(ChannelHandlerContext ctx, QuicStreamChannel streamChannel) {
final long maxTableCapacity = maxTableCapacity();
streamChannel.pipeline().addLast(
new Http3UnidirectionalStreamInboundServerHandler(codecFactory,
localControlStreamHandler, remoteControlStreamHandler,
unknownInboundStreamHandlerFactory,
() -> new QpackEncoderHandler(maxTableCapacity, qpackDecoder),
() -> new QpackDecoderHandler(qpackEncoder)));
}
}
|
Http3ServerConnectionHandler
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/mapping/enumeratedvalue/NamedEnumEnumerateValueTests.java
|
{
"start": 4864,
"end": 5187
}
|
enum ____ {
MALE( 'M' ),
FEMALE( 'F' ),
OTHER( 'U' );
@EnumeratedValue
private final char code;
Gender(char code) {
this.code = code;
}
public char getCode() {
return code;
}
}
@SuppressWarnings({ "FieldCanBeLocal", "unused" })
@Entity(name="Person")
@Table(name="persons")
public static
|
Gender
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/annotations/index/jpa/Dealer.java
|
{
"start": 267,
"end": 558
}
|
class ____ implements Serializable {
private String name;
private long rate;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getRate() {
return rate;
}
public void setRate(long rate) {
this.rate = rate;
}
}
|
Dealer
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/collection/iterabletononiterable/Target.java
|
{
"start": 220,
"end": 473
}
|
class ____ {
//CHECKSTYLE:OFF
public String publicNames;
//CHECKSTYLE:ON
private String names;
public String getNames() {
return names;
}
public void setNames(String names) {
this.names = names;
}
}
|
Target
|
java
|
apache__flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/TextElement.java
|
{
"start": 3384,
"end": 3422
}
|
enum ____ {
CODE
}
}
|
TextStyle
|
java
|
apache__spark
|
sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperationInfo.java
|
{
"start": 1175,
"end": 1450
}
|
interface ____ {
/**
* Returns options that the user specified when performing the row-level operation.
*/
CaseInsensitiveStringMap options();
/**
* Returns the row-level SQL command (e.g. DELETE, UPDATE, MERGE).
*/
Command command();
}
|
RowLevelOperationInfo
|
java
|
quarkusio__quarkus
|
independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/core/startup/RuntimeResourceDeployment.java
|
{
"start": 5914,
"end": 28451
}
|
class ____ {
private static final ServerRestHandler[] EMPTY_REST_HANDLER_ARRAY = new ServerRestHandler[0];
@SuppressWarnings("rawtypes")
private static final MessageBodyWriter[] EMPTY_MESSAGE_BODY_WRITERS = new MessageBodyWriter[0];
private static final int HANDLERS_CAPACITY = 10;
private static final Logger log = Logger.getLogger(RuntimeResourceDeployment.class);
private final DeploymentInfo info;
private final ServerSerialisers serialisers;
private final ResteasyReactiveConfig resteasyReactiveConfig;
private final Supplier<Executor> executorSupplier;
private final Supplier<Executor> virtualExecutorSupplier;
private final RuntimeInterceptorDeployment runtimeInterceptorDeployment;
private final DynamicEntityWriter dynamicEntityWriter;
private final ResourceLocatorHandler resourceLocatorHandler;
/**
* If the runtime will always default to blocking (e.g. Servlet)
*/
private final boolean defaultBlocking;
private final BlockingHandler blockingHandler;
private final BlockingHandler blockingHandlerVirtualThread;
private final ResponseWriterHandler responseWriterHandler;
public RuntimeResourceDeployment(DeploymentInfo info, Supplier<Executor> executorSupplier,
Supplier<Executor> virtualExecutorSupplier,
RuntimeInterceptorDeployment runtimeInterceptorDeployment, DynamicEntityWriter dynamicEntityWriter,
ResourceLocatorHandler resourceLocatorHandler, boolean defaultBlocking) {
this.info = info;
this.serialisers = info.getSerialisers();
this.resteasyReactiveConfig = info.getResteasyReactiveConfig();
this.executorSupplier = executorSupplier;
this.virtualExecutorSupplier = virtualExecutorSupplier;
this.runtimeInterceptorDeployment = runtimeInterceptorDeployment;
this.dynamicEntityWriter = dynamicEntityWriter;
this.resourceLocatorHandler = resourceLocatorHandler;
this.defaultBlocking = defaultBlocking;
this.blockingHandler = new BlockingHandler(executorSupplier);
this.blockingHandlerVirtualThread = new BlockingHandler(virtualExecutorSupplier);
this.responseWriterHandler = new ResponseWriterHandler(dynamicEntityWriter);
}
public RuntimeResource buildResourceMethod(ResourceClass clazz,
ServerResourceMethod method, boolean locatableResource, URITemplate classPathTemplate, DeploymentInfo info) {
URITemplate methodPathTemplate = new URITemplate(method.getPath(), method.isResourceLocator());
MultivaluedMap<ScoreSystem.Category, ScoreSystem.Diagnostic> score = new QuarkusMultivaluedHashMap<>();
Map<String, Integer> pathParameterIndexes = buildParamIndexMap(classPathTemplate, methodPathTemplate);
MediaType streamElementType = null;
if (method.getStreamElementType() != null) {
streamElementType = MediaTypeHelper.valueOf(method.getStreamElementType());
}
List<MediaType> consumesMediaTypes;
if (method.getConsumes() == null) {
consumesMediaTypes = Collections.emptyList();
} else {
consumesMediaTypes = new ArrayList<>(method.getConsumes().length);
for (String s : method.getConsumes()) {
consumesMediaTypes.add(MediaTypeHelper.valueOf(s));
}
}
Class<Object> resourceClass = loadClass(clazz.getClassName());
Class<?>[] parameterDeclaredTypes = new Class[method.getParameters().length];
Class<?>[] parameterDeclaredUnresolvedTypes = new Class[method.getParameters().length];
for (int i = 0; i < method.getParameters().length; ++i) {
MethodParameter parameter = method.getParameters()[i];
String declaredType = parameter.declaredType;
String declaredUnresolvedType = parameter.declaredUnresolvedType;
parameterDeclaredTypes[i] = loadClass(declaredType);
parameterDeclaredUnresolvedTypes[i] = parameterDeclaredTypes[i];
if (!declaredType.equals(declaredUnresolvedType)) {
parameterDeclaredUnresolvedTypes[i] = loadClass(declaredUnresolvedType);
}
}
ResteasyReactiveResourceInfo lazyMethod = new ResteasyReactiveResourceInfo(method.getName(), resourceClass,
parameterDeclaredUnresolvedTypes,
!defaultBlocking && !method.isBlocking(), method.getActualDeclaringClassName());
RuntimeInterceptorDeployment.MethodInterceptorContext interceptorDeployment = runtimeInterceptorDeployment
.forMethod(method, lazyMethod);
//setup reader and writer interceptors first
ServerRestHandler interceptorHandler = interceptorDeployment.setupInterceptorHandler();
//we want interceptors in the abort handler chain
List<ServerRestHandler> abortHandlingChain = new ArrayList<>(
3 + (interceptorHandler != null ? 1 : 0) + (info.getPreExceptionMapperHandler() != null ? 1 : 0));
List<ServerRestHandler> handlers = new ArrayList<>(HANDLERS_CAPACITY);
// we add null as the first item to make sure that subsequent items are added in the proper positions
// and that the items don't need to shifted when at the end of the method we set the
// first item
handlers.add(null);
addHandlers(handlers, clazz, method, info, HandlerChainCustomizer.Phase.AFTER_MATCH);
if (interceptorHandler != null) {
handlers.add(interceptorHandler);
}
// when a method is blocking, we also want all the request filters to run on the worker thread
// because they can potentially set thread local variables
// we don't need to run this for Servlet and other runtimes that default to blocking
Optional<Integer> blockingHandlerIndex = Optional.empty();
if (!defaultBlocking) {
if (method.isBlocking()) {
if (method.isRunOnVirtualThread()) {
handlers.add(blockingHandlerVirtualThread);
score.add(ScoreSystem.Category.Execution, ScoreSystem.Diagnostic.ExecutionVirtualThread);
} else {
handlers.add(blockingHandler);
score.add(ScoreSystem.Category.Execution, ScoreSystem.Diagnostic.ExecutionBlocking);
}
blockingHandlerIndex = Optional.of(handlers.size() - 1);
} else {
if (method.isRunOnVirtualThread()) {
//should not happen
log.error("a method was both non-blocking and @RunOnVirtualThread, it is now considered " +
"@RunOnVirtual and blocking");
handlers.add(blockingHandlerVirtualThread);
score.add(ScoreSystem.Category.Execution, ScoreSystem.Diagnostic.ExecutionVirtualThread);
} else {
handlers.add(NonBlockingHandler.INSTANCE);
score.add(ScoreSystem.Category.Execution, ScoreSystem.Diagnostic.ExecutionNonBlocking);
}
}
}
// special case for AsyncFile which can't do async IO and handle interceptors
if (method.getReturnType().equals("Lio/vertx/core/file/AsyncFile;")
&& interceptorDeployment.hasWriterInterceptors()) {
throw new RuntimeException(
"Endpoints that return an AsyncFile cannot have any WriterInterceptor set");
}
//spec doesn't seem to test this, but RESTEasy does not run request filters for both root and sub resources (which makes sense)
//so only run request filters for methods that are leaf resources - i.e. have a HTTP method annotation so we ensure only one will run
boolean hasWithFormReadRequestFilters = false;
if (method.getHttpMethod() != null) {
List<ResourceRequestFilterHandler> containerRequestFilterHandlers = interceptorDeployment
.setupRequestFilterHandler();
if (blockingHandlerIndex.isPresent()) {
int initialIndex = blockingHandlerIndex.get();
for (int i = 0; i < containerRequestFilterHandlers.size(); i++) {
ResourceRequestFilterHandler handler = containerRequestFilterHandlers.get(i);
if (handler.isNonBlockingRequired()) {
// the non-blocking handlers are added in the order we have already determined, but they need to
// be added before the blocking handler
handlers.add(initialIndex + i, handler);
} else {
handlers.add(handler);
}
}
} else {
handlers.addAll(containerRequestFilterHandlers);
}
for (ResourceRequestFilterHandler handler : containerRequestFilterHandlers) {
if (handler.isWithFormRead()) {
hasWithFormReadRequestFilters = true;
break;
}
}
}
// some parameters need the body to be read
MethodParameter[] parameters = method.getParameters();
// body can only be in a parameter
MethodParameter bodyParameter = null;
int bodyParameterIndex = -1;
for (int i = 0; i < parameters.length; i++) {
MethodParameter param = parameters[i];
if (param.parameterType == ParameterType.BODY) {
bodyParameter = param;
bodyParameterIndex = i;
break;
}
}
// form params can be everywhere (field, beanparam, param)
boolean checkWithFormReadRequestFilters = false;
boolean inputHandlerEngaged = false;
if (method.isFormParamRequired() || hasWithFormReadRequestFilters) {
// read the body as multipart in one go
handlers.add(new FormBodyHandler(bodyParameter != null, executorSupplier, method.getFileFormNames()));
checkWithFormReadRequestFilters = true;
}
if (bodyParameter != null) {
if (!defaultBlocking) {
if (!method.isBlocking()) {
// allow the body to be read by chunks
handlers.add(new InputHandler(resteasyReactiveConfig.getInputBufferSize(), executorSupplier));
checkWithFormReadRequestFilters = true;
inputHandlerEngaged = true;
}
}
}
if (checkWithFormReadRequestFilters && hasWithFormReadRequestFilters) {
// we need to remove the corresponding filters from the handlers list and add them to its end in the same order
List<ServerRestHandler> readBodyRequestFilters = new ArrayList<>(1);
for (int i = handlers.size() - 2; i >= 0; i--) {
var serverRestHandler = handlers.get(i);
if (serverRestHandler instanceof ResourceRequestFilterHandler) {
ResourceRequestFilterHandler resourceRequestFilterHandler = (ResourceRequestFilterHandler) serverRestHandler;
if (resourceRequestFilterHandler.isWithFormRead()) {
readBodyRequestFilters.add(handlers.remove(i));
}
}
}
handlers.addAll(readBodyRequestFilters);
}
// if we need the body, let's deserialize it
if (bodyParameter != null) {
Class<Object> typeClass = loadClass(bodyParameter.declaredType);
Type genericType = typeClass;
if (!bodyParameter.type.equals(bodyParameter.declaredType)) {
// we only need to parse the signature and create generic type when the declared type differs from the type
genericType = TypeSignatureParser.parse(bodyParameter.signature);
}
handlers.add(new RequestDeserializeHandler(typeClass, genericType, consumesMediaTypes, serialisers,
bodyParameterIndex));
if (inputHandlerEngaged) {
handlers.add(NonBlockingHandler.INSTANCE);
}
}
// given that we may inject form params in the endpoint we need to make sure we read the body before
// we create/inject our endpoint
ServerRestHandler instanceHandler = null;
if (!locatableResource) {
if (clazz.isPerRequestResource()) {
instanceHandler = new PerRequestInstanceHandler(clazz.getFactory(), info.getClientProxyUnwrapper());
score.add(ScoreSystem.Category.Resource, ScoreSystem.Diagnostic.ResourcePerRequest);
} else {
instanceHandler = new InstanceHandler(clazz.getFactory());
score.add(ScoreSystem.Category.Resource, ScoreSystem.Diagnostic.ResourceSingleton);
}
handlers.add(instanceHandler);
}
addHandlers(handlers, clazz, method, info, HandlerChainCustomizer.Phase.RESOLVE_METHOD_PARAMETERS);
for (int i = 0; i < parameters.length; i++) {
ServerMethodParameter param = (ServerMethodParameter) parameters[i];
if (param.parameterType.equals(ParameterType.SKIPPED))
continue;
ParameterExtractor extractor = parameterExtractor(pathParameterIndexes, locatableResource, param);
ParameterConverter converter = null;
ParamConverterProviders paramConverterProviders = info.getParamConverterProviders();
boolean userProviderConvertersExist = !paramConverterProviders.getParamConverterProviders().isEmpty();
if (param.converter != null) {
converter = param.converter.get();
if (userProviderConvertersExist) {
Method javaMethod = lazyMethod.getMethod();
// Workaround our lack of support for generic params by not doing this init if there are not runtime
// param converter providers
Class<?>[] parameterTypes = javaMethod.getParameterTypes();
Type[] genericParameterTypes = javaMethod.getGenericParameterTypes();
Annotation[][] parameterAnnotations = javaMethod.getParameterAnnotations();
smartInitParameterConverter(i, converter, paramConverterProviders, parameterTypes, genericParameterTypes,
parameterAnnotations);
// make sure we give the user provided resolvers the chance to convert
converter = new RuntimeResolvedConverter(converter);
converter.init(paramConverterProviders, parameterTypes[i], genericParameterTypes[i],
parameterAnnotations[i]);
}
}
handlers.add(new ParameterHandler(i, param.getDefaultValue(), extractor,
converter, param.parameterType,
param.isObtainedAsCollection(), param.isOptional()));
}
addHandlers(handlers, clazz, method, info, HandlerChainCustomizer.Phase.BEFORE_METHOD_INVOKE);
EndpointInvoker invoker = method.getInvoker().get();
ServerRestHandler alternate = alternateInvoker(method, invoker);
if (alternate != null) {
handlers.add(alternate);
} else {
handlers.add(new InvocationHandler(invoker));
}
boolean afterMethodInvokeHandlersAdded = addHandlers(handlers, clazz, method, info,
HandlerChainCustomizer.Phase.AFTER_METHOD_INVOKE);
boolean afterMethodInvokeHandlersSecondRoundAdded = addHandlers(handlers, clazz, method, info,
HandlerChainCustomizer.Phase.AFTER_METHOD_INVOKE_SECOND_ROUND);
if (afterMethodInvokeHandlersAdded || afterMethodInvokeHandlersSecondRoundAdded) {
addStreamingResponseCustomizers(method, handlers);
}
Type returnType = TypeSignatureParser.parse(method.getReturnType());
Type effectiveReturnType = getEffectiveReturnType(returnType);
Class<?> rawEffectiveReturnType = getRawType(effectiveReturnType);
ServerMediaType serverMediaType = null;
if (method.getProduces() != null && method.getProduces().length > 0) {
// when negotiating a media type, we want to use the proper subtype to locate a ResourceWriter,
// hence the 'true' for 'useSuffix'
serverMediaType = new ServerMediaType(ServerMediaType.mediaTypesFromArray(method.getProduces()),
StandardCharsets.UTF_8.name(), false);
}
if (method.getHttpMethod() == null) {
//this is a resource locator method
handlers.add(resourceLocatorHandler);
} else if (!Response.class.isAssignableFrom(rawEffectiveReturnType)) {
//try and statically determine the media type and response writer
//we can't do this for all cases, but we can do it for the most common ones
//in practice this should work for the majority of endpoints
if (method.getProduces() != null && method.getProduces().length > 0) {
//the method can only produce a single content type, which is the most common case
if (method.getProduces().length == 1) {
MediaType mediaType = MediaTypeHelper.valueOf(method.getProduces()[0]);
//its a wildcard type, makes it hard to determine statically
if (mediaType.isWildcardType() || mediaType.isWildcardSubtype()) {
handlers.add(new VariableProducesHandler(serverMediaType, serialisers));
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterRunTime);
} else if (isNotVoid(rawEffectiveReturnType)) {
List<MessageBodyWriter<?>> buildTimeWriters = serialisers.findBuildTimeWriters(rawEffectiveReturnType,
RuntimeType.SERVER, MediaTypeHelper.toListOfMediaType(method.getProduces()));
if (buildTimeWriters == null) {
//if this is null this means that the type cannot be resolved at build time
//this happens when the method returns a generic type (e.g. Object), so there
//are more specific mappers that could be invoked depending on the actual return value
handlers.add(new FixedProducesHandler(mediaType, dynamicEntityWriter));
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterRunTime);
} else if (buildTimeWriters.isEmpty()) {
//we could not find any writers that can write a response to this endpoint
log.warn("Cannot find any combination of response writers for the method " + clazz.getClassName()
+ "#" + method.getName() + "(" + Arrays.toString(method.getParameters()) + ")");
handlers.add(new VariableProducesHandler(serverMediaType, serialisers));
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterRunTime);
} else if (isSingleEffectiveWriter(buildTimeWriters)) {
MessageBodyWriter<?> writer = buildTimeWriters.get(0);
handlers.add(new FixedProducesHandler(mediaType, new FixedEntityWriter(
writer, serialisers)));
if (writer instanceof ServerMessageBodyWriter)
score.add(ScoreSystem.Category.Writer,
ScoreSystem.Diagnostic.WriterBuildTimeDirect(writer));
else
score.add(ScoreSystem.Category.Writer,
ScoreSystem.Diagnostic.WriterBuildTime(writer));
} else {
//multiple writers, we try them in the proper order which had already been created
handlers.add(new FixedProducesHandler(mediaType,
new FixedEntityWriterArray(buildTimeWriters.toArray(EMPTY_MESSAGE_BODY_WRITERS),
serialisers)));
score.add(ScoreSystem.Category.Writer,
ScoreSystem.Diagnostic.WriterBuildTimeMultiple(buildTimeWriters));
}
} else {
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterNotRequired);
}
} else {
//there are multiple possibilities
//we could optimise this more in future
handlers.add(new VariableProducesHandler(serverMediaType, serialisers));
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterRunTime);
}
} else {
score.add(ScoreSystem.Category.Writer, isNotVoid(rawEffectiveReturnType) ? ScoreSystem.Diagnostic.WriterRunTime
: ScoreSystem.Diagnostic.WriterNotRequired);
}
} else {
score.add(ScoreSystem.Category.Writer, ScoreSystem.Diagnostic.WriterRunTime);
}
//the response filter handlers, they need to be added to both the abort and
//normal chains. At the moment this only has one handler added to it but
//in future there will be one per filter
List<ServerRestHandler> responseFilterHandlers;
if (method.isSse()) {
handlers.add(SseResponseWriterHandler.INSTANCE);
responseFilterHandlers = Collections.emptyList();
} else {
addResponseHandler(method, handlers);
addHandlers(handlers, clazz, method, info, HandlerChainCustomizer.Phase.AFTER_RESPONSE_CREATED);
responseFilterHandlers = new ArrayList<>(interceptorDeployment.setupResponseFilterHandler());
handlers.addAll(responseFilterHandlers);
handlers.add(responseWriterHandler);
}
if (!clazz.resourceExceptionMapper().isEmpty() && (instanceHandler != null)) {
// when
|
RuntimeResourceDeployment
|
java
|
apache__camel
|
dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/ClickUpEndpointBuilderFactory.java
|
{
"start": 11031,
"end": 11353
}
|
class ____ extends AbstractEndpointBuilder implements ClickUpEndpointBuilder, AdvancedClickUpEndpointBuilder {
public ClickUpEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new ClickUpEndpointBuilderImpl(path);
}
}
|
ClickUpEndpointBuilderImpl
|
java
|
elastic__elasticsearch
|
server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
|
{
"start": 29922,
"end": 31115
}
|
class ____ extends AbstractBytesReference {
int dummyValue;
TestBytesReference(int dummyValue) {
super(0);
this.dummyValue = dummyValue;
}
@Override
public boolean equals(Object other) {
return other instanceof TestBytesReference && this.dummyValue == ((TestBytesReference) other).dummyValue;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + dummyValue;
return result;
}
@Override
public byte get(int index) {
return 0;
}
@Override
public BytesReference slice(int from, int length) {
return null;
}
@Override
public BytesRef toBytesRef() {
return null;
}
@Override
public BytesRefIterator iterator() {
return BytesRefIterator.EMPTY;
}
@Override
public long ramBytesUsed() {
return 0;
}
@Override
public boolean isFragment() {
return false;
}
}
private static
|
TestBytesReference
|
java
|
apache__hadoop
|
hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azurebfs/extensions/MockEncryptionContextProvider.java
|
{
"start": 1209,
"end": 2691
}
|
class ____ implements EncryptionContextProvider {
private HashMap<String, String> pathToContextMap = new HashMap<>();
private HashMap<String, byte[]> contextToKeyByteMap = new HashMap<>();
@Override
public void initialize(Configuration configuration, String accountName,
String fileSystem) throws IOException {
}
@Override
public ABFSKey getEncryptionContext(String path)
throws IOException {
String newContext = UUID.randomUUID().toString();
pathToContextMap.put(path, newContext);
byte[] newKey = new byte[ENCRYPTION_KEY_LEN];
new Random().nextBytes(newKey);
ABFSKey key = new ABFSKey(newKey);
contextToKeyByteMap.put(newContext, key.getEncoded());
return new ABFSKey(newContext.getBytes(StandardCharsets.UTF_8));
}
@Override
public ABFSKey getEncryptionKey(String path, ABFSKey encryptionContext) throws IOException {
String encryptionContextString =
new String(encryptionContext.getEncoded(), StandardCharsets.UTF_8);
if (!encryptionContextString.equals(pathToContextMap.get(path))) {
throw new IOException("encryption context does not match path");
}
return new ABFSKey(contextToKeyByteMap.get(encryptionContextString));
}
public byte[] getEncryptionKeyForTest(String encryptionContext) {
return contextToKeyByteMap.get(encryptionContext);
}
public String getEncryptionContextForTest(String path) {
return pathToContextMap.get(path);
}
}
|
MockEncryptionContextProvider
|
java
|
ReactiveX__RxJava
|
src/main/java/io/reactivex/rxjava3/internal/operators/flowable/FlowableCache.java
|
{
"start": 11226,
"end": 12672
}
|
class ____<T> extends AtomicInteger
implements Subscription {
private static final long serialVersionUID = 6770240836423125754L;
final Subscriber<? super T> downstream;
final FlowableCache<T> parent;
final AtomicLong requested;
Node<T> node;
int offset;
long index;
/**
* Constructs a new instance with the actual downstream consumer and
* the parent cache object.
* @param downstream the actual consumer
* @param parent the parent that holds onto the cached items
*/
CacheSubscription(Subscriber<? super T> downstream, FlowableCache<T> parent) {
this.downstream = downstream;
this.parent = parent;
this.node = parent.head;
this.requested = new AtomicLong();
}
@Override
public void request(long n) {
if (SubscriptionHelper.validate(n)) {
BackpressureHelper.addCancel(requested, n);
parent.replay(this);
}
}
@Override
public void cancel() {
if (requested.getAndSet(Long.MIN_VALUE) != Long.MIN_VALUE) {
parent.remove(this);
}
}
}
/**
* Represents a segment of the cached item list as
* part of a linked-node-list structure.
* @param <T> the element type
*/
static final
|
CacheSubscription
|
java
|
apache__kafka
|
metadata/src/test/java/org/apache/kafka/image/publisher/SnapshotGeneratorTest.java
|
{
"start": 1840,
"end": 11062
}
|
class ____ implements SnapshotGenerator.Emitter {
private final CountDownLatch latch = new CountDownLatch(1);
private final List<MetadataImage> images = new CopyOnWriteArrayList<>();
private volatile RuntimeException problem = null;
MockEmitter setReady() {
latch.countDown();
return this;
}
synchronized MockEmitter setProblem(RuntimeException problem) {
this.problem = problem;
return this;
}
@Override
public synchronized void maybeEmit(MetadataImage image) {
RuntimeException currentProblem = problem;
if (currentProblem != null) {
throw currentProblem;
}
try {
latch.await(30, TimeUnit.SECONDS);
} catch (Throwable e) {
throw new RuntimeException(e);
}
images.add(image);
}
List<MetadataImage> images() {
return new ArrayList<>(images);
}
}
static LogDeltaManifest.Builder logDeltaManifestBuilder() {
return LogDeltaManifest.newBuilder()
.provenance(new MetadataProvenance(-1L, -1, -1L, true))
.leaderAndEpoch(LeaderAndEpoch.UNKNOWN)
.numBatches(1)
.elapsedNs(100)
.numBytes(100);
}
static LogDeltaManifest.Builder notBatchAlignedLogDeltaManifestBuilder() {
return LogDeltaManifest.newBuilder()
.provenance(MetadataProvenance.EMPTY)
.leaderAndEpoch(LeaderAndEpoch.UNKNOWN)
.numBatches(1)
.elapsedNs(100)
.numBytes(100);
}
private static final MetadataDelta TEST_DELTA;
static {
TEST_DELTA = new MetadataDelta.Builder().
setImage(MetadataImage.EMPTY).
build();
TEST_DELTA.replay(RecordTestUtils.testRecord(0).message());
}
private static final MetadataImage TEST_IMAGE = TEST_DELTA.apply(MetadataProvenance.EMPTY);
@Test
public void testCreateSnapshot() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter();
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(200).
setMaxTimeSinceLastSnapshotNs(TimeUnit.DAYS.toNanos(10)).
build()) {
// Publish a log delta batch. This one will not trigger a snapshot yet.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().build());
// Publish a log delta batch. This will trigger a snapshot.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().build());
// Publish a log delta batch. This one will be ignored because there are other images
// queued for writing.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().numBytes(2000).build());
assertEquals(List.of(), emitter.images());
emitter.setReady();
}
assertEquals(List.of(TEST_IMAGE), emitter.images());
faultHandler.maybeRethrowFirstException();
}
@Test
public void testNoSnapshotCreatedWhenLastOffsetIsNotBatchAligned() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter();
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(200).
setMaxTimeSinceLastSnapshotNs(TimeUnit.DAYS.toNanos(10)).
build()) {
// None of these log delta batches should trigger a snapshot since their offset is not batch aligned.
generator.publishLogDelta(TEST_IMAGE, notBatchAlignedLogDeltaManifestBuilder().build());
generator.publishLogDelta(TEST_IMAGE, notBatchAlignedLogDeltaManifestBuilder().build());
generator.publishLogDelta(TEST_IMAGE, notBatchAlignedLogDeltaManifestBuilder().build());
assertEquals(List.of(), emitter.images());
emitter.setReady();
}
assertEquals(List.of(), emitter.images());
faultHandler.maybeRethrowFirstException();
}
@Test
public void testSnapshotsCreatedAgainWhenLastOffsetIsAligned() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter();
MetadataImage batchAlignedImage = TEST_DELTA.apply(
new MetadataProvenance(-1L, -1, -1L, true));
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(100).
setMaxTimeSinceLastSnapshotNs(TimeUnit.DAYS.toNanos(10)).
build()) {
// These should not be published despite meeting the max bytes threshold since they are not batch aligned.
generator.publishLogDelta(TEST_IMAGE, notBatchAlignedLogDeltaManifestBuilder().build());
generator.publishLogDelta(TEST_IMAGE, notBatchAlignedLogDeltaManifestBuilder().build());
// This snapshot should get published since it is batch aligned.
generator.publishLogDelta(batchAlignedImage, logDeltaManifestBuilder().build());
assertEquals(List.of(), emitter.images());
emitter.setReady();
}
assertEquals(List.of(batchAlignedImage), emitter.images());
faultHandler.maybeRethrowFirstException();
}
@Test
public void testSnapshotsDisabled() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter().setReady();
AtomicReference<String> disabledReason = new AtomicReference<>();
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(1).
setMaxTimeSinceLastSnapshotNs(0).
setDisabledReason(disabledReason).
build()) {
disabledReason.compareAndSet(null, "we are testing disable()");
// No snapshots are generated because snapshots are disabled.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().build());
}
assertEquals(List.of(), emitter.images());
faultHandler.maybeRethrowFirstException();
}
@Test
public void testTimeBasedSnapshots() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter().setReady();
MockTime mockTime = new MockTime();
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setTime(mockTime).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(200).
setMaxTimeSinceLastSnapshotNs(TimeUnit.MINUTES.toNanos(30)).
build()) {
// This image isn't published yet.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().numBytes(50).build());
assertEquals(List.of(), emitter.images());
mockTime.sleep(TimeUnit.MINUTES.toNanos(40));
// Next image is published because of the time delay.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().numBytes(50).build());
TestUtils.waitForCondition(() -> emitter.images().size() == 1, "images.size == 1");
// bytesSinceLastSnapshot was reset to 0 by the previous snapshot,
// so this does not trigger a new snapshot.
generator.publishLogDelta(TEST_IMAGE, logDeltaManifestBuilder().numBytes(150).build());
}
assertEquals(List.of(TEST_IMAGE), emitter.images());
faultHandler.maybeRethrowFirstException();
}
@Test
public void testEmitterProblem() throws Exception {
MockFaultHandler faultHandler = new MockFaultHandler("SnapshotGenerator");
MockEmitter emitter = new MockEmitter().setProblem(new RuntimeException("oops"));
try (SnapshotGenerator generator = new SnapshotGenerator.Builder(emitter).
setFaultHandler(faultHandler).
setMaxBytesSinceLastSnapshot(200).
build()) {
for (int i = 0; i < 2; i++) {
generator.publishLogDelta(TEST_IMAGE,
logDeltaManifestBuilder().elapsedNs(10000).numBytes(50000).build());
}
}
assertEquals(List.of(), emitter.images());
assertNotNull(faultHandler.firstException());
assertEquals(FaultHandlerException.class, faultHandler.firstException().getClass());
assertEquals("SnapshotGenerator: KRaft snapshot file generation error: oops",
faultHandler.firstException().getMessage());
}
}
|
MockEmitter
|
java
|
mybatis__mybatis-3
|
src/test/java/org/apache/ibatis/submitted/blobtest/BlobTest.java
|
{
"start": 1175,
"end": 3704
}
|
class ____ {
private static SqlSessionFactory sqlSessionFactory;
@BeforeAll
static void initDatabase() throws Exception {
try (Reader reader = Resources.getResourceAsReader("org/apache/ibatis/submitted/blobtest/MapperConfig.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
BaseDataTest.runScript(sqlSessionFactory.getConfiguration().getEnvironment().getDataSource(),
"org/apache/ibatis/submitted/blobtest/CreateDB.sql");
}
@Test
/*
* This test demonstrates the use of type aliases for primitive types in constructor based result maps
*/
void insertBlobThenSelectAll() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
BlobMapper blobMapper = sqlSession.getMapper(BlobMapper.class);
byte[] myblob = { 1, 2, 3, 4, 5 };
BlobRecord blobRecord = new BlobRecord(1, myblob);
int rows = blobMapper.insert(blobRecord);
assertEquals(1, rows);
// NPE here due to unresolved type handler
List<BlobRecord> results = blobMapper.selectAll();
assertEquals(1, results.size());
BlobRecord result = results.get(0);
assertEquals(blobRecord.getId(), result.getId());
assertTrue(blobsAreEqual(blobRecord.getBlob(), result.getBlob()));
}
}
@Test
/*
* This test demonstrates the use of type aliases for primitive types in constructor based result maps
*/
void insertBlobObjectsThenSelectAll() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
BlobMapper blobMapper = sqlSession.getMapper(BlobMapper.class);
Byte[] myblob = { 1, 2, 3, 4, 5 };
BlobRecord blobRecord = new BlobRecord(1, myblob);
int rows = blobMapper.insert(blobRecord);
assertEquals(1, rows);
// NPE here due to unresolved type handler
List<BlobRecord> results = blobMapper.selectAllWithBlobObjects();
assertEquals(1, results.size());
BlobRecord result = results.get(0);
assertEquals(blobRecord.getId(), result.getId());
assertTrue(blobsAreEqual(blobRecord.getBlob(), result.getBlob()));
}
}
static boolean blobsAreEqual(byte[] blob1, byte[] blob2) {
if (blob1 == null) {
return blob2 == null;
}
if (blob2 == null) {
return blob1 == null;
}
boolean rc = blob1.length == blob2.length;
if (rc) {
for (int i = 0; i < blob1.length; i++) {
if (blob1[i] != blob2[i]) {
rc = false;
break;
}
}
}
return rc;
}
}
|
BlobTest
|
java
|
apache__flink
|
flink-end-to-end-tests/flink-stream-state-ttl-test/src/main/java/org/apache/flink/streaming/tests/verify/TtlListStateVerifier.java
|
{
"start": 1351,
"end": 2878
}
|
class ____
extends AbstractTtlStateVerifier<
ListStateDescriptor<String>,
ListState<String>,
List<String>,
String,
List<String>> {
TtlListStateVerifier() {
super(
new ListStateDescriptor<>(
TtlListStateVerifier.class.getSimpleName(), StringSerializer.INSTANCE));
}
@Override
@Nonnull
State createState(@Nonnull FunctionInitializationContext context) {
return context.getKeyedStateStore().getListState(stateDesc);
}
@Override
@Nonnull
public TypeSerializer<String> getUpdateSerializer() {
return StringSerializer.INSTANCE;
}
@Override
@Nonnull
public String generateRandomUpdate() {
return randomString();
}
@Override
@Nonnull
List<String> getInternal(@Nonnull ListState<String> state) throws Exception {
return StreamSupport.stream(state.get().spliterator(), false).collect(Collectors.toList());
}
@Override
void updateInternal(@Nonnull ListState<String> state, String update) throws Exception {
state.add(update);
}
@Override
@Nonnull
List<String> expected(@Nonnull List<ValueWithTs<String>> updates, long currentTimestamp) {
return updates.stream()
.filter(u -> !expired(u.getTimestamp(), currentTimestamp))
.map(ValueWithTs::getValue)
.collect(Collectors.toList());
}
}
|
TtlListStateVerifier
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/resourcemanager/slotmanager/ResourceEventListener.java
|
{
"start": 1090,
"end": 1394
}
|
interface ____ {
/**
* @param jobId job for which not enough resources are available
* @param acquiredResources the resources that have been acquired for the job
*/
void notEnoughResourceAvailable(JobID jobId, Collection<ResourceRequirement> acquiredResources);
}
|
ResourceEventListener
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/threadsafety/GuardedByCheckerTest.java
|
{
"start": 26436,
"end": 26576
}
|
class ____ {
final Object lock = new Object();
@GuardedBy("lock")
int x;
|
Outer
|
java
|
quarkusio__quarkus
|
test-framework/junit5-component/src/test/java/io/quarkus/test/component/lifecycle/PerClassLifecycleTest.java
|
{
"start": 1552,
"end": 1863
}
|
class ____ {
static final AtomicInteger COUNTER = new AtomicInteger();
@Inject
Charlie charlie;
@PostConstruct
void init() {
COUNTER.incrementAndGet();
}
public String ping() {
return charlie.ping();
}
}
}
|
MySingleton
|
java
|
elastic__elasticsearch
|
libs/dissect/src/main/java/org/elasticsearch/dissect/DissectParser.java
|
{
"start": 3966,
"end": 9328
}
|
class ____ {
private static final Pattern LEADING_DELIMITER_PATTERN = Pattern.compile("^(.*?)%\\{");
private static final Pattern KEY_DELIMITER_FIELD_PATTERN = Pattern.compile("%\\{([^}]*?)}(.+?(?=%\\{)|.*$)", Pattern.DOTALL);
private static final EnumSet<DissectKey.Modifier> ASSOCIATE_MODIFIERS = EnumSet.of(
DissectKey.Modifier.FIELD_NAME,
DissectKey.Modifier.FIELD_VALUE
);
private static final EnumSet<DissectKey.Modifier> APPEND_MODIFIERS = EnumSet.of(
DissectKey.Modifier.APPEND,
DissectKey.Modifier.APPEND_WITH_ORDER
);
private static final Function<DissectPair, String> KEY_NAME = val -> val.key().getName();
private final List<DissectPair> matchPairs;
private final String pattern;
private String leadingDelimiter = "";
private final int maxMatches;
private final int maxResults;
private final int appendCount;
private final int referenceCount;
private final String appendSeparator;
public DissectParser(String pattern, String appendSeparator) {
this.pattern = pattern;
this.appendSeparator = appendSeparator == null ? "" : appendSeparator;
Matcher matcher = LEADING_DELIMITER_PATTERN.matcher(pattern);
while (matcher.find()) {
leadingDelimiter = matcher.group(1);
}
List<DissectPair> dissectPairs = new ArrayList<>();
matcher = KEY_DELIMITER_FIELD_PATTERN.matcher(pattern.substring(leadingDelimiter.length()));
while (matcher.find()) {
DissectKey key = new DissectKey(matcher.group(1));
String delimiter = matcher.group(2);
dissectPairs.add(new DissectPair(key, delimiter));
}
this.maxMatches = dissectPairs.size();
this.maxResults = Long.valueOf(
dissectPairs.stream().filter(dissectPair -> dissectPair.key().skip() == false).map(KEY_NAME).distinct().count()
).intValue();
if (this.maxMatches == 0 || maxResults == 0) {
throw new DissectException.PatternParse(pattern, "Unable to find any keys or delimiters.");
}
// append validation - look through all of the keys to see if there are any keys that need to participate in an append operation
// but don't have the '+' defined
Set<String> appendKeyNames = dissectPairs.stream()
.filter(dissectPair -> APPEND_MODIFIERS.contains(dissectPair.key().getModifier()))
.map(KEY_NAME)
.collect(Collectors.toSet());
if (appendKeyNames.size() > 0) {
List<DissectPair> modifiedMatchPairs = new ArrayList<>(dissectPairs.size());
for (DissectPair p : dissectPairs) {
if (p.key().getModifier().equals(DissectKey.Modifier.NONE) && appendKeyNames.contains(p.key().getName())) {
modifiedMatchPairs.add(new DissectPair(new DissectKey(p.key(), DissectKey.Modifier.APPEND), p.delimiter()));
} else {
modifiedMatchPairs.add(p);
}
}
dissectPairs = modifiedMatchPairs;
}
appendCount = appendKeyNames.size();
// reference validation - ensure that '*' and '&' come in pairs
Map<String, List<DissectPair>> referenceGroupings = dissectPairs.stream()
.filter(dissectPair -> ASSOCIATE_MODIFIERS.contains(dissectPair.key().getModifier()))
.collect(Collectors.groupingBy(KEY_NAME));
for (Map.Entry<String, List<DissectPair>> entry : referenceGroupings.entrySet()) {
if (entry.getValue().size() != 2) {
throw new DissectException.PatternParse(
pattern,
"Found invalid key/reference associations: '"
+ entry.getValue().stream().map(KEY_NAME).collect(Collectors.joining(","))
+ "' Please ensure each '*<key>' is matched with a matching '&<key>"
);
}
}
referenceCount = referenceGroupings.size() * 2;
this.matchPairs = List.copyOf(dissectPairs);
}
/**
* Entry point to dissect a string into its parts.
*
* @param inputString The string to dissect
* @return the key/value Map of the results
* @throws DissectException if unable to dissect a pair into its parts.
*/
public Map<String, String> parse(String inputString) {
/**
*
* This implements a naive string matching algorithm. The string is walked left to right, comparing each byte against
* another string's bytes looking for matches. If the bytes match, then a second cursor looks ahead to see if all the bytes
* of the other string matches. If they all match, record it and advances the primary cursor to the match point. If it can not match
* all of the bytes then progress the main cursor. Repeat till the end of the input string. Since the string being searching for
* (the delimiter) is generally small and rare the naive approach is efficient.
*
* In this case the string that is walked is the input string, and the string being searched for is the current delimiter.
* For example for a dissect pattern of {@code %{a},%{b}:%{c}} the delimiters (comma then colon) are searched for in the
* input string. At
|
DissectParser
|
java
|
elastic__elasticsearch
|
server/src/internalClusterTest/java/org/elasticsearch/search/rank/MockedRequestActionBasedRerankerIT.java
|
{
"start": 4339,
"end": 4629
}
|
class ____ extends Plugin implements ActionPlugin {
@Override
public Collection<ActionHandler> getActions() {
return List.of(new ActionHandler(TEST_RERANKING_ACTION_TYPE, TestRerankingTransportAction.class));
}
}
public static
|
RerankerServicePlugin
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/spring/support/RedissonNamespaceDefaultDecorator.java
|
{
"start": 888,
"end": 1175
}
|
class ____ implements RedissonNamespaceDecorator {
@Override
public void decorate(Element element, ParserContext parserContext, BeanDefinitionBuilder builder, RedissonNamespaceParserSupport helper) {
//default is no decoration;
}
}
|
RedissonNamespaceDefaultDecorator
|
java
|
alibaba__nacos
|
common/src/main/java/com/alibaba/nacos/common/task/NacosTaskProcessor.java
|
{
"start": 711,
"end": 895
}
|
interface ____ {
/**
* Process task.
*
* @param task task.
* @return process task result.
*/
boolean process(NacosTask task);
}
|
NacosTaskProcessor
|
java
|
FasterXML__jackson-databind
|
src/test/java/tools/jackson/databind/deser/FloatDeserTest.java
|
{
"start": 297,
"end": 3696
}
|
class ____
{
/*
/**********************************************************
/* Tests
/**********************************************************
*/
private final ObjectMapper MAPPER = newJsonMapper();
@Test
public void testFloatPrimitive() throws Exception
{
assertEquals(7.038531e-26f, MAPPER.readValue("\"7.038531e-26\"", float.class));
assertEquals(1.1999999f, MAPPER.readValue("\"1.199999988079071\"", float.class));
assertEquals(3.4028235e38f, MAPPER.readValue("\"3.4028235677973366e38\"", float.class));
//this assertion fails unless toString is used
assertEquals("1.4E-45", MAPPER.readValue("\"7.006492321624086e-46\"", float.class).toString());
}
@Test
public void testFloatClass() throws Exception
{
assertEquals(Float.valueOf(7.038531e-26f), MAPPER.readValue("\"7.038531e-26\"", Float.class));
assertEquals(Float.valueOf(1.1999999f), MAPPER.readValue("\"1.199999988079071\"", Float.class));
assertEquals(Float.valueOf(3.4028235e38f), MAPPER.readValue("\"3.4028235677973366e38\"", Float.class));
//this assertion fails unless toString is used
assertEquals("1.4E-45", MAPPER.readValue("\"7.006492321624086e-46\"", Float.class).toString());
}
@Test
public void testArrayOfFloatPrimitives() throws Exception
{
StringBuilder sb = new StringBuilder();
sb.append('[')
.append("\"7.038531e-26\",")
.append("\"1.199999988079071\",")
.append("\"3.4028235677973366e38\",")
.append("\"7.006492321624086e-46\"")
.append(']');
float[] floats = MAPPER.readValue(sb.toString(), float[].class);
assertEquals(4, floats.length);
assertEquals(7.038531e-26f, floats[0]);
assertEquals(1.1999999f, floats[1]);
assertEquals(3.4028235e38f, floats[2]);
assertEquals("1.4E-45", Float.toString(floats[3])); //this assertion fails unless toString is used
}
// for [jackson-core#757]
@Test
public void testBigArrayOfFloatPrimitives() throws Exception {
try (InputStream stream = FloatDeserTest.class.getResourceAsStream("/data/float-array-755.txt")) {
float[] floats = MAPPER.readValue(stream, float[].class);
assertEquals(1004, floats.length);
assertEquals(7.038531e-26f, floats[0]);
assertEquals(1.1999999f, floats[1]);
assertEquals(3.4028235e38f, floats[2]);
assertEquals(7.006492321624086e-46f, floats[3]); //this assertion fails unless toString is used
}
}
@Test
public void testArrayOfFloats() throws Exception
{
StringBuilder sb = new StringBuilder();
sb.append('[')
.append("\"7.038531e-26\",")
.append("\"1.199999988079071\",")
.append("\"3.4028235677973366e38\",")
.append("\"7.006492321624086e-46\"")
.append(']');
Float[] floats = MAPPER.readValue(sb.toString(), Float[].class);
assertEquals(4, floats.length);
assertEquals(Float.valueOf(7.038531e-26f), floats[0]);
assertEquals(Float.valueOf(1.1999999f), floats[1]);
assertEquals(Float.valueOf(3.4028235e38f), floats[2]);
assertEquals(Float.valueOf("1.4E-45"), floats[3]);
}
}
|
FloatDeserTest
|
java
|
google__error-prone
|
core/src/test/java/com/google/errorprone/bugpatterns/LockOnNonEnclosingClassLiteralTest.java
|
{
"start": 2474,
"end": 2919
}
|
class ____ {
static {
synchronized (LockOnNonEnclosingClassLiteralNegativeCases.class) {
}
}
private void methodContainsSynchronizedBlock() {
synchronized (LockOnNonEnclosingClassLiteralNegativeCases.class) {
}
synchronized (this) {
}
}
|
LockOnNonEnclosingClassLiteralNegativeCases
|
java
|
processing__processing4
|
app/src/processing/app/contrib/UpdateContributionTab.java
|
{
"start": 891,
"end": 2856
}
|
class ____ extends ContributionTab {
public UpdateContributionTab(ManagerFrame dialog) {
super(dialog);
// Filter to show only the contributions with updates available,
// or are section headers (which are fake contributions).
filter = contrib -> {
if (contrib instanceof ListPanel.SectionHeaderContribution) {
return true;
}
if (contrib instanceof LocalContribution) {
return ContributionListing.getInstance().hasUpdates(contrib);
}
return false;
};
listPanel = new UpdateListPanel(this, filter);
statusPanel = new UpdateStatusPanel(this);
initLayout();
ContributionListing.getInstance().addListPanel(listPanel);
}
@Override
protected void initLayout() {
/*
if (loaderLabel == null) {
// if (progressBar == null) {
// progressBar = new JProgressBar();
// progressBar.setVisible(false);
buildErrorPanel();
loaderLabel = new JLabel(Toolkit.getLibIcon("manager/loader.gif"));
loaderLabel.setOpaque(false);
}
*/
GroupLayout layout = new GroupLayout(this);
setLayout(layout);
layout.setHorizontalGroup(layout
.createParallelGroup(GroupLayout.Alignment.CENTER)
//.addComponent(loaderLabel)
.addComponent(listPanel)
//.addComponent(errorPanel)
.addComponent(statusPanel));
layout.setVerticalGroup(layout
.createSequentialGroup()
.addGroup(layout.createParallelGroup(GroupLayout.Alignment.CENTER)
//.addComponent(loaderLabel)
.addComponent(listPanel))
//.addComponent(errorPanel)
.addComponent(statusPanel,
GroupLayout.PREFERRED_SIZE,
GroupLayout.DEFAULT_SIZE,
GroupLayout.PREFERRED_SIZE));
layout.setHonorsVisibility(listPanel, false);
//setBackground(Color.WHITE);
}
}
|
UpdateContributionTab
|
java
|
quarkusio__quarkus
|
extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/ApplyServiceAccountNameToRevisionSpecDecorator.java
|
{
"start": 377,
"end": 1475
}
|
class ____ extends NamedResourceDecorator<RevisionSpecFluent<?>> {
private static final String NONE = null;
private final String serviceAccountName;
public ApplyServiceAccountNameToRevisionSpecDecorator() {
this(ANY, NONE);
}
public ApplyServiceAccountNameToRevisionSpecDecorator(String serviceAccountName) {
super(ANY);
this.serviceAccountName = serviceAccountName;
}
public ApplyServiceAccountNameToRevisionSpecDecorator(String resourceName, String serviceAccountName) {
super(resourceName);
this.serviceAccountName = serviceAccountName;
}
public void andThenVisit(RevisionSpecFluent<?> spec, ObjectMeta resourceMeta) {
if (Strings.isNotNullOrEmpty(this.serviceAccountName)) {
spec.withServiceAccountName(this.serviceAccountName);
} else {
spec.withServiceAccountName(resourceMeta.getName());
}
}
public Class<? extends Decorator>[] after() {
return new Class[] { ResourceProvidingDecorator.class };
}
}
|
ApplyServiceAccountNameToRevisionSpecDecorator
|
java
|
apache__kafka
|
streams/src/test/java/org/apache/kafka/streams/state/internals/TimeOrderedWindowStoreTest.java
|
{
"start": 4749,
"end": 63030
}
|
class ____ {
private static final int MAX_CACHE_SIZE_BYTES = 300;
private static final long DEFAULT_TIMESTAMP = 10L;
private static final Long WINDOW_SIZE = 10L;
private static final long SEGMENT_INTERVAL = 100L;
private static final String TOPIC = "topic";
private static final String CACHE_NAMESPACE = "0_0-store-name";
private InternalMockProcessorContext<?, ?> context;
private RocksDBTimeOrderedWindowSegmentedBytesStore bytesStore;
private RocksDBTimeOrderedWindowStore underlyingStore;
private TimeOrderedCachingWindowStore cachingStore;
private CacheFlushListenerStub<Windowed<String>, String> cacheListener;
private ThreadCache cache;
private TimeFirstWindowKeySchema baseKeySchema;
public void setUp(final boolean hasIndex) {
baseKeySchema = new TimeFirstWindowKeySchema();
bytesStore = new RocksDBTimeOrderedWindowSegmentedBytesStore("test", "metrics-scope", 100, SEGMENT_INTERVAL, hasIndex);
underlyingStore = new RocksDBTimeOrderedWindowStore(bytesStore, false, WINDOW_SIZE);
final TimeWindowedDeserializer<String> keyDeserializer = new TimeWindowedDeserializer<>(new StringDeserializer(), WINDOW_SIZE);
keyDeserializer.setIsChangelogTopic(true);
cacheListener = new CacheFlushListenerStub<>(keyDeserializer, new StringDeserializer());
cachingStore = new TimeOrderedCachingWindowStore(underlyingStore, WINDOW_SIZE, SEGMENT_INTERVAL);
cachingStore.setFlushListener(cacheListener, false);
cache = new ThreadCache(new LogContext("testCache "), MAX_CACHE_SIZE_BYTES, new MockStreamsMetrics(new Metrics()));
context = new InternalMockProcessorContext<>(TestUtils.tempDirectory(), null, null, null, cache);
context.setRecordContext(new ProcessorRecordContext(DEFAULT_TIMESTAMP, 0, 0, TOPIC, new RecordHeaders()));
cachingStore.init(context, cachingStore);
}
@AfterEach
public void closeStore() {
cachingStore.close();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldDelegateInit(final boolean hasIndex) {
setUp(hasIndex);
final RocksDBTimeOrderedWindowStore inner = mock(RocksDBTimeOrderedWindowStore.class);
when(inner.hasIndex()).thenReturn(hasIndex);
final TimeOrderedCachingWindowStore outer = new TimeOrderedCachingWindowStore(inner, WINDOW_SIZE, SEGMENT_INTERVAL);
reset(inner);
when(inner.name()).thenReturn("store");
outer.init(context, outer);
verify(inner).init(context, outer);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfWrongStore(final boolean hasIndex) {
setUp(hasIndex);
final RocksDBTimestampedWindowStore innerWrong = mock(RocksDBTimestampedWindowStore.class);
final Exception e = assertThrows(IllegalArgumentException.class,
() -> new TimeOrderedCachingWindowStore(innerWrong, WINDOW_SIZE, SEGMENT_INTERVAL));
assertThat(e.getMessage(),
containsString("TimeOrderedCachingWindowStore only supports RocksDBTimeOrderedWindowStore backed store"));
final RocksDBTimeOrderedWindowStore inner = mock(RocksDBTimeOrderedWindowStore.class);
// Nothing happens
new TimeOrderedCachingWindowStore(inner, WINDOW_SIZE, SEGMENT_INTERVAL);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotReturnDuplicatesInRanges(final boolean hasIndex) {
setUp(hasIndex);
final StreamsBuilder builder = new StreamsBuilder();
final StoreBuilder<TimestampedWindowStore<String, String>> storeBuilder = Stores.timestampedWindowStoreBuilder(
RocksDbIndexedTimeOrderedWindowBytesStoreSupplier.create(
"store-name",
ofHours(1L),
ofMinutes(1),
false,
hasIndex
), Serdes.String(), Serdes.String())
.withCachingEnabled();
builder.addStateStore(storeBuilder);
builder.stream(TOPIC,
Consumed.with(Serdes.String(), Serdes.String()))
.process(() -> new Processor<String, String, String, String>() {
private WindowStore<String, ValueAndTimestamp<String>> store;
private int numRecordsProcessed;
private org.apache.kafka.streams.processor.api.ProcessorContext<String, String> context;
@Override
public void init(final org.apache.kafka.streams.processor.api.ProcessorContext<String, String> processorContext) {
this.context = processorContext;
this.store = processorContext.getStateStore("store-name");
int count = 0;
try (final KeyValueIterator<Windowed<String>, ValueAndTimestamp<String>> all = store.all()) {
while (all.hasNext()) {
count++;
all.next();
}
}
assertThat(count, equalTo(0));
}
@Override
public void process(final Record<String, String> record) {
int count = 0;
try (final KeyValueIterator<Windowed<String>, ValueAndTimestamp<String>> all = store.all()) {
while (all.hasNext()) {
count++;
all.next();
}
}
assertThat(count, equalTo(numRecordsProcessed));
store.put(record.value(), ValueAndTimestamp.make(record.value(), record.timestamp()), record.timestamp());
numRecordsProcessed++;
context.forward(record);
}
}, "store-name");
final Properties streamsConfiguration = new Properties();
streamsConfiguration.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
streamsConfiguration.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
streamsConfiguration.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.StringSerde.class.getName());
streamsConfiguration.put(StreamsConfig.STATE_DIR_CONFIG, TestUtils.tempDirectory().getPath());
streamsConfiguration.put(StreamsConfig.COMMIT_INTERVAL_MS_CONFIG, 10 * 1000L);
final Instant initialWallClockTime = Instant.ofEpochMilli(0L);
final TopologyTestDriver driver = new TopologyTestDriver(builder.build(), streamsConfiguration, initialWallClockTime);
final TestInputTopic<String, String> inputTopic = driver.createInputTopic(TOPIC,
new StringSerializer(),
new StringSerializer(),
initialWallClockTime,
Duration.ZERO);
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.advanceWallClockTime(Duration.ofSeconds(10));
inputTopic.advanceTime(Duration.ofSeconds(10));
for (int i = 0; i < 5; i++) {
inputTopic.pipeInput(UUID.randomUUID().toString(), UUID.randomUUID().toString());
}
driver.close();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
assertThat(cachingStore.fetch(bytesKey("a"), 10), equalTo(bytesValue("a")));
assertThat(cachingStore.fetch(bytesKey("b"), 10), equalTo(bytesValue("b")));
assertThat(cachingStore.fetch(bytesKey("c"), 10), equalTo(null));
assertThat(cachingStore.fetch(bytesKey("a"), 0), equalTo(null));
try (final WindowStoreIterator<byte[]> a = cachingStore.fetch(bytesKey("a"), ofEpochMilli(10), ofEpochMilli(10));
final WindowStoreIterator<byte[]> b = cachingStore.fetch(bytesKey("b"), ofEpochMilli(10), ofEpochMilli(10))) {
verifyKeyValue(a.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(b.next(), DEFAULT_TIMESTAMP, "b");
assertFalse(a.hasNext());
assertFalse(b.hasNext());
final int expectedSize = hasIndex ? 4 : 2;
assertEquals(expectedSize, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldMatchPositionAfterPutWithFlushListener(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(record -> { }, false);
shouldMatchPositionAfterPut();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldMatchPositionAfterPutWithoutFlushListener(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(null, false);
shouldMatchPositionAfterPut();
}
private void shouldMatchPositionAfterPut() {
context.setRecordContext(new ProcessorRecordContext(0, 1, 0, "", new RecordHeaders()));
cachingStore.put(bytesKey("key1"), bytesValue("value1"), DEFAULT_TIMESTAMP);
context.setRecordContext(new ProcessorRecordContext(0, 2, 0, "", new RecordHeaders()));
cachingStore.put(bytesKey("key2"), bytesValue("value2"), DEFAULT_TIMESTAMP);
// Position should correspond to the last record's context, not the current context.
context.setRecordContext(
new ProcessorRecordContext(0, 3, 0, "", new RecordHeaders())
);
// the caching window store doesn't maintain a separate
// position because it never serves queries from the cache
assertEquals(Position.emptyPosition(), cachingStore.getPosition());
assertEquals(Position.emptyPosition(), underlyingStore.getPosition());
cachingStore.flush();
assertEquals(
Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 2L))))),
cachingStore.getPosition()
);
assertEquals(
Position.fromMap(mkMap(mkEntry("", mkMap(mkEntry(0, 2L))))),
underlyingStore.getPosition()
);
}
private void verifyKeyValue(final KeyValue<Long, byte[]> next,
final long expectedKey,
final String expectedValue) {
assertThat(next.key, equalTo(expectedKey));
assertThat(next.value, equalTo(bytesValue(expectedValue)));
}
private static byte[] bytesValue(final String value) {
return value.getBytes();
}
private static Bytes bytesKey(final String key) {
return Bytes.wrap(key.getBytes());
}
@SuppressWarnings("resource")
private String stringFrom(final byte[] from) {
return new StringDeserializer().deserialize("", from);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
final int expectedSize = hasIndex ? 4 : 2;
assertEquals(expectedSize, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyFrom(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(null, bytesKey("d"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b", "c", "d");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(bytesKey("b"), null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("b", "c", "d", "e");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutFetchRangeFromCacheForNullKeyFromKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetch(null, null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("a", "b", "c", "d", "e");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyFrom(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(null, bytesKey("c"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("c", "b", "a");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(bytesKey("c"), null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("e", "d", "c");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldPutBackwardFetchRangeFromCacheForNullKeyFromKeyTo(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP + 10L);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP + 20L);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP + 20L);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(null, null, ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + 20L))) {
final List<Windowed<Bytes>> expectedKeys = Arrays.asList(
new Windowed<>(bytesKey("e"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("d"), new TimeWindow(DEFAULT_TIMESTAMP + 20L, DEFAULT_TIMESTAMP + 20L + WINDOW_SIZE)),
new Windowed<>(bytesKey("c"), new TimeWindow(DEFAULT_TIMESTAMP + 10L, DEFAULT_TIMESTAMP + 10L + WINDOW_SIZE)),
new Windowed<>(bytesKey("b"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
new Windowed<>(bytesKey("a"), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE))
);
final List<String> expectedValues = Arrays.asList("e", "d", "c", "b", "a");
verifyAllWindowedKeyValues(iterator, expectedKeys, expectedValues);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldGetAllFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("f"), bytesValue("f"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("g"), bytesValue("g"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("h"), bytesValue("h"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.all()) {
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (final String s : array) {
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(s), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
s);
}
assertFalse(iterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldGetAllBackwardFromCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("b"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("c"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("d"), bytesValue("d"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("e"), bytesValue("e"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("f"), bytesValue("f"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("g"), bytesValue("g"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("h"), bytesValue("h"), DEFAULT_TIMESTAMP);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.backwardAll()) {
final String[] array = {"h", "g", "f", "e", "d", "c", "b", "a"};
for (final String s : array) {
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(s), new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
s);
}
assertFalse(iterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAllWithinTimestampRange(final boolean hasIndex) {
setUp(hasIndex);
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (int i = 0; i < array.length; i++) {
cachingStore.put(bytesKey(array[i]), bytesValue(array[i]), i);
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.fetchAll(ofEpochMilli(0), ofEpochMilli(7))) {
for (int i = 0; i < array.length; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator1 =
cachingStore.fetchAll(ofEpochMilli(2), ofEpochMilli(4))) {
for (int i = 2; i <= 4; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator1.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator1.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator2 =
cachingStore.fetchAll(ofEpochMilli(5), ofEpochMilli(7))) {
for (int i = 5; i <= 7; i++) {
final String str = array[i];
verifyWindowedKeyValue(
iterator2.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator2.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAllBackwardWithinTimestampRange(final boolean hasIndex) {
setUp(hasIndex);
final String[] array = {"a", "b", "c", "d", "e", "f", "g", "h"};
for (int i = 0; i < array.length; i++) {
cachingStore.put(bytesKey(array[i]), bytesValue(array[i]), i);
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetchAll(ofEpochMilli(0), ofEpochMilli(7))) {
for (int i = array.length - 1; i >= 0; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator1 =
cachingStore.backwardFetchAll(ofEpochMilli(2), ofEpochMilli(4))) {
for (int i = 4; i >= 2; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator1.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator1.hasNext());
}
try (final KeyValueIterator<Windowed<Bytes>, byte[]> iterator2 =
cachingStore.backwardFetchAll(ofEpochMilli(5), ofEpochMilli(7))) {
for (int i = 7; i >= 5; i--) {
final String str = array[i];
verifyWindowedKeyValue(
iterator2.next(),
new Windowed<>(bytesKey(str), new TimeWindow(i, i + WINDOW_SIZE)),
str);
}
assertFalse(iterator2.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFlushEvictedItemsIntoUnderlyingStore(final boolean hasIndex) {
setUp(hasIndex);
final int added = addItemsToCache();
// all dirty entries should have been flushed
try (final KeyValueIterator<Bytes, byte[]> iter = bytesStore.fetch(
Bytes.wrap("0".getBytes(StandardCharsets.UTF_8)),
DEFAULT_TIMESTAMP,
DEFAULT_TIMESTAMP)) {
final KeyValue<Bytes, byte[]> next = iter.next();
assertEquals(DEFAULT_TIMESTAMP, baseKeySchema.segmentTimestamp(next.key));
assertArrayEquals("0".getBytes(), next.value);
assertFalse(iter.hasNext());
assertEquals(added - 1, cache.size());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardDirtyItemsWhenFlushCalled(final boolean hasIndex) {
setUp(hasIndex);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("a", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldSetFlushListener(final boolean hasIndex) {
setUp(hasIndex);
assertTrue(cachingStore.setFlushListener(null, true));
assertTrue(cachingStore.setFlushListener(null, false));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardOldValuesWhenEnabled(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.setFlushListener(cacheListener, true);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("b", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("c", cacheListener.forwarded.get(windowedKey).newValue);
assertEquals("b", cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey).newValue);
assertEquals("c", cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey));
cacheListener.forwarded.clear();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardOldValuesWhenDisabled(final boolean hasIndex) {
setUp(hasIndex);
final Windowed<String> windowedKey =
new Windowed<>("1", new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE));
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("b", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), bytesValue("c"), DEFAULT_TIMESTAMP);
cachingStore.flush();
assertEquals("c", cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey).newValue);
assertNull(cacheListener.forwarded.get(windowedKey).oldValue);
cacheListener.forwarded.clear();
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), null, DEFAULT_TIMESTAMP);
cachingStore.flush();
assertNull(cacheListener.forwarded.get(windowedKey));
cacheListener.forwarded.clear();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldForwardDirtyItemToListenerWhenEvicted(final boolean hasIndex) {
setUp(hasIndex);
final int numRecords = addItemsToCache();
assertEquals(numRecords, cacheListener.forwarded.size());
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldTakeValueFromCacheIfSameTimestampFlushedToRocks(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.flush();
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateAcrossWindows(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardAcrossWindows(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("1"), bytesValue("a"), DEFAULT_TIMESTAMP);
cachingStore.put(bytesKey("1"), bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.backwardFetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateCacheAndStore(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.fetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardCacheAndStore(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final WindowStoreIterator<byte[]> fetch =
cachingStore.backwardFetch(bytesKey("1"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP + WINDOW_SIZE, "b");
verifyKeyValue(fetch.next(), DEFAULT_TIMESTAMP, "a");
assertFalse(fetch.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateCacheAndStoreKeyRange(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> fetchRange =
cachingStore.fetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
"a");
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)),
"b");
assertFalse(fetchRange.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldIterateBackwardCacheAndStoreKeyRange(final boolean hasIndex) {
setUp(hasIndex);
final Bytes key = Bytes.wrap("1".getBytes());
bytesStore.put(TimeFirstWindowKeySchema.toStoreKeyBinary(key, DEFAULT_TIMESTAMP, 0), "a".getBytes());
cachingStore.put(key, bytesValue("b"), DEFAULT_TIMESTAMP + WINDOW_SIZE);
try (final KeyValueIterator<Windowed<Bytes>, byte[]> fetchRange =
cachingStore.backwardFetch(key, bytesKey("2"), ofEpochMilli(DEFAULT_TIMESTAMP), ofEpochMilli(DEFAULT_TIMESTAMP + WINDOW_SIZE))) {
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP + WINDOW_SIZE, DEFAULT_TIMESTAMP + WINDOW_SIZE + WINDOW_SIZE)),
"b");
verifyWindowedKeyValue(
fetchRange.next(),
new Windowed<>(key, new TimeWindow(DEFAULT_TIMESTAMP, DEFAULT_TIMESTAMP + WINDOW_SIZE)),
"a");
assertFalse(fetchRange.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldClearNamespaceCacheOnClose(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("a"), 0L);
final int size = hasIndex ? 2 : 1;
assertEquals(size, cache.size());
cachingStore.close();
assertEquals(0, cache.size());
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToFetchFromClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(10)));
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToFetchRangeFromClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.fetch(bytesKey("a"), bytesKey("b"), ofEpochMilli(0), ofEpochMilli(10)));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowIfTryingToWriteToClosedCachingStore(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.close();
assertThrows(InvalidStateStoreException.class, () -> cachingStore.put(bytesKey("a"), bytesValue("a"), 0L));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldSkipNonExistBaseKeyInCache(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
final SegmentedCacheFunction indexCacheFunction = new SegmentedCacheFunction(new KeyFirstWindowKeySchema(), SEGMENT_INTERVAL);
final Bytes key = bytesKey("a");
final byte[] value = bytesValue("0001");
final Bytes cacheIndexKey = indexCacheFunction.cacheKey(KeyFirstWindowKeySchema.toStoreKeyBinary(key, 1, 0));
final String cacheName = context.taskId() + "-test";
// Only put index to store
cache.put(cacheName,
cacheIndexKey,
new LRUCacheEntry(
new byte[0],
new RecordHeaders(),
true,
context.recordContext().offset(),
context.recordContext().timestamp(),
context.recordContext().partition(),
"",
context.recordContext().sourceRawKey(),
context.recordContext().sourceRawValue()
)
);
underlyingStore.put(key, value, 1);
if (hasIndex) {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 1),
windowedPair("aa", "0002", 0)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
windowedPair("aa", "0002", 0),
windowedPair("a", "0001", 1)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverExactKeys(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
final List<KeyValue<Long, byte[]>> expected = asList(
KeyValue.pair(0L, bytesValue("0001")),
KeyValue.pair(1L, bytesValue("0003")),
KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005"))
);
final List<KeyValue<Long, byte[]>> actual =
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)));
verifyKeyValueList(expected, actual);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldBackwardFetchAndIterateOverExactKeys(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
final List<KeyValue<Long, byte[]>> expected = asList(
KeyValue.pair(SEGMENT_INTERVAL, bytesValue("0005")),
KeyValue.pair(1L, bytesValue("0003")),
KeyValue.pair(0L, bytesValue("0001"))
);
final List<KeyValue<Long, byte[]>> actual =
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)));
verifyKeyValueList(expected, actual);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverKeyRange(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("a", "0003", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
verifyKeyValueList(
asList(
windowedPair("aa", "0002", 0),
windowedPair("aa", "0004", 1)),
toListAndCloseIterator(cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
if (hasIndex) {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("a", "0003", 1),
windowedPair("aa", "0002", 0),
windowedPair("aa", "0004", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
windowedPair("a", "0001", 0),
windowedPair("aa", "0002", 0),
windowedPair("a", "0003", 1),
windowedPair("aa", "0004", 1),
windowedPair("a", "0005", SEGMENT_INTERVAL)
),
toListAndCloseIterator(cachingStore.fetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldFetchAndIterateOverKeyBackwardRange(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 0);
cachingStore.put(bytesKey("a"), bytesValue("0003"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0004"), 1);
cachingStore.put(bytesKey("a"), bytesValue("0005"), SEGMENT_INTERVAL);
verifyKeyValueList(
asList(
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("a", "0003", 1),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("a"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
verifyKeyValueList(
asList(
windowedPair("aa", "0004", 1),
windowedPair("aa", "0002", 0)),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("aa"), bytesKey("aa"), ofEpochMilli(0), ofEpochMilli(Long.MAX_VALUE)))
);
if (!hasIndex) {
verifyKeyValueList(
// Ordered by timestamp if has no index
asList(
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("aa", "0004", 1),
windowedPair("a", "0003", 1),
windowedPair("aa", "0002", 0),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
} else {
verifyKeyValueList(
asList(
// First because in larger segments
windowedPair("a", "0005", SEGMENT_INTERVAL),
windowedPair("aa", "0004", 1),
windowedPair("aa", "0002", 0),
windowedPair("a", "0003", 1),
windowedPair("a", "0001", 0)
),
toListAndCloseIterator(cachingStore.backwardFetch(bytesKey("a"), bytesKey("aa"), ofEpochMilli(0),
ofEpochMilli(Long.MAX_VALUE)))
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeFetch(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0003"), 2);
cachingStore.put(bytesKey("aaa"), bytesValue("0004"), 3);
try (final WindowStoreIterator<byte[]> singleKeyIterator = cachingStore.fetch(bytesKey("aa"), 0L, 5L);
final KeyValueIterator<Windowed<Bytes>, byte[]> keyRangeIterator = cachingStore.fetch(bytesKey("aa"), bytesKey("aa"), 0L, 5L)) {
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertFalse(singleKeyIterator.hasNext());
assertFalse(keyRangeIterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeBackwardFetch(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), bytesValue("0001"), 0);
cachingStore.put(bytesKey("aa"), bytesValue("0002"), 1);
cachingStore.put(bytesKey("aa"), bytesValue("0003"), 2);
cachingStore.put(bytesKey("aaa"), bytesValue("0004"), 3);
try (final WindowStoreIterator<byte[]> singleKeyIterator =
cachingStore.backwardFetch(bytesKey("aa"), Instant.ofEpochMilli(0L), Instant.ofEpochMilli(5L));
final KeyValueIterator<Windowed<Bytes>, byte[]> keyRangeIterator =
cachingStore.backwardFetch(bytesKey("aa"), bytesKey("aa"), Instant.ofEpochMilli(0L), Instant.ofEpochMilli(5L))) {
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertEquals(stringFrom(singleKeyIterator.next().value), stringFrom(keyRangeIterator.next().value));
assertFalse(singleKeyIterator.hasNext());
assertFalse(keyRangeIterator.hasNext());
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowNullPointerExceptionOnPutNullKey(final boolean hasIndex) {
setUp(hasIndex);
assertThrows(NullPointerException.class, () -> cachingStore.put(null, bytesValue("anyValue"), 0L));
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowNullPointerExceptionOnPutNullValue(final boolean hasIndex) {
setUp(hasIndex);
cachingStore.put(bytesKey("a"), null, 0L);
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldThrowNullPointerExceptionOnFetchNullKey(final boolean hasIndex) {
setUp(hasIndex);
assertThrows(NullPointerException.class, () -> cachingStore.fetch(null, ofEpochMilli(1L), ofEpochMilli(2L)));
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey(final boolean hasIndex) {
setUp(hasIndex);
final Bytes keyFrom = Bytes.wrap(new IntegerSerializer().serialize("", -1));
final Bytes keyTo = Bytes.wrap(new IntegerSerializer().serialize("", 1));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(TimeOrderedCachingWindowStore.class);
final KeyValueIterator<Windowed<Bytes>, byte[]> iterator = cachingStore.fetch(keyFrom, keyTo, 0L, 10L)) {
assertFalse(iterator.hasNext());
final List<String> messages = appender.getMessages();
assertThat(
messages,
hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to range arguments set in the wrong order, " +
"or serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
" Note that the built-in numerical serdes do not follow this for negative numbers")
);
}
}
@SuppressWarnings("resource")
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldNotThrowInvalidBackwardRangeExceptionWithNegativeFromKey(final boolean hasIndex) {
setUp(hasIndex);
final Bytes keyFrom = Bytes.wrap(new IntegerSerializer().serialize("", -1));
final Bytes keyTo = Bytes.wrap(new IntegerSerializer().serialize("", 1));
try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(TimeOrderedCachingWindowStore.class);
final KeyValueIterator<Windowed<Bytes>, byte[]> iterator =
cachingStore.backwardFetch(keyFrom, keyTo, Instant.ofEpochMilli(0L), Instant.ofEpochMilli(10L))) {
assertFalse(iterator.hasNext());
final List<String> messages = appender.getMessages();
assertThat(
messages,
hasItem("Returning empty iterator for fetch with invalid key range: from > to." +
" This may be due to serdes that don't preserve ordering when lexicographically comparing the serialized bytes." +
" Note that the built-in numerical serdes do not follow this for negative numbers")
);
}
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseCacheAndWrappedStoreAfterErrorDuringCacheFlush(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException(
"Simulating an error on flush"))
.when(cache).flush(CACHE_NAMESPACE);
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseWrappedStoreAfterErrorDuringCacheClose(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException("Simulating an error on close"))
.when(cache).close(CACHE_NAMESPACE);
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void shouldCloseCacheAfterErrorDuringStateStoreClose(final boolean hasIndex) {
setUp(hasIndex);
setUpCloseTests();
doThrow(new RuntimeException("Simulating an error on close"))
.when(underlyingStore).close();
assertThrows(RuntimeException.class, cachingStore::close);
verifyAndTearDownCloseTests();
}
private void setUpCloseTests() {
underlyingStore = mock(RocksDBTimeOrderedWindowStore.class);
when(underlyingStore.name()).thenReturn("store-name");
when(underlyingStore.isOpen()).thenReturn(true);
cachingStore = new TimeOrderedCachingWindowStore(underlyingStore, WINDOW_SIZE, SEGMENT_INTERVAL);
cache = mock(ThreadCache.class);
context = new InternalMockProcessorContext<>(TestUtils.tempDirectory(), null, null, null, cache);
context.setRecordContext(new ProcessorRecordContext(10, 0, 0, TOPIC, new RecordHeaders()));
cachingStore.init(context, cachingStore);
}
private static KeyValue<Windowed<Bytes>, byte[]> windowedPair(final String key, final String value, final long timestamp) {
return KeyValue.pair(
new Windowed<>(bytesKey(key), new TimeWindow(timestamp, timestamp + WINDOW_SIZE)),
bytesValue(value));
}
private int addItemsToCache() {
long cachedSize = 0;
int i = 0;
while (cachedSize < MAX_CACHE_SIZE_BYTES) {
final String kv = String.valueOf(i++);
cachingStore.put(bytesKey(kv), bytesValue(kv), DEFAULT_TIMESTAMP);
cachedSize += memoryCacheEntrySize(kv.getBytes(), kv.getBytes(), TOPIC) +
8 + // timestamp
4; // sequenceNumber
}
return i;
}
private void verifyAndTearDownCloseTests() {
verify(underlyingStore).close();
verify(cache).flush(CACHE_NAMESPACE);
verify(cache).close(CACHE_NAMESPACE);
// resets the mocks created in #setUpCloseTests(). It is necessary to
// ensure that @After works correctly.
reset(cache);
reset(underlyingStore);
}
}
|
TimeOrderedWindowStoreTest
|
java
|
quarkusio__quarkus
|
extensions/devui/deployment/src/main/java/io/quarkus/devui/deployment/ExtensionsBuildItem.java
|
{
"start": 180,
"end": 1835
}
|
class ____ extends SimpleBuildItem {
private final List<Extension> activeExtensions;
private final List<Extension> inactiveExtensions;
private final List<Extension> sectionMenuExtensions;
private final List<Extension> footerTabsExtensions;
private final List<Extension> settingTabsExtensions;
private final List<Extension> unlistedExtensions;
public ExtensionsBuildItem(List<Extension> activeExtensions,
List<Extension> inactiveExtensions,
List<Extension> sectionMenuExtensions,
List<Extension> footerTabsExtensions,
List<Extension> settingTabsExtensions,
List<Extension> unlistedExtensions) {
this.activeExtensions = activeExtensions;
this.inactiveExtensions = inactiveExtensions;
this.sectionMenuExtensions = sectionMenuExtensions;
this.footerTabsExtensions = footerTabsExtensions;
this.settingTabsExtensions = settingTabsExtensions;
this.unlistedExtensions = unlistedExtensions;
}
public List<Extension> getActiveExtensions() {
return this.activeExtensions;
}
public List<Extension> getInactiveExtensions() {
return this.inactiveExtensions;
}
public List<Extension> getSectionMenuExtensions() {
return this.sectionMenuExtensions;
}
public List<Extension> getFooterTabsExtensions() {
return this.footerTabsExtensions;
}
public List<Extension> getSettingTabsExtensions() {
return this.settingTabsExtensions;
}
public List<Extension> getUnlistedExtensions() {
return this.unlistedExtensions;
}
}
|
ExtensionsBuildItem
|
java
|
apache__kafka
|
clients/src/main/java/org/apache/kafka/common/security/token/delegation/DelegationToken.java
|
{
"start": 981,
"end": 1034
}
|
class ____ a delegation token.
*
*/
public
|
representing
|
java
|
mapstruct__mapstruct
|
processor/src/main/java/org/mapstruct/ap/internal/model/SupportingMappingMethod.java
|
{
"start": 571,
"end": 1061
}
|
interface ____ is added as
* private method to map a certain source/target type combination. Based on a {@link BuiltInMethod}.
*
* Specific templates all point to this class, for instance:
* {@link org.mapstruct.ap.internal.model.source.builtin.XmlGregorianCalendarToCalendar},
* but also used fields and constructor elements, e.g.
* {@link org.mapstruct.ap.internal.model.common.FinalField} and
* {@link NewDatatypeFactoryConstructorFragment}
*
* @author Gunnar Morling
*/
public
|
but
|
java
|
assertj__assertj-core
|
assertj-core/src/test/java/org/assertj/core/api/short_/ShortAssert_isPositive_Test.java
|
{
"start": 882,
"end": 1190
}
|
class ____ extends ShortAssertBaseTest {
@Override
protected ShortAssert invoke_api_method() {
return assertions.isPositive();
}
@Override
protected void verify_internal_effects() {
verify(shorts).assertIsPositive(getInfo(assertions), getActual(assertions));
}
}
|
ShortAssert_isPositive_Test
|
java
|
google__guava
|
android/guava-tests/test/com/google/common/eventbus/outside/BaseSubscriberFinderTest.java
|
{
"start": 970,
"end": 1584
}
|
class ____ {
final List<Object> nonSubscriberEvents = new ArrayList<>();
final List<Object> subscriberEvents = new ArrayList<>();
public void notASubscriber(Object o) {
nonSubscriberEvents.add(o);
}
@Subscribe
public void subscriber(Object o) {
subscriberEvents.add(o);
}
}
public void testNonSubscriber() {
assertThat(getSubscriber().nonSubscriberEvents).isEmpty();
}
public void testSubscriber() {
assertThat(getSubscriber().subscriberEvents).contains(EVENT);
}
@Override
Subscriber createSubscriber() {
return new Subscriber();
}
}
|
Subscriber
|
java
|
apache__flink
|
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterModelChangeOperation.java
|
{
"start": 1417,
"end": 4485
}
|
class ____ implements AlterOperation {
private final ObjectIdentifier modelIdentifier;
private final List<ModelChange> modelChanges;
private final CatalogModel catalogModel;
private final boolean ignoreIfNotExists;
public ObjectIdentifier getModelIdentifier() {
return modelIdentifier;
}
public List<ModelChange> getModelChanges() {
return modelChanges;
}
@Nullable
public CatalogModel getCatalogModel() {
return catalogModel;
}
public boolean ignoreIfNotExists() {
return ignoreIfNotExists;
}
/**
* Creates an ALTER MODEL CHANGE statement.
*
* @param modelIdentifier The identifier of the model to be altered.
* @param modelChanges The list of changes to be applied to the model.
* @param catalogModel The resolved model after applying the changes. If null, existing model
* doesn't exist and ignoreIfNotExists is true.
* @param ignoreIfNotExists Flag to specify behavior when the model doesn't exist.
*/
public AlterModelChangeOperation(
ObjectIdentifier modelIdentifier,
List<ModelChange> modelChanges,
@Nullable CatalogModel catalogModel,
boolean ignoreIfNotExists) {
this.modelIdentifier = modelIdentifier;
this.modelChanges = modelChanges;
this.catalogModel = catalogModel;
this.ignoreIfNotExists = ignoreIfNotExists;
}
@Override
public String asSummaryString() {
String changes =
modelChanges.stream()
.map(AlterModelChangeOperation::toString)
.collect(Collectors.joining(",\n"));
return String.format(
"ALTER MODEL %s%s\n%s",
ignoreIfNotExists ? "IF EXISTS " : "", modelIdentifier.asSummaryString(), changes);
}
@Override
public TableResultInternal execute(Context ctx) {
if (getCatalogModel() == null && ignoreIfNotExists()) {
return TableResultImpl.TABLE_RESULT_OK;
}
ctx.getCatalogManager()
.alterModel(
getCatalogModel(), modelChanges, getModelIdentifier(), ignoreIfNotExists());
return TableResultImpl.TABLE_RESULT_OK;
}
private static String toString(ModelChange modelChange) {
if (modelChange instanceof ModelChange.SetOption) {
ModelChange.SetOption setOption = (ModelChange.SetOption) modelChange;
return String.format(
" SET (%s)",
OperationUtils.formatParameter(setOption.getKey(), setOption.getValue()));
} else if (modelChange instanceof ModelChange.ResetOption) {
ModelChange.ResetOption resetOption = (ModelChange.ResetOption) modelChange;
return String.format(" RESET (%s)", resetOption.getKey());
} else {
throw new UnsupportedOperationException(
String.format("Unknown model change: %s", modelChange));
}
}
}
|
AlterModelChangeOperation
|
java
|
spring-projects__spring-boot
|
core/spring-boot-test/src/main/java/org/springframework/boot/test/context/filter/annotation/StandardAnnotationCustomizableTypeExcludeFilter.java
|
{
"start": 1503,
"end": 3579
}
|
class ____<A extends Annotation>
extends AnnotationCustomizableTypeExcludeFilter {
private static final Filter[] NO_FILTERS = {};
private static final String[] FILTER_TYPE_ATTRIBUTES;
static {
FilterType[] filterValues = FilterType.values();
FILTER_TYPE_ATTRIBUTES = new String[filterValues.length];
for (int i = 0; i < filterValues.length; i++) {
FILTER_TYPE_ATTRIBUTES[i] = filterValues[i].name().toLowerCase(Locale.ROOT) + "Filters";
}
}
private final MergedAnnotation<A> annotation;
protected StandardAnnotationCustomizableTypeExcludeFilter(Class<?> testClass) {
this.annotation = MergedAnnotations.from(testClass, SearchStrategy.INHERITED_ANNOTATIONS)
.get(getAnnotationType());
}
protected final MergedAnnotation<A> getAnnotation() {
return this.annotation;
}
@Override
protected boolean hasAnnotation() {
return this.annotation.isPresent();
}
@Override
protected Filter[] getFilters(FilterType type) {
return this.annotation.getValue(FILTER_TYPE_ATTRIBUTES[type.ordinal()], Filter[].class).orElse(NO_FILTERS);
}
@Override
protected boolean isUseDefaultFilters() {
return this.annotation.getValue("useDefaultFilters", Boolean.class).orElse(false);
}
@Override
protected final Set<Class<?>> getDefaultIncludes() {
Set<Class<?>> defaultIncludes = new HashSet<>();
defaultIncludes.addAll(getKnownIncludes());
defaultIncludes.addAll(TypeIncludes.load(this.annotation.getType(), getClass().getClassLoader()).getIncludes());
return defaultIncludes;
}
protected Set<Class<?>> getKnownIncludes() {
return Collections.emptySet();
}
@Override
protected Set<Class<?>> getComponentIncludes() {
return Collections.emptySet();
}
@SuppressWarnings("unchecked")
protected Class<A> getAnnotationType() {
ResolvableType type = ResolvableType.forClass(StandardAnnotationCustomizableTypeExcludeFilter.class,
getClass());
Class<A> generic = (Class<A>) type.resolveGeneric();
Assert.state(generic != null, "'generic' must not be null");
return generic;
}
}
|
StandardAnnotationCustomizableTypeExcludeFilter
|
java
|
apache__camel
|
core/camel-core/src/test/java/org/apache/camel/component/bean/BeanProxyNoBindingTest.java
|
{
"start": 1322,
"end": 8469
}
|
class ____ extends ContextTestSupport {
@Test
public void testBeanProxyStringReturnString() throws Exception {
// START SNIPPET: e2
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
String reply = service.submitOrderStringReturnString("<order type=\"book\">Camel in action</order>");
assertEquals("<order id=\"123\">OK</order>", reply);
// END SNIPPET: e2
}
@Test
public void testBeanProxyStringReturnDocument() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
Document reply = service.submitOrderStringReturnDocument("<order type=\"book\">Camel in action</order>");
assertNotNull(reply);
String s = context.getTypeConverter().convertTo(String.class, reply);
assertEquals("<order id=\"123\">OK</order>", s);
}
@Test
public void testBeanProxyDocumentReturnString() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
Document doc = context.getTypeConverter().convertTo(Document.class, "<order type=\"book\">Camel in action</order>");
String reply = service.submitOrderDocumentReturnString(doc);
assertEquals("<order id=\"123\">OK</order>", reply);
}
@Test
public void testBeanProxyDocumentReturnDocument() throws Exception {
// START SNIPPET: e3
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
Document doc = context.getTypeConverter().convertTo(Document.class, "<order type=\"book\">Camel in action</order>");
Document reply = service.submitOrderDocumentReturnDocument(doc);
assertNotNull(reply);
String s = context.getTypeConverter().convertTo(String.class, reply);
assertEquals("<order id=\"123\">OK</order>", s);
// END SNIPPET: e3
}
@Test
public void testBeanProxyFailure() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
String reply = service.submitOrderStringReturnString("<order type=\"beer\">Carlsberg</order>");
assertEquals("<order>FAIL</order>", reply);
}
@Test
public void testBeanProxyFailureNotXMLBody() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
assertThrows(Exception.class,
() -> service.submitOrderStringReturnString("Hello World"),
"Should have thrown exception");
}
@Test
public void testBeanProxyVoidReturnType() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
service.doNothing("<order>ping</order>");
}
@Test
public void testBeanProxyFailureInvalidReturnType() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:start");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
Exception e = assertThrows(Exception.class,
() -> service.invalidReturnType("<order type=\"beer\">Carlsberg</order>"),
"Should have thrown exception");
InvalidPayloadException cause = assertIsInstanceOf(InvalidPayloadException.class, e.getCause());
assertEquals(Integer.class, cause.getType());
}
@Test
public void testBeanProxyCallAnotherBean() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:bean");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
String reply = service.submitOrderStringReturnString("World");
assertEquals("Hello World", reply);
}
// START SNIPPET: e4
@Test
public void testProxyBuilderProxyCallAnotherBean() throws Exception {
// use ProxyBuilder to easily create the proxy
OrderService service = new ProxyBuilder(context).endpoint("direct:bean").build(OrderService.class);
String reply = service.submitOrderStringReturnString("World");
assertEquals("Hello World", reply);
}
// END SNIPPET: e4
@Test
public void testBeanProxyCallAnotherBeanWithNoArgs() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:bean");
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
String reply = service.doAbsolutelyNothing();
assertEquals("Hi nobody", reply);
}
@Test
public void testProxyBuilderProxyCallAnotherBeanWithNoArgs() throws Exception {
Endpoint endpoint = context.getEndpoint("direct:bean");
OrderService service = new ProxyBuilder(context).endpoint(endpoint).build(OrderService.class);
String reply = service.doAbsolutelyNothing();
assertEquals("Hi nobody", reply);
}
@Test
public void testBeanProxyVoidAsInOut() throws Exception {
Endpoint endpoint = context.getEndpoint("seda:delay");
// will by default let all exchanges be InOut
OrderService service = ProxyHelper.createProxy(endpoint, false, OrderService.class);
getMockEndpoint("mock:delay").expectedBodiesReceived("Hello World", "Bye World");
service.doNothing("Hello World");
template.sendBody("mock:delay", "Bye World");
assertMockEndpointsSatisfied();
}
@Test
public void testProxyBuilderVoidAsInOut() throws Exception {
// will by default let all exchanges be InOut
OrderService service = new ProxyBuilder(context).endpoint("seda:delay").build(OrderService.class);
getMockEndpoint("mock:delay").expectedBodiesReceived("Hello World", "Bye World");
service.doNothing("Hello World");
template.sendBody("mock:delay", "Bye World");
assertMockEndpointsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// START SNIPPET: e1
from("direct:start").choice().when(xpath("/order/@type = 'book'")).to("direct:book").otherwise()
.to("direct:other").end();
from("direct:book").transform(constant("<order id=\"123\">OK</order>"));
from("direct:other").transform(constant("<order>FAIL</order>"));
// END SNIPPET: e1
from("direct:bean").bean(MyFooBean.class, "hello");
from("seda:delay").delay(1000).to("mock:delay");
}
};
}
public static
|
BeanProxyNoBindingTest
|
java
|
redisson__redisson
|
redisson/src/main/java/org/redisson/RedissonLongAdder.java
|
{
"start": 927,
"end": 2037
}
|
class ____ extends RedissonBaseAdder<Long> implements RLongAdder {
private final RedissonClient redisson;
private final LongAdder counter = new LongAdder();
public RedissonLongAdder(CommandAsyncExecutor connectionManager, String name, RedissonClient redisson) {
super(connectionManager, name, redisson);
this.redisson = redisson;
}
@Override
protected void doReset() {
counter.reset();
}
@Override
protected RFuture<Long> addAndGetAsync(String id) {
return redisson.getAtomicLong(getCounterName(id)).getAndAddAsync(counter.sum());
}
@Override
protected RFuture<Long> getAndDeleteAsync(String id) {
return redisson.getAtomicLong(getCounterName(id)).getAndDeleteAsync();
}
@Override
public void add(long x) {
counter.add(x);
}
@Override
public void increment() {
add(1L);
}
@Override
public void decrement() {
add(-1L);
}
@Override
public long sum() {
return get(sumAsync(60, TimeUnit.SECONDS));
}
}
|
RedissonLongAdder
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MRBench.java
|
{
"start": 1389,
"end": 1913
}
|
class ____ extends Configured implements Tool{
private static final Logger LOG = LoggerFactory.getLogger(MRBench.class);
private static final String DEFAULT_INPUT_SUB = "mr_input";
private static final String DEFAULT_OUTPUT_SUB = "mr_output";
private static Path BASE_DIR =
new Path(System.getProperty("test.build.data","/benchmarks/MRBench"));
private static Path INPUT_DIR = new Path(BASE_DIR, DEFAULT_INPUT_SUB);
private static Path OUTPUT_DIR = new Path(BASE_DIR, DEFAULT_OUTPUT_SUB);
public
|
MRBench
|
java
|
spring-projects__spring-boot
|
module/spring-boot-security/src/test/java/org/springframework/boot/security/autoconfigure/web/servlet/SecurityFilterAutoConfigurationEarlyInitializationTests.java
|
{
"start": 3217,
"end": 4878
}
|
class ____ {
private static final Pattern PASSWORD_PATTERN = Pattern.compile("^Using generated security password: (.*)$",
Pattern.MULTILINE);
@Test
@ClassPathExclusions({ "spring-security-oauth2-client-*.jar", "spring-security-oauth2-resource-server-*.jar",
"spring-security-saml2-service-provider-*.jar" })
void testSecurityFilterDoesNotCauseEarlyInitialization(CapturedOutput output) {
try (AnnotationConfigServletWebServerApplicationContext context = new AnnotationConfigServletWebServerApplicationContext()) {
TestPropertyValues.of("server.port:0").applyTo(context);
context.register(Config.class);
context.refresh();
WebServer webServer = context.getWebServer();
assertThat(webServer).isNotNull();
int port = webServer.getPort();
Matcher password = PASSWORD_PATTERN.matcher(output);
assertThat(password.find()).isTrue();
new TestRestTemplate("user", password.group(1)).getForEntity("http://localhost:" + port, Object.class);
// If early initialization occurred a ConverterNotFoundException is thrown
}
}
@Configuration(proxyBeanMethods = false)
@Import({ DeserializerBean.class, JacksonModuleBean.class, ExampleController.class, ConverterBean.class })
@ImportAutoConfiguration({ WebMvcAutoConfiguration.class, JacksonAutoConfiguration.class,
HttpMessageConvertersAutoConfiguration.class, DispatcherServletAutoConfiguration.class,
SecurityAutoConfiguration.class, ServletWebSecurityAutoConfiguration.class,
UserDetailsServiceAutoConfiguration.class, SecurityFilterAutoConfiguration.class,
PropertyPlaceholderAutoConfiguration.class })
static
|
SecurityFilterAutoConfigurationEarlyInitializationTests
|
java
|
alibaba__fastjson
|
src/test/java/com/alibaba/fastjson/deserializer/issues3796/bean/ObjectG.java
|
{
"start": 119,
"end": 525
}
|
class ____ {
public static final String tesdt = "tesdt";
@JSONField(name = "a")
private long a;
private long b;
private ObjectF c;
public long getA() {
return a;
}
public void setA(long a) {
this.a = a;
}
public long getB() {
return b;
}
public void setB(long b) {
this.b = b;
}
public ObjectF getC() {
return c;
}
public void setC(ObjectF c) {
this.c = c;
}
}
|
ObjectG
|
java
|
elastic__elasticsearch
|
x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/GetConnectorActionResponseBWCSerializingTests.java
|
{
"start": 676,
"end": 1658
}
|
class ____ extends AbstractBWCWireSerializationTestCase<GetConnectorAction.Response> {
@Override
protected Writeable.Reader<GetConnectorAction.Response> instanceReader() {
return GetConnectorAction.Response::new;
}
@Override
protected GetConnectorAction.Response createTestInstance() {
return new GetConnectorAction.Response(ConnectorTestUtils.getRandomConnectorSearchResult());
}
@Override
protected GetConnectorAction.Response mutateInstance(GetConnectorAction.Response instance) throws IOException {
ConnectorSearchResult connector = instance.getConnector();
return new GetConnectorAction.Response(randomValueOtherThan(connector, ConnectorTestUtils::getRandomConnectorSearchResult));
}
@Override
protected GetConnectorAction.Response mutateInstanceForVersion(GetConnectorAction.Response instance, TransportVersion version) {
return instance;
}
}
|
GetConnectorActionResponseBWCSerializingTests
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/collection/set/Parent.java
|
{
"start": 259,
"end": 667
}
|
class ____ {
private String name;
private Set<Child> children = new HashSet<>();
public Parent() {
}
public Parent(String name) {
this.name = name;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set<Child> getChildren() {
return children;
}
public void setChildren(Set<Child> children) {
this.children = children;
}
}
|
Parent
|
java
|
spring-projects__spring-boot
|
module/spring-boot-pulsar/src/main/java/org/springframework/boot/pulsar/autoconfigure/PulsarPropertiesMapper.java
|
{
"start": 11668,
"end": 11820
}
|
interface ____ {
void accept(String authPluginClassName, String authParamString) throws UnsupportedAuthenticationException;
}
}
|
AuthenticationConsumer
|
java
|
quarkusio__quarkus
|
extensions/jdbc/jdbc-mssql/runtime/src/main/java/io/quarkus/jdbc/mssql/runtime/graal/com/microsoft/sqlserver/jdbc/SQLServerJDBCSubstitutions.java
|
{
"start": 2543,
"end": 3049
}
|
class ____ {
}
/**
* This will make sure the ANTLR4 Lexer included in the driver is not reachable; this was mostly
* prevented by not allowing to explicitly set the useFmtOnly connection property, but this code
* path would also get activated on very old SQL Server versions being detected on a connection.
* Since that's not a constant that the compiler can rely on, we need one more substitution.
*/
@TargetClass(className = "com.microsoft.sqlserver.jdbc.SQLServerFMTQuery")
final
|
SQLServerLexerRemove
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/proxy/ProxyReferenceEqualityTest.java
|
{
"start": 1001,
"end": 1637
}
|
class ____ {
@AfterEach
public void tearDown(SessionFactoryScope scope){
scope.getSessionFactory().getSchemaManager().truncate();
}
@Test
public void testProxyFromQuery(SessionFactoryScope scope) {
scope.inTransaction( s -> {
A a = new A();
a.id = 1L;
a.b = new B();
a.b.id = 1L;
s.persist( a );
} );
scope.inTransaction( s -> {
A a = s.find( A.class, 1L );
List<B> result = s.createQuery( "FROM " + B.class.getName() + " b", B.class ).getResultList();
assertEquals( 1, result.size() );
assertTrue( a.b == result.get( 0 ) );
} );
}
@Entity(name = "A")
public static
|
ProxyReferenceEqualityTest
|
java
|
apache__flink
|
flink-filesystems/flink-hadoop-fs/src/main/java/org/apache/flink/runtime/fs/hdfs/HadoopFsFactory.java
|
{
"start": 1867,
"end": 3673
}
|
class ____ implements FileSystemFactory {
private static final Logger LOG = LoggerFactory.getLogger(HadoopFsFactory.class);
/** Flink's configuration object. */
private Configuration flinkConfig;
/** Hadoop's configuration for the file systems. */
private org.apache.hadoop.conf.Configuration hadoopConfig;
@Override
public String getScheme() {
// the hadoop factory creates various schemes
return "*";
}
@Override
public void configure(Configuration config) {
flinkConfig = config;
hadoopConfig = null; // reset the Hadoop Config
}
@Override
public FileSystem create(URI fsUri) throws IOException {
checkNotNull(fsUri, "fsUri");
final String scheme = fsUri.getScheme();
checkArgument(scheme != null, "file system has null scheme");
// from here on, we need to handle errors due to missing optional
// dependency classes
try {
// -- (1) get the loaded Hadoop config (or fall back to one loaded from the classpath)
final org.apache.hadoop.conf.Configuration hadoopConfig;
if (this.hadoopConfig != null) {
hadoopConfig = this.hadoopConfig;
} else if (flinkConfig != null) {
hadoopConfig = HadoopUtils.getHadoopConfiguration(flinkConfig);
this.hadoopConfig = hadoopConfig;
} else {
LOG.warn(
"Hadoop configuration has not been explicitly initialized prior to loading a Hadoop file system."
+ " Using configuration from the classpath.");
hadoopConfig = new org.apache.hadoop.conf.Configuration();
}
// -- (2) get the Hadoop file system
|
HadoopFsFactory
|
java
|
apache__hadoop
|
hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/XAttrCommands.java
|
{
"start": 1376,
"end": 1803
}
|
class ____ extends FsCommand {
private static final String GET_FATTR = "getfattr";
private static final String SET_FATTR = "setfattr";
public static void registerCommands(CommandFactory factory) {
factory.addClass(GetfattrCommand.class, "-" + GET_FATTR);
factory.addClass(SetfattrCommand.class, "-" + SET_FATTR);
}
/**
* Implements the '-getfattr' command for the FsShell.
*/
public static
|
XAttrCommands
|
java
|
apache__hadoop
|
hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/OpenFilesIterator.java
|
{
"start": 1692,
"end": 2951
}
|
enum ____ {
ALL_OPEN_FILES((short) 0x01),
BLOCKING_DECOMMISSION((short) 0x02);
private final short mode;
OpenFilesType(short mode) {
this.mode = mode;
}
public short getMode() {
return mode;
}
public static OpenFilesType valueOf(short num) {
for (OpenFilesType type : OpenFilesType.values()) {
if (type.getMode() == num) {
return type;
}
}
return null;
}
}
private final ClientProtocol namenode;
private final Tracer tracer;
private final EnumSet<OpenFilesType> types;
/** List files filtered by given path. */
private String path;
public OpenFilesIterator(ClientProtocol namenode, Tracer tracer,
EnumSet<OpenFilesType> types, String path) {
super(HdfsConstants.GRANDFATHER_INODE_ID);
this.namenode = namenode;
this.tracer = tracer;
this.types = types;
this.path = path;
}
@Override
public BatchedEntries<OpenFileEntry> makeRequest(Long prevId)
throws IOException {
try (TraceScope ignored = tracer.newScope("listOpenFiles")) {
return namenode.listOpenFiles(prevId, types, path);
}
}
@Override
public Long elementToPrevKey(OpenFileEntry entry) {
return entry.getId();
}
}
|
OpenFilesType
|
java
|
apache__kafka
|
storage/src/main/java/org/apache/kafka/storage/internals/checkpoint/PartitionMetadataReadBuffer.java
|
{
"start": 993,
"end": 3265
}
|
class ____ {
private static final Pattern WHITE_SPACES_PATTERN = Pattern.compile(":\\s+");
private final String location;
private final BufferedReader reader;
public PartitionMetadataReadBuffer(
String location,
BufferedReader reader
) {
this.location = location;
this.reader = reader;
}
PartitionMetadata read() throws IOException {
String line = null;
Uuid metadataTopicId;
try {
line = reader.readLine();
String[] versionArr = WHITE_SPACES_PATTERN.split(line);
if (versionArr.length == 2) {
int version = Integer.parseInt(versionArr[1]);
// To ensure downgrade compatibility, check if version is at least 0
if (version >= PartitionMetadataFile.CURRENT_VERSION) {
line = reader.readLine();
String[] topicIdArr = WHITE_SPACES_PATTERN.split(line);
if (topicIdArr.length == 2) {
metadataTopicId = Uuid.fromString(topicIdArr[1]);
if (metadataTopicId.equals(Uuid.ZERO_UUID)) {
throw new IOException("Invalid topic ID in partition metadata file (" + location + ")");
}
return new PartitionMetadata(version, metadataTopicId);
} else {
throw malformedLineException(line);
}
} else {
throw new IOException("Unrecognized version of partition metadata file (" + location + "): " + version);
}
} else {
throw malformedLineException(line);
}
} catch (NumberFormatException e) {
throw malformedLineException(line, e);
}
}
private IOException malformedLineException(String line) {
return new IOException(String.format("Malformed line in partition metadata file [%s]: %s", location, line));
}
private IOException malformedLineException(String line, Exception e) {
return new IOException(String.format("Malformed line in partition metadata file [%s]: %s", location, line), e);
}
}
|
PartitionMetadataReadBuffer
|
java
|
hibernate__hibernate-orm
|
hibernate-core/src/test/java/org/hibernate/orm/test/schemavalidation/EnumValidationTest.java
|
{
"start": 2272,
"end": 4685
}
|
class ____ implements ServiceRegistryProducer {
static List<JdbcMetadataAccessStrategy> extractorStrategies() {
return List.of(
JdbcMetadataAccessStrategy.GROUPED,
JdbcMetadataAccessStrategy.INDIVIDUALLY
);
}
private final JdbcMetadataAccessStrategy jdbcMetadataExtractorStrategy;
public EnumValidationTest(JdbcMetadataAccessStrategy jdbcMetadataExtractorStrategy) {
this.jdbcMetadataExtractorStrategy = jdbcMetadataExtractorStrategy;
}
@Override
public StandardServiceRegistry produceServiceRegistry(StandardServiceRegistryBuilder builder) {
return builder
.applySetting( HBM2DDL_JDBC_METADATA_EXTRACTOR_STRATEGY, jdbcMetadataExtractorStrategy )
.build();
}
@BeforeEach
void setUp(DomainModelScope modelScope) {
final var model = modelScope.getDomainModel();
model.orderColumns( false );
model.validate();
dropSchema( model );
createSchema( model );
}
private void createSchema(MetadataImplementor model) {
new SchemaExport().create( EnumSet.of( TargetType.DATABASE ), model );
}
@AfterEach
void tearDown(DomainModelScope modelScope) {
final var model = modelScope.getDomainModel();
model.orderColumns( false );
model.validate();
dropSchema( model );
}
private void dropSchema(MetadataImplementor model) {
new SchemaExport().drop( EnumSet.of( TargetType.DATABASE ), model );
}
@Test
public void testValidation(ServiceRegistryScope registryScope) {
final var newModel = (MetadataImplementor) new MetadataSources( registryScope.getRegistry() )
.addAnnotatedClasses( TestEntity.class )
.buildMetadata();
newModel.orderColumns( false );
newModel.validate();
final var tool = registryScope.getRegistry().requireService( SchemaManagementTool.class );
final var execOptions = new ExecutionOptions() {
final Map<String, Object> settings = registryScope.getRegistry().requireService( ConfigurationService.class ).getSettings();
@Override
public Map<String, Object> getConfigurationValues() {
return settings;
}
@Override
public boolean shouldManageNamespaces() {
return false;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
};
tool.getSchemaValidator( null ).doValidation( newModel, execOptions, ContributableMatcher.ALL );
}
@SuppressWarnings("unused")
@Entity(name = "TestEntity")
public static
|
EnumValidationTest
|
java
|
micronaut-projects__micronaut-core
|
http-client-tck/src/main/java/io/micronaut/http/client/tck/tests/StatusTest.java
|
{
"start": 4551,
"end": 4704
}
|
class ____ extends RuntimeException {
}
@Produces
@Singleton
@Requires(property = "spec.name", value = SPEC_NAME)
static
|
TeapotException
|
java
|
mapstruct__mapstruct
|
processor/src/test/java/org/mapstruct/ap/test/decorator/jsr330/PersonMapperDecorator.java
|
{
"start": 367,
"end": 777
}
|
class ____ implements PersonMapper {
@Inject
@Named("org.mapstruct.ap.test.decorator.jsr330.PersonMapperImpl_")
private PersonMapper delegate;
@Override
public PersonDto personToPersonDto(Person person) {
PersonDto dto = delegate.personToPersonDto( person );
dto.setName( person.getFirstName() + " " + person.getLastName() );
return dto;
}
}
|
PersonMapperDecorator
|
java
|
netty__netty
|
handler/src/main/java/io/netty/handler/ssl/CipherSuiteConverter.java
|
{
"start": 1336,
"end": 3434
}
|
class ____ {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(CipherSuiteConverter.class);
/**
* A_B_WITH_C_D, where:
*
* A - TLS or SSL (protocol)
* B - handshake algorithm (key exchange and authentication algorithms to be precise)
* C - bulk cipher
* D - HMAC algorithm
*
* This regular expression assumes that:
*
* 1) A is always TLS or SSL, and
* 2) D is always a single word.
*/
private static final Pattern JAVA_CIPHERSUITE_PATTERN =
Pattern.compile("^(?:TLS|SSL)_((?:(?!_WITH_).)+)_WITH_(.*)_(.*)$");
/**
* A-B-C, where:
*
* A - handshake algorithm (key exchange and authentication algorithms to be precise)
* B - bulk cipher
* C - HMAC algorithm
*
* This regular expression assumes that:
*
* 1) A has some deterministic pattern as shown below, and
* 2) C is always a single word
*/
private static final Pattern OPENSSL_CIPHERSUITE_PATTERN =
// Be very careful not to break the indentation while editing.
Pattern.compile(
"^(?:(" + // BEGIN handshake algorithm
"(?:(?:EXP-)?" +
"(?:" +
"(?:DHE|EDH|ECDH|ECDHE|SRP|RSA)-(?:DSS|RSA|ECDSA|PSK)|" +
"(?:ADH|AECDH|KRB5|PSK|SRP)" +
')' +
")|" +
"EXP" +
")-)?" + // END handshake algorithm
"(.*)-(.*)$");
private static final Pattern JAVA_AES_CBC_PATTERN = Pattern.compile("^(AES)_([0-9]+)_CBC$");
private static final Pattern JAVA_AES_PATTERN = Pattern.compile("^(AES)_([0-9]+)_(.*)$");
private static final Pattern OPENSSL_AES_CBC_PATTERN = Pattern.compile("^(AES)([0-9]+)$");
private static final Pattern OPENSSL_AES_PATTERN = Pattern.compile("^(AES)([0-9]+)-(.*)$");
/**
* Used to store nullable values in a CHM
*/
private static final
|
CipherSuiteConverter
|
java
|
grpc__grpc-java
|
api/src/main/java/io/grpc/ClientStreamTracer.java
|
{
"start": 923,
"end": 3520
}
|
class ____ extends StreamTracer {
/**
* Indicates how long the call was delayed, in nanoseconds, due to waiting for name resolution
* result. If the call option is not set, the call did not experience name resolution delay.
*/
public static final CallOptions.Key<Long> NAME_RESOLUTION_DELAYED =
CallOptions.Key.create("io.grpc.ClientStreamTracer.NAME_RESOLUTION_DELAYED");
/**
* The stream is being created on a ready transport.
*
* @param headers the mutable initial metadata. Modifications to it will be sent to the socket but
* not be seen by client interceptors and the application.
*
* @since 1.40.0
*/
public void streamCreated(@Grpc.TransportAttr Attributes transportAttrs, Metadata headers) {
}
/**
* Name resolution is completed and the connection starts getting established. This method is only
* invoked on the streams that encounter such delay.
*
* </p>gRPC buffers the client call if the remote address and configurations, e.g. timeouts and
* retry policy, are not ready. Asynchronously gRPC internally does the name resolution to get
* this information. The streams that are processed immediately on ready transports by the time
* the RPC comes do not go through the pending process, thus this callback will not be invoked.
*/
public void createPendingStream() {
}
/**
* Headers has been sent to the socket.
*/
public void outboundHeaders() {
}
/**
* Headers has been received from the server.
*/
public void inboundHeaders() {
}
/**
* Headers has been received from the server. This method does not pass ownership to {@code
* headers}, so implementations must not access the metadata after returning. Modifications to the
* metadata within this method will be seen by interceptors and the application.
*
* @param headers the received header metadata
*/
public void inboundHeaders(Metadata headers) {
inboundHeaders();
}
/**
* Trailing metadata has been received from the server. This method does not pass ownership to
* {@code trailers}, so implementations must not access the metadata after returning.
* Modifications to the metadata within this method will be seen by interceptors and the
* application.
*
* @param trailers the received trailing metadata
* @since 1.17.0
*/
public void inboundTrailers(Metadata trailers) {
}
/**
* Information providing context to the call became available.
*/
@Internal
public void addOptionalLabel(String key, String value) {
}
/**
* Factory
|
ClientStreamTracer
|
java
|
google__dagger
|
hilt-core/main/java/dagger/hilt/internal/GeneratedComponentManager.java
|
{
"start": 642,
"end": 806
}
|
interface ____ provides a managed generated component. */
// TODO(bcorso): Consider either removing type parameter or using actual component type in usages.
public
|
that
|
java
|
apache__hadoop
|
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeDataTarget.java
|
{
"start": 1093,
"end": 1456
}
|
interface ____ {
/**
* Sends a signal to indicate that the data has been stored in output buffer
*/
public void sendData() throws IOException;
/**
* Sends a signal that there is no more data
*/
public void finishSendData() throws IOException;
/**
* Gets the output buffer.
*/
public OutputBuffer getOutputBuffer();
}
|
NativeDataTarget
|
java
|
apache__flink
|
flink-runtime/src/main/java/org/apache/flink/runtime/scheduler/adaptivebatch/JobGraphUpdateListener.java
|
{
"start": 1122,
"end": 1644
}
|
interface ____ {
/**
* Invoked when new {@link JobVertex} instances are added to the JobGraph of a specific job.
* This allows interested components to react to the addition of new vertices to the job
* topology.
*
* @param newVertices A list of newly added JobVertex instances.
* @param pendingOperatorsCount The number of pending operators.
*/
void onNewJobVerticesAdded(List<JobVertex> newVertices, int pendingOperatorsCount)
throws Exception;
}
|
JobGraphUpdateListener
|
java
|
junit-team__junit5
|
junit-jupiter-api/src/main/java/org/junit/jupiter/api/condition/DisabledInNativeImage.java
|
{
"start": 891,
"end": 1437
}
|
class ____
* be disabled within a native image.
*
* <p>This annotation is not {@link java.lang.annotation.Inherited @Inherited}.
* Consequently, if you wish to apply the same semantics to a subclass, this
* annotation must be redeclared on the subclass.
*
* <p>If a test method is disabled via this annotation, that prevents execution
* of the test method and method-level lifecycle callbacks such as
* {@code @BeforeEach} methods, {@code @AfterEach} methods, and corresponding
* extension APIs. However, that does not prevent the test
|
will
|
java
|
hibernate__hibernate-orm
|
hibernate-envers/src/main/java/org/hibernate/envers/internal/tools/MappingTools.java
|
{
"start": 406,
"end": 1870
}
|
class ____ {
/**
* @param componentName Name of the component, that is, name of the property in the entity that references the component.
*
* @return A prefix for properties in the given component.
*/
public static String createComponentPrefix(String componentName) {
return componentName + "_";
}
/**
* @param referencePropertyName The name of the property that holds the relation to the entity.
*
* @return A prefix which should be used to prefix an id mapper for the related entity.
*/
public static String createToOneRelationPrefix(String referencePropertyName) {
return referencePropertyName + "_";
}
public static String getReferencedEntityName(Value value) {
if ( value instanceof ToOne ) {
return ( (ToOne) value ).getReferencedEntityName();
}
else if ( value instanceof OneToMany ) {
return ( (OneToMany) value ).getReferencedEntityName();
}
else if ( value instanceof Collection ) {
return getReferencedEntityName( ( (Collection) value ).getElement() );
}
return null;
}
/**
* @param value Persistent property.
* @return {@code false} if lack of associated entity shall raise an exception, {@code true} otherwise.
*/
public static boolean ignoreNotFound(Value value) {
if ( value instanceof ManyToOne ) {
return ( (ManyToOne) value ).isIgnoreNotFound();
}
else if ( value instanceof OneToMany ) {
return ( (OneToMany) value ).isIgnoreNotFound();
}
return false;
}
}
|
MappingTools
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.