language stringclasses 1
value | repo stringclasses 60
values | path stringlengths 22 294 | class_span dict | source stringlengths 13 1.16M | target stringlengths 1 113 |
|---|---|---|---|---|---|
java | apache__camel | dsl/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastAtomicnumberEndpointBuilderFactory.java | {
"start": 1624,
"end": 5795
} | interface ____
extends
EndpointProducerBuilder {
default AdvancedHazelcastAtomicnumberEndpointBuilder advanced() {
return (AdvancedHazelcastAtomicnumberEndpointBuilder) this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option is a:
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: producer
*
* @param defaultOperation the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder defaultOperation(org.apache.camel.component.hazelcast.HazelcastOperation defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option will be converted to a
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: producer
*
* @param defaultOperation the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder defaultOperation(String defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* Hazelcast configuration file.
*
* This option can also be loaded from an existing file, by prefixing
* with file: or classpath: followed by the location of the file.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param hazelcastConfigUri the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder hazelcastConfigUri(String hazelcastConfigUri) {
doSetProperty("hazelcastConfigUri", hazelcastConfigUri);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option is a: <code>com.hazelcast.core.HazelcastInstance</code>
* type.
*
* Group: producer
*
* @param hazelcastInstance the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder hazelcastInstance(com.hazelcast.core.HazelcastInstance hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option will be converted to a
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: producer
*
* @param hazelcastInstance the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder hazelcastInstance(String hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference name which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*
* @param hazelcastInstanceName the value to set
* @return the dsl builder
*/
default HazelcastAtomicnumberEndpointBuilder hazelcastInstanceName(String hazelcastInstanceName) {
doSetProperty("hazelcastInstanceName", hazelcastInstanceName);
return this;
}
}
/**
* Advanced builder for endpoint for the Hazelcast Atomic Number component.
*/
public | HazelcastAtomicnumberEndpointBuilder |
java | quarkusio__quarkus | independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/contexts/request/propagation/SuperController.java | {
"start": 314,
"end": 734
} | class ____ {
static final AtomicBoolean DESTROYED = new AtomicBoolean();
private String id;
@Inject
SuperButton button;
@PostConstruct
void init() {
id = UUID.randomUUID().toString();
}
@PreDestroy
void destroy() {
DESTROYED.set(true);
}
String getId() {
return id;
}
SuperButton getButton() {
return button;
}
}
| SuperController |
java | apache__camel | test-infra/camel-test-infra-azure-common/src/main/java/org/apache/camel/test/infra/azure/common/services/AzureStorageInfraService.java | {
"start": 1276,
"end": 3193
} | class ____ implements AzureInfraService, ContainerService<AzuriteContainer> {
private static final Logger LOG = LoggerFactory.getLogger(AzureStorageInfraService.class);
private final AzuriteContainer container;
public AzureStorageInfraService() {
this(LocalPropertyResolver.getProperty(AzureStorageInfraService.class, AzureProperties.AZURE_CONTAINER));
}
public AzureStorageInfraService(String imageName) {
this.container = initContainer(imageName);
String name = ContainerEnvironmentUtil.containerName(this.getClass());
if (name != null) {
container.withCreateContainerCmdModifier(cmd -> cmd.withName(name));
}
}
public AzureStorageInfraService(AzuriteContainer container) {
this.container = container;
}
protected AzuriteContainer initContainer(String imageName) {
return new AzuriteContainer(imageName, ContainerEnvironmentUtil.isFixedPort(this.getClass().getSuperclass()));
}
public AzuriteContainer getContainer() {
return container;
}
public void registerProperties() {
System.setProperty(AzureConfigs.ACCOUNT_NAME, container.azureCredentials().accountName());
System.setProperty(AzureConfigs.ACCOUNT_KEY, container.azureCredentials().accountKey());
System.setProperty(AzureConfigs.HOST, container.getHost());
}
@Override
public void initialize() {
container.start();
LOG.info("Azurite local blob service running at address {}:{}", container.getHost(),
container.getMappedPort(AzureServices.BLOB_SERVICE));
LOG.info("Azurite local queue service running at address {}:{}", container.getHost(),
container.getMappedPort(AzureServices.QUEUE_SERVICE));
registerProperties();
}
@Override
public void shutdown() {
container.stop();
}
}
| AzureStorageInfraService |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/tests/KeyTester.java | {
"start": 3086,
"end": 3721
} | class ____ {
private final MessageDigest digest;
Sha256() {
try {
digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private byte[] getDigest(Key key) {
try {
key.updateDiskCacheKey(digest);
return digest.digest();
} finally {
digest.reset();
}
}
String getStringDigest(Key key) {
return com.bumptech.glide.util.Util.sha256BytesToHex(getDigest(key));
}
}
/** Tests equals, hashcode and digest methods of {@link Key}s. */
private static final | Sha256 |
java | apache__camel | core/camel-api/src/main/java/org/apache/camel/spi/PackageScanClassResolver.java | {
"start": 2979,
"end": 4381
} | interface ____ find subclasses or implementations of
* @param packageNames one or more package names to scan (including subpackages) for classes
* @return the classes found, returns an empty set if none found
*/
Set<Class<?>> findImplementations(Class<?> parent, String... packageNames);
/**
* Attempts to discover classes filter by the provided filter
*
* @param filter filter to filter desired classes.
* @param packageNames one or more package names to scan (including subpackages) for classes
* @return the classes found, returns an empty set if none found
*/
Set<Class<?>> findByFilter(PackageScanFilter filter, String... packageNames);
/**
* Add a filter that will be applied to all scan operations
*
* @param filter filter to filter desired classes in all scan operations
*/
void addFilter(PackageScanFilter filter);
/**
* Removes the filter
*
* @param filter filter to filter desired classes in all scan operations
*/
void removeFilter(PackageScanFilter filter);
/**
* To specify a set of accepted schemas to use for loading resources as URL connections (besides http and https
* schemas)
*/
void setAcceptableSchemes(String schemes);
/**
* Clears and frees the internal cache.
*/
void clearCache();
}
| to |
java | google__gson | gson/src/test/java/com/google/gson/internal/ConstructorConstructorTest.java | {
"start": 7993,
"end": 8203
} | class ____<K, V> extends TreeMap<K, V> {
// Removes default no-args constructor
@SuppressWarnings("unused")
CustomSortedMap(Void v) {}
}
@SuppressWarnings("serial")
private static | CustomSortedMap |
java | spring-projects__spring-framework | spring-beans/src/main/java/org/springframework/beans/DirectFieldAccessor.java | {
"start": 1854,
"end": 3643
} | class ____ extends AbstractNestablePropertyAccessor {
private final Map<String, FieldPropertyHandler> fieldMap = new HashMap<>();
/**
* Create a new DirectFieldAccessor for the given object.
* @param object the object wrapped by this DirectFieldAccessor
*/
public DirectFieldAccessor(Object object) {
super(object);
}
/**
* Create a new DirectFieldAccessor for the given object,
* registering a nested path that the object is in.
* @param object the object wrapped by this DirectFieldAccessor
* @param nestedPath the nested path of the object
* @param parent the containing DirectFieldAccessor (must not be {@code null})
*/
protected DirectFieldAccessor(Object object, String nestedPath, DirectFieldAccessor parent) {
super(object, nestedPath, parent);
}
@Override
protected @Nullable FieldPropertyHandler getLocalPropertyHandler(String propertyName) {
FieldPropertyHandler propertyHandler = this.fieldMap.get(propertyName);
if (propertyHandler == null) {
Field field = ReflectionUtils.findField(getWrappedClass(), propertyName);
if (field != null) {
propertyHandler = new FieldPropertyHandler(field);
this.fieldMap.put(propertyName, propertyHandler);
}
}
return propertyHandler;
}
@Override
protected DirectFieldAccessor newNestedPropertyAccessor(Object object, String nestedPath) {
return new DirectFieldAccessor(object, nestedPath, this);
}
@Override
protected NotWritablePropertyException createNotWritablePropertyException(String propertyName) {
PropertyMatches matches = PropertyMatches.forField(propertyName, getRootClass());
throw new NotWritablePropertyException(getRootClass(), getNestedPath() + propertyName,
matches.buildErrorMessage(), matches.getPossibleMatches());
}
private | DirectFieldAccessor |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/xml/MixedCollectionBean.java | {
"start": 870,
"end": 1066
} | class ____ {
private Collection<?> jumble;
public void setJumble(Collection<?> jumble) {
this.jumble = jumble;
}
public Collection<?> getJumble() {
return jumble;
}
}
| MixedCollectionBean |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/taskexecutor/ThreadInfoSampleService.java | {
"start": 1595,
"end": 5871
} | class ____ implements Closeable {
private final ScheduledExecutorService scheduledExecutor;
ThreadInfoSampleService(final ScheduledExecutorService scheduledExecutor) {
this.scheduledExecutor =
checkNotNull(scheduledExecutor, "scheduledExecutor must not be null");
}
/**
* Returns a future that completes with a given number of thread info samples for a set of task
* threads.
*
* @param threads the map key is thread id, the map value is the ExecutionAttemptID.
* @param requestParams Parameters of the sampling request.
* @return A future containing the stack trace samples.
*/
public CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>>
requestThreadInfoSamples(
Map<Long, ExecutionAttemptID> threads,
final ThreadInfoSamplesRequest requestParams) {
checkNotNull(threads, "threads must not be null");
checkNotNull(requestParams, "requestParams must not be null");
CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>> resultFuture =
new CompletableFuture<>();
scheduledExecutor.execute(
() ->
requestThreadInfoSamples(
threads,
requestParams.getNumSamples(),
requestParams.getDelayBetweenSamples(),
requestParams.getMaxStackTraceDepth(),
CollectionUtil.newHashMapWithExpectedSize(threads.size()),
resultFuture));
return resultFuture;
}
private void requestThreadInfoSamples(
Map<Long, ExecutionAttemptID> threads,
final int numSamples,
final Duration delayBetweenSamples,
final int maxStackTraceDepth,
final Map<ExecutionAttemptID, Collection<ThreadInfoSample>> currentTraces,
final CompletableFuture<Map<ExecutionAttemptID, Collection<ThreadInfoSample>>>
resultFuture) {
final Map<Long, ThreadInfoSample> threadInfoSample =
JvmUtils.createThreadInfoSample(threads.keySet(), maxStackTraceDepth);
if (!threadInfoSample.isEmpty()) {
for (Map.Entry<Long, ThreadInfoSample> entry : threadInfoSample.entrySet()) {
ExecutionAttemptID executionAttemptID = threads.get(entry.getKey());
Collection<ThreadInfoSample> threadInfoSamples =
currentTraces.computeIfAbsent(executionAttemptID, key -> new ArrayList<>());
threadInfoSamples.add(entry.getValue());
}
if (numSamples > 1) {
scheduledExecutor.schedule(
() ->
requestThreadInfoSamples(
threads,
numSamples - 1,
delayBetweenSamples,
maxStackTraceDepth,
currentTraces,
resultFuture),
delayBetweenSamples.toMillis(),
TimeUnit.MILLISECONDS);
} else {
resultFuture.complete(currentTraces);
}
} else if (!currentTraces.isEmpty()) {
// Requested tasks are not running anymore, completing with whatever was collected by
// now.
resultFuture.complete(currentTraces);
} else {
final String ids =
threads.values().stream()
.map(e -> e == null ? "unknown" : e.toString())
.collect(Collectors.joining(", ", "[", "]"));
resultFuture.completeExceptionally(
new IllegalStateException(
String.format(
"Cannot sample tasks %s. The tasks are not running.", ids)));
}
}
@Override
public void close() throws IOException {
scheduledExecutor.shutdownNow();
}
}
| ThreadInfoSampleService |
java | quarkusio__quarkus | extensions/arc/deployment/src/test/java/io/quarkus/arc/test/unused/UnusedExclusionTest.java | {
"start": 5345,
"end": 5464
} | class ____ {
@Produces
public Gama ping() {
return new Gama();
}
}
}
| GamaProducer |
java | google__error-prone | core/src/main/java/com/google/errorprone/bugpatterns/ModifySourceCollectionInStream.java | {
"start": 1943,
"end": 6711
} | class ____ extends BugChecker
implements MemberReferenceTreeMatcher, MethodInvocationTreeMatcher {
private static final ImmutableList<String> STATE_MUTATION_METHOD_NAMES =
ImmutableList.of("add", "addAll", "clear", "remove", "removeAll", "retainAll");
private static final ImmutableList<String> STREAM_CREATION_METHOD_NAMES =
ImmutableList.of("stream", "parallelStream");
private static final Matcher<ExpressionTree> COLLECTION_TO_STREAM_MATCHER =
instanceMethod()
.onDescendantOf("java.util.Collection")
.namedAnyOf(STREAM_CREATION_METHOD_NAMES);
/** Covers common stream structures, including Stream, IntStream, LongStream, DoubleStream. */
private static final Matcher<ExpressionTree> STREAM_API_INVOCATION_MATCHER =
instanceMethod().onDescendantOfAny("java.util.stream.BaseStream");
private static final Matcher<ExpressionTree> MUTATION_METHOD_MATCHER =
instanceMethod()
.onDescendantOf("java.util.Collection")
.namedAnyOf(STATE_MUTATION_METHOD_NAMES);
@Override
public Description matchMemberReference(MemberReferenceTree tree, VisitorState state) {
if (!isSubtypeOf("java.util.Collection").matches(tree.getQualifierExpression(), state)
|| STATE_MUTATION_METHOD_NAMES.stream()
.noneMatch(methodName -> methodName.contentEquals(tree.getName()))) {
return Description.NO_MATCH;
}
MethodInvocationTree methodInvocationTree = state.findEnclosing(MethodInvocationTree.class);
return isStreamApiInvocationOnStreamSource(
methodInvocationTree, ASTHelpers.getReceiver(tree), state)
? describeMatch(tree)
: Description.NO_MATCH;
}
@Override
public Description matchMethodInvocation(MethodInvocationTree tree, VisitorState state) {
if (!MUTATION_METHOD_MATCHER.matches(tree, state)) {
return Description.NO_MATCH;
}
// The enclosing method invocation of the method reference doesn't dereferenced an expression.
// e.g. calling other methods defined in the same class.
ExpressionTree mutatedReceiver = ASTHelpers.getReceiver(tree);
if (mutatedReceiver == null) {
return Description.NO_MATCH;
}
TreePath pathToLambdaExpression = state.findPathToEnclosing(LambdaExpressionTree.class);
// Case for a method reference not enclosed in a lambda expression,
// e.g. BiConsumer<ArrayList, Integer> biConsumer = ArrayList::add;
if (pathToLambdaExpression == null) {
return Description.NO_MATCH;
}
// Starting from the immediate enclosing method invocation of the lambda expression.
Tree parentNode = pathToLambdaExpression.getParentPath().getLeaf();
if (!(parentNode instanceof ExpressionTree expressionTree)) {
return Description.NO_MATCH;
}
return isStreamApiInvocationOnStreamSource(expressionTree, mutatedReceiver, state)
? describeMatch(tree)
: Description.NO_MATCH;
}
/**
* Returns true if and only if the given MethodInvocationTree
*
* <p>1) is a Stream API invocation, .e.g. map, filter, collect 2) the source of the stream has
* the same expression representation as streamSourceExpression.
*/
private static boolean isStreamApiInvocationOnStreamSource(
@Nullable ExpressionTree rootTree,
ExpressionTree streamSourceExpression,
VisitorState visitorState) {
ExpressionTree expressionTree = rootTree;
while (STREAM_API_INVOCATION_MATCHER.matches(expressionTree, visitorState)) {
expressionTree = ASTHelpers.getReceiver(expressionTree);
}
if (!COLLECTION_TO_STREAM_MATCHER.matches(expressionTree, visitorState)) {
return false;
}
return isSameExpression(ASTHelpers.getReceiver(expressionTree), streamSourceExpression);
}
// TODO(b/125767228): Consider a rigorous implementation to check tree structure equivalence.
@SuppressWarnings("TreeToString") // Indented to ignore whitespace, comments, and source position.
private static boolean isSameExpression(ExpressionTree leftTree, ExpressionTree rightTree) {
// The left tree and right tree must have the same symbol resolution.
// This ensures the symbol kind on field, parameter or local var.
if (ASTHelpers.getSymbol(leftTree) != ASTHelpers.getSymbol(rightTree)) {
return false;
}
String leftTreeTextRepr = stripPrefixIfPresent(leftTree.toString(), "this.");
String rightTreeTextRepr = stripPrefixIfPresent(rightTree.toString(), "this.");
return leftTreeTextRepr.contentEquals(rightTreeTextRepr);
}
private static String stripPrefixIfPresent(String originalText, String prefix) {
return originalText.startsWith(prefix) ? originalText.substring(prefix.length()) : originalText;
}
}
| ModifySourceCollectionInStream |
java | elastic__elasticsearch | build-tools/src/main/java/org/elasticsearch/gradle/plugin/StablePluginBuildPlugin.java | {
"start": 988,
"end": 2911
} | class ____ implements Plugin<Project> {
@Override
public void apply(Project project) {
project.getPluginManager().apply(BasePluginBuildPlugin.class);
project.getTasks().withType(GeneratePluginPropertiesTask.class).named("pluginProperties").configure(task -> {
task.getIsStable().set(true);
Provider<RegularFile> file = project.getLayout()
.getBuildDirectory()
.file("generated-descriptor/" + GeneratePluginPropertiesTask.STABLE_PROPERTIES_FILENAME);
task.getOutputFile().set(file);
});
final var pluginNamedComponents = project.getTasks().register("pluginNamedComponents", GenerateNamedComponentsTask.class, t -> {
SourceSet mainSourceSet = GradleUtils.getJavaSourceSets(project).findByName(SourceSet.MAIN_SOURCE_SET_NAME);
FileCollection dependencyJars = project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME);
FileCollection compiledPluginClasses = mainSourceSet.getOutput().getClassesDirs();
FileCollection classPath = dependencyJars.plus(compiledPluginClasses);
t.setClasspath(classPath);
});
Configuration pluginScannerConfig = project.getConfigurations().create("pluginScannerConfig");
DependencyHandler dependencyHandler = project.getDependencies();
pluginScannerConfig.defaultDependencies(
deps -> deps.add(
dependencyHandler.create("org.elasticsearch:elasticsearch-plugin-scanner:" + VersionProperties.getElasticsearch())
)
);
pluginNamedComponents.configure(t -> { t.setPluginScannerClasspath(pluginScannerConfig); });
final var pluginExtension = project.getExtensions().getByType(PluginPropertiesExtension.class);
pluginExtension.getBundleSpec().from(pluginNamedComponents);
}
}
| StablePluginBuildPlugin |
java | apache__hadoop | hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Task20LineHistoryEventEmitter.java | {
"start": 2039,
"end": 2928
} | class ____ extends SingleEventEmitter {
HistoryEvent maybeEmitEvent(ParsedLine line, String taskIDName,
HistoryEventEmitter thatg) {
if (taskIDName == null) {
return null;
}
TaskID taskID = TaskID.forName(taskIDName);
String taskType = line.get("TASK_TYPE");
String startTime = line.get("START_TIME");
String splits = line.get("SPLITS");
if (startTime != null && taskType != null) {
Task20LineHistoryEventEmitter that =
(Task20LineHistoryEventEmitter) thatg;
that.originalStartTime = Long.parseLong(startTime);
that.originalTaskType =
Version20LogInterfaceUtils.get20TaskType(taskType);
return new TaskStartedEvent(taskID, that.originalStartTime,
that.originalTaskType, splits);
}
return null;
}
}
static private | TaskStartedEventEmitter |
java | spring-projects__spring-framework | spring-context/src/test/java/org/springframework/context/annotation/ImportAwareTests.java | {
"start": 7608,
"end": 8024
} | class ____ implements ImportAware {
AnnotationMetadata importMetadata;
@Override
public void setImportMetadata(AnnotationMetadata importMetadata) {
this.importMetadata = importMetadata;
}
@Bean
public BPP importedConfigBean() {
return new BPP();
}
@Bean
public AsyncAnnotationBeanPostProcessor asyncBPP() {
return new AsyncAnnotationBeanPostProcessor();
}
}
static | ImportedConfigLite |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/search/suggest/SortBy.java | {
"start": 822,
"end": 1507
} | enum ____ implements Writeable {
/** Sort should first be based on score, then document frequency and then the term itself. */
SCORE,
/** Sort should first be based on document frequency, then score and then the term itself. */
FREQUENCY;
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeEnum(this);
}
public static SortBy readFromStream(final StreamInput in) throws IOException {
return in.readEnum(SortBy.class);
}
public static SortBy resolve(final String str) {
Objects.requireNonNull(str, "Input string is null");
return valueOf(str.toUpperCase(Locale.ROOT));
}
}
| SortBy |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/injection/guice/multibindings/RealElement.java | {
"start": 802,
"end": 1549
} | class ____ implements Element {
private static final AtomicInteger nextUniqueId = new AtomicInteger(1);
private final int uniqueId;
RealElement() {
uniqueId = nextUniqueId.getAndIncrement();
}
@Override
public int uniqueId() {
return uniqueId;
}
@Override
public Class<? extends Annotation> annotationType() {
return Element.class;
}
@Override
public String toString() {
return "@" + Element.class.getName() + "(uniqueId=" + uniqueId + ")";
}
@Override
public boolean equals(Object o) {
return ((Element) o).uniqueId() == uniqueId();
}
@Override
public int hashCode() {
return Integer.hashCode(uniqueId);
}
}
| RealElement |
java | spring-projects__spring-boot | module/spring-boot-validation/src/test/java/org/springframework/boot/validation/autoconfigure/ValidatorAdapterTests.java | {
"start": 6504,
"end": 6781
} | class ____ {
private final LocalValidatorFactoryBean validator = mock(LocalValidatorFactoryBean.class);
@Bean
ValidatorAdapter wrapper() {
return new ValidatorAdapter(this.validator, false);
}
}
@Configuration(proxyBeanMethods = false)
static | NonManagedBeanConfig |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/GeoUnit.java | {
"start": 686,
"end": 1155
} | enum ____ {
METERS {
@Override
public String toString() {
return "m";
}
},
KILOMETERS {
@Override
public String toString() {
return "km";
}
},
MILES {
@Override
public String toString() {
return "mi";
}
},
FEET {
@Override
public String toString() {
return "ft";
}
}
}
| GeoUnit |
java | hibernate__hibernate-orm | hibernate-core/src/main/java/org/hibernate/context/internal/ThreadLocalSessionContext.java | {
"start": 2746,
"end": 8231
} | class ____ extends AbstractCurrentSessionContext {
private static final Class<?>[] SESSION_PROXY_INTERFACES = new Class<?>[] {
Session.class,
SessionImplementor.class,
EventSource.class,
LobCreationContext.class
};
/**
* A ThreadLocal maintaining current sessions for the given execution thread.
* The actual ThreadLocal variable is a java.util.Map to account for
* the possibility for multiple SessionFactory instances being used during execution
* of the given thread.
*/
private static final ThreadLocal<Map<SessionFactory,Session>> CONTEXT_TL = ThreadLocal.withInitial( HashMap::new );
/**
* Constructs a ThreadLocal
*
* @param factory The factory this context will service
*/
public ThreadLocalSessionContext(SessionFactoryImplementor factory) {
super( factory );
}
@Override
public final Session currentSession() throws HibernateException {
Session current = existingSession( factory() );
if ( current == null ) {
current = buildOrObtainSession();
// register a cleanup sync
current.getTransaction().registerSynchronization( buildCleanupSynch() );
// wrap the session in the transaction-protection proxy
if ( needsWrapping( current ) ) {
current = wrap( current );
}
// then bind it
doBind( current, factory() );
}
else {
validateExistingSession( current );
}
return current;
}
private boolean needsWrapping(Session session) {
// try to make sure we don't wrap an already wrapped session
return !Proxy.isProxyClass( session.getClass() )
|| !( Proxy.getInvocationHandler( session ) instanceof TransactionProtectionWrapper );
}
/**
* Getter for property 'factory'.
*
* @return Value for property 'factory'.
*/
protected SessionFactoryImplementor getFactory() {
return factory();
}
/**
* Strictly provided for sub-classing purposes; specifically to allow long-session
* support.
* <p>
* This implementation always just opens a new session.
*
* @return the built or (re)obtained session.
*/
protected Session buildOrObtainSession() {
return baseSessionBuilder()
.autoClose( isAutoCloseEnabled() )
.connectionHandlingMode( getConnectionHandlingMode() )
.flushMode( isAutoFlushEnabled() ? FlushMode.AUTO : FlushMode.MANUAL )
.openSession();
}
protected CleanupSync buildCleanupSynch() {
return new CleanupSync( factory() );
}
/**
* Mainly for subclass usage. This impl always returns true.
*
* @return Whether the session should be closed by transaction completion.
*/
protected boolean isAutoCloseEnabled() {
return true;
}
/**
* Mainly for subclass usage. This impl always returns true.
*
* @return Whether the session should be flushed prior to transaction completion.
*/
protected boolean isAutoFlushEnabled() {
return true;
}
/**
* Mainly for subclass usage. This impl always returns after_transaction.
*
* @return The connection release mode for any built sessions.
*/
protected PhysicalConnectionHandlingMode getConnectionHandlingMode() {
return factory().getSessionFactoryOptions().getPhysicalConnectionHandlingMode();
}
protected Session wrap(Session session) {
final var wrapper = new TransactionProtectionWrapper( session );
final var wrapped = (Session) Proxy.newProxyInstance(
Session.class.getClassLoader(),
SESSION_PROXY_INTERFACES,
wrapper
);
// yuck! need this for proper serialization/deserialization handling...
wrapper.setWrapped( wrapped );
return wrapped;
}
/**
* Associates the given session with the current thread of execution.
*
* @param session The session to bind.
*/
public static void bind(Session session) {
doBind( session, session.getSessionFactory() );
}
private static void terminateOrphanedSession(Session orphan) {
if ( orphan != null ) {
CURRENT_SESSION_LOGGER.alreadySessionBound();
try ( orphan ) {
final var orphanTransaction = orphan.getTransaction();
if ( orphanTransaction != null && orphanTransaction.getStatus() == TransactionStatus.ACTIVE ) {
try {
orphanTransaction.rollback();
}
catch (Throwable t) {
CURRENT_SESSION_LOGGER.unableToRollbackTransactionForOrphanedSession( t );
}
}
}
catch (Throwable t) {
CURRENT_SESSION_LOGGER.unableToCloseOrphanedSession( t );
}
}
}
/**
* Disassociates a previously bound session from the current thread of execution.
*
* @param factory The factory for which the session should be unbound.
* @return The session which was unbound.
*/
public static Session unbind(SessionFactory factory) {
return doUnbind( factory);
}
private static Session existingSession(SessionFactory factory) {
return sessionMap().get( factory );
}
protected static Map<SessionFactory,Session> sessionMap() {
return CONTEXT_TL.get();
}
private static void doBind(Session session, SessionFactory factory) {
final var orphanedPreviousSession = sessionMap().put( factory, session );
terminateOrphanedSession( orphanedPreviousSession );
}
private static Session doUnbind(SessionFactory factory) {
final var sessionMap = sessionMap();
final var session = sessionMap.remove( factory );
if ( sessionMap.isEmpty() ) {
//Do not use set(null) as it would prevent the initialValue to be invoked again in case of need.
CONTEXT_TL.remove();
}
return session;
}
/**
* Transaction sync used for cleanup of the internal session map.
*/
protected static | ThreadLocalSessionContext |
java | alibaba__fastjson | src/test/java/com/alibaba/json/test/benchmark/decode/StringArray1000Decode.java | {
"start": 153,
"end": 748
} | class ____ extends BenchmarkCase {
private String text;
public StringArray1000Decode(){
super("StringArray1000Decode");
StringBuilder buf = new StringBuilder();
buf.append('[');
for (int i = 0; i < 1000; ++i) {
if (i != 0) {
buf.append(",");
}
buf.append('"' + Integer.toHexString(i * 100) + '"');
}
buf.append(']');
this.text = buf.toString();
}
@Override
public void execute(Codec codec) throws Exception {
codec.decode(text);
}
}
| StringArray1000Decode |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java | {
"start": 1593,
"end": 6158
} | class ____ implements WritableComparable<UTF8> {
private static final Logger LOG= LoggerFactory.getLogger(UTF8.class);
private static final DataInputBuffer IBUF = new DataInputBuffer();
private static final ThreadLocal<DataOutputBuffer> OBUF_FACTORY =
new ThreadLocal<DataOutputBuffer>(){
@Override
protected DataOutputBuffer initialValue() {
return new DataOutputBuffer();
}
};
private static final byte[] EMPTY_BYTES = new byte[0];
private byte[] bytes = EMPTY_BYTES;
private int length;
public UTF8() {
//set("");
}
/**
* Construct from a given string.
* @param string input string.
*/
public UTF8(String string) {
set(string);
}
/**
* Construct from a given string.
* @param utf8 input utf8.
*/
public UTF8(UTF8 utf8) {
set(utf8);
}
/** @return The raw bytes. */
public byte[] getBytes() {
return bytes;
}
/** @return The number of bytes in the encoded string. */
public int getLength() {
return length;
}
/**
* Set to contain the contents of a string.
* @param string input string.
*/
public void set(String string) {
if (string.length() > 0xffff/3) { // maybe too long
LOG.warn("truncating long string: " + string.length()
+ " chars, starting with " + string.substring(0, 20));
string = string.substring(0, 0xffff/3);
}
length = utf8Length(string); // compute length
if (length > 0xffff) // double-check length
throw new RuntimeException("string too long!");
if (bytes == null || length > bytes.length) // grow buffer
bytes = new byte[length];
try { // avoid sync'd allocations
DataOutputBuffer obuf = OBUF_FACTORY.get();
obuf.reset();
writeChars(obuf, string, 0, string.length());
System.arraycopy(obuf.getData(), 0, bytes, 0, length);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Set to contain the contents of a string.
* @param other input other.
*/
public void set(UTF8 other) {
length = other.length;
if (bytes == null || length > bytes.length) // grow buffer
bytes = new byte[length];
System.arraycopy(other.bytes, 0, bytes, 0, length);
}
@Override
public void readFields(DataInput in) throws IOException {
length = in.readUnsignedShort();
if (bytes == null || bytes.length < length)
bytes = new byte[length];
in.readFully(bytes, 0, length);
}
/**
* Skips over one UTF8 in the input.
* @param in datainput.
* @throws IOException raised on errors performing I/O.
*/
public static void skip(DataInput in) throws IOException {
int length = in.readUnsignedShort();
WritableUtils.skipFully(in, length);
}
@Override
public void write(DataOutput out) throws IOException {
out.writeShort(length);
out.write(bytes, 0, length);
}
/** Compare two UTF8s. */
@Override
public int compareTo(UTF8 o) {
return WritableComparator.compareBytes(bytes, 0, length,
o.bytes, 0, o.length);
}
/** Convert to a String. */
@Override
public String toString() {
StringBuilder buffer = new StringBuilder(length);
try {
synchronized (IBUF) {
IBUF.reset(bytes, length);
readChars(IBUF, buffer, length);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return buffer.toString();
}
/**
* Convert to a string, checking for valid UTF8.
* @return the converted string
* @throws UTFDataFormatException if the underlying bytes contain invalid
* UTF8 data.
*/
public String toStringChecked() throws IOException {
StringBuilder buffer = new StringBuilder(length);
synchronized (IBUF) {
IBUF.reset(bytes, length);
readChars(IBUF, buffer, length);
}
return buffer.toString();
}
/** Returns true iff <code>o</code> is a UTF8 with the same contents. */
@Override
public boolean equals(Object o) {
if (!(o instanceof UTF8))
return false;
UTF8 that = (UTF8)o;
if (this.length != that.length)
return false;
else
return WritableComparator.compareBytes(bytes, 0, length,
that.bytes, 0, that.length) == 0;
}
@Override
public int hashCode() {
return WritableComparator.hashBytes(bytes, length);
}
/** A WritableComparator optimized for UTF8 keys. */
public static | UTF8 |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/operators/Ordering.java | {
"start": 1063,
"end": 1205
} | class ____ an ordering on a set of fields. It specifies the fields and order direction
* (ascending, descending).
*/
@Internal
public | represents |
java | spring-projects__spring-framework | spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/SplittingStompEncoder.java | {
"start": 1106,
"end": 2401
} | class ____ {
private final StompEncoder encoder;
private final int bufferSizeLimit;
/**
* Create a new {@code SplittingStompEncoder}.
* @param encoder the {@link StompEncoder} to use
* @param bufferSizeLimit the buffer size limit
*/
public SplittingStompEncoder(StompEncoder encoder, int bufferSizeLimit) {
Assert.notNull(encoder, "StompEncoder is required");
Assert.isTrue(bufferSizeLimit > 0, "Buffer size limit must be greater than 0");
this.encoder = encoder;
this.bufferSizeLimit = bufferSizeLimit;
}
/**
* Encode the given payload and headers to a STOMP frame, and split it into a
* list of parts based on the configured buffer size limit.
* @param headers the STOMP message headers
* @param payload the STOMP message payload
* @return the parts of the encoded STOMP message
*/
public List<byte[]> encode(Map<String, Object> headers, byte[] payload) {
byte[] result = this.encoder.encode(headers, payload);
int length = result.length;
if (length <= this.bufferSizeLimit) {
return List.of(result);
}
List<byte[]> frames = new ArrayList<>();
for (int i = 0; i < length; i += this.bufferSizeLimit) {
frames.add(Arrays.copyOfRange(result, i, Math.min(i + this.bufferSizeLimit, length)));
}
return frames;
}
}
| SplittingStompEncoder |
java | google__gson | gson/src/test/java/com/google/gson/functional/JsonAdapterAnnotationOnClassesTest.java | {
"start": 12854,
"end": 13905
} | class ____ {
@SuppressWarnings("unused")
final String value = "a";
}
/**
* Verifies that {@link TypeAdapterFactory} specified by {@code @JsonAdapter} can call {@link
* Gson#getDelegateAdapter} without any issues, despite the factory not being directly registered
* on Gson.
*/
@Test
public void testDelegatingAdapterFactory() {
@SuppressWarnings("unchecked")
WithDelegatingFactory<String> deserialized =
new Gson().fromJson("{\"custom\":{\"f\":\"de\"}}", WithDelegatingFactory.class);
assertThat(deserialized.f).isEqualTo("de");
deserialized =
new Gson()
.fromJson(
"{\"custom\":{\"f\":\"de\"}}", new TypeToken<WithDelegatingFactory<String>>() {});
assertThat(deserialized.f).isEqualTo("de");
WithDelegatingFactory<String> serialized = new WithDelegatingFactory<>("se");
assertThat(new Gson().toJson(serialized)).isEqualTo("{\"custom\":{\"f\":\"se\"}}");
}
@JsonAdapter(WithDelegatingFactory.Factory.class)
private static | WithInvalidAdapterClass |
java | google__guava | guava-gwt/src-super/com/google/common/primitives/super/com/google/common/primitives/DoublesMethodsForWeb.java | {
"start": 787,
"end": 1046
} | class ____ {
@JsMethod(name = "Math.min", namespace = JsPackage.GLOBAL)
public static native double min(double... array);
@JsMethod(name = "Math.max", namespace = JsPackage.GLOBAL)
public static native double max(double... array);
}
| DoublesMethodsForWeb |
java | apache__camel | components/camel-debezium/camel-debezium-postgres/src/main/java/org/apache/camel/component/debezium/postgres/DebeziumPostgresEndpoint.java | {
"start": 1619,
"end": 2956
} | class ____ extends DebeziumEndpoint<PostgresConnectorEmbeddedDebeziumConfiguration>
implements EndpointServiceLocation {
@UriParam
private PostgresConnectorEmbeddedDebeziumConfiguration configuration;
public DebeziumPostgresEndpoint(final String uri, final DebeziumPostgresComponent component,
final PostgresConnectorEmbeddedDebeziumConfiguration configuration) {
super(uri, component);
this.configuration = configuration;
}
public DebeziumPostgresEndpoint() {
}
@Override
public String getServiceUrl() {
return configuration.getDatabaseHostname() + ":" + configuration.getDatabasePort();
}
@Override
public String getServiceProtocol() {
return "jdbc";
}
@Override
public Map<String, String> getServiceMetadata() {
if (configuration.getDatabaseUser() != null) {
return Map.of("username", configuration.getDatabaseUser());
}
return null;
}
@Override
public PostgresConnectorEmbeddedDebeziumConfiguration getConfiguration() {
return configuration;
}
@Override
public void setConfiguration(final PostgresConnectorEmbeddedDebeziumConfiguration configuration) {
this.configuration = configuration;
}
}
| DebeziumPostgresEndpoint |
java | elastic__elasticsearch | x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/extractor/GeoShapeFieldTests.java | {
"start": 632,
"end": 3419
} | class ____ extends ESTestCase {
public void testObjectFormat() {
double lat = 38.897676;
double lon = -77.03653;
String[] expected = new String[] { lat + "," + lon };
SearchHit hit = new SearchHitBuilder(42).setSource("{\"geo\":{\"type\":\"point\", \"coordinates\": [" + lon + ", " + lat + "]}}")
.build();
ExtractedField geo = new GeoShapeField("geo");
assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected));
assertThat(geo.getName(), equalTo("geo"));
assertThat(geo.getSearchField(), equalTo("geo"));
assertThat(geo.getTypes(), contains("geo_shape"));
assertThat(geo.getMethod(), equalTo(ExtractedField.Method.SOURCE));
assertThat(geo.supportsFromSource(), is(true));
assertThat(geo.newFromSource(), sameInstance(geo));
expectThrows(UnsupportedOperationException.class, () -> geo.getDocValueFormat());
assertThat(geo.isMultiField(), is(false));
expectThrows(UnsupportedOperationException.class, () -> geo.getParentField());
}
public void testWKTFormat() {
double lat = 38.897676;
double lon = -77.03653;
String[] expected = new String[] { lat + "," + lon };
SearchHit hit = new SearchHitBuilder(42).setSource("{\"geo\":\"POINT (" + lon + " " + lat + ")\"}").build();
ExtractedField geo = new GeoShapeField("geo");
assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(expected));
assertThat(geo.getName(), equalTo("geo"));
assertThat(geo.getSearchField(), equalTo("geo"));
assertThat(geo.getTypes(), contains("geo_shape"));
assertThat(geo.getMethod(), equalTo(ExtractedField.Method.SOURCE));
assertThat(geo.supportsFromSource(), is(true));
assertThat(geo.newFromSource(), sameInstance(geo));
expectThrows(UnsupportedOperationException.class, () -> geo.getDocValueFormat());
assertThat(geo.isMultiField(), is(false));
expectThrows(UnsupportedOperationException.class, () -> geo.getParentField());
}
public void testMissing() {
SearchHit hit = new SearchHitBuilder(42).addField("a_keyword", "bar").build();
ExtractedField geo = new GeoShapeField("missing");
assertThat(geo.value(hit, new SourceSupplier(hit)), equalTo(new Object[0]));
}
public void testArray() {
SearchHit hit = new SearchHitBuilder(42).setSource("{\"geo\":[1,2]}").build();
ExtractedField geo = new GeoShapeField("geo");
IllegalStateException e = expectThrows(IllegalStateException.class, () -> geo.value(hit, new SourceSupplier(hit)));
assertThat(e.getMessage(), equalTo("Unexpected values for a geo_shape field: [1, 2]"));
}
}
| GeoShapeFieldTests |
java | apache__camel | components/camel-jcr/src/main/java/org/apache/camel/component/jcr/EndpointEventListener.java | {
"start": 1385,
"end": 3209
} | class ____ implements EventListener {
private static final Logger LOG = LoggerFactory.getLogger(EndpointEventListener.class);
private final JcrConsumer consumer;
private final JcrEndpoint endpoint;
private final Processor processor;
public EndpointEventListener(JcrConsumer consumer, JcrEndpoint endpoint, Processor processor) {
this.consumer = consumer;
this.endpoint = endpoint;
this.processor = processor;
}
@Override
public void onEvent(EventIterator events) {
LOG.trace("onEvent START");
LOG.debug("{} consumer received JCR events: {}", endpoint, events);
RuntimeCamelException rce;
final Exchange exchange = createExchange(events);
try {
try {
LOG.debug("Processor, {}, is processing exchange, {}", processor, exchange);
processor.process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
rce = exchange.getException(RuntimeCamelException.class);
} catch (Exception e) {
rce = wrapRuntimeCamelException(e);
} finally {
consumer.releaseExchange(exchange, false);
}
if (rce != null) {
LOG.trace("onEvent END throwing exception: {}", rce.toString());
throw rce;
}
LOG.trace("onEvent END");
}
private Exchange createExchange(EventIterator events) {
Exchange exchange = consumer.createExchange(false);
List<Event> eventList = new LinkedList<>();
if (events != null) {
while (events.hasNext()) {
eventList.add(events.nextEvent());
}
}
exchange.getIn().setBody(eventList);
return exchange;
}
}
| EndpointEventListener |
java | spring-projects__spring-framework | spring-beans/src/test/java/org/springframework/beans/factory/support/RegisteredBeanTests.java | {
"start": 1272,
"end": 7483
} | class ____ {
private DefaultListableBeanFactory beanFactory;
@BeforeEach
void setup() {
this.beanFactory = new DefaultListableBeanFactory();
this.beanFactory.registerBeanDefinition("bd",
new RootBeanDefinition(TestBean.class));
this.beanFactory.registerSingleton("sb", new TestBean());
}
@Test
void ofWhenBeanFactoryIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> RegisteredBean.of(null, "bd"))
.withMessage("'beanFactory' must not be null");
}
@Test
void ofWhenBeanNameIsEmptyThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> RegisteredBean.of(this.beanFactory, null))
.withMessage("'beanName' must not be empty");
}
@Test
void ofInnerBeanWhenInnerBeanIsNullThrowsException() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
assertThatIllegalArgumentException().isThrownBy(
() -> RegisteredBean.ofInnerBean(parent, (BeanDefinitionHolder) null))
.withMessage("'innerBean' must not be null");
}
@Test
void ofInnerBeanWhenParentIsNullThrowsException() {
assertThatIllegalArgumentException()
.isThrownBy(() -> RegisteredBean.ofInnerBean(null,
new RootBeanDefinition(TestInnerBean.class)))
.withMessage("'parent' must not be null");
}
@Test
void ofInnerBeanWhenInnerBeanDefinitionIsNullThrowsException() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
assertThatIllegalArgumentException()
.isThrownBy(() -> RegisteredBean.ofInnerBean(parent, "ib", null))
.withMessage("'innerBeanDefinition' must not be null");
}
@Test
void getBeanNameReturnsBeanName() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.getBeanName()).isEqualTo("bd");
}
@Test
void getBeanNameWhenNamedInnerBeanReturnsBeanName() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent, "ib",
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.getBeanName()).isEqualTo("ib");
}
@Test
void getBeanNameWhenUnnamedInnerBeanReturnsBeanName() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.getBeanName()).startsWith("(inner bean)#");
}
@Test
void getBeanClassReturnsBeanClass() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.getBeanClass()).isEqualTo(TestBean.class);
}
@Test
void getBeanTypeReturnsBeanType() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.getBeanType().toClass()).isEqualTo(TestBean.class);
}
@Test
void getBeanTypeWhenHasInstanceBackedByLambdaDoesNotReturnLambdaType() {
this.beanFactory.registerBeanDefinition("bfpp", new RootBeanDefinition(
BeanFactoryPostProcessor.class, RegisteredBeanTests::getBeanFactoryPostProcessorLambda));
this.beanFactory.getBean("bfpp");
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bfpp");
assertThat(registeredBean.getBeanType().toClass()).isEqualTo(BeanFactoryPostProcessor.class);
}
static BeanFactoryPostProcessor getBeanFactoryPostProcessorLambda() {
return bf -> {};
}
@Test
void getMergedBeanDefinitionReturnsMergedBeanDefinition() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.getMergedBeanDefinition().getBeanClass())
.isEqualTo(TestBean.class);
}
@Test
void getMergedBeanDefinitionWhenSingletonThrowsException() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "sb");
assertThatExceptionOfType(NoSuchBeanDefinitionException.class)
.isThrownBy(() -> registeredBean.getMergedBeanDefinition());
}
@Test
void getMergedBeanDefinitionWhenInnerBeanReturnsMergedBeanDefinition() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.getMergedBeanDefinition().getBeanClass())
.isEqualTo(TestInnerBean.class);
}
@Test
void isInnerBeanWhenInnerBeanReturnsTrue() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.isInnerBean()).isTrue();
}
@Test
void isInnerBeanWhenNotInnerBeanReturnsTrue() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.isInnerBean()).isFalse();
}
@Test
void getParentWhenInnerBeanReturnsParent() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.getParent()).isSameAs(parent);
}
@Test
void getParentWhenNotInnerBeanReturnsNull() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.getParent()).isNull();
}
@Test
void isGeneratedBeanNameWhenInnerBeanWithoutNameReturnsTrue() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new RootBeanDefinition(TestInnerBean.class));
assertThat(registeredBean.isGeneratedBeanName()).isTrue();
}
@Test
void isGeneratedBeanNameWhenInnerBeanWithNameReturnsFalse() {
RegisteredBean parent = RegisteredBean.of(this.beanFactory, "bd");
RegisteredBean registeredBean = RegisteredBean.ofInnerBean(parent,
new BeanDefinitionHolder(new RootBeanDefinition(TestInnerBean.class),
"test"));
assertThat(registeredBean.isGeneratedBeanName()).isFalse();
}
@Test
void isGeneratedBeanNameWhenNotInnerBeanReturnsFalse() {
RegisteredBean registeredBean = RegisteredBean.of(this.beanFactory, "bd");
assertThat(registeredBean.isGeneratedBeanName()).isFalse();
}
static | RegisteredBeanTests |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NonCanonicalTypeTest.java | {
"start": 874,
"end": 1234
} | class ____ {
private final CompilationTestHelper compilationHelper =
CompilationTestHelper.newInstance(NonCanonicalType.class, getClass());
@Test
public void positive() {
compilationHelper
.addSourceLines(
"Test.java",
"""
import com.google.common.collect.ImmutableMap;
| NonCanonicalTypeTest |
java | apache__flink | flink-core/src/main/java/org/apache/flink/api/common/typeutils/base/StringValueComparator.java | {
"start": 1337,
"end": 4757
} | class ____ extends TypeComparator<StringValue> {
private static final long serialVersionUID = 1L;
private final boolean ascendingComparison;
private final StringValue reference = new StringValue();
private final StringValue tempReference = new StringValue();
private final TypeComparator<?>[] comparators = new TypeComparator[] {this};
public StringValueComparator(boolean ascending) {
this.ascendingComparison = ascending;
}
@Override
public int hash(StringValue record) {
return record.hashCode();
}
@Override
public void setReference(StringValue toCompare) {
toCompare.copyTo(reference);
}
@Override
public boolean equalToReference(StringValue candidate) {
return candidate.equals(this.reference);
}
@Override
public int compareToReference(TypeComparator<StringValue> referencedComparator) {
StringValue otherRef = ((StringValueComparator) referencedComparator).reference;
int comp = otherRef.compareTo(reference);
return ascendingComparison ? comp : -comp;
}
@Override
public int compare(StringValue first, StringValue second) {
int comp = first.compareTo(second);
return ascendingComparison ? comp : -comp;
}
@Override
public int compareSerialized(DataInputView firstSource, DataInputView secondSource)
throws IOException {
reference.read(firstSource);
tempReference.read(secondSource);
int comp = reference.compareTo(tempReference);
return ascendingComparison ? comp : -comp;
}
@Override
public boolean supportsNormalizedKey() {
return NormalizableKey.class.isAssignableFrom(StringValue.class);
}
@Override
public int getNormalizeKeyLen() {
return reference.getMaxNormalizedKeyLen();
}
@Override
public boolean isNormalizedKeyPrefixOnly(int keyBytes) {
return keyBytes < getNormalizeKeyLen();
}
@Override
public void putNormalizedKey(
StringValue record, MemorySegment target, int offset, int numBytes) {
record.copyNormalizedKey(target, offset, numBytes);
}
@Override
public boolean invertNormalizedKey() {
return !ascendingComparison;
}
@Override
public TypeComparator<StringValue> duplicate() {
return new StringValueComparator(ascendingComparison);
}
@Override
public int extractKeys(Object record, Object[] target, int index) {
target[index] = record;
return 1;
}
@Override
public TypeComparator<?>[] getFlatComparators() {
return comparators;
}
// --------------------------------------------------------------------------------------------
// unsupported normalization
// --------------------------------------------------------------------------------------------
@Override
public boolean supportsSerializationWithKeyNormalization() {
return false;
}
@Override
public void writeWithKeyNormalization(StringValue record, DataOutputView target)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
public StringValue readWithKeyDenormalization(StringValue reuse, DataInputView source)
throws IOException {
throw new UnsupportedOperationException();
}
}
| StringValueComparator |
java | elastic__elasticsearch | x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/rest/RestGetGlobalCheckpointsAction.java | {
"start": 832,
"end": 2262
} | class ____ extends BaseRestHandler {
@Override
public String getName() {
return "fleet_get_global_checkpoints";
}
@Override
public List<Route> routes() {
return List.of(new Route(GET, "/{index}/_fleet/global_checkpoints"));
}
@Override
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) {
final String index = request.param("index");
final boolean waitForAdvance = request.paramAsBoolean("wait_for_advance", false);
final boolean waitForIndex = request.paramAsBoolean("wait_for_index", false);
final String[] stringCheckpoints = request.paramAsStringArray("checkpoints", new String[0]);
final long[] checkpoints = new long[stringCheckpoints.length];
for (int i = 0; i < stringCheckpoints.length; ++i) {
checkpoints[i] = Long.parseLong(stringCheckpoints[i]);
}
final TimeValue pollTimeout = request.paramAsTime("timeout", TimeValue.timeValueSeconds(30));
GetGlobalCheckpointsAction.Request getCheckpointsRequest = new GetGlobalCheckpointsAction.Request(
index,
waitForAdvance,
waitForIndex,
checkpoints,
pollTimeout
);
return channel -> client.execute(GetGlobalCheckpointsAction.INSTANCE, getCheckpointsRequest, new RestToXContentListener<>(channel));
}
}
| RestGetGlobalCheckpointsAction |
java | apache__logging-log4j2 | log4j-core-test/src/test/java/org/apache/logging/log4j/core/config/plugins/validation/ValidatingPluginWithTypedBuilder.java | {
"start": 1992,
"end": 2521
} | class ____<T>
implements org.apache.logging.log4j.core.util.Builder<ValidatingPluginWithTypedBuilder> {
@PluginBuilderAttribute
@Required(message = "The name given by the builder is null")
private String name;
public Builder<T> setName(final String name) {
this.name = name;
return this;
}
@Override
public ValidatingPluginWithTypedBuilder build() {
return new ValidatingPluginWithTypedBuilder(name);
}
}
}
| Builder |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java | {
"start": 7378,
"end": 7561
} | class ____ cache. */
private static native void initNativePosix(boolean doThreadsafeWorkaround);
/**
* JNI wrapper of persist memory operations.
*/
public static | ID |
java | elastic__elasticsearch | qa/smoke-test-http/src/internalClusterTest/java/org/elasticsearch/http/ClusterHealthRestCancellationIT.java | {
"start": 1308,
"end": 3447
} | class ____ extends HttpSmokeTestCase {
public void testClusterHealthRestCancellation() throws Exception {
final var barrier = new CyclicBarrier(2);
internalCluster().getCurrentMasterNodeInstance(ClusterService.class)
.submitUnbatchedStateUpdateTask("blocking", new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) {
safeAwait(barrier);
safeAwait(barrier);
return currentState;
}
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
});
final Request clusterHealthRequest = new Request(HttpGet.METHOD_NAME, "/_cluster/health");
clusterHealthRequest.addParameter("wait_for_events", Priority.LANGUID.toString());
final PlainActionFuture<Response> future = new PlainActionFuture<>();
logger.info("--> sending cluster health request");
final Cancellable cancellable = getRestClient().performRequestAsync(clusterHealthRequest, wrapAsRestResponseListener(future));
safeAwait(barrier);
// wait until the health request is waiting on the (blocked) master service
assertBusy(
() -> assertTrue(
internalCluster().getCurrentMasterNodeInstance(ClusterService.class)
.getMasterService()
.pendingTasks()
.stream()
.anyMatch(
pendingClusterTask -> pendingClusterTask.source().string().equals("cluster_health (wait_for_events [LANGUID])")
)
)
);
logger.info("--> cancelling cluster health request");
cancellable.cancel();
expectThrows(CancellationException.class, future::actionGet);
logger.info("--> checking cluster health task cancelled");
assertAllCancellableTasksAreCancelled(TransportClusterHealthAction.NAME);
safeAwait(barrier);
}
}
| ClusterHealthRestCancellationIT |
java | quarkusio__quarkus | integration-tests/maven/src/test/resources-filtered/projects/path-filter-not-applied-test-classes/test-manipulator/deployment/src/main/java/org/acme/test/manipulator/deployment/TestManipulatorProcessor.java | {
"start": 431,
"end": 942
} | class ____ {
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem("test-manipulator");
}
@BuildStep
BytecodeTransformerBuildItem interceptTestMethods() {
return new BytecodeTransformerBuildItem("org.acme.GreetingResourceTest", (cls, clsVisitor) -> {
var transformer = new ClassTransformer(cls);
transformer.addField("useless", String.class);
return transformer.applyTo(clsVisitor);
});
}
}
| TestManipulatorProcessor |
java | apache__logging-log4j2 | log4j-api/src/main/java/org/apache/logging/log4j/util/IndexedReadOnlyStringMap.java | {
"start": 1367,
"end": 3120
} | interface ____ extends ReadOnlyStringMap {
/**
* Viewing all key-value pairs as a sequence sorted by key, this method returns the key at the specified index,
* or {@code null} if the specified index is less than zero or greater or equal to the size of this collection.
*
* @param index the index of the key to return
* @return the key at the specified index or {@code null}
*/
String getKeyAt(final int index);
/**
* Viewing all key-value pairs as a sequence sorted by key, this method returns the value at the specified index,
* or {@code null} if the specified index is less than zero or greater or equal to the size of this collection.
*
* @param index the index of the value to return
* @return the value at the specified index or {@code null}
*/
<V> V getValueAt(final int index);
/**
* Viewing all key-value pairs as a sequence sorted by key, this method returns the index of the specified key in
* that sequence. If the specified key is not found, this method returns {@code (-(insertion point) - 1)}.
*
* @param key the key whose index in the ordered sequence of keys to return
* @return the index of the specified key or {@code (-(insertion point) - 1)} if the key is not found.
* The insertion point is defined as the point at which the key would be inserted into the array:
* the index of the first element in the range greater than the key, or {@code size()} if all elements
* are less than the specified key. Note that this guarantees that the return value will be >= 0
* if and only if the key is found.
*/
int indexOfKey(final String key);
}
| IndexedReadOnlyStringMap |
java | spring-projects__spring-boot | loader/spring-boot-loader/src/main/java/org/springframework/boot/loader/net/protocol/nested/NestedLocation.java | {
"start": 4219,
"end": 4625
} | class
____ (locationPath.length() > 2 && locationPath.charAt(2) == ':') {
return locationPath.substring(1);
}
// Deal with Jetty's org.eclipse.jetty.util.URIUtil#correctURI(URI)
if (locationPath.startsWith("///") && locationPath.charAt(4) == ':') {
return locationPath.substring(3);
}
return locationPath;
}
static void clearCache() {
locationCache.clear();
pathCache.clear();
}
}
| if |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FutureTransformAsyncTest.java | {
"start": 17267,
"end": 18470
} | interface ____ {
ListenableFuture<Void> apply(String value);
}
void foo(TestInterface unused) {
return;
}
ListenableFuture<Void> test() {
ListenableFuture<Void> future =
Futures.transform(
Futures.immediateFuture("x"),
unused -> {
foo(
x -> {
return Futures.immediateVoidFuture();
});
return (Void) null;
},
executor);
return future;
}
}
""")
.doTest();
}
@Test
public void transformAsync_fluentFuture() {
refactoringHelper
.addInputLines(
"in/Test.java",
"""
import com.google.common.util.concurrent.FluentFuture;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.Executor;
| TestInterface |
java | apache__camel | core/camel-core/src/test/java/org/apache/camel/component/file/FileConsumerSuspendTest.java | {
"start": 1298,
"end": 2545
} | class ____ extends ContextTestSupport {
@Test
public void testConsumeSuspendFile() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBodyAndHeader(fileUri(), "Bye World", Exchange.FILE_NAME, "bye.txt");
template.sendBodyAndHeader(fileUri(), "Hello World", Exchange.FILE_NAME, "hello.txt");
assertMockEndpointsSatisfied();
oneExchangeDone.matchesWaitTime();
// the route is suspended by the policy so we should only receive one
try (Stream<Path> list = Files.list(testDirectory())) {
long files = list.count();
assertEquals(1, files, "The file should exists");
}
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
MyPolicy myPolicy = new MyPolicy();
from(fileUri("?maxMessagesPerPoll=1&delete=true&initialDelay=0&delay=10"))
.routePolicy(myPolicy).id("myRoute").convertBodyTo(String.class)
.to("mock:result");
}
};
}
private static | FileConsumerSuspendTest |
java | spring-projects__spring-framework | spring-web/src/main/java/org/springframework/http/server/reactive/ReactorServerHttpRequest.java | {
"start": 1858,
"end": 4672
} | class ____ extends AbstractServerHttpRequest {
private static final Log logger = HttpLogging.forLogName(ReactorServerHttpRequest.class);
private static final AtomicLong logPrefixIndex = new AtomicLong();
private final HttpServerRequest request;
private final NettyDataBufferFactory bufferFactory;
public ReactorServerHttpRequest(HttpServerRequest request, NettyDataBufferFactory bufferFactory)
throws URISyntaxException {
super(HttpMethod.valueOf(request.method().name()),
ReactorUriHelper.createUri(request), request.forwardedPrefix(),
new HttpHeaders(new Netty4HeadersAdapter(request.requestHeaders())));
Assert.notNull(bufferFactory, "DataBufferFactory must not be null");
this.request = request;
this.bufferFactory = bufferFactory;
}
@Override
protected MultiValueMap<String, HttpCookie> initCookies() {
MultiValueMap<String, HttpCookie> cookies = new LinkedMultiValueMap<>();
for (CharSequence name : this.request.allCookies().keySet()) {
for (Cookie cookie : this.request.allCookies().get(name)) {
HttpCookie httpCookie = new HttpCookie(name.toString(), cookie.value());
cookies.add(name.toString(), httpCookie);
}
}
return cookies;
}
@Override
public @Nullable InetSocketAddress getLocalAddress() {
return this.request.hostAddress();
}
@Override
public @Nullable InetSocketAddress getRemoteAddress() {
return this.request.remoteAddress();
}
@Override
protected @Nullable SslInfo initSslInfo() {
Channel channel = ((Connection) this.request).channel();
SslHandler sslHandler = channel.pipeline().get(SslHandler.class);
if (sslHandler == null && channel.parent() != null) { // HTTP/2
sslHandler = channel.parent().pipeline().get(SslHandler.class);
}
if (sslHandler != null) {
SSLSession session = sslHandler.engine().getSession();
return new DefaultSslInfo(session);
}
return null;
}
@Override
public Flux<DataBuffer> getBody() {
return this.request.receive().retain().map(this.bufferFactory::wrap);
}
@SuppressWarnings("unchecked")
@Override
public <T> T getNativeRequest() {
return (T) this.request;
}
@Override
protected @Nullable String initId() {
if (this.request instanceof Connection connection) {
return connection.channel().id().asShortText() +
"-" + logPrefixIndex.incrementAndGet();
}
return null;
}
@Override
protected String initLogPrefix() {
String id = null;
if (this.request instanceof ChannelOperationsId operationsId) {
id = (logger.isDebugEnabled() ? operationsId.asLongText() : operationsId.asShortText());
}
if (id != null) {
return id;
}
if (this.request instanceof Connection connection) {
return connection.channel().id().asShortText() +
"-" + logPrefixIndex.incrementAndGet();
}
return getId();
}
}
| ReactorServerHttpRequest |
java | apache__flink | flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/MemberFieldRewriter.java | {
"start": 3829,
"end": 9743
} | class ____ extends JavaParserBaseVisitor<Void> {
private final Stack<StackElement> classStack;
private final Map<String, String> replaceMap;
private final Set<String> varNames;
private int fieldCount = 0;
MemberFieldVisitor() {
classStack = new Stack<>();
replaceMap = new HashMap<>();
varNames = new HashSet<>();
}
@Override
public Void visitClassDeclaration(JavaParser.ClassDeclarationContext ctx) {
classStack.push(new StackElement());
Void ret = visitChildren(ctx);
rewriteClassDeclaration(ctx);
classStack.pop();
return ret;
}
@Override
public Void visitMemberDeclaration(JavaParser.MemberDeclarationContext ctx) {
if (ctx.fieldDeclaration() == null) {
return null;
}
checkMemberDeclaration(ctx);
for (JavaParser.ModifierContext modifier :
((JavaParser.ClassBodyDeclarationContext) ctx.getParent()).modifier()) {
if ("static".equals(modifier.getText())) {
// we will not modify static fields
return null;
}
}
String fieldName =
ctx.fieldDeclaration()
.variableDeclarators()
.variableDeclarator(0)
.variableDeclaratorId()
.getText();
if ("references".equals(fieldName)) {
// this is a special field name used by all code generator
return null;
}
String type = ctx.fieldDeclaration().typeType().getText();
String init =
CodeSplitUtil.getContextString(
ctx.fieldDeclaration()
.variableDeclarators()
.variableDeclarator(0)
.variableInitializer());
StackElement classInfo = classStack.peek();
Integer typeCount = classInfo.typeCounts.get(type);
int id = typeCount == null ? 0 : typeCount;
classInfo.typeCounts.put(type, id + 1);
classInfo.fields.add(new MemberField(fieldName, type, id, init));
rewriter.delete(ctx.getParent().start, ctx.getParent().stop);
fieldCount++;
return null;
}
private void rewriteClassDeclaration(JavaParser.ClassDeclarationContext ctx) {
Map<String, String> typeFieldNames = new HashMap<>();
StringBuilder newDeclaration = new StringBuilder("\n");
for (Map.Entry<String, Integer> typeCount : classStack.peek().typeCounts.entrySet()) {
String type = typeCount.getKey();
String typeWithoutArgs =
type.indexOf('<') == -1
? type
: type.substring(0, type.indexOf('<'))
+ type.substring(type.lastIndexOf('>') + 1);
int count = typeCount.getValue();
String fieldName = CodeSplitUtil.newName("rewrite");
typeFieldNames.put(type, fieldName);
StringBuilder newField = new StringBuilder();
newField.append(type).append("[] ").append(fieldName).append(" = new ");
int pos = typeWithoutArgs.indexOf("[");
if (pos == -1) {
newField.append(typeWithoutArgs).append("[").append(count).append("]");
} else {
newField.append(typeWithoutArgs, 0, pos)
.append("[")
.append(count)
.append("]")
.append(typeWithoutArgs, pos, typeWithoutArgs.length());
}
newField.append(";\n");
newDeclaration.append(newField);
}
boolean hasInit = false;
for (MemberField field : classStack.peek().fields) {
String newName = typeFieldNames.get(field.type) + "[" + field.id + "]";
replaceMap.put(field.oldName, newName);
if (field.init.length() == 0) {
continue;
}
if (!hasInit) {
newDeclaration.append("\n{\n");
hasInit = true;
}
newDeclaration.append(newName).append(" = ").append(field.init).append(";\n");
}
if (hasInit) {
newDeclaration.append("}\n");
}
rewriter.insertAfter(ctx.classBody().start, newDeclaration.toString());
}
private void checkMemberDeclaration(JavaParser.MemberDeclarationContext ctx) {
if (ctx.fieldDeclaration() == null) {
return;
}
Preconditions.checkArgument(
ctx.fieldDeclaration().variableDeclarators().variableDeclarator().size() == 1,
"%s\nCodegen rewrite failed. You can only declare one field in one statement.",
code);
for (JavaParser.VariableDeclaratorContext v :
ctx.fieldDeclaration().variableDeclarators().variableDeclarator()) {
String identifier = v.variableDeclaratorId().getText();
Preconditions.checkArgument(
!varNames.contains(identifier),
"%s\nCodegen rewrite failed. Field names should not be the same. Name: %s",
code,
identifier);
varNames.add(identifier);
}
}
}
private | MemberFieldVisitor |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/api/shortarray/ShortArrayAssert_startsWith_Test.java | {
"start": 969,
"end": 1327
} | class ____ extends ShortArrayAssertBaseTest {
@Override
protected ShortArrayAssert invoke_api_method() {
return assertions.startsWith((short) 6, (short) 8);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertStartsWith(getInfo(assertions), getActual(assertions), arrayOf(6, 8));
}
}
| ShortArrayAssert_startsWith_Test |
java | elastic__elasticsearch | server/src/main/java/org/elasticsearch/common/hash/MurmurHash3.java | {
"start": 2045,
"end": 7106
} | class ____ {
int offset;
long h1;
long h2;
IntermediateResult(int offset, long h1, long h2) {
this.offset = offset;
this.h1 = h1;
this.h2 = h2;
}
}
private static long C1 = 0x87c37b91114253d5L;
private static long C2 = 0x4cf5ad432745937fL;
public static long fmix(long k) {
k ^= k >>> 33;
k *= 0xff51afd7ed558ccdL;
k ^= k >>> 33;
k *= 0xc4ceb9fe1a85ec53L;
k ^= k >>> 33;
return k;
}
/**
* Compute the hash of the MurmurHash3_x64_128 hashing function.
*
* Note, this hashing function might be used to persist hashes, so if the way hashes are computed
* changes for some reason, it needs to be addressed (like in BloomFilter and MurmurHashField).
*/
@SuppressWarnings("fallthrough") // Intentionally uses fallthrough to implement a well known hashing algorithm
public static Hash128 hash128(byte[] key, int offset, int length, long seed, Hash128 hash) {
long h1 = seed;
long h2 = seed;
if (length >= 16) {
IntermediateResult result = intermediateHash(key, offset, length, h1, h2);
h1 = result.h1;
h2 = result.h2;
offset = result.offset;
}
return finalizeHash(hash, key, offset, length, h1, h2);
}
static IntermediateResult intermediateHash(byte[] key, int offset, int length, long h1, long h2) {
final int len16 = length & 0xFFFFFFF0; // higher multiple of 16 that is lower than or equal to length
final int end = offset + len16;
for (int i = offset; i < end; i += 16) {
long k1 = ByteUtils.readLongLE(key, i);
long k2 = ByteUtils.readLongLE(key, i + 8);
k1 *= C1;
k1 = Long.rotateLeft(k1, 31);
k1 *= C2;
h1 ^= k1;
h1 = Long.rotateLeft(h1, 27);
h1 += h2;
h1 = h1 * 5 + 0x52dce729;
k2 *= C2;
k2 = Long.rotateLeft(k2, 33);
k2 *= C1;
h2 ^= k2;
h2 = Long.rotateLeft(h2, 31);
h2 += h1;
h2 = h2 * 5 + 0x38495ab5;
}
// Advance offset to the unprocessed tail of the data.
offset = end;
return new IntermediateResult(offset, h1, h2);
}
@SuppressWarnings("fallthrough") // Intentionally uses fallthrough to implement a well known hashing algorithm
static Hash128 finalizeHash(Hash128 hash, byte[] remainder, int offset, int length, long h1, long h2) {
long k1 = 0;
long k2 = 0;
switch (length & 15) {
case 15:
k2 ^= (remainder[offset + 14] & 0xFFL) << 48;
case 14:
k2 ^= (remainder[offset + 13] & 0xFFL) << 40;
case 13:
k2 ^= (remainder[offset + 12] & 0xFFL) << 32;
case 12:
k2 ^= (remainder[offset + 11] & 0xFFL) << 24;
case 11:
k2 ^= (remainder[offset + 10] & 0xFFL) << 16;
case 10:
k2 ^= (remainder[offset + 9] & 0xFFL) << 8;
case 9:
k2 ^= (remainder[offset + 8] & 0xFFL) << 0;
k2 *= C2;
k2 = Long.rotateLeft(k2, 33);
k2 *= C1;
h2 ^= k2;
case 8:
k1 ^= (remainder[offset + 7] & 0xFFL) << 56;
case 7:
k1 ^= (remainder[offset + 6] & 0xFFL) << 48;
case 6:
k1 ^= (remainder[offset + 5] & 0xFFL) << 40;
case 5:
k1 ^= (remainder[offset + 4] & 0xFFL) << 32;
case 4:
k1 ^= (remainder[offset + 3] & 0xFFL) << 24;
case 3:
k1 ^= (remainder[offset + 2] & 0xFFL) << 16;
case 2:
k1 ^= (remainder[offset + 1] & 0xFFL) << 8;
case 1:
k1 ^= (remainder[offset] & 0xFFL);
k1 *= C1;
k1 = Long.rotateLeft(k1, 31);
k1 *= C2;
h1 ^= k1;
}
h1 ^= length;
h2 ^= length;
h1 += h2;
h2 += h1;
h1 = fmix(h1);
h2 = fmix(h2);
h1 += h2;
h2 += h1;
hash.h1 = h1;
hash.h2 = h2;
return hash;
}
/**
* A 64-bit variant which accepts a long to hash, and returns the 64bit long hash.
* This is useful if the input is already in long (or smaller) format and you don't
* need the full 128b width and flexibility of
* {@link MurmurHash3#hash128(byte[], int, int, long, Hash128)}
*
* Given the limited nature of this variant, it should be faster than the 128b version
* when you only need 128b (many fewer instructions)
*/
public static long murmur64(long h) {
h ^= h >>> 33;
h *= 0xff51afd7ed558ccdL;
h ^= h >>> 33;
h *= 0xc4ceb9fe1a85ec53L;
h ^= h >>> 33;
return h;
}
}
| IntermediateResult |
java | apache__camel | components/camel-spring-parent/camel-spring-xml/src/test/java/org/apache/camel/component/bean/MyBeanBindingConsumer.java | {
"start": 1149,
"end": 2253
} | class ____ {
private ProducerTemplate template;
@Consume("direct:startBeanExpression")
public void doSomethingBeanExpression(String payload, @Bean(ref = "myCounter") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
@Consume("direct:startConstantExpression")
public void doSomethingConstantExpression(String payload, @Simple("5") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
@Consume("direct:startHeaderExpression")
public void doSomethingHeaderExpression(String payload, @Header("number") int count) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count);
}
@Consume("direct:startMany")
public void doSomethingManyExpression(String payload, @Simple("5") int count, @Header("number") int number) {
template.sendBodyAndHeader("mock:result", "Bye " + payload, "count", count * number);
}
public void setTemplate(ProducerTemplate template) {
this.template = template;
}
}
| MyBeanBindingConsumer |
java | apache__camel | components/camel-ai/camel-qdrant/src/test/java/org/apache/camel/component/qdrant/it/QdrantDeletePointsIT.java | {
"start": 1688,
"end": 5584
} | class ____ extends QdrantTestSupport {
@Test
@Order(1)
public void createCollection() {
Exchange result = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.CREATE_COLLECTION)
.withBody(
Collections.VectorParams.newBuilder()
.setSize(2)
.setDistance(Collections.Distance.Cosine).build())
.request(Exchange.class);
assertThat(result).isNotNull();
assertThat(result.getException()).isNull();
}
@Test
@Order(2)
public void upsert() {
Exchange result1 = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.UPSERT)
.withBody(
Points.PointStruct.newBuilder()
.setId(PointIdFactory.id(8))
.setVectors(VectorsFactory.vectors(List.of(3.5f, 4.5f)))
.putAllPayload(Map.of("foo", ValueFactory.value("hello1")))
.build())
.request(Exchange.class);
assertThat(result1).isNotNull();
assertThat(result1.getException()).isNull();
Exchange result2 = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.UPSERT)
.withBody(
Points.PointStruct.newBuilder()
.setId(PointIdFactory.id(9))
.setVectors(VectorsFactory.vectors(List.of(3.5f, 4.5f)))
.putAllPayload(Map.of("bar", ValueFactory.value("hello2")))
.build())
.request(Exchange.class);
assertThat(result2).isNotNull();
assertThat(result2.getException()).isNull();
}
@Test
@Order(3)
public void deleteWithCondition() {
Exchange deleteResult = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.DELETE)
.withBody(ConditionFactory.matchKeyword("foo", "hello1"))
.request(Exchange.class);
assertThat(deleteResult).isNotNull();
assertThat(deleteResult.getException()).isNull();
Exchange result = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.RETRIEVE)
.withBody(PointIdFactory.id(8))
.request(Exchange.class);
assertThat(result).isNotNull();
assertThat(result.getException()).isNull();
assertThat(result.getIn().getBody()).isInstanceOfSatisfying(Collection.class, c -> {
assertThat(c).hasSize(0);
});
}
@Test
@Order(4)
public void deleteWithFilter() {
Exchange deleteResult = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.DELETE)
.withBody(
Common.Filter.newBuilder()
.addMust(ConditionFactory.matchKeyword("bar", "hello2"))
.build())
.request(Exchange.class);
assertThat(deleteResult).isNotNull();
assertThat(deleteResult.getException()).isNull();
Exchange result = fluentTemplate.to("qdrant:testDelete")
.withHeader(QdrantHeaders.ACTION, QdrantAction.RETRIEVE)
.withBody(PointIdFactory.id(9))
.request(Exchange.class);
assertThat(result).isNotNull();
assertThat(result.getException()).isNull();
assertThat(result.getIn().getBody()).isInstanceOfSatisfying(Collection.class, c -> {
assertThat(c).hasSize(0);
});
}
}
| QdrantDeletePointsIT |
java | apache__camel | components/camel-salesforce/camel-salesforce-component/src/test/java/org/apache/camel/component/salesforce/internal/pubsub/AuthErrorPubSubServer.java | {
"start": 1371,
"end": 2633
} | class ____ extends PubSubGrpc.PubSubImplBase {
public int subscribeCalls = 0;
@Override
public StreamObserver<FetchRequest> subscribe(StreamObserver<FetchResponse> client) {
subscribeCalls = subscribeCalls + 1;
return new StreamObserver<>() {
@Override
public void onNext(FetchRequest request) {
if (subscribeCalls == 1) {
TimerTask task = new TimerTask() {
public void run() {
StatusRuntimeException e = new StatusRuntimeException(Status.UNAUTHENTICATED, new Metadata());
e.getTrailers().put(Metadata.Key.of("error-code", Metadata.ASCII_STRING_MARSHALLER),
PUBSUB_ERROR_AUTH_ERROR);
client.onError(e);
}
};
Timer timer = new Timer("Timer");
long delay = 1000L;
timer.schedule(task, delay);
}
}
@Override
public void onError(Throwable t) {
}
@Override
public void onCompleted() {
}
};
}
}
| AuthErrorPubSubServer |
java | bumptech__glide | library/test/src/test/java/com/bumptech/glide/load/resource/bytes/BytesResourceTest.java | {
"start": 215,
"end": 752
} | class ____ {
@Test
public void testReturnsGivenBytes() {
byte[] bytes = new byte[0];
BytesResource resource = new BytesResource(bytes);
assertEquals(bytes, resource.get());
}
@Test
public void testReturnsSizeOfGivenBytes() {
byte[] bytes = new byte[123];
BytesResource resource = new BytesResource(bytes);
assertEquals(bytes.length, resource.getSize());
}
@Test(expected = NullPointerException.class)
public void testThrowsIfGivenNullBytes() {
new BytesResource(null);
}
}
| BytesResourceTest |
java | netty__netty | codec-compression/src/main/java/io/netty/handler/codec/compression/SnappyFrameDecoder.java | {
"start": 1517,
"end": 10681
} | enum ____ {
STREAM_IDENTIFIER,
COMPRESSED_DATA,
UNCOMPRESSED_DATA,
RESERVED_UNSKIPPABLE,
RESERVED_SKIPPABLE
}
private static final int SNAPPY_IDENTIFIER_LEN = 6;
// See https://github.com/google/snappy/blob/1.1.9/framing_format.txt#L95
private static final int MAX_UNCOMPRESSED_DATA_SIZE = 65536 + 4;
// See https://github.com/google/snappy/blob/1.1.9/framing_format.txt#L82
private static final int MAX_DECOMPRESSED_DATA_SIZE = 65536;
// See https://github.com/google/snappy/blob/1.1.9/framing_format.txt#L82
private static final int MAX_COMPRESSED_CHUNK_SIZE = 16777216 - 1;
private final Snappy snappy = new Snappy();
private final boolean validateChecksums;
private boolean started;
private boolean corrupted;
private int numBytesToSkip;
/**
* Creates a new snappy-framed decoder with validation of checksums
* turned OFF. To turn checksum validation on, please use the alternate
* {@link #SnappyFrameDecoder(boolean)} constructor.
*/
public SnappyFrameDecoder() {
this(false);
}
/**
* Creates a new snappy-framed decoder with validation of checksums
* as specified.
*
* @param validateChecksums
* If true, the checksum field will be validated against the actual
* uncompressed data, and if the checksums do not match, a suitable
* {@link DecompressionException} will be thrown
*/
public SnappyFrameDecoder(boolean validateChecksums) {
this.validateChecksums = validateChecksums;
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
if (corrupted) {
in.skipBytes(in.readableBytes());
return;
}
if (numBytesToSkip != 0) {
// The last chunkType we detected was RESERVED_SKIPPABLE and we still have some bytes to skip.
int skipBytes = Math.min(numBytesToSkip, in.readableBytes());
in.skipBytes(skipBytes);
numBytesToSkip -= skipBytes;
// Let's return and try again.
return;
}
try {
int idx = in.readerIndex();
final int inSize = in.readableBytes();
if (inSize < 4) {
// We need to be at least able to read the chunk type identifier (one byte),
// and the length of the chunk (3 bytes) in order to proceed
return;
}
final int chunkTypeVal = in.getUnsignedByte(idx);
final ChunkType chunkType = mapChunkType((byte) chunkTypeVal);
final int chunkLength = in.getUnsignedMediumLE(idx + 1);
switch (chunkType) {
case STREAM_IDENTIFIER:
if (chunkLength != SNAPPY_IDENTIFIER_LEN) {
throw new DecompressionException("Unexpected length of stream identifier: " + chunkLength);
}
if (inSize < 4 + SNAPPY_IDENTIFIER_LEN) {
break;
}
in.skipBytes(4);
int offset = in.readerIndex();
in.skipBytes(SNAPPY_IDENTIFIER_LEN);
checkByte(in.getByte(offset++), (byte) 's');
checkByte(in.getByte(offset++), (byte) 'N');
checkByte(in.getByte(offset++), (byte) 'a');
checkByte(in.getByte(offset++), (byte) 'P');
checkByte(in.getByte(offset++), (byte) 'p');
checkByte(in.getByte(offset), (byte) 'Y');
started = true;
break;
case RESERVED_SKIPPABLE:
if (!started) {
throw new DecompressionException("Received RESERVED_SKIPPABLE tag before STREAM_IDENTIFIER");
}
in.skipBytes(4);
int skipBytes = Math.min(chunkLength, in.readableBytes());
in.skipBytes(skipBytes);
if (skipBytes != chunkLength) {
// We could skip all bytes, let's store the remaining so we can do so once we receive more
// data.
numBytesToSkip = chunkLength - skipBytes;
}
break;
case RESERVED_UNSKIPPABLE:
// The spec mandates that reserved unskippable chunks must immediately
// return an error, as we must assume that we cannot decode the stream
// correctly
throw new DecompressionException(
"Found reserved unskippable chunk type: 0x" + Integer.toHexString(chunkTypeVal));
case UNCOMPRESSED_DATA:
if (!started) {
throw new DecompressionException("Received UNCOMPRESSED_DATA tag before STREAM_IDENTIFIER");
}
if (chunkLength > MAX_UNCOMPRESSED_DATA_SIZE) {
throw new DecompressionException("Received UNCOMPRESSED_DATA larger than " +
MAX_UNCOMPRESSED_DATA_SIZE + " bytes");
}
if (inSize < 4 + chunkLength) {
return;
}
in.skipBytes(4);
if (validateChecksums) {
int checksum = in.readIntLE();
validateChecksum(checksum, in, in.readerIndex(), chunkLength - 4);
} else {
in.skipBytes(4);
}
out.add(in.readRetainedSlice(chunkLength - 4));
break;
case COMPRESSED_DATA:
if (!started) {
throw new DecompressionException("Received COMPRESSED_DATA tag before STREAM_IDENTIFIER");
}
if (chunkLength > MAX_COMPRESSED_CHUNK_SIZE) {
throw new DecompressionException("Received COMPRESSED_DATA that contains" +
" chunk that exceeds " + MAX_COMPRESSED_CHUNK_SIZE + " bytes");
}
if (inSize < 4 + chunkLength) {
return;
}
in.skipBytes(4);
int checksum = in.readIntLE();
int uncompressedSize = snappy.getPreamble(in);
if (uncompressedSize > MAX_DECOMPRESSED_DATA_SIZE) {
throw new DecompressionException("Received COMPRESSED_DATA that contains" +
" uncompressed data that exceeds " + MAX_DECOMPRESSED_DATA_SIZE + " bytes");
}
ByteBuf uncompressed = ctx.alloc().buffer(uncompressedSize, MAX_DECOMPRESSED_DATA_SIZE);
try {
if (validateChecksums) {
int oldWriterIndex = in.writerIndex();
try {
in.writerIndex(in.readerIndex() + chunkLength - 4);
snappy.decode(in, uncompressed);
} finally {
in.writerIndex(oldWriterIndex);
}
validateChecksum(checksum, uncompressed, 0, uncompressed.writerIndex());
} else {
snappy.decode(in.readSlice(chunkLength - 4), uncompressed);
}
out.add(uncompressed);
uncompressed = null;
} finally {
if (uncompressed != null) {
uncompressed.release();
}
}
snappy.reset();
break;
}
} catch (Exception e) {
corrupted = true;
throw e;
}
}
private static void checkByte(byte actual, byte expect) {
if (actual != expect) {
throw new DecompressionException("Unexpected stream identifier contents. Mismatched snappy " +
"protocol version?");
}
}
/**
* Decodes the chunk type from the type tag byte.
*
* @param type The tag byte extracted from the stream
* @return The appropriate {@link ChunkType}, defaulting to {@link ChunkType#RESERVED_UNSKIPPABLE}
*/
private static ChunkType mapChunkType(byte type) {
if (type == 0) {
return ChunkType.COMPRESSED_DATA;
} else if (type == 1) {
return ChunkType.UNCOMPRESSED_DATA;
} else if (type == (byte) 0xff) {
return ChunkType.STREAM_IDENTIFIER;
} else if ((type & 0x80) == 0x80) {
return ChunkType.RESERVED_SKIPPABLE;
} else {
return ChunkType.RESERVED_UNSKIPPABLE;
}
}
}
| ChunkType |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/FloatingPointAssertionWithinEpsilonTest.java | {
"start": 4279,
"end": 6267
} | class ____ {
private static final float TOLERANCE = 1e-5f;
private static final double TOLERANCE2 = 1e-10f;
private static final float VALUE = 1;
public void testFloat() {
String test = Boolean.TRUE.toString();
assertThat(1.0f).isWithin(1e-5f).of(1.0f);
assertThat(1f).isWithin(TOLERANCE).of(VALUE);
assertThat(1f).isWithin(1).of(1);
assertThat(1f).isNotWithin(0).of(2f);
assertThat(1f).isNotWithin(.5f).of(2f);
assertEquals(1f, 1f, TOLERANCE);
}
public void testDouble() {
String test = Boolean.TRUE.toString();
assertThat(1.0).isWithin(1e-10).of(1.0);
assertThat(1.0).isWithin(TOLERANCE2).of(1f);
assertThat(1.0).isWithin(TOLERANCE2).of(1);
assertEquals(1.0, 1.0, TOLERANCE);
}
public void testZeroCases() {
assertThat(1.0).isWithin(0.0).of(1.0);
assertThat(1f).isWithin(0f).of(1f);
assertThat(1f).isWithin(0).of(1f);
assertEquals(1f, 1f, 0f);
}
}\
""")
.doTest();
}
@Test
public void fixes() {
BugCheckerRefactoringTestHelper.newInstance(
FloatingPointAssertionWithinEpsilon.class, getClass())
.addInputLines(
"FloatingPointAssertionWithinEpsilonPositiveCases.java",
"""
package com.google.errorprone.bugpatterns.testdata;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertEquals;
/**
* Positive test cases for FloatingPointAssertionWithinEpsilon check.
*
* @author ghm@google.com (Graeme Morgan)
*/
final | FloatingPointAssertionWithinEpsilonNegativeCases |
java | quarkusio__quarkus | extensions/resteasy-reactive/rest/deployment/src/test/java/io/quarkus/resteasy/reactive/server/test/security/AbstractCustomExceptionMapperTest.java | {
"start": 1355,
"end": 3104
} | class ____ {
@Test
public void testNoExceptions() {
RestAssured.given()
.auth().preemptive().basic("gaston", "gaston-password")
.get("/hello")
.then()
.statusCode(200)
.body(Matchers.is("Hello Gaston"));
RestAssured.given()
.get("/hello")
.then()
.statusCode(401);
}
@Test
public void testUnhandledRuntimeException() {
RestAssured.given()
.auth().preemptive().basic("gaston", "gaston-password")
.header("fail-unhandled", "true")
.get("/hello")
.then()
.statusCode(500)
.body(Matchers.containsString(UnhandledRuntimeException.class.getName()))
.body(Matchers.containsString("Expected unhandled failure"));
}
@Test
public void testCustomExceptionInIdentityProvider() {
RestAssured.given()
.auth().preemptive().basic("gaston", "gaston-password")
.header("fail-authentication", "true")
.get("/hello")
.then()
.statusCode(500)
.body(Matchers.is("Expected authentication failure"));
}
@Test
public void testCustomExceptionInIdentityAugmentor() {
RestAssured.given()
.auth().preemptive().basic("gaston", "gaston-password")
.header("fail-augmentation", "true")
.get("/hello")
.then()
.statusCode(500)
.body(Matchers.is("Expected identity augmentation failure"));
}
@Path("/hello")
public static | AbstractCustomExceptionMapperTest |
java | elastic__elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/Authenticator.java | {
"start": 3505,
"end": 4264
} | class ____ a container to encapsulate the current request and other necessary information (mostly configuration related)
* required for authentication.
* It is instantiated for every incoming request and passed around to {@link AuthenticatorChain} and subsequently all
* {@link Authenticator}.
* {@link Authenticator}s are consulted in order (see {@link AuthenticatorChain}),
* where each is given the chance to first extract some token, and then to verify it.
* If token verification fails in some particular way (i.e. {@code AuthenticationResult.Status.CONTINUE}),
* the next {@link Authenticator} is tried.
* The extracted tokens are all appended with {@link #addAuthenticationToken(AuthenticationToken)}.
*/
| is |
java | elastic__elasticsearch | benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockReadBenchmark.java | {
"start": 939,
"end": 12520
} | class ____ extends BlockBenchmark {
static {
if (false == "true".equals(System.getProperty("skipSelfTest"))) {
// Smoke test all the expected values and force loading subclasses more like prod
selfTest();
}
}
static void selfTest() {
// Smoke test all the expected values and force loading subclasses more like prod
int totalPositions = 10;
long[] actualCheckSums = new long[NUM_BLOCKS_PER_ITERATION];
for (String paramString : RELEVANT_TYPE_BLOCK_COMBINATIONS) {
String[] params = paramString.split("/");
String dataType = params[0];
String blockKind = params[1];
BenchmarkBlocks data = buildBenchmarkBlocks(dataType, blockKind, totalPositions);
int[][] traversalOrders = createTraversalOrders(data.blocks(), false);
run(dataType, data, traversalOrders, actualCheckSums);
assertCheckSums(data, actualCheckSums);
}
}
private static int[][] createTraversalOrders(Block[] blocks, boolean randomized) {
int[][] orders = new int[blocks.length][];
for (int i = 0; i < blocks.length; i++) {
IntStream positionsStream = IntStream.range(0, blocks[i].getPositionCount());
if (randomized) {
List<Integer> positions = new ArrayList<>(positionsStream.boxed().toList());
Collections.shuffle(positions, random);
orders[i] = positions.stream().mapToInt(x -> x).toArray();
} else {
orders[i] = positionsStream.toArray();
}
}
return orders;
}
record BenchmarkBlocks(Block[] blocks, long[] checkSums) {};
static BenchmarkBlocks buildBenchmarkBlocks(String dataType, String blockKind, int totalPositions) {
Block[] blocks = BlockBenchmark.buildBlocks(dataType, blockKind, totalPositions);
long[] checkSums = new long[NUM_BLOCKS_PER_ITERATION];
switch (dataType) {
case "boolean" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
BooleanBlock block = (BooleanBlock) blocks[blockIndex];
checkSums[blockIndex] = computeBooleanCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray());
}
}
case "BytesRef" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
BytesRefBlock block = (BytesRefBlock) blocks[blockIndex];
checkSums[blockIndex] = computeBytesRefCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray());
}
}
case "double" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
DoubleBlock block = (DoubleBlock) blocks[blockIndex];
checkSums[blockIndex] = computeDoubleCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray());
}
}
case "int" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
IntBlock block = (IntBlock) blocks[blockIndex];
checkSums[blockIndex] = computeIntCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray());
}
}
case "long" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
LongBlock block = (LongBlock) blocks[blockIndex];
checkSums[blockIndex] = computeLongCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray());
}
}
// TODO float
default -> throw new IllegalStateException("illegal data type [" + dataType + "]");
}
return new BenchmarkBlocks(blocks, checkSums);
}
private static void run(String dataType, BenchmarkBlocks data, int[][] traversalOrders, long[] resultCheckSums) {
switch (dataType) {
case "boolean" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
BooleanBlock block = (BooleanBlock) data.blocks[blockIndex];
resultCheckSums[blockIndex] = computeBooleanCheckSum(block, traversalOrders[blockIndex]);
}
}
case "BytesRef" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
BytesRefBlock block = (BytesRefBlock) data.blocks[blockIndex];
resultCheckSums[blockIndex] = computeBytesRefCheckSum(block, traversalOrders[blockIndex]);
}
}
case "double" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
DoubleBlock block = (DoubleBlock) data.blocks[blockIndex];
resultCheckSums[blockIndex] = computeDoubleCheckSum(block, traversalOrders[blockIndex]);
}
}
case "int" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
IntBlock block = (IntBlock) data.blocks[blockIndex];
resultCheckSums[blockIndex] = computeIntCheckSum(block, traversalOrders[blockIndex]);
}
}
case "long" -> {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
LongBlock block = (LongBlock) data.blocks[blockIndex];
resultCheckSums[blockIndex] = computeLongCheckSum(block, traversalOrders[blockIndex]);
}
}
default -> {
throw new IllegalStateException();
}
}
}
private static void assertCheckSums(BenchmarkBlocks data, long[] actualCheckSums) {
for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) {
if (actualCheckSums[blockIndex] != data.checkSums[blockIndex]) {
throw new AssertionError("checksums do not match for block [" + blockIndex + "]");
}
}
}
private static long computeBooleanCheckSum(BooleanBlock block, int[] traversalOrder) {
long sum = 0;
for (int position : traversalOrder) {
if (block.isNull(position)) {
continue;
}
int start = block.getFirstValueIndex(position);
int end = start + block.getValueCount(position);
for (int i = start; i < end; i++) {
sum += block.getBoolean(i) ? 1 : 0;
}
}
return sum;
}
private static long computeBytesRefCheckSum(BytesRefBlock block, int[] traversalOrder) {
long sum = 0;
BytesRef scratch = new BytesRef();
for (int position : traversalOrder) {
if (block.isNull(position)) {
continue;
}
int start = block.getFirstValueIndex(position);
int end = start + block.getValueCount(position);
for (int i = start; i < end; i++) {
BytesRef v = block.getBytesRef(i, scratch);
sum += v.length > 0 ? v.bytes[v.offset] : 0;
}
}
return sum;
}
private static long computeDoubleCheckSum(DoubleBlock block, int[] traversalOrder) {
long sum = 0;
for (int position : traversalOrder) {
if (block.isNull(position)) {
continue;
}
int start = block.getFirstValueIndex(position);
int end = start + block.getValueCount(position);
for (int i = start; i < end; i++) {
// Use an operation that is not affected by rounding errors. Otherwise, the result may depend on the traversalOrder.
sum += (long) block.getDouble(i);
}
}
return sum;
}
private static long computeIntCheckSum(IntBlock block, int[] traversalOrder) {
int sum = 0;
for (int position : traversalOrder) {
if (block.isNull(position)) {
continue;
}
int start = block.getFirstValueIndex(position);
int end = start + block.getValueCount(position);
for (int i = start; i < end; i++) {
sum += block.getInt(i);
}
}
return sum;
}
private static long computeLongCheckSum(LongBlock block, int[] traversalOrder) {
long sum = 0;
for (int position : traversalOrder) {
if (block.isNull(position)) {
continue;
}
int start = block.getFirstValueIndex(position);
int end = start + block.getValueCount(position);
for (int i = start; i < end; i++) {
sum += block.getLong(i);
}
}
return sum;
}
private static boolean isRandom(String accessType) {
return accessType.equalsIgnoreCase("random");
}
/**
* Must be a subset of {@link BlockBenchmark#RELEVANT_TYPE_BLOCK_COMBINATIONS}
*/
@Param(
{
"boolean/array",
"boolean/array-multivalue-null",
"boolean/big-array",
"boolean/big-array-multivalue-null",
"boolean/vector",
"boolean/vector-big-array",
"boolean/vector-const",
"BytesRef/array",
"BytesRef/array-multivalue-null",
"BytesRef/vector",
"BytesRef/vector-const",
"double/array",
"double/array-multivalue-null",
"double/big-array",
"double/big-array-multivalue-null",
"double/vector",
"double/vector-big-array",
"double/vector-const",
"int/array",
"int/array-multivalue-null",
"int/big-array",
"int/big-array-multivalue-null",
"int/vector",
"int/vector-big-array",
"int/vector-const",
"long/array",
"long/array-multivalue-null",
"long/big-array",
"long/big-array-multivalue-null",
"long/vector",
"long/vector-big-array",
"long/vector-const" }
)
public String dataTypeAndBlockKind;
@Param({ "sequential", "random" })
public String accessType;
private BenchmarkBlocks data;
private int[][] traversalOrders;
private final long[] actualCheckSums = new long[NUM_BLOCKS_PER_ITERATION];
@Setup
public void setup() {
String[] params = dataTypeAndBlockKind.split("/");
String dataType = params[0];
String blockKind = params[1];
data = buildBenchmarkBlocks(dataType, blockKind, BLOCK_TOTAL_POSITIONS);
traversalOrders = createTraversalOrders(data.blocks(), isRandom(accessType));
}
@Benchmark
@OperationsPerInvocation(NUM_BLOCKS_PER_ITERATION * BLOCK_TOTAL_POSITIONS)
public void run() {
String[] params = dataTypeAndBlockKind.split("/");
String dataType = params[0];
run(dataType, data, traversalOrders, actualCheckSums);
}
@TearDown(Level.Iteration)
public void assertCheckSums() {
assertCheckSums(data, actualCheckSums);
}
}
| BlockReadBenchmark |
java | spring-projects__spring-framework | spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/method/annotation/ServletAnnotationControllerHandlerMethodTests.java | {
"start": 107764,
"end": 108736
} | class ____ {
@RequestMapping
public void myHandle(HttpServletRequest request, HttpServletResponse response) throws IOException {
response.getWriter().write("test");
}
@RequestMapping("/myPath2.do")
public void myHandle(@RequestParam("param1") String p1, int param2, HttpServletResponse response,
@RequestHeader("header1") String h1, @CookieValue("cookie1") String c1) throws IOException {
response.getWriter().write("test-" + p1 + "-" + param2 + "-" + h1 + "-" + c1);
}
@RequestMapping("/myPath3")
public void myHandle(TestBean tb, HttpServletResponse response) throws IOException {
response.getWriter().write("test-" + tb.getName() + "-" + tb.getAge());
}
@RequestMapping("/myPath4.*")
public void myHandle(TestBean tb, Errors errors, HttpServletResponse response) throws IOException {
response.getWriter().write("test-" + tb.getName() + "-" + errors.getFieldError("age").getCode());
}
}
@Controller
static | MyAdaptedController2 |
java | google__guava | android/guava-tests/test/com/google/common/util/concurrent/FuturesTest.java | {
"start": 11653,
"end": 12329
} | class ____ implements Function<Object, Object> {
@SuppressWarnings("nullness:initialization.field.uninitialized")
ListenableFuture<Object> output;
@Override
public Object apply(Object input) {
output.cancel(false);
throw new SomeError();
}
}
Transformer transformer = new Transformer();
SettableFuture<Object> input = SettableFuture.create();
ListenableFuture<Object> output = transform(input, transformer, directExecutor());
transformer.output = output;
input.set("foo");
assertTrue(output.isCancelled());
}
public void testTransform_exceptionAfterCancellation() throws Exception {
| Transformer |
java | apache__flink | flink-runtime/src/main/java/org/apache/flink/runtime/execution/librarycache/BlobLibraryCacheManager.java | {
"start": 5119,
"end": 8078
} | class ____ implements ClassLoaderFactory {
/** The resolve order to use when creating a {@link ClassLoader}. */
private final FlinkUserCodeClassLoaders.ResolveOrder classLoaderResolveOrder;
/**
* List of patterns for classes that should always be resolved from the parent ClassLoader,
* if possible.
*/
private final String[] alwaysParentFirstPatterns;
/** Class loading exception handler. */
private final Consumer<Throwable> classLoadingExceptionHandler;
/** Test if classloader is used outside of job. */
private final boolean checkClassLoaderLeak;
private DefaultClassLoaderFactory(
FlinkUserCodeClassLoaders.ResolveOrder classLoaderResolveOrder,
String[] alwaysParentFirstPatterns,
Consumer<Throwable> classLoadingExceptionHandler,
boolean checkClassLoaderLeak) {
this.classLoaderResolveOrder = classLoaderResolveOrder;
this.alwaysParentFirstPatterns = alwaysParentFirstPatterns;
this.classLoadingExceptionHandler = classLoadingExceptionHandler;
this.checkClassLoaderLeak = checkClassLoaderLeak;
}
@Override
public URLClassLoader createClassLoader(URL[] libraryURLs) {
return FlinkUserCodeClassLoaders.create(
classLoaderResolveOrder,
libraryURLs,
FlinkUserCodeClassLoaders.class.getClassLoader(),
alwaysParentFirstPatterns,
classLoadingExceptionHandler,
checkClassLoaderLeak);
}
}
public static ClassLoaderFactory defaultClassLoaderFactory(
FlinkUserCodeClassLoaders.ResolveOrder classLoaderResolveOrder,
String[] alwaysParentFirstPatterns,
@Nullable FatalErrorHandler fatalErrorHandlerJvmMetaspaceOomError,
boolean checkClassLoaderLeak) {
return new DefaultClassLoaderFactory(
classLoaderResolveOrder,
alwaysParentFirstPatterns,
createClassLoadingExceptionHandler(fatalErrorHandlerJvmMetaspaceOomError),
checkClassLoaderLeak);
}
private static Consumer<Throwable> createClassLoadingExceptionHandler(
@Nullable FatalErrorHandler fatalErrorHandlerJvmMetaspaceOomError) {
return fatalErrorHandlerJvmMetaspaceOomError != null
? classLoadingException -> {
if (ExceptionUtils.isMetaspaceOutOfMemoryError(classLoadingException)) {
fatalErrorHandlerJvmMetaspaceOomError.onFatalError(classLoadingException);
}
}
: FlinkUserCodeClassLoader.NOOP_EXCEPTION_HANDLER;
}
// --------------------------------------------------------------------------------------------
private final | DefaultClassLoaderFactory |
java | bumptech__glide | library/src/main/java/com/bumptech/glide/load/engine/cache/SafeKeyGenerator.java | {
"start": 684,
"end": 2047
} | class ____ {
private final LruCache<Key, String> loadIdToSafeHash = new LruCache<>(1000);
private final Pools.Pool<PoolableDigestContainer> digestPool =
FactoryPools.threadSafe(
10,
new FactoryPools.Factory<PoolableDigestContainer>() {
@Override
public PoolableDigestContainer create() {
try {
return new PoolableDigestContainer(MessageDigest.getInstance("SHA-256"));
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
});
public String getSafeKey(Key key) {
String safeKey;
synchronized (loadIdToSafeHash) {
safeKey = loadIdToSafeHash.get(key);
}
if (safeKey == null) {
safeKey = calculateHexStringDigest(key);
}
synchronized (loadIdToSafeHash) {
loadIdToSafeHash.put(key, safeKey);
}
return safeKey;
}
private String calculateHexStringDigest(Key key) {
PoolableDigestContainer container = Preconditions.checkNotNull(digestPool.acquire());
try {
key.updateDiskCacheKey(container.messageDigest);
// calling digest() will automatically reset()
return Util.sha256BytesToHex(container.messageDigest.digest());
} finally {
digestPool.release(container);
}
}
private static final | SafeKeyGenerator |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/NamedLikeContextualKeywordTest.java | {
"start": 6308,
"end": 6597
} | class ____ {
@SuppressWarnings("NamedLikeContextualKeyword")
void yield() {}
}
""")
.doTest();
}
@Test
public void positive() {
helper
.addSourceLines(
"Test.java", //
" | RegrettablyNamedClass |
java | assertj__assertj-core | assertj-core/src/main/java/org/assertj/core/util/introspection/ClassUtils.java | {
"start": 5247,
"end": 6274
} | class ____ check or null.
* @return true the given {@code type} belongs to the java.lang package itself or one of its subpackage, false otherwise.
* @since 3.25.0
*/
public static boolean isInJavaLangPackage(final Class<?> type) {
return type != null && type.getName().startsWith("java.lang");
}
/**
* Returns whether the given objects types have the same name but are located in different packages
*
* @param object1 first object to compare
* @param object2 the object to compare to
* @return true if the given {@code object1} types have the same name as {@code object2} but is
* in a different package
*/
public static boolean haveSameClassNameInDifferentPackages(Object object1, Object object2) {
if (object1 != null && object2 != null) {
Class<?> type1 = object1.getClass();
Class<?> type2 = object2.getClass();
return type1.getSimpleName().equals(type2.getSimpleName()) && !type1.getPackageName().equals(type2.getPackageName());
}
return false;
}
}
| to |
java | redisson__redisson | redisson/src/main/java/org/redisson/api/RClientSideCaching.java | {
"start": 726,
"end": 7727
} | interface ____ extends RDestroyable {
/**
* Returns object holder instance by name.
*
* @param <V> type of value
* @param name name of object
* @return Bucket object
*/
<V> RBucket<V> getBucket(String name);
/**
* Returns object holder instance by name
* using provided codec for object.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for values
* @return Bucket object
*/
<V> RBucket<V> getBucket(String name, Codec codec);
/**
* Returns stream instance by <code>name</code>
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param <K> type of key
* @param <V> type of value
* @param name of stream
* @return RStream object
*/
<K, V> RStream<K, V> getStream(String name);
/**
* Returns stream instance by <code>name</code>
* using provided <code>codec</code> for entries.
* <p>
* Requires <b>Redis 5.0.0 and higher.</b>
*
* @param <K> type of key
* @param <V> type of value
* @param name name of stream
* @param codec codec for entry
* @return RStream object
*/
<K, V> RStream<K, V> getStream(String name, Codec codec);
/**
* Returns set instance by name.
*
* @param <V> type of value
* @param name name of object
* @return Set object
*/
<V> RSet<V> getSet(String name);
/**
* Returns set instance by name
* using provided codec for set objects.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for values
* @return Set object
*/
<V> RSet<V> getSet(String name, Codec codec);
/**
* Returns map instance by name.
* <p>
* <strong>
* NOTE: client side caching feature invalidates whole Map per entry change which is ineffective.
* Use local cached <a href="https://redisson.org/docs/data-and-services/collections/#eviction-local-cache-and-data-partitioning">Map</a>, <a href="https://redisson.org/docs/data-and-services/collections/#local-cache">JSON Store</a> instead.
* </strong>
*
* @param <K> type of key
* @param <V> type of value
* @param name name of object
* @return Map object
*/
<K, V> RMap<K, V> getMap(String name);
/**
* Returns map instance by name
* using provided codec for both map keys and values.
* <p>
* <strong>
* NOTE: client side caching feature invalidates whole Map per entry change which is ineffective.
* Use local cached <a href="https://redisson.org/docs/data-and-services/collections/#eviction-local-cache-and-data-partitioning">Map</a>, <a href="https://redisson.org/docs/data-and-services/collections/#local-cache">JSON Store</a> instead.
* </strong>
*
* @param <K> type of key
* @param <V> type of value
* @param name name of object
* @param codec codec for keys and values
* @return Map object
*/
<K, V> RMap<K, V> getMap(String name, Codec codec);
/**
* Returns Redis Sorted Set instance by name.
* This sorted set sorts objects by object score.
*
* @param <V> type of value
* @param name name of object
* @return ScoredSortedSet object
*/
<V> RScoredSortedSet<V> getScoredSortedSet(String name);
/**
* Returns Redis Sorted Set instance by name
* using provided codec for sorted set objects.
* This sorted set sorts objects by object score.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for values
* @return ScoredSortedSet object
*/
<V> RScoredSortedSet<V> getScoredSortedSet(String name, Codec codec);
/**
* Returns list instance by name.
*
* @param <V> type of value
* @param name name of object
* @return List object
*/
<V> RList<V> getList(String name);
/**
* Returns list instance by name
* using provided codec for list objects.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for values
* @return List object
*/
<V> RList<V> getList(String name, Codec codec);
/**
* Returns unbounded queue instance by name.
*
* @param <V> type of value
* @param name of object
* @return queue object
*/
<V> RQueue<V> getQueue(String name);
/**
* Returns unbounded queue instance by name
* using provided codec for queue objects.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for message
* @return Queue object
*/
<V> RQueue<V> getQueue(String name, Codec codec);
/**
* Returns unbounded deque instance by name.
*
* @param <V> type of value
* @param name name of object
* @return Deque object
*/
<V> RDeque<V> getDeque(String name);
/**
* Returns unbounded deque instance by name
* using provided codec for deque objects.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for values
* @return Deque object
*/
<V> RDeque<V> getDeque(String name, Codec codec);
/**
* Returns unbounded blocking queue instance by name.
*
* @param <V> type of value
* @param name name of object
* @return BlockingQueue object
*/
<V> RBlockingQueue<V> getBlockingQueue(String name);
/**
* Returns unbounded blocking queue instance by name
* using provided codec for queue objects.
*
* @param <V> type of value
* @param name name of queue
* @param codec queue objects codec
* @return BlockingQueue object
*/
<V> RBlockingQueue<V> getBlockingQueue(String name, Codec codec);
/**
* Returns unbounded blocking deque instance by name.
*
* @param <V> type of value
* @param name name of object
* @return BlockingDeque object
*/
<V> RBlockingDeque<V> getBlockingDeque(String name);
/**
* Returns unbounded blocking deque instance by name
* using provided codec for deque objects.
*
* @param <V> type of value
* @param name name of object
* @param codec deque objects codec
* @return BlockingDeque object
*/
<V> RBlockingDeque<V> getBlockingDeque(String name, Codec codec);
/**
* Returns geospatial items holder instance by <code>name</code>.
*
* @param <V> type of value
* @param name name of object
* @return Geo object
*/
<V> RGeo<V> getGeo(String name);
/**
* Returns geospatial items holder instance by <code>name</code>
* using provided codec for geospatial members.
*
* @param <V> type of value
* @param name name of object
* @param codec codec for value
* @return Geo object
*/
<V> RGeo<V> getGeo(String name, Codec codec);
}
| RClientSideCaching |
java | apache__camel | test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/services/OllamaLocalHostInfraService.java | {
"start": 1525,
"end": 3390
} | class ____ implements OllamaServiceConfiguration {
@Override
public String modelName() {
return LocalPropertyResolver.getProperty(OllamaLocalContainerInfraService.class, OllamaProperties.MODEL);
}
@Override
public String apiKey() {
return LocalPropertyResolver.getProperty(OllamaLocalContainerInfraService.class, OllamaProperties.API_KEY);
}
}
private final OllamaServiceConfiguration configuration;
private final String hostUrl;
public OllamaLocalHostInfraService() {
this(new DefaultServiceConfiguration());
}
public OllamaLocalHostInfraService(OllamaServiceConfiguration serviceConfiguration) {
this.configuration = serviceConfiguration;
this.hostUrl = System.getProperty(OllamaProperties.OLLAMA_HOST_URL, DEFAULT_OLLAMA_HOST_URL);
}
@Override
public void registerProperties() {
System.setProperty(OllamaProperties.ENDPOINT, hostUrl);
LOG.info("Registered Ollama endpoint property: {}", hostUrl);
}
@Override
public void initialize() {
LOG.info("Using local Ollama instance at {}", hostUrl);
registerProperties();
}
@Override
public void shutdown() {
// NO-OP - we don't manage the lifecycle of the local Ollama instance
}
@Override
public String getEndpoint() {
return baseUrl();
}
@Override
public String getModel() {
return modelName();
}
@Override
public String modelName() {
return configuration.modelName();
}
@Override
public String baseUrl() {
return hostUrl;
}
@Override
public String baseUrlV1() {
return hostUrl + "/v1";
}
@Override
public String apiKey() {
return configuration.apiKey();
}
}
| DefaultServiceConfiguration |
java | apache__flink | flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/TypeInferenceExtractorTest.java | {
"start": 127558,
"end": 127857
} | class ____
extends ProcessTableFunction<Integer> {
public void eval(
@ArgumentHint(ArgumentTrait.ROW_SEMANTIC_TABLE) Row r,
@DataTypeHint(inputGroup = InputGroup.ANY) Object o) {}
}
private static | MixingStaticAndInputGroupProcessTableFunction |
java | elastic__elasticsearch | x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java | {
"start": 2220,
"end": 5098
} | class ____ extends BaseNodeResponse implements Writeable {
private final ByteSizeValue jvmInferenceMax;
private final ByteSizeValue jvmInference;
public CacheInfo(DiscoveryNode node, ByteSizeValue jvmInferenceMax, ByteSizeValue jvmInference) {
super(node);
this.jvmInferenceMax = Objects.requireNonNull(jvmInferenceMax);
this.jvmInference = Objects.requireNonNull(jvmInference);
}
public CacheInfo(StreamInput in) throws IOException {
super(in);
jvmInferenceMax = ByteSizeValue.readFrom(in);
jvmInference = ByteSizeValue.readFrom(in);
}
public ByteSizeValue getJvmInferenceMax() {
return jvmInferenceMax;
}
public ByteSizeValue getJvmInference() {
return jvmInference;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
jvmInferenceMax.writeTo(out);
jvmInference.writeTo(out);
}
@Override
public int hashCode() {
return Objects.hash(getNode(), jvmInferenceMax, jvmInference);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CacheInfo cacheInfo = (CacheInfo) o;
return Objects.equals(getNode(), cacheInfo.getNode())
&& Objects.equals(jvmInferenceMax, cacheInfo.jvmInferenceMax)
&& Objects.equals(jvmInference, cacheInfo.jvmInference);
}
}
public Response(StreamInput in) throws IOException {
super(in);
}
public Response(ClusterName clusterName, List<CacheInfo> nodes, List<FailedNodeException> failures) {
super(clusterName, nodes, failures);
}
@Override
protected List<CacheInfo> readNodesFrom(StreamInput in) throws IOException {
return in.readCollectionAsList(CacheInfo::new);
}
@Override
protected void writeNodesTo(StreamOutput out, List<CacheInfo> nodes) throws IOException {
out.writeCollection(nodes);
}
@Override
public int hashCode() {
return Objects.hash(getNodes());
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Response other = (Response) obj;
return Objects.equals(getNodes(), other.getNodes());
}
}
}
| CacheInfo |
java | apache__flink | flink-streaming-java/src/test/java/org/apache/flink/streaming/runtime/operators/windowing/WindowOperatorTest.java | {
"start": 5905,
"end": 147246
} | interface ____<IN, OUT> {
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
create();
}
private static final TypeInformation<Tuple2<String, Integer>> STRING_INT_TUPLE =
TypeInformation.of(new TypeHint<Tuple2<String, Integer>>() {});
// For counting if close() is called the correct number of times on the SumReducer
private static AtomicInteger closeCalled = new AtomicInteger(0);
// late arriving event OutputTag<StreamRecord<IN>>
private static final OutputTag<Tuple2<String, Integer>> lateOutputTag =
new OutputTag<Tuple2<String, Integer>>("late-output") {};
private void testSlidingEventTimeWindows(
HarnessProvider<Tuple2<String, Integer>, Tuple2<String, Integer>> harnessProvider)
throws Exception {
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness = harnessProvider.create();
testHarness.setup();
testHarness.open();
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 999));
expectedOutput.add(new Watermark(999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 1999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 1999));
expectedOutput.add(new Watermark(1999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new Watermark(2999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
expectedOutput.clear();
testHarness = harnessProvider.create();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), 3999));
expectedOutput.add(new Watermark(3999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 4999));
expectedOutput.add(new Watermark(4999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
expectedOutput.add(new Watermark(5999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
// those don't have any effect...
testHarness.processWatermark(new Watermark(6999));
testHarness.processWatermark(new Watermark(7999));
expectedOutput.add(new Watermark(6999));
expectedOutput.add(new Watermark(7999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@SuppressWarnings("unchecked")
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testSlidingEventTimeWindowsReduce(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 3;
final int windowSlide = 1;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize),
Duration.ofSeconds(windowSlide)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize), Duration.ofSeconds(windowSlide)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
testSlidingEventTimeWindows(
enableAsyncState
? () ->
createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: () -> createTestHarness(operator));
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testSlidingEventTimeWindowsApply(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 3;
final int windowSlide = 1;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize),
Duration.ofSeconds(windowSlide)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(
new RichSumReducer<TimeWindow>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize), Duration.ofSeconds(windowSlide)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
testSlidingEventTimeWindows(
enableAsyncState
? () -> createAsyncTestHarness(builder.asyncApply(new RichSumReducer<>()))
: () -> createTestHarness(operator));
// we close once in the rest...
assertThat(closeCalled).as("Close was not called.").hasValue(2);
}
private void testTumblingEventTimeWindows(
HarnessProvider<Tuple2<String, Integer>, Tuple2<String, Integer>> harnessProvider)
throws Exception {
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness = harnessProvider.create();
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(999));
expectedOutput.add(new Watermark(999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(1999));
expectedOutput.add(new Watermark(1999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
testHarness = harnessProvider.create();
expectedOutput.clear();
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processWatermark(new Watermark(2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new Watermark(2999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(3999));
expectedOutput.add(new Watermark(3999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(4999));
expectedOutput.add(new Watermark(4999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(5999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 5999));
expectedOutput.add(new Watermark(5999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
// those don't have any effect...
testHarness.processWatermark(new Watermark(6999));
testHarness.processWatermark(new Watermark(7999));
expectedOutput.add(new Watermark(6999));
expectedOutput.add(new Watermark(7999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
@SuppressWarnings("unchecked")
void testTumblingEventTimeWindowsReduce(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
testTumblingEventTimeWindows(
enableAsyncState
? () ->
createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: () -> createTestHarness(operator));
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
@SuppressWarnings("unchecked")
void testTumblingEventTimeWindowsApply(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(
new RichSumReducer<TimeWindow>()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
testTumblingEventTimeWindows(
enableAsyncState
? () -> createAsyncTestHarness(builder.asyncApply(new RichSumReducer<>()))
: () -> createTestHarness(operator));
// we close once in the rest...
assertThat(closeCalled).as("Close was not called.").hasValue(2);
}
@Test
@SuppressWarnings("unchecked")
void testSessionWindows() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
@Test
@SuppressWarnings("unchecked")
void testSessionWindowsWithProcessFunction() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableProcessWindowFunction<>(
new SessionProcessWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
@Test
@SuppressWarnings("unchecked")
void testReduceSessionWindows() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
@Test
@SuppressWarnings("unchecked")
void testReduceSessionWindowsWithProcessFunction() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueProcessWindowFunction<>(
new ReducedProcessSessionWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5501));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 6), 6050));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-6", 10L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-20", 5501L, 9050L), 9049));
expectedOutput.add(new Watermark(12000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 15000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 20), 15000));
testHarness.processWatermark(new Watermark(17999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-30", 15000L, 18000L), 17999));
expectedOutput.add(new Watermark(17999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
/** This tests whether merging works correctly with the CountTrigger. */
@Test
void testSessionWindowsWithCountTrigger() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
PurgingTrigger.of(CountTrigger.of(4)),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-10", 0L, 6500L), 6499));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
expectedOutput.clear();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 6000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 6500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 7000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
// add an element that merges the two "key1" sessions, they should now have count 6, and
// therefore fire
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 10), 4500));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-22", 10L, 10000L), 9999L));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
/** This tests whether merging works correctly with the ContinuousEventTimeTrigger. */
@Test
void testSessionWindowsWithContinuousEventTimeTrigger() throws Exception {
closeCalled.set(0);
final int sessionSize = 3;
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(sessionSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
ContinuousEventTimeTrigger.of(Duration.ofSeconds(2)),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// add elements out-of-order and first trigger time is 2000
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 1500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 3), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 1000));
// triggers emit and next trigger time is 4000
testHarness.processWatermark(new Watermark(2500));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-1", 1500L, 4500L), 4499));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-6", 0L, 5500L), 5499));
expectedOutput.add(new Watermark(2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 4000));
testHarness.processWatermark(new Watermark(3000));
expectedOutput.add(new Watermark(3000));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
expectedOutput.clear();
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 4000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 3500));
// triggers emit and next trigger time is 6000
testHarness.processWatermark(new Watermark(4000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-3", 1500L, 7000L), 6999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-15", 0L, 7000L), 6999));
expectedOutput.add(new Watermark(4000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
/**
* This tests a custom Session window assigner that assigns some elements to "point windows",
* windows that have the same timestamp for start and end.
*
* <p>In this test, elements that have 33 as the second tuple field will be put into a point
* window.
*/
@Test
@SuppressWarnings("unchecked")
void testPointSessions() throws Exception {
closeCalled.set(0);
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
new PointSessionWindows(3000),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
OperatorSubtaskState snapshot;
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator)) {
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 1000));
// do a snapshot, close and restore again
snapshot = testHarness.snapshot(0L, 0L);
}
try (OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator)) {
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 33), 2500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 2), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 33), 2500));
testHarness.processWatermark(new Watermark(12000));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-36", 10L, 4000L), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-67", 0L, 3000L), 2999));
expectedOutput.add(new Watermark(12000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
}
}
private static <OUT>
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, OUT> createTestHarness(
OneInputStreamOperatorFactory<Tuple2<String, Integer>, OUT> operator) {
try {
return new KeyedOneInputStreamOperatorTestHarness<>(
operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static <OUT>
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, OUT> createAsyncTestHarness(
OneInputStreamOperator<Tuple2<String, Integer>, OUT> operator) {
try {
return AsyncKeyedOneInputStreamOperatorTestHarness.create(
operator, new TupleKeySelector(), BasicTypeInfo.STRING_TYPE_INFO);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
@SuppressWarnings("unchecked")
void testContinuousWatermarkTrigger(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
GlobalWindow>
operator =
new WindowOperatorFactory<>(
GlobalWindows.create(),
new GlobalWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, GlobalWindow, Tuple2<String, Integer>>()),
ContinuousEventTimeTrigger.of(Duration.ofSeconds(windowSize)),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, GlobalWindow> builder =
new WindowOperatorBuilder<>(
GlobalWindows.create(),
null, /*Required async trigger*/
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class))
.asyncTrigger(
AsyncContinuousEventTimeTrigger.of(Duration.ofSeconds(windowSize)));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1000));
expectedOutput.add(new Watermark(1000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(2000));
expectedOutput.add(new Watermark(2000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(3000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), Long.MAX_VALUE));
expectedOutput.add(new Watermark(3000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(4000));
expectedOutput.add(new Watermark(4000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(5000));
expectedOutput.add(new Watermark(5000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(new Watermark(6000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
expectedOutput.add(new Watermark(6000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
// those don't have any effect...
testHarness.processWatermark(new Watermark(7000));
testHarness.processWatermark(new Watermark(8000));
expectedOutput.add(new Watermark(7000));
expectedOutput.add(new Watermark(8000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
@SuppressWarnings("unchecked")
void testCountTrigger(boolean enableAsyncState) throws Exception {
closeCalled.set(0);
final int windowSize = 4;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
GlobalWindow>
operator =
new WindowOperatorFactory<>(
GlobalWindows.create(),
new GlobalWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, GlobalWindow, Tuple2<String, Integer>>()),
PurgingTrigger.of(CountTrigger.of(windowSize)),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, GlobalWindow> builder =
new WindowOperatorBuilder<>(
GlobalWindows.create(),
null, /*Required async trigger*/
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class))
.asyncTrigger(AsyncPurgingTrigger.of(AsyncCountTrigger.of(windowSize)));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// The global window actually ignores these timestamps...
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
// do a snapshot, close and restore again
OperatorSubtaskState snapshot = testHarness.snapshot(0L, 0L);
testHarness.close();
ConcurrentLinkedQueue<Object> outputBeforeClose = testHarness.getOutput();
stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
operator =
new WindowOperatorFactory<>(
GlobalWindows.create(),
new GlobalWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, GlobalWindow, Tuple2<String, Integer>>()),
PurgingTrigger.of(CountTrigger.of(windowSize)),
0,
null /* late data output tag */);
testHarness = createTestHarness(operator);
testHarness.setup();
testHarness.initializeState(snapshot);
testHarness.open();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
Iterables.concat(outputBeforeClose, testHarness.getOutput()),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 10999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 4), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 4), Long.MAX_VALUE));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
Iterables.concat(outputBeforeClose, testHarness.getOutput()),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testEndOfStreamTrigger(boolean enableAsyncState) throws Exception {
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(
new ExecutionConfig().getSerializerConfig()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
GlobalWindow>
operator =
new WindowOperatorFactory<>(
GlobalWindows.createWithEndOfStreamTrigger(),
new GlobalWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new ExecutionConfig().getSerializerConfig()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, GlobalWindow, Tuple2<String, Integer>>()),
GlobalWindows.createWithEndOfStreamTrigger().getDefaultTrigger(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, GlobalWindow> builder =
new WindowOperatorBuilder<>(
GlobalWindows.createWithEndOfStreamTrigger(),
GlobalWindows.createWithEndOfStreamTrigger().getDefaultTrigger(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
testHarness.open();
// add elements out-of-order
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 20));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 0));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
Collections.EMPTY_LIST,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processWatermark(Watermark.MAX_WATERMARK);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), Long.MAX_VALUE));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 5), Long.MAX_VALUE));
expectedOutput.add(Watermark.MAX_WATERMARK);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testProcessingTimeTumblingWindows(boolean enableAsyncState) throws Throwable {
final int windowSize = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingProcessingTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingProcessingTimeWindows.of(Duration.ofSeconds(windowSize)),
ProcessingTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
testHarness.setProcessingTime(3);
// timestamp is ignored in processing time
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
testHarness.setProcessingTime(5000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 2999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 7000));
testHarness.setProcessingTime(7000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testProcessingTimeSlidingWindows(boolean enableAsyncState) throws Throwable {
final int windowSize = 3;
final int windowSlide = 1;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
SlidingProcessingTimeWindows.of(
Duration.ofSeconds(windowSize),
Duration.ofSeconds(windowSlide)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
SlidingProcessingTimeWindows.of(
Duration.ofSeconds(windowSize), Duration.ofSeconds(windowSlide)),
ProcessingTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// timestamp is ignored in processing time
testHarness.setProcessingTime(3);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(1000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 1), 999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(2000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 1999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(3000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 3), 2999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 2), 2999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), Long.MAX_VALUE));
testHarness.setProcessingTime(7000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 5), 3999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 5), 4999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 5999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@Test
void testProcessingTimeSessionWindows() throws Throwable {
final int windowGap = 3;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
ProcessingTimeSessionWindows.withGap(Duration.ofSeconds(windowGap)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// timestamp is ignored in processing time
testHarness.setProcessingTime(3);
testHarness.processElement(
new StreamRecord<>(new Tuple2<>("key2", 1), 1)); // Long.MAX_VALUE));
testHarness.setProcessingTime(1000);
testHarness.processElement(
new StreamRecord<>(new Tuple2<>("key2", 1), 1002)); // Long.MAX_VALUE));
testHarness.setProcessingTime(5000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 3999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 5000));
testHarness.setProcessingTime(10000);
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key2", 2), 7999));
expectedOutput.add(new StreamRecord<>(new Tuple2<>("key1", 3), 7999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
assertThat(testHarness.getOutput()).hasSameSizeAs(expectedOutput);
for (Object elem : testHarness.getOutput()) {
if (elem instanceof StreamRecord) {
StreamRecord<Tuple2<String, Integer>> el =
(StreamRecord<Tuple2<String, Integer>>) elem;
assertThat(expectedOutput).contains(el);
}
}
testHarness.close();
}
@Test
@SuppressWarnings("unchecked")
void testDynamicEventTimeSessionWindows() throws Exception {
closeCalled.set(0);
SessionWindowTimeGapExtractor<Tuple2<String, Integer>> extractor =
mock(SessionWindowTimeGapExtractor.class);
when(extractor.extract(any(Tuple2.class)))
.thenAnswer(
invocation -> {
Tuple2<String, Integer> element =
(Tuple2<String, Integer>) invocation.getArguments()[0];
switch (element.f0) {
case "key1":
return 3000L;
case "key2":
switch (element.f1) {
case 10:
return 1000L;
default:
return 2000L;
}
default:
return 0L;
}
});
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
DynamicEventTimeSessionWindows.withDynamicGap(extractor),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
EventTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// test different gaps for different keys
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 10));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.processWatermark(new Watermark(8999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-3", 10L, 3010L), 3009));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-9", 5000L, 8000L), 7999));
expectedOutput.add(new Watermark(8999));
// test gap when it produces an end time before current timeout
// the furthest timeout is respected
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 9000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 10000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 10500));
testHarness.processWatermark(new Watermark(12999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-13", 9000L, 12000L), 11999));
expectedOutput.add(new Watermark(12999));
// test gap when it produces an end time after current timeout
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 13000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 13500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14000));
testHarness.processWatermark(new Watermark(16999));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-21", 13000L, 16000L), 15999));
expectedOutput.add(new Watermark(16999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
@Test
@SuppressWarnings("unchecked")
void testDynamicProcessingTimeSessionWindows() throws Exception {
closeCalled.set(0);
SessionWindowTimeGapExtractor<Tuple2<String, Integer>> extractor =
mock(SessionWindowTimeGapExtractor.class);
when(extractor.extract(any(Tuple2.class)))
.thenAnswer(
invocation -> {
Tuple2<String, Integer> element =
(Tuple2<String, Integer>) invocation.getArguments()[0];
switch (element.f0) {
case "key1":
return 3000L;
case "key2":
switch (element.f1) {
case 10:
return 1000L;
default:
return 2000L;
}
default:
return 0L;
}
});
ListStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
DynamicProcessingTimeSessionWindows.withDynamicGap(extractor),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalIterableWindowFunction<>(new SessionWindowFunction()),
ProcessingTimeTrigger.create(),
0,
null /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>();
testHarness.open();
// test different gaps for different keys
testHarness.setProcessingTime(10);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 3), 10));
testHarness.setProcessingTime(5000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 4), 5000));
testHarness.setProcessingTime(6000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 5), 6000));
testHarness.setProcessingTime(8999);
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key1-3", 10L, 3010L), 3009));
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-9", 5000L, 8000L), 7999));
// test gap when it produces an end time before current timeout
// the furthest timeout is respected
testHarness.setProcessingTime(9000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 9000));
testHarness.setProcessingTime(10000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 2), 10000));
testHarness.setProcessingTime(10500);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 10500));
testHarness.setProcessingTime(10500);
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-13", 9000L, 12000L), 11999));
// test gap when it produces an end time after current timeout
testHarness.setProcessingTime(13000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 13000));
testHarness.setProcessingTime(13500);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 10), 13500));
testHarness.setProcessingTime(14000);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14000));
testHarness.setProcessingTime(16999);
expectedOutput.add(new StreamRecord<>(new Tuple3<>("key2-21", 13000L, 16000L), 15999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expectedOutput,
testHarness.getOutput(),
new Tuple3ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testLateness(boolean enableAsyncState) throws Exception {
final int windowSize = 2;
final long lateness = 500;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
PurgingTrigger.of(EventTimeTrigger.create()),
lateness,
lateOutputTag);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
PurgingTrigger.of(EventTimeTrigger.create()),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
builder.allowedLateness(Duration.ofMillis(lateness));
builder.sideOutputLateData(lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> lateExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 500));
testHarness.processWatermark(new Watermark(1500));
expected.add(new Watermark(1500));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1300));
testHarness.processWatermark(new Watermark(2300));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
expected.add(new Watermark(2300));
// this will not be sideoutput because window.maxTimestamp() + allowedLateness >
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1997));
testHarness.processWatermark(new Watermark(6000));
// this is 1 and not 3 because the trigger fires and purges
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
expected.add(new Watermark(6000));
// this will be side output because window.maxTimestamp() + allowedLateness <
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processWatermark(new Watermark(7000));
lateExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
expected.add(new Watermark(7000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expected,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
lateExpected,
(Iterable) testHarness.getSideOutput(lateOutputTag),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testCleanupTimeOverflow(boolean enableAsyncState) throws Exception {
final int windowSize = 1000;
final long lateness = 2000;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
TumblingEventTimeWindows windowAssigner =
TumblingEventTimeWindows.of(Duration.ofMillis(windowSize));
final WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
windowAssigner,
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
lateness,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
windowAssigner,
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
builder.allowedLateness(Duration.ofMillis(lateness));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
long timestamp = Long.MAX_VALUE - 1750;
Collection<TimeWindow> windows =
windowAssigner.assignWindows(
new Tuple2<>("key2", 1),
timestamp,
new WindowAssigner.WindowAssignerContext() {
@Override
public long getCurrentProcessingTime() {
return enableAsyncState
? ((AsyncWindowOperator<?, ?, ?, ?, ?>)
testHarness.getOperator())
.getWindowAssignerContext()
.getCurrentProcessingTime()
: ((WindowOperator<?, ?, ?, ?, ?>)
testHarness.getOperator())
.windowAssignerContext.getCurrentProcessingTime();
}
});
TimeWindow window = Iterables.getOnlyElement(windows);
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), timestamp));
// the garbage collection timer would wrap-around
assertThat(window.maxTimestamp() + lateness).isLessThan(window.maxTimestamp());
// and it would prematurely fire with watermark (Long.MAX_VALUE - 1500)
assertThat(window.maxTimestamp() + lateness).isLessThan(Long.MAX_VALUE - 1500);
// if we don't correctly prevent wrap-around in the garbage collection
// timers this watermark will clean our window state for the just-added
// element/window
testHarness.processWatermark(new Watermark(Long.MAX_VALUE - 1500));
// this watermark is before the end timestamp of our only window
assertThat(window.maxTimestamp()).isStrictlyBetween(Long.MAX_VALUE - 1500, Long.MAX_VALUE);
// push in a watermark that will trigger computation of our window
testHarness.processWatermark(new Watermark(window.maxTimestamp()));
expected.add(new Watermark(Long.MAX_VALUE - 1500));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), window.maxTimestamp()));
expected.add(new Watermark(window.maxTimestamp()));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expected,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testSideOutputDueToLatenessTumbling(boolean enableAsyncState) throws Exception {
final int windowSize = 2;
final long lateness = 0;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
lateness,
lateOutputTag);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
builder.allowedLateness(Duration.ofMillis(lateness));
builder.sideOutputLateData(lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
// normal element
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1985));
expected.add(new Watermark(1985));
// this will not be dropped because window.maxTimestamp() + allowedLateness >
// currentWatermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1980));
testHarness.processWatermark(new Watermark(1999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 1999));
expected.add(new Watermark(1999));
// sideoutput as late, will reuse previous timestamp since only input tuple is sideoutputed
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1998));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2001));
testHarness.processWatermark(new Watermark(2999));
expected.add(new Watermark(2999));
testHarness.processWatermark(new Watermark(3999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 3999));
expected.add(new Watermark(3999));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expected,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
sideExpected,
(Iterable) testHarness.getSideOutput(lateOutputTag),
new Tuple2ResultSortComparator());
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testSideOutputDueToLatenessSliding(boolean enableAsyncState) throws Exception {
final int windowSize = 3;
final int windowSlide = 1;
final long lateness = 0;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize),
Duration.ofSeconds(windowSlide)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new PassThroughWindowFunction<
String, TimeWindow, Tuple2<String, Integer>>()),
EventTimeTrigger.create(),
lateness,
lateOutputTag /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
SlidingEventTimeWindows.of(
Duration.ofSeconds(windowSize), Duration.ofSeconds(windowSlide)),
EventTimeTrigger.create(),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
builder.allowedLateness(Duration.ofMillis(lateness));
builder.sideOutputLateData(lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple2<String, Integer>>
testHarness =
enableAsyncState
? createAsyncTestHarness(
builder.asyncReduce(
new SumReducer(),
new PassThroughWindowFunction<>()))
: createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(3000));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 2), 2999));
expected.add(new Watermark(3000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 3001));
// lateness is set to 0 and window size = 3 sec and slide 1, the following 2 elements (2400)
// are assigned to windows ending at 2999, 3999, 4999.
// The 2999 is dropped because it is already late (WM = 2999) but the rest are kept.
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2400));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2400));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 3001));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 3900));
testHarness.processWatermark(new Watermark(6000));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 5), 3999));
expected.add(new StreamRecord<>(new Tuple2<>("key1", 2), 3999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 4), 4999));
expected.add(new StreamRecord<>(new Tuple2<>("key1", 2), 4999));
expected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 5999));
expected.add(new StreamRecord<>(new Tuple2<>("key1", 2), 5999));
expected.add(new Watermark(6000));
// sideoutput element due to lateness
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key1", 1), 3001));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key1", 1), 3001));
testHarness.processWatermark(new Watermark(25000));
expected.add(new Watermark(25000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expected,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
sideExpected,
(Iterable) testHarness.getSideOutput(lateOutputTag),
new Tuple2ResultSortComparator());
testHarness.close();
}
@Test
void testSideOutputDueToLatenessSessionZeroLatenessPurgingTrigger() throws Exception {
final int gapSize = 3;
final long lateness = 0;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
PurgingTrigger.of(EventTimeTrigger.create()),
lateness,
lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be dropped because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
// this is side output as late
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
// this is also side output as late (we test that they are not accidentally merged)
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10100));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10100));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 14500L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual =
testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
sideExpected,
(Iterable) sideActual,
new Tuple2ResultSortComparator());
testHarness.close();
}
@Test
void testSideOutputDueToLatenessSessionZeroLateness() throws Exception {
final int gapSize = 3;
final long lateness = 0;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
EventTimeTrigger.create(),
lateness,
lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
ConcurrentLinkedQueue<Object> sideExpected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be dropped because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
// this is sideoutput as late, reuse last timestamp
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
sideExpected.add(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 14500L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual =
testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple2ResultSortComparator());
TestHarnessUtil.assertOutputEqualsSorted(
"SideOutput was not correct.",
sideExpected,
(Iterable) sideActual,
new Tuple2ResultSortComparator());
testHarness.close();
}
@Test
void testDropDueToLatenessSessionWithLatenessPurgingTrigger() throws Exception {
// this has the same output as testSideOutputDueToLatenessSessionZeroLateness() because
// the allowed lateness is too small to make a difference
final int gapSize = 3;
final long lateness = 10;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
PurgingTrigger.of(EventTimeTrigger.create()),
lateness,
lateOutputTag);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be dropped because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 10000L, 14600L), 14599));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 10000L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
testHarness.close();
}
@Test
void testNotSideOutputDueToLatenessSessionWithLateness() throws Exception {
// same as testSideOutputDueToLatenessSessionWithLateness() but with an accumulating
// trigger, i.e.
// one that does not return FIRE_AND_PURGE when firing but just FIRE. The expected
// results are therefore slightly different.
final int gapSize = 3;
final long lateness = 10;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
EventTimeTrigger.create(),
lateness,
lateOutputTag /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be sideoutput because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
// because of the small allowed lateness and because the trigger is accumulating
// this will be merged into the session (11600-14600) and therefore will not
// be sideoutput as late
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
// adding ("key2", 1) extended the session to (10000-146000) for which
// maxTimestamp <= currentWatermark. Therefore, we immediately get a firing
// with the current version of EventTimeTrigger/EventTimeTriggerAccum
expected.add(new StreamRecord<>(new Tuple3<>("key2-2", 10000L, 14600L), 14599));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual =
testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-3", 10000L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
actual = testHarness.getOutput();
sideActual = testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.close();
}
@Test
void testNotSideOutputDueToLatenessSessionWithHugeLatenessPurgingTrigger() throws Exception {
final int gapSize = 3;
final long lateness = 10000;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
PurgingTrigger.of(EventTimeTrigger.create()),
lateness,
lateOutputTag /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be sideoutput because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 14600L), 14599));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual =
testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 1000L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
actual = testHarness.getOutput();
sideActual = testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.close();
}
@Test
void testNotSideOutputDueToLatenessSessionWithHugeLateness() throws Exception {
final int gapSize = 3;
final long lateness = 10000;
ReducingStateDescriptor<Tuple2<String, Integer>> stateDesc =
new ReducingStateDescriptor<>(
"window-contents",
new SumReducer(),
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Tuple2<String, Integer>,
Tuple3<String, Long, Long>,
TimeWindow>
operator =
new WindowOperatorFactory<>(
EventTimeSessionWindows.withGap(Duration.ofSeconds(gapSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
stateDesc,
new InternalSingleValueWindowFunction<>(
new ReducedSessionWindowFunction()),
EventTimeTrigger.create(),
lateness,
lateOutputTag /* late data output tag */);
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, Tuple3<String, Long, Long>>
testHarness = createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1999));
expected.add(new Watermark(1999));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 2000));
testHarness.processWatermark(new Watermark(4998));
expected.add(new Watermark(4998));
// this will not be sideoutput because the session we're adding two has maxTimestamp
// after the current watermark
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 4500));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 8500));
testHarness.processWatermark(new Watermark(7400));
expected.add(new Watermark(7400));
// this will merge the two sessions into one
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 7000));
testHarness.processWatermark(new Watermark(11501));
expected.add(new StreamRecord<>(new Tuple3<>("key2-5", 1000L, 11500L), 11499));
expected.add(new Watermark(11501));
// new session
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 11600));
testHarness.processWatermark(new Watermark(14600));
expected.add(new StreamRecord<>(new Tuple3<>("key2-1", 11600L, 14600L), 14599));
expected.add(new Watermark(14600));
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 10000));
// the maxTimestamp of the merged session is already late,
// so we get an immediate firing
expected.add(new StreamRecord<>(new Tuple3<>("key2-7", 1000L, 14600L), 14599));
ConcurrentLinkedQueue<Object> actual = testHarness.getOutput();
ConcurrentLinkedQueue<StreamRecord<Tuple2<String, Integer>>> sideActual =
testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 14500));
testHarness.processWatermark(new Watermark(20000));
expected.add(new StreamRecord<>(new Tuple3<>("key2-8", 1000L, 17500L), 17499));
expected.add(new Watermark(20000));
testHarness.processWatermark(new Watermark(100000));
expected.add(new Watermark(100000));
actual = testHarness.getOutput();
sideActual = testHarness.getSideOutput(lateOutputTag);
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.", expected, actual, new Tuple3ResultSortComparator());
assertThat(sideActual).isNull();
testHarness.close();
}
@ParameterizedTest(name = "Enable async state = {0}")
@ValueSource(booleans = {false, true})
void testCleanupTimerWithEmptyListStateForTumblingWindows2(boolean enableAsyncState)
throws Exception {
final int windowSize = 2;
final long lateness = 100;
ListStateDescriptor<Tuple2<String, Integer>> windowStateDesc =
new ListStateDescriptor<>(
"window-contents",
STRING_INT_TUPLE.createSerializer(new SerializerConfigImpl()));
WindowOperatorFactory<
String,
Tuple2<String, Integer>,
Iterable<Tuple2<String, Integer>>,
String,
TimeWindow>
operator =
new WindowOperatorFactory<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new TimeWindow.Serializer(),
new TupleKeySelector(),
BasicTypeInfo.STRING_TYPE_INFO.createSerializer(
new SerializerConfigImpl()),
windowStateDesc,
new InternalIterableWindowFunction<>(new PassThroughFunction2()),
new EventTimeTriggerAccumGC(lateness),
lateness,
null /* late data output tag */);
WindowOperatorBuilder<Tuple2<String, Integer>, String, TimeWindow> builder =
new WindowOperatorBuilder<>(
TumblingEventTimeWindows.of(Duration.ofSeconds(windowSize)),
new EventTimeTriggerAccumGC(lateness),
new ExecutionConfig(),
STRING_INT_TUPLE,
new TupleKeySelector(),
TypeInformation.of(String.class));
builder.allowedLateness(Duration.ofMillis(lateness));
OneInputStreamOperatorTestHarness<Tuple2<String, Integer>, String> testHarness =
enableAsyncState
? createAsyncTestHarness(builder.asyncApply(new PassThroughFunction2()))
: createTestHarness(operator);
testHarness.open();
ConcurrentLinkedQueue<Object> expected = new ConcurrentLinkedQueue<>();
// normal element
testHarness.processElement(new StreamRecord<>(new Tuple2<>("key2", 1), 1000));
testHarness.processWatermark(new Watermark(1599));
testHarness.processWatermark(new Watermark(1999));
testHarness.processWatermark(new Watermark(2100));
testHarness.processWatermark(new Watermark(5000));
expected.add(new Watermark(1599));
expected.add(new StreamRecord<>("GOT: (key2,1)", 1999));
expected.add(new Watermark(1999)); // here it fires and purges
expected.add(new Watermark(2100)); // here is the cleanup timer
expected.add(new Watermark(5000));
TestHarnessUtil.assertOutputEqualsSorted(
"Output was not correct.",
expected,
testHarness.getOutput(),
new Tuple2ResultSortComparator());
testHarness.close();
}
private static | HarnessProvider |
java | elastic__elasticsearch | modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java | {
"start": 11488,
"end": 22084
} | class ____ will return script instances.
*/
private static <T> T generateFactory(Loader loader, ScriptContext<T> context, Type classType, ScriptScope scriptScope) {
int classFrames = ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS;
int classAccess = Opcodes.ACC_PUBLIC | Opcodes.ACC_SUPER | Opcodes.ACC_FINAL;
String interfaceBase = Type.getType(context.factoryClazz).getInternalName();
String className = interfaceBase + "$Factory";
String[] classInterfaces = new String[] { interfaceBase };
ClassWriter writer = new ClassWriter(classFrames);
writer.visit(WriterConstants.CLASS_VERSION, classAccess, className, null, OBJECT_TYPE.getInternalName(), classInterfaces);
org.objectweb.asm.commons.Method init = new org.objectweb.asm.commons.Method(
"<init>",
MethodType.methodType(void.class).toMethodDescriptorString()
);
GeneratorAdapter constructor = new GeneratorAdapter(
Opcodes.ASM5,
init,
writer.visitMethod(Opcodes.ACC_PUBLIC, init.getName(), init.getDescriptor(), null, null)
);
constructor.visitCode();
constructor.loadThis();
constructor.invokeConstructor(OBJECT_TYPE, init);
constructor.returnValue();
constructor.endMethod();
Method reflect = null;
Method docFieldsReflect = null;
for (Method method : context.factoryClazz.getMethods()) {
if ("newInstance".equals(method.getName())) {
reflect = method;
} else if ("newFactory".equals(method.getName())) {
reflect = method;
}
}
org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method(
reflect.getName(),
MethodType.methodType(reflect.getReturnType(), reflect.getParameterTypes()).toMethodDescriptorString()
);
org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method(
"<init>",
MethodType.methodType(void.class, reflect.getParameterTypes()).toMethodDescriptorString()
);
GeneratorAdapter adapter = new GeneratorAdapter(
Opcodes.ASM5,
instance,
writer.visitMethod(Opcodes.ACC_PUBLIC | Opcodes.ACC_FINAL, instance.getName(), instance.getDescriptor(), null, null)
);
adapter.visitCode();
adapter.newInstance(classType);
adapter.dup();
adapter.loadArgs();
adapter.invokeConstructor(classType, constru);
adapter.returnValue();
adapter.endMethod();
writeNeedsMethods(context.factoryClazz, writer, scriptScope.getUsedVariables());
String methodName = "isResultDeterministic";
org.objectweb.asm.commons.Method isResultDeterministic = new org.objectweb.asm.commons.Method(
methodName,
MethodType.methodType(boolean.class).toMethodDescriptorString()
);
GeneratorAdapter deterAdapter = new GeneratorAdapter(
Opcodes.ASM5,
isResultDeterministic,
writer.visitMethod(Opcodes.ACC_PUBLIC, methodName, isResultDeterministic.getDescriptor(), null, null)
);
deterAdapter.visitCode();
deterAdapter.push(scriptScope.isDeterministic());
deterAdapter.returnValue();
deterAdapter.endMethod();
writer.visitEnd();
Class<?> factory = loader.defineFactory(className.replace('/', '.'), writer.toByteArray());
try {
return context.factoryClazz.cast(factory.getConstructor().newInstance());
} catch (Exception exception) {
// Catch everything to let the user know this is something caused internally.
throw new IllegalStateException(
"An internal error occurred attempting to define the factory class [" + className + "].",
exception
);
}
}
private static void writeNeedsMethods(Class<?> clazz, ClassWriter writer, Set<String> extractedVariables) {
for (Method method : clazz.getMethods()) {
if (method.getName().startsWith("needs")
&& method.getReturnType().equals(boolean.class)
&& method.getParameterTypes().length == 0) {
String name = method.getName();
name = name.substring(5);
name = Character.toLowerCase(name.charAt(0)) + name.substring(1);
org.objectweb.asm.commons.Method needs = new org.objectweb.asm.commons.Method(
method.getName(),
MethodType.methodType(boolean.class).toMethodDescriptorString()
);
GeneratorAdapter adapter = new GeneratorAdapter(
Opcodes.ASM5,
needs,
writer.visitMethod(Opcodes.ACC_PUBLIC, needs.getName(), needs.getDescriptor(), null, null)
);
adapter.visitCode();
adapter.push(extractedVariables.contains(name));
adapter.returnValue();
adapter.endMethod();
}
}
}
ScriptScope compile(Compiler compiler, Loader loader, String scriptName, String source, Map<String, String> params) {
final CompilerSettings compilerSettings = buildCompilerSettings(params);
try {
// Drop all permissions to actually compile the code itself.
String name = scriptName == null ? source : scriptName;
return compiler.compile(loader, name, source, compilerSettings);
// Note that it is safe to catch any of the following errors since Painless is stateless.
} catch (OutOfMemoryError | StackOverflowError | LinkageError | Exception e) {
throw convertToScriptException(source, e);
}
}
private CompilerSettings buildCompilerSettings(Map<String, String> params) {
CompilerSettings compilerSettings;
if (params.isEmpty()) {
// Use the default settings.
compilerSettings = defaultCompilerSettings;
} else {
// Use custom settings specified by params.
compilerSettings = new CompilerSettings();
// Except regexes enabled - this is a node level setting and can't be changed in the request.
compilerSettings.setRegexesEnabled(defaultCompilerSettings.areRegexesEnabled());
compilerSettings.setRegexLimitFactor(defaultCompilerSettings.getAppliedRegexLimitFactor());
Map<String, String> copy = new HashMap<>(params);
String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER);
if (value != null) {
compilerSettings.setMaxLoopCounter(Integer.parseInt(value));
}
value = copy.remove(CompilerSettings.PICKY);
if (value != null) {
compilerSettings.setPicky(parseBoolean(value));
}
value = copy.remove(CompilerSettings.INITIAL_CALL_SITE_DEPTH);
if (value != null) {
compilerSettings.setInitialCallSiteDepth(Integer.parseInt(value));
}
value = copy.remove(CompilerSettings.REGEX_ENABLED.getKey());
if (value != null) {
throw new IllegalArgumentException("[painless.regex.enabled] can only be set on node startup.");
}
value = copy.remove(CompilerSettings.REGEX_LIMIT_FACTOR.getKey());
if (value != null) {
throw new IllegalArgumentException("[painless.regex.limit-factor] can only be set on node startup.");
}
if (copy.isEmpty() == false) {
throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
}
}
return compilerSettings;
}
@SuppressForbidden(
reason = "TODO Deprecate any lenient usage of Boolean#parseBoolean https://github.com/elastic/elasticsearch/issues/128993"
)
private static boolean parseBoolean(String value) {
return Boolean.parseBoolean(value);
}
private static ScriptException convertToScriptException(String scriptSource, Throwable t) {
// create a script stack: this is just the script portion
List<String> scriptStack = new ArrayList<>();
ScriptException.Position pos = null;
for (StackTraceElement element : t.getStackTrace()) {
if (WriterConstants.CLASS_NAME.equals(element.getClassName())) {
// found the script portion
int originalOffset = element.getLineNumber();
if (originalOffset == -1) {
scriptStack.add("<<< unknown portion of script >>>");
} else {
int offset = --originalOffset; // offset is 1 based, line numbers must be!
int startOffset = getPreviousStatement(offset);
int endOffset = getNextStatement(scriptSource, offset);
StringBuilder snippet = new StringBuilder();
if (startOffset > 0) {
snippet.append("... ");
}
snippet.append(scriptSource.substring(startOffset, endOffset));
if (endOffset < scriptSource.length()) {
snippet.append(" ...");
}
scriptStack.add(snippet.toString());
StringBuilder pointer = new StringBuilder();
if (startOffset > 0) {
pointer.append(" ");
}
for (int i = startOffset; i < offset; i++) {
pointer.append(' ');
}
pointer.append("^---- HERE");
scriptStack.add(pointer.toString());
pos = new ScriptException.Position(originalOffset, startOffset, endOffset);
}
break;
}
}
Throwable cause = ErrorCauseWrapper.maybeWrap(t);
throw new ScriptException("compile error", cause, scriptStack, scriptSource, PainlessScriptEngine.NAME, pos);
}
// very simple heuristic: +/- 25 chars. can be improved later.
private static int getPreviousStatement(int offset) {
return Math.max(0, offset - 25);
}
private static int getNextStatement(String scriptSource, int offset) {
return Math.min(scriptSource.length(), offset + 25);
}
}
| that |
java | spring-projects__spring-framework | spring-web/src/test/java/org/springframework/http/client/SimpleClientHttpResponseTests.java | {
"start": 4675,
"end": 5016
} | class ____ extends ByteArrayInputStream {
private boolean closed;
public TestByteArrayInputStream(byte[] buf) {
super(buf);
this.closed = false;
}
public boolean isClosed() {
return closed;
}
@Override
public void close() throws IOException {
super.close();
this.closed = true;
}
}
}
| TestByteArrayInputStream |
java | apache__flink | flink-runtime/src/test/java/org/apache/flink/runtime/operators/coordination/CoordinatorEventsExactlyOnceITCase.java | {
"start": 11501,
"end": 12677
} | class ____ implements CoordinationResponse {
final int value;
private IntegerResponse(int value) {
this.value = value;
}
}
// ------------------------------------------------------------------------
/**
* The coordinator that sends events and completes checkpoints.
*
* <p>All consistency guaranteed for the coordinator apply to order or method invocations (like
* {@link #executionAttemptFailed(int, int, Throwable)}}, {@link #subtaskReset(int, long)} or
* {@link #checkpointCoordinator(long, CompletableFuture)}) and the order in which actions are
* done (sending events and completing checkpoints). Tho consistently evaluate this, but with
* concurrency against the scheduler thread that calls this coordinator implements a simple
* mailbox that moves the method handling into a separate thread, but keeps the order.
*
* <p>It would inject a failure at some point while sending out operator events. This behavior
* helps to trigger a fail-over of the Flink job and test the exactly-once of events delivery in
* this case.
*/
protected static | IntegerResponse |
java | FasterXML__jackson-databind | src/test/java/tools/jackson/databind/deser/jdk/StringArrayDeserializer5165Test.java | {
"start": 856,
"end": 2522
} | class ____ extends StdDeserializer<String> {
public EmptyStringToNullDeserializer() {
super(String.class);
}
@Override
public String deserialize(JsonParser p, DeserializationContext ctxt) {
String value = p.getValueAsString();
if (value != null && value.isEmpty()) {
return null;
}
return value;
}
}
private ObjectMapper createMapperWithCustomDeserializer() {
SimpleModule module = new SimpleModule()
.addDeserializer(String.class, new EmptyStringToNullDeserializer());
return JsonMapper.builder()
.addModule(module)
.changeDefaultNullHandling(n -> JsonSetter.Value.forContentNulls(Nulls.FAIL))
.build();
}
@Test
public void nullsFailTest() {
ObjectMapper mapper = createMapperWithCustomDeserializer();
assertThrows(
InvalidNullException.class,
() -> mapper.readValue("{\"array\":[\"\"]}", Dst.class)
);
}
@Test
public void nullsSkipTest() throws Exception {
SimpleModule module = new SimpleModule()
.addDeserializer(String.class, new EmptyStringToNullDeserializer());
ObjectMapper mapper = JsonMapper.builder()
.addModule(module)
.changeDefaultNullHandling(n -> JsonSetter.Value.forContentNulls(Nulls.SKIP))
.build();
Dst dst = mapper.readValue("{\"array\":[\"\"]}", Dst.class);
assertEquals(0, dst.array.length, "Null values should be skipped");
}
}
| EmptyStringToNullDeserializer |
java | apache__hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/XMLUtils.java | {
"start": 1524,
"end": 8898
} | class ____ {
private static final Logger LOG =
LoggerFactory.getLogger(XMLUtils.class);
public static final String DISALLOW_DOCTYPE_DECL =
"http://apache.org/xml/features/disallow-doctype-decl";
public static final String LOAD_EXTERNAL_DECL =
"http://apache.org/xml/features/nonvalidating/load-external-dtd";
public static final String EXTERNAL_GENERAL_ENTITIES =
"http://xml.org/sax/features/external-general-entities";
public static final String EXTERNAL_PARAMETER_ENTITIES =
"http://xml.org/sax/features/external-parameter-entities";
public static final String CREATE_ENTITY_REF_NODES =
"http://apache.org/xml/features/dom/create-entity-ref-nodes";
public static final String VALIDATION =
"http://xml.org/sax/features/validation";
private static final AtomicBoolean CAN_SET_TRANSFORMER_ACCESS_EXTERNAL_DTD =
new AtomicBoolean(true);
private static final AtomicBoolean CAN_SET_TRANSFORMER_ACCESS_EXTERNAL_STYLESHEET =
new AtomicBoolean(true);
/**
* Transform input xml given a stylesheet.
*
* @param styleSheet the style-sheet
* @param xml input xml data
* @param out output
* @throws TransformerConfigurationException synopsis signals a problem
* creating a transformer object.
* @throws TransformerException this is used for throwing processor
* exceptions before the processing has started.
*/
public static void transform(
InputStream styleSheet, InputStream xml, Writer out
)
throws TransformerConfigurationException, TransformerException {
// Instantiate a TransformerFactory
TransformerFactory tFactory = newSecureTransformerFactory();
// Use the TransformerFactory to process the
// stylesheet and generate a Transformer
Transformer transformer = tFactory.newTransformer(
new StreamSource(styleSheet)
);
// Use the Transformer to transform an XML Source
// and send the output to a Result object.
transformer.transform(new StreamSource(xml), new StreamResult(out));
}
/**
* This method should be used if you need a {@link DocumentBuilderFactory}. Use this method
* instead of {@link DocumentBuilderFactory#newInstance()}. The factory that is returned has
* secure configuration enabled.
*
* @return a {@link DocumentBuilderFactory} with secure configuration enabled
* @throws ParserConfigurationException if the {@code JAXP} parser does not support the
* secure configuration
*/
public static DocumentBuilderFactory newSecureDocumentBuilderFactory()
throws ParserConfigurationException {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
dbf.setFeature(DISALLOW_DOCTYPE_DECL, true);
dbf.setFeature(LOAD_EXTERNAL_DECL, false);
dbf.setFeature(EXTERNAL_GENERAL_ENTITIES, false);
dbf.setFeature(EXTERNAL_PARAMETER_ENTITIES, false);
dbf.setFeature(CREATE_ENTITY_REF_NODES, false);
return dbf;
}
/**
* This method should be used if you need a {@link SAXParserFactory}. Use this method
* instead of {@link SAXParserFactory#newInstance()}. The factory that is returned has
* secure configuration enabled.
*
* @return a {@link SAXParserFactory} with secure configuration enabled
* @throws ParserConfigurationException if the {@code JAXP} parser does not support the
* secure configuration
* @throws SAXException if there are another issues when creating the factory
*/
public static SAXParserFactory newSecureSAXParserFactory()
throws SAXException, ParserConfigurationException {
SAXParserFactory spf = SAXParserFactory.newInstance();
spf.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
spf.setFeature(DISALLOW_DOCTYPE_DECL, true);
spf.setFeature(LOAD_EXTERNAL_DECL, false);
spf.setFeature(EXTERNAL_GENERAL_ENTITIES, false);
spf.setFeature(EXTERNAL_PARAMETER_ENTITIES, false);
return spf;
}
/**
* This method should be used if you need a {@link TransformerFactory}. Use this method
* instead of {@link TransformerFactory#newInstance()}. The factory that is returned has
* secure configuration enabled.
*
* @return a {@link TransformerFactory} with secure configuration enabled
* @throws TransformerConfigurationException if the {@code JAXP} transformer does not
* support the secure configuration
*/
public static TransformerFactory newSecureTransformerFactory()
throws TransformerConfigurationException {
TransformerFactory trfactory = TransformerFactory.newInstance();
trfactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
setOptionalSecureTransformerAttributes(trfactory);
return trfactory;
}
/**
* This method should be used if you need a {@link SAXTransformerFactory}. Use this method
* instead of {@link SAXTransformerFactory#newInstance()}. The factory that is returned has
* secure configuration enabled.
*
* @return a {@link SAXTransformerFactory} with secure configuration enabled
* @throws TransformerConfigurationException if the {@code JAXP} transformer does not
* support the secure configuration
*/
public static SAXTransformerFactory newSecureSAXTransformerFactory()
throws TransformerConfigurationException {
SAXTransformerFactory trfactory = (SAXTransformerFactory) SAXTransformerFactory.newInstance();
trfactory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, true);
setOptionalSecureTransformerAttributes(trfactory);
return trfactory;
}
/**
* These attributes are recommended for maximum security but some JAXP transformers do
* not support them. If at any stage, we fail to set these attributes, then we won't try again
* for subsequent transformers.
*
* @param transformerFactory to update
*/
private static void setOptionalSecureTransformerAttributes(
TransformerFactory transformerFactory) {
bestEffortSetAttribute(transformerFactory, CAN_SET_TRANSFORMER_ACCESS_EXTERNAL_DTD,
XMLConstants.ACCESS_EXTERNAL_DTD, "");
bestEffortSetAttribute(transformerFactory, CAN_SET_TRANSFORMER_ACCESS_EXTERNAL_STYLESHEET,
XMLConstants.ACCESS_EXTERNAL_STYLESHEET, "");
}
/**
* Set an attribute value on a {@link TransformerFactory}. If the TransformerFactory
* does not support the attribute, the method just returns <code>false</code> and
* logs the issue at debug level.
*
* @param transformerFactory to update
* @param flag that indicates whether to do the update and the flag can be set to
* <code>false</code> if an update fails
* @param name of the attribute to set
* @param value to set on the attribute
*/
static void bestEffortSetAttribute(TransformerFactory transformerFactory, AtomicBoolean flag,
String name, Object value) {
if (flag.get()) {
try {
transformerFactory.setAttribute(name, value);
} catch (Throwable t) {
flag.set(false);
LOG.debug("Issue setting TransformerFactory attribute {}: {}", name, t.toString());
}
}
}
}
| XMLUtils |
java | google__guice | core/test/com/google/inject/spi/InjectionPointTest.java | {
"start": 16864,
"end": 18460
} | class ____ improper generic types.
*/
public void testSyntheticBridgeMethodsInSubclasses() {
Set<InjectionPoint> points;
points = InjectionPoint.forInstanceMethodsAndFields(RestrictedSuper.class);
assertPointDependencies(points, new TypeLiteral<Provider<String>>() {});
assertEquals(points.toString(), 2, points.size());
assertPoints(points, RestrictedSuper.class, "jInject", "gInject");
points = InjectionPoint.forInstanceMethodsAndFields(ExposedSub.class);
assertPointDependencies(points, new TypeLiteral<Provider<String>>() {});
assertEquals(points.toString(), 2, points.size());
assertPoints(points, RestrictedSuper.class, "jInject", "gInject");
}
private void assertPoints(
Iterable<InjectionPoint> points, Class<?> clazz, String... methodNames) {
Set<String> methods = new HashSet<String>();
for (InjectionPoint point : points) {
if (point.getDeclaringType().getRawType() == clazz) {
methods.add(point.getMember().getName());
}
}
assertEquals(points.toString(), ImmutableSet.copyOf(methodNames), methods);
}
/** Asserts that each injection point has the specified dependencies, in the given order. */
private void assertPointDependencies(
Iterable<InjectionPoint> points, TypeLiteral<?>... literals) {
for (InjectionPoint point : points) {
assertEquals(literals.length, point.getDependencies().size());
for (Dependency<?> dep : point.getDependencies()) {
assertEquals(literals[dep.getParameterIndex()], dep.getKey().getTypeLiteral());
}
}
}
static | with |
java | micronaut-projects__micronaut-core | http-client-core/src/main/java/io/micronaut/http/client/bind/DefaultHttpClientBinderRegistry.java | {
"start": 2661,
"end": 11845
} | class ____ implements HttpClientBinderRegistry {
private static final Logger LOG = LoggerFactory.getLogger(HttpClientBinderRegistry.class);
private final Map<Class<? extends Annotation>, ClientArgumentRequestBinder<?>> byAnnotation = new LinkedHashMap<>();
private final Map<Integer, ClientArgumentRequestBinder<?>> byType = new LinkedHashMap<>();
private final Map<Class<? extends Annotation>, AnnotatedClientRequestBinder<?>> methodByAnnotation = new LinkedHashMap<>();
/**
* @param conversionService The conversion service
* @param binders The request binders
* @param beanContext The context to resolve beans
*/
protected DefaultHttpClientBinderRegistry(ConversionService conversionService,
List<ClientRequestBinder> binders,
BeanContext beanContext) {
byType.put(Argument.of(HttpHeaders.class).typeHashCode(), (ClientArgumentRequestBinder<HttpHeaders>) (context, uriContext, value, request) -> value.forEachValue(request::header));
byType.put(Argument.of(Cookies.class).typeHashCode(), (ClientArgumentRequestBinder<Cookies>) (context, uriContext, value, request) -> request.cookies(value.getAll()));
byType.put(Argument.of(Cookie.class).typeHashCode(), (ClientArgumentRequestBinder<Cookie>) (context, uriContext, value, request) -> request.cookie(value));
byType.put(Argument.of(BasicAuth.class).typeHashCode(), (ClientArgumentRequestBinder<BasicAuth>) (context, uriContext, value, request) -> request.basicAuth(value.getUsername(), value.getPassword()));
byType.put(Argument.of(Locale.class).typeHashCode(), (ClientArgumentRequestBinder<Locale>) (context, uriContext, value, request) -> request.header(HttpHeaders.ACCEPT_LANGUAGE, value.toLanguageTag()));
byAnnotation.put(QueryValue.class, new QueryValueClientArgumentRequestBinder(conversionService));
byAnnotation.put(PathVariable.class, (context, uriContext, value, request) -> {
String parameterName = context.getAnnotationMetadata().stringValue(PathVariable.class)
.filter (StringUtils::isNotEmpty)
.orElse(context.getArgument().getName());
conversionService.convert(value, ConversionContext.STRING.with(context.getAnnotationMetadata()))
.filter(StringUtils::isNotEmpty)
.ifPresent(param -> uriContext.getPathParameters().put(parameterName, param));
});
byAnnotation.put(CookieValue.class, (context, uriContext, value, request) -> {
String cookieName = context.getAnnotationMetadata().stringValue(CookieValue.class)
.filter(StringUtils::isNotEmpty)
.orElse(context.getArgument().getName());
conversionService.convert(value, String.class)
.ifPresent(o -> request.cookie(Cookie.of(cookieName, o)));
});
byAnnotation.put(Header.class, (context, uriContext, value, request) -> {
AnnotationMetadata annotationMetadata = context.getAnnotationMetadata();
String headerName = annotationMetadata
.stringValue(Header.class)
.filter(StringUtils::isNotEmpty)
.orElseGet(() -> annotationMetadata.stringValue(Header.class, "name").orElse(NameUtils.hyphenate(context.getArgument().getName())));
conversionService.convert(value, String.class)
.ifPresent(header -> request.getHeaders().set(headerName, header));
});
byAnnotation.put(RequestAttribute.class, (context, uriContext, value, request) -> {
AnnotationMetadata annotationMetadata = context.getAnnotationMetadata();
String name = context.getArgument().getName();
String attributeName = annotationMetadata
.stringValue(RequestAttribute.class)
.filter(StringUtils::isNotEmpty)
.orElse(NameUtils.hyphenate(name));
request.getAttributes().put(attributeName, value);
conversionService.convert(value, ConversionContext.STRING.with(context.getAnnotationMetadata()))
.filter(StringUtils::isNotEmpty)
.ifPresent(param -> {
if (uriContext.getUriTemplate().getVariableNames().contains(name)) {
uriContext.getPathParameters().put(name, param);
}
});
});
byAnnotation.put(Body.class, (context, uriContext, value, request) -> request.body(value));
byAnnotation.put(RequestBean.class, (context, uriContext, value, request) -> {
BeanIntrospection<Object> introspection = BeanIntrospection.getIntrospection(context.getArgument().getType());
for (BeanProperty<Object, Object> beanProperty : introspection.getBeanProperties()) {
findArgumentBinder(beanProperty.asArgument()).ifPresent(binder -> {
Object propertyValue = beanProperty.get(value);
if (propertyValue != null) {
((ClientArgumentRequestBinder<Object>) binder).bind(context.with(beanProperty.asArgument()), uriContext, propertyValue, request);
}
});
}
});
methodByAnnotation.put(Header.class, new HeaderClientRequestBinder());
methodByAnnotation.put(Version.class, new VersionClientRequestBinder(beanContext));
methodByAnnotation.put(RequestAttribute.class, new AttributeClientRequestBinder());
if (KOTLIN_COROUTINES_SUPPORTED) {
//Clients should do nothing with the continuation
byType.put(Argument.of(Continuation.class).typeHashCode(), (context, uriContext, value, request) -> { });
}
if (CollectionUtils.isNotEmpty(binders)) {
for (ClientRequestBinder binder: binders) {
addBinder(binder);
}
}
}
@Override
public <T> Optional<ClientArgumentRequestBinder<?>> findArgumentBinder(@NonNull Argument<T> argument) {
Optional<Class<? extends Annotation>> opt = argument.getAnnotationMetadata().getAnnotationTypeByStereotype(Bindable.class);
if (opt.isPresent()) {
Class<? extends Annotation> annotationType = opt.get();
ClientArgumentRequestBinder<?> binder = byAnnotation.get(annotationType);
return Optional.ofNullable(binder);
} else {
Optional<ClientArgumentRequestBinder<?>> typeBinder = findTypeBinder(argument);
if (typeBinder.isPresent()) {
return typeBinder;
}
if (argument.isOptional()) {
Argument<?> typeArgument = argument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT);
return findTypeBinder(typeArgument);
}
return Optional.empty();
}
}
@Override
public Optional<AnnotatedClientRequestBinder<?>> findAnnotatedBinder(@NonNull Class<?> annotationType) {
return Optional.ofNullable(methodByAnnotation.get(annotationType));
}
/**
* Adds a binder to the registry.
*
* @param binder The binder
* @param <T> The type
*/
public <T> void addBinder(ClientRequestBinder binder) {
if (binder instanceof AnnotatedClientRequestBinder annotatedBinder) {
methodByAnnotation.put(annotatedBinder.getAnnotationType(), annotatedBinder);
} else if (binder instanceof AnnotatedClientArgumentRequestBinder annotatedRequestArgumentBinder) {
Class<? extends Annotation> annotationType = annotatedRequestArgumentBinder.getAnnotationType();
byAnnotation.put(annotationType, annotatedRequestArgumentBinder);
} else if (binder instanceof TypedClientArgumentRequestBinder typedRequestArgumentBinder) {
byType.put(typedRequestArgumentBinder.argumentType().typeHashCode(), typedRequestArgumentBinder);
List<Class<?>> superTypes = typedRequestArgumentBinder.superTypes();
if (CollectionUtils.isNotEmpty(superTypes)) {
for (Class<?> superType : superTypes) {
byType.put(Argument.of(superType).typeHashCode(), typedRequestArgumentBinder);
}
}
} else {
if (LOG.isErrorEnabled()) {
LOG.error("The client request binder {} was rejected because it does not implement {}, {}, or {}", binder.getClass().getName(), TypedClientArgumentRequestBinder.class.getName(), AnnotatedClientArgumentRequestBinder.class.getName(), AnnotatedClientRequestBinder.class.getName());
}
}
}
private <T> Optional<ClientArgumentRequestBinder<?>> findTypeBinder(Argument<T> argument) {
ClientArgumentRequestBinder<?> binder = byType.get(argument.typeHashCode());
if (binder != null) {
return Optional.of(binder);
}
return Optional.ofNullable(byType.get(Argument.of(argument.getType()).typeHashCode()));
}
}
| DefaultHttpClientBinderRegistry |
java | apache__flink | flink-end-to-end-tests/flink-end-to-end-tests-common/src/main/java/org/apache/flink/tests/util/util/FactoryUtils.java | {
"start": 1067,
"end": 1227
} | enum ____ {
;
private static final Logger LOG = LoggerFactory.getLogger(FactoryUtils.class);
/**
* Loads all factories for the given | FactoryUtils |
java | grpc__grpc-java | xds/src/main/java/io/grpc/xds/XdsDependencyManager.java | {
"start": 20658,
"end": 21151
} | class ____ implements XdsConfig.Subscription {
private final String clusterName;
boolean closed; // Accessed from syncContext
public ClusterSubscription(String clusterName) {
this.clusterName = checkNotNull(clusterName, "clusterName");
}
String getClusterName() {
return clusterName;
}
@Override
public void close() {
releaseSubscription(this);
}
}
/** State for tracing garbage collector. */
private static final | ClusterSubscription |
java | spring-projects__spring-boot | build-plugin/spring-boot-maven-plugin/src/dockerTest/java/org/springframework/boot/maven/BuildImageTests.java | {
"start": 2108,
"end": 30692
} | class ____ extends AbstractArchiveIntegrationTests {
@TestTemplate
void whenBuildImageIsInvokedWithoutRepackageTheArchiveIsRepackagedOnTheFly(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File original = new File(project, "target/build-image-0.0.1.BUILD-SNAPSHOT.jar.original");
assertThat(original).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image:0.0.1.BUILD-SNAPSHOT")
.contains("Running detector")
.contains("Running builder")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedOnTheCommandLineWithoutRepackageTheArchiveIsRepackagedOnTheFly(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-cmd-line")
.goals("spring-boot:build-image")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-cmd-line-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File original = new File(project, "target/build-image-cmd-line-0.0.1.BUILD-SNAPSHOT.jar.original");
assertThat(original).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-cmd-line:0.0.1.BUILD-SNAPSHOT")
.contains("Running detector")
.contains("Running builder")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-cmd-line", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenPackageIsInvokedWithClassifierTheOriginalArchiveIsFound(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-classifier")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-classifier-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File classifier = new File(project, "target/build-image-classifier-0.0.1.BUILD-SNAPSHOT-test.jar");
assertThat(classifier).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-classifier:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-classifier", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithClassifierAndRepackageTheOriginalArchiveIsFound(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-fork-classifier")
.goals("spring-boot:build-image")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-fork-classifier-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File classifier = new File(project, "target/build-image-fork-classifier-0.0.1.BUILD-SNAPSHOT-exec.jar");
assertThat(classifier).exists();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-fork-classifier:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-fork-classifier", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithClassifierSourceWithoutRepackageTheArchiveIsRepackagedOnTheFly(
MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-classifier-source")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-classifier-source-0.0.1.BUILD-SNAPSHOT-test.jar");
assertThat(jar).isFile();
File original = new File(project,
"target/build-image-classifier-source-0.0.1.BUILD-SNAPSHOT-test.jar.original");
assertThat(original).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-classifier-source:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-classifier-source", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithRepackageTheExistingArchiveIsUsed(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-with-repackage")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-with-repackage-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File original = new File(project,
"target/build-image-with-repackage-0.0.1.BUILD-SNAPSHOT.jar.original");
assertThat(original).isFile();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-with-repackage:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-with-repackage", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithClassifierAndRepackageTheExistingArchiveIsUsed(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-classifier-with-repackage")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project, "target/build-image-classifier-with-repackage-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File original = new File(project,
"target/build-image-classifier-with-repackage-0.0.1.BUILD-SNAPSHOT-test.jar");
assertThat(original).isFile();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-classifier-with-repackage:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-classifier-with-repackage", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithClassifierSourceAndRepackageTheExistingArchiveIsUsed(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-classifier-source-with-repackage")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File jar = new File(project,
"target/build-image-classifier-source-with-repackage-0.0.1.BUILD-SNAPSHOT-test.jar");
assertThat(jar).isFile();
File original = new File(project,
"target/build-image-classifier-source-with-repackage-0.0.1.BUILD-SNAPSHOT-test.jar.original");
assertThat(original).isFile();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-classifier-source-with-repackage:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-classifier-source-with-repackage", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithWarPackaging(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-war-packaging")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
File war = new File(project, "target/build-image-war-packaging-0.0.1.BUILD-SNAPSHOT.war");
assertThat(war).isFile();
File original = new File(project, "target/build-image-war-packaging-0.0.1.BUILD-SNAPSHOT.war.original");
assertThat(original).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-war-packaging:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-war-packaging", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithCustomImageName(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-custom-name")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.systemProperty("spring-boot.build-image.imageName", "example.com/test/property-ignored:pom-preferred")
.execute((project) -> {
File jar = new File(project, "target/build-image-custom-name-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
File original = new File(project, "target/build-image-custom-name-0.0.1.BUILD-SNAPSHOT.jar.original");
assertThat(original).doesNotExist();
assertThat(buildLog(project)).contains("Building image")
.contains("example.com/test/build-image:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("example.com/test/build-image", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithCommandLineParameters(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.systemProperty("spring-boot.build-image.imageName", "example.com/test/cmd-property-name:v1")
.systemProperty("spring-boot.build-image.builder", "ghcr.io/spring-io/spring-boot-cnb-test-builder:0.0.2")
.systemProperty("spring-boot.build-image.trustBuilder", "true")
.systemProperty("spring-boot.build-image.runImage", "paketobuildpacks/run-noble-tiny")
.systemProperty("spring-boot.build-image.createdDate", "2020-07-01T12:34:56Z")
.systemProperty("spring-boot.build-image.applicationDirectory", "/application")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("example.com/test/cmd-property-name:v1")
.contains("Running creator")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
Image image = new DockerApi().image()
.inspect(ImageReference.of("example.com/test/cmd-property-name:v1"));
assertThat(image.getCreated()).isEqualTo("2020-07-01T12:34:56Z");
removeImage("example.com/test/cmd-property-name", "v1");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithCustomBuilderImageAndRunImage(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-custom-builder")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-v2-builder:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("docker.io/library/build-image-v2-builder", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithTrustBuilder(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-trust-builder")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-v2-trust-builder:0.0.1.BUILD-SNAPSHOT")
.contains("Running creator")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("docker.io/library/build-image-v2-trust-builder", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithEmptyEnvEntry(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-empty-env-entry")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.prepare(this::writeLongNameResource)
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-empty-env-entry:0.0.1.BUILD-SNAPSHOT")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("build-image-empty-env-entry", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithZipPackaging(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-zip-packaging")
.goals("package")
.prepare(this::writeLongNameResource)
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
File jar = new File(project, "target/build-image-zip-packaging-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-zip-packaging:0.0.1.BUILD-SNAPSHOT")
.contains("Main-Class: org.springframework.boot.loader.launch.PropertiesLauncher")
.contains("Successfully built image");
removeImage("build-image-zip-packaging", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithBuildpacks(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-custom-buildpacks")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-custom-buildpacks:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-custom-buildpacks", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithBinding(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-bindings")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-bindings:0.0.1.BUILD-SNAPSHOT")
.contains("binding: ca-certificates/type=ca-certificates")
.contains("binding: ca-certificates/test.crt=---certificate one---")
.contains("Successfully built image");
removeImage("build-image-bindings", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithNetworkModeNone(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-network")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-network:0.0.1.BUILD-SNAPSHOT")
.contains("Network status: curl failed")
.contains("Successfully built image");
removeImage("build-image-network", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedOnMultiModuleProjectWithPackageGoal(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-multi-module")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-multi-module-app:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-multi-module-app", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithTags(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-tags")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-tags:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image")
.contains("docker.io/library/build-image-tags:latest")
.contains("Successfully created image tag");
removeImage("build-image-tags", "0.0.1.BUILD-SNAPSHOT");
removeImage("build-image-tags", "latest");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithVolumeCaches(MavenBuild mavenBuild) {
String testBuildId = randomString();
mavenBuild.project("dockerTest", "build-image-volume-caches")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.systemProperty("test-build-id", testBuildId)
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-volume-caches:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-volume-caches", "0.0.1.BUILD-SNAPSHOT");
deleteVolumes("cache-" + testBuildId + ".build", "cache-" + testBuildId + ".launch");
});
}
@TestTemplate
@EnabledOnOs(value = OS.LINUX, disabledReason = "Works with Docker Engine on Linux but is not reliable with "
+ "Docker Desktop on other OSs")
void whenBuildImageIsInvokedWithBindCaches(MavenBuild mavenBuild) {
String testBuildId = randomString();
mavenBuild.project("dockerTest", "build-image-bind-caches")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.systemProperty("test-build-id", testBuildId)
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-bind-caches:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-bind-caches", "0.0.1.BUILD-SNAPSHOT");
String tempDir = System.getProperty("java.io.tmpdir");
Path buildCachePath = Paths.get(tempDir, "junit-image-cache-" + testBuildId + "-build");
Path launchCachePath = Paths.get(tempDir, "junit-image-cache-" + testBuildId + "-launch");
assertThat(buildCachePath).exists().isDirectory();
assertThat(launchCachePath).exists().isDirectory();
cleanupCache(buildCachePath);
cleanupCache(launchCachePath);
});
}
private static void cleanupCache(Path cachePath) {
try {
FileSystemUtils.deleteRecursively(cachePath);
}
catch (Exception ex) {
// ignore
}
}
@TestTemplate
void whenBuildImageIsInvokedWithCreatedDate(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-created-date")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-created-date:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
Image image = new DockerApi().image()
.inspect(ImageReference.of("docker.io/library/build-image-created-date:0.0.1.BUILD-SNAPSHOT"));
assertThat(image.getCreated()).isEqualTo("2020-07-01T12:34:56Z");
removeImage("build-image-created-date", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithCurrentCreatedDate(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-current-created-date")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-current-created-date:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
Image image = new DockerApi().image()
.inspect(ImageReference
.of("docker.io/library/build-image-current-created-date:0.0.1.BUILD-SNAPSHOT"));
OffsetDateTime createdDateTime = OffsetDateTime.parse(image.getCreated());
OffsetDateTime current = OffsetDateTime.now().withOffsetSameInstant(createdDateTime.getOffset());
assertThat(createdDateTime.getYear()).isEqualTo(current.getYear());
assertThat(createdDateTime.getMonth()).isEqualTo(current.getMonth());
assertThat(createdDateTime.getDayOfMonth()).isEqualTo(current.getDayOfMonth());
removeImage("build-image-current-created-date", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithApplicationDirectory(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-app-dir")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-app-dir:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-app-dir", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
void whenBuildImageIsInvokedWithEmptySecurityOptions(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-security-opts")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.execute((project) -> {
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-security-opts:0.0.1.BUILD-SNAPSHOT")
.contains("Successfully built image");
removeImage("build-image-security-opts", "0.0.1.BUILD-SNAPSHOT");
});
}
@TestTemplate
@EnabledOnOs(value = { OS.LINUX, OS.MAC }, architectures = "aarch64",
disabledReason = "Lifecycle will only run on ARM architecture")
void whenBuildImageIsInvokedOnLinuxArmWithImagePlatformLinuxArm(MavenBuild mavenBuild) throws IOException {
String builderImage = "ghcr.io/spring-io/spring-boot-cnb-test-builder:0.0.2";
String runImage = "docker.io/paketobuildpacks/run-noble-tiny:latest";
String buildpackImage = "ghcr.io/spring-io/spring-boot-test-info:0.0.2";
removeImages(builderImage, runImage, buildpackImage);
mavenBuild.project("dockerTest", "build-image-platform-linux-arm").goals("package").execute((project) -> {
File jar = new File(project, "target/build-image-platform-linux-arm-0.0.1.BUILD-SNAPSHOT.jar");
assertThat(jar).isFile();
assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-platform-linux-arm:0.0.1.BUILD-SNAPSHOT")
.contains("Pulling builder image '" + builderImage + "' for platform 'linux/arm64'")
.contains("Pulling run image '" + runImage + "' for platform 'linux/arm64'")
.contains("Pulling buildpack image '" + buildpackImage + "' for platform 'linux/arm64'")
.contains("---> Test Info buildpack building")
.contains("---> Test Info buildpack done")
.contains("Successfully built image");
removeImage("docker.io/library/build-image-platform-linux-arm", "0.0.1.BUILD-SNAPSHOT");
});
removeImages(builderImage, runImage, buildpackImage);
}
@TestTemplate
@EnabledOnOs(value = { OS.LINUX, OS.MAC }, architectures = "amd64",
disabledReason = "The expected failure condition will not fail on ARM architectures")
void failsWhenBuildImageIsInvokedOnLinuxAmdWithImagePlatformLinuxArm(MavenBuild mavenBuild) throws IOException {
String builderImage = "ghcr.io/spring-io/spring-boot-cnb-test-builder:0.0.2";
String runImage = "docker.io/paketobuildpacks/run-noble-tiny:latest";
String buildpackImage = "ghcr.io/spring-io/spring-boot-test-info:0.0.2";
removeImages(buildpackImage, runImage, buildpackImage);
mavenBuild.project("dockerTest", "build-image-platform-linux-arm")
.goals("package")
.executeAndFail((project) -> assertThat(buildLog(project)).contains("Building image")
.contains("docker.io/library/build-image-platform-linux-arm:0.0.1.BUILD-SNAPSHOT")
.contains("Pulling builder image '" + builderImage + "' for platform 'linux/arm64'")
.contains("Pulling run image '" + runImage + "' for platform 'linux/arm64'")
.contains("Pulling buildpack image '" + buildpackImage + "' for platform 'linux/arm64'")
.contains("exec format error"));
removeImages(builderImage, runImage, buildpackImage);
}
@TestTemplate
void failsWhenBuildImageIsInvokedOnMultiModuleProjectWithBuildImageGoal(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-multi-module")
.goals("spring-boot:build-image")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.executeAndFail((project) -> assertThat(buildLog(project)).contains("Error packaging archive for image"));
}
@TestTemplate
void failsWhenBuilderFails(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-builder-error")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.executeAndFail((project) -> assertThat(buildLog(project)).contains("Building image")
.contains("---> Test Info buildpack building")
.contains("Forced builder failure")
.containsPattern("Builder lifecycle '.*' failed with status code"));
}
@TestTemplate
void failsWithBuildpackNotInBuilder(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-bad-buildpack")
.goals("package")
.systemProperty("spring-boot.build-image.pullPolicy", "IF_NOT_PRESENT")
.executeAndFail((project) -> assertThat(buildLog(project))
.contains("'urn:cnb:builder:example/does-not-exist:0.0.1' not found in builder"));
}
@TestTemplate
void failsWhenFinalNameIsMisconfigured(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-final-name")
.goals("package")
.executeAndFail((project) -> assertThat(buildLog(project)).contains("final-name.jar.original")
.contains("is required for building an image"));
}
@TestTemplate
void failsWhenCachesAreConfiguredTwice(MavenBuild mavenBuild) {
mavenBuild.project("dockerTest", "build-image-caches-multiple")
.goals("package")
.executeAndFail((project) -> assertThat(buildLog(project))
.contains("Each image building cache can be configured only once"));
}
private void writeLongNameResource(File project) {
StringBuilder name = new StringBuilder();
new Random().ints('a', 'z' + 1).limit(128).forEach((i) -> name.append((char) i));
try {
Path path = project.toPath().resolve(Paths.get("src", "main", "resources", name.toString()));
Files.createDirectories(path.getParent());
Files.createFile(path);
}
catch (IOException ex) {
throw new RuntimeException(ex);
}
}
private void removeImages(String... names) throws IOException {
ImageApi imageApi = new DockerApi().image();
for (String name : names) {
try {
imageApi.remove(ImageReference.of(name), false);
}
catch (DockerEngineException ex) {
// ignore image remove failures
}
}
}
private void removeImage(String name, String version) {
ImageReference imageReference = ImageReference.of(ImageName.of(name), version);
try {
new DockerApi().image().remove(imageReference, false);
}
catch (IOException ex) {
throw new IllegalStateException("Failed to remove docker image " + imageReference, ex);
}
}
private void deleteVolumes(String... names) throws IOException {
VolumeApi volumeApi = new DockerApi().volume();
for (String name : names) {
volumeApi.delete(VolumeName.of(name), false);
}
}
private String randomString() {
IntStream chars = new Random().ints('a', 'z' + 1).limit(10);
return chars.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append).toString();
}
}
| BuildImageTests |
java | apache__flink | flink-runtime-web/src/main/java/org/apache/flink/runtime/webmonitor/handlers/JarListInfo.java | {
"start": 2694,
"end": 4490
} | class ____ {
public static final String JAR_FILE_FIELD_ID = "id";
public static final String JAR_FILE_FIELD_NAME = "name";
public static final String JAR_FILE_FIELD_UPLOADED = "uploaded";
public static final String JAR_FILE_FIELD_ENTRY = "entry";
@JsonProperty(JAR_FILE_FIELD_ID)
public String id;
@JsonProperty(JAR_FILE_FIELD_NAME)
public String name;
@JsonProperty(JAR_FILE_FIELD_UPLOADED)
private long uploaded;
@JsonProperty(JAR_FILE_FIELD_ENTRY)
private List<JarEntryInfo> jarEntryList;
@JsonCreator
public JarFileInfo(
@JsonProperty(JAR_FILE_FIELD_ID) String id,
@JsonProperty(JAR_FILE_FIELD_NAME) String name,
@JsonProperty(JAR_FILE_FIELD_UPLOADED) long uploaded,
@JsonProperty(JAR_FILE_FIELD_ENTRY) List<JarEntryInfo> jarEntryList) {
this.id = checkNotNull(id);
this.name = checkNotNull(name);
this.uploaded = uploaded;
this.jarEntryList = checkNotNull(jarEntryList);
}
@Override
public int hashCode() {
return Objects.hash(id, name, uploaded, jarEntryList);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (null == o || this.getClass() != o.getClass()) {
return false;
}
JarFileInfo that = (JarFileInfo) o;
return Objects.equals(id, that.id)
&& Objects.equals(name, that.name)
&& uploaded == that.uploaded
&& Objects.equals(jarEntryList, that.jarEntryList);
}
}
/** Nested | JarFileInfo |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/diagnostics/analyzer/BeanCurrentlyInCreationFailureAnalyzerTests.java | {
"start": 10099,
"end": 10250
} | class ____ {
@Bean
SelfReferenceBean bean(SelfReferenceBean bean) {
return new SelfReferenceBean();
}
}
static | SelfReferenceBeanConfiguration |
java | quarkusio__quarkus | integration-tests/hibernate-orm-panache/src/main/java/io/quarkus/it/panache/defaultpu/Beer.java | {
"start": 191,
"end": 380
} | class ____ extends PanacheEntity {
public String name;
@Transactional
public static void deleteAllAndPersist(Beer beer) {
deleteAll();
persist(beer);
}
}
| Beer |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/expr/SQLSequenceExpr.java | {
"start": 807,
"end": 1836
} | class ____ extends SQLExprImpl implements SQLReplaceable {
private SQLName sequence;
private Function function;
public SQLSequenceExpr() {
}
public SQLSequenceExpr(SQLName sequence, Function function) {
this.sequence = sequence;
this.function = function;
}
public SQLSequenceExpr clone() {
SQLSequenceExpr x = new SQLSequenceExpr();
if (sequence != null) {
x.setSequence(sequence.clone());
}
x.function = function;
return x;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
if (this.sequence != null) {
this.sequence.accept(visitor);
}
}
visitor.endVisit(this);
}
@Override
public boolean replace(SQLExpr expr, SQLExpr target) {
if (this.sequence == expr) {
setSequence((SQLName) target);
return true;
}
return false;
}
public static | SQLSequenceExpr |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/android/FragmentInjectionTest.java | {
"start": 3298,
"end": 3877
} | class ____ extends PreferenceActivity {
static final boolean known = true;
// BUG: Diagnostic contains: isValidFragment unconditionally returns true
protected boolean isValidFragment(String fragment) {
return known;
}
}
""")
.doTest();
}
@Test
public void isValidFragmentReturnsFalse() {
compilationHelper
.addSourceLines(
"MyPrefActivity.java",
"""
import android.preference.PreferenceActivity;
| MyPrefActivity |
java | google__error-prone | core/src/test/java/com/google/errorprone/bugpatterns/MissingCasesInEnumSwitchTest.java | {
"start": 6542,
"end": 6970
} | enum ____ {
ONE,
TWO
}
void m(Case e) {
// BUG: Diagnostic contains: ONE, TWO
switch (e) {
}
}
}
""")
.doTest();
}
@Test
public void nonExhaustive_arrowStatement() {
compilationHelper
.addSourceLines(
"Test.java",
"""
| Case |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/filter/subclass/joined/Club.java | {
"start": 692,
"end": 1449
} | class ____ {
@Id
@GeneratedValue
@Column(name="CLUB_ID")
private int id;
private String name;
@OneToMany(mappedBy="club")
@Filter(name="iqMin", condition="{h}.HUMAN_IQ >= :min", aliases={@SqlFragmentAlias(alias="h", entity=Human.class)})
@Filter(name="pregnantMembers", condition="{m}.IS_PREGNANT=1", aliases={@SqlFragmentAlias(alias="m", table="ZOOLOGY_MAMMAL")})
private Set<Human> members = new HashSet<Human>();
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Set<Human> getMembers() {
return members;
}
public void setMembers(Set<Human> members) {
this.members = members;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
| Club |
java | apache__dubbo | dubbo-metadata/dubbo-metadata-api/src/main/java/org/apache/dubbo/metadata/MetadataInfo.java | {
"start": 32768,
"end": 33041
} | class ____ implements Comparator<URL> {
public static final URLComparator INSTANCE = new URLComparator();
@Override
public int compare(URL o1, URL o2) {
return o1.toFullString().compareTo(o2.toFullString());
}
}
}
| URLComparator |
java | quarkusio__quarkus | extensions/hibernate-orm/deployment/src/test/java/io/quarkus/hibernate/orm/boot/AmbiguousPersistenceUnitExtensionTest.java | {
"start": 1529,
"end": 1642
} | class ____ implements Interceptor {
}
@PersistenceUnitExtension
public static | PersistenceUnitInterceptor |
java | spring-projects__spring-boot | module/spring-boot-devtools/src/test/java/org/springframework/boot/devtools/env/DevToolPropertiesIntegrationTests.java | {
"start": 5826,
"end": 5954
} | class ____ {
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnProperty("com.example.enabled")
static | TestConfiguration |
java | apache__flink | flink-state-backends/flink-statebackend-forst/src/main/java/org/apache/flink/state/forst/restore/ForStRestoreOperation.java | {
"start": 1005,
"end": 1243
} | interface ____ extends RestoreOperation<ForStRestoreResult>, AutoCloseable {
/** Restores state that was previously snapshot-ed from the provided state handles. */
ForStRestoreResult restore() throws Exception;
}
| ForStRestoreOperation |
java | alibaba__druid | core/src/main/java/com/alibaba/druid/sql/ast/statement/SQLAlterTableAttachPartition.java | {
"start": 812,
"end": 1538
} | class ____ extends SQLObjectImpl implements SQLAlterTableItem {
protected SQLName partitionName;
protected boolean defaultFlag;
public SQLName getPartitionName() {
return partitionName;
}
public void setPartitionName(SQLName partitionName) {
this.partitionName = partitionName;
}
public boolean isDefaultFlag() {
return defaultFlag;
}
public void setDefaultFlag(boolean defaultFlag) {
this.defaultFlag = defaultFlag;
}
@Override
protected void accept0(SQLASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.partitionName);
}
visitor.endVisit(this);
}
}
| SQLAlterTableAttachPartition |
java | quarkusio__quarkus | independent-projects/arc/processor/src/main/java/io/quarkus/arc/processor/bcextensions/ClassInfoImpl.java | {
"start": 754,
"end": 4904
} | class ____ extends DeclarationInfoImpl<org.jboss.jandex.ClassInfo> implements ClassInfo {
ClassInfoImpl(org.jboss.jandex.IndexView jandexIndex, org.jboss.jandex.MutableAnnotationOverlay annotationOverlay,
org.jboss.jandex.ClassInfo jandexDeclaration) {
super(jandexIndex, annotationOverlay, jandexDeclaration);
}
@Override
public String name() {
return jandexDeclaration.name().toString();
}
@Override
public String simpleName() {
return jandexDeclaration.simpleName();
}
@Override
public PackageInfo packageInfo() {
String packageName = jandexDeclaration.name().packagePrefix();
org.jboss.jandex.ClassInfo packageClass = jandexIndex.getClassByName(
DotName.createSimple(packageName + ".package-info"));
return new PackageInfoImpl(jandexIndex, annotationOverlay, packageClass);
}
@Override
public List<TypeVariable> typeParameters() {
return jandexDeclaration.typeParameters()
.stream()
.map(it -> TypeImpl.fromJandexType(jandexIndex, annotationOverlay, it))
.filter(Type::isTypeVariable) // not necessary, just as a precaution
.map(Type::asTypeVariable) // not necessary, just as a precaution
.toList();
}
@Override
public Type superClass() {
org.jboss.jandex.Type jandexSuperType = jandexDeclaration.superClassType();
if (jandexSuperType == null) {
return null;
}
return TypeImpl.fromJandexType(jandexIndex, annotationOverlay, jandexSuperType);
}
@Override
public ClassInfo superClassDeclaration() {
DotName jandexSuperType = jandexDeclaration.superName();
if (jandexSuperType == null) {
return null;
}
return new ClassInfoImpl(jandexIndex, annotationOverlay, jandexIndex.getClassByName(jandexSuperType));
}
@Override
public List<Type> superInterfaces() {
return jandexDeclaration.interfaceTypes()
.stream()
.map(it -> TypeImpl.fromJandexType(jandexIndex, annotationOverlay, it))
.toList();
}
@Override
public List<ClassInfo> superInterfacesDeclarations() {
return jandexDeclaration.interfaceNames()
.stream()
.map(it -> (ClassInfo) new ClassInfoImpl(jandexIndex, annotationOverlay, jandexIndex.getClassByName(it)))
.toList();
}
@Override
public boolean isPlainClass() {
return !isInterface() && !isEnum() && !isAnnotation() && !isRecord();
}
@Override
public boolean isInterface() {
if (isAnnotation()) {
return false;
}
return Modifier.isInterface(jandexDeclaration.flags());
}
@Override
public boolean isEnum() {
return jandexDeclaration.isEnum();
}
@Override
public boolean isAnnotation() {
return jandexDeclaration.isAnnotation();
}
@Override
public boolean isRecord() {
return jandexDeclaration.isRecord();
}
@Override
public boolean isAbstract() {
return Modifier.isAbstract(jandexDeclaration.flags());
}
@Override
public boolean isFinal() {
return Modifier.isFinal(jandexDeclaration.flags());
}
@Override
public int modifiers() {
return jandexDeclaration.flags();
}
@Override
public Collection<MethodInfo> constructors() {
List<MethodInfo> result = new ArrayList<>();
for (org.jboss.jandex.MethodInfo jandexMethod : jandexDeclaration.methods()) {
if (jandexMethod.isSynthetic()) {
continue;
}
if (MethodPredicates.IS_CONSTRUCTOR_JANDEX.test(jandexMethod)) {
result.add(new MethodInfoImpl(jandexIndex, annotationOverlay, jandexMethod));
}
}
return Collections.unmodifiableList(result);
}
private List<org.jboss.jandex.ClassInfo> allSupertypes() {
List<org.jboss.jandex.ClassInfo> result = new ArrayList<>();
// an | ClassInfoImpl |
java | assertj__assertj-core | assertj-core/src/test/java/org/assertj/core/internal/floatarrays/FloatArrays_assertNullOrEmpty_Test.java | {
"start": 1356,
"end": 1993
} | class ____ extends FloatArraysBaseTest {
@Test
void should_fail_if_array_is_not_null_and_is_not_empty() {
AssertionInfo info = someInfo();
float[] actual = { 6f, 8f };
Throwable error = catchThrowable(() -> arrays.assertNullOrEmpty(info, actual));
assertThat(error).isInstanceOf(AssertionError.class);
verify(failures).failure(info, shouldBeNullOrEmpty(actual));
}
@Test
void should_pass_if_array_is_null() {
arrays.assertNullOrEmpty(someInfo(), null);
}
@Test
void should_pass_if_array_is_empty() {
arrays.assertNullOrEmpty(someInfo(), emptyArray());
}
}
| FloatArrays_assertNullOrEmpty_Test |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/jpa/compliance/JtaIllegalArgumentExceptionTest.java | {
"start": 1211,
"end": 1932
} | class ____ {
@Test
public void testNonExistingNativeQuery(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager ->
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
entityManager.createNamedQuery( "NonExisting_NativeQuery" );
}
)
);
}
@Test
public void testNonExistingNativeQuery2(EntityManagerFactoryScope scope) {
scope.inEntityManager(
entityManager ->
Assertions.assertThrows(
IllegalArgumentException.class,
() -> {
entityManager.createNamedQuery( "NonExisting_NativeQuery", Person.class );
}
)
);
}
@Entity(name = "Person")
public static | JtaIllegalArgumentExceptionTest |
java | spring-projects__spring-boot | core/spring-boot/src/test/java/org/springframework/boot/support/EnvironmentPostProcessorApplicationListenerTests.java | {
"start": 14484,
"end": 14817
} | class ____
implements ApplicationContextInitializer<ConfigurableApplicationContext> {
@Override
public void initialize(ConfigurableApplicationContext applicationContext) {
applicationContext.getBeanFactory().registerSingleton("test", "test");
}
}
}
}
| ExampleAotProcessedNoProfileApp__ApplicationContextInitializer |
java | quarkusio__quarkus | core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/NativeImageBuildLocalRunner.java | {
"start": 226,
"end": 1484
} | class ____ extends NativeImageBuildRunner {
private final String nativeImageExecutable;
public NativeImageBuildLocalRunner(String nativeImageExecutable) {
this.nativeImageExecutable = nativeImageExecutable;
}
@Override
public boolean isContainer() {
return false;
}
@Override
protected String[] getGraalVMVersionCommand(List<String> args) {
return buildCommand(args);
}
@Override
protected String[] getBuildCommand(Path outputDir, List<String> args) {
return buildCommand(args);
}
@Override
protected void objcopy(Path outputDir, String... args) {
final String[] command = new String[args.length + 1];
command[0] = "objcopy";
System.arraycopy(args, 0, command, 1, args.length);
runCommand(command, null, outputDir.toFile());
}
@Override
protected boolean objcopyExists() {
if (!SystemUtils.IS_OS_LINUX) {
return false;
}
return ProcessUtil.pathOfCommand(Path.of("objcopy")).isPresent();
}
private String[] buildCommand(List<String> args) {
return Stream.concat(Stream.of(nativeImageExecutable), args.stream()).toArray(String[]::new);
}
}
| NativeImageBuildLocalRunner |
java | quarkusio__quarkus | integration-tests/maven/src/test/java/io/quarkus/maven/it/ListExtensionsIT.java | {
"start": 859,
"end": 3480
} | class ____ extends QuarkusPlatformAwareMojoTestBase {
private static final String VERTX_ARTIFACT_ID = "quarkus-vertx";
private static final String PROJECT_SOURCE_DIR = "projects/classic";
private File testDir;
private Invoker invoker;
@Test
void testListExtensions() throws MavenInvocationException, IOException {
testDir = initProject(PROJECT_SOURCE_DIR, "projects/testListExtensions");
invoker = initInvoker(testDir);
List<String> outputLogLines = listExtensions();
assertThat(outputLogLines).anyMatch(line -> line.contains(VERTX_ARTIFACT_ID));
}
@Test
void testListExtensionsWithManagedDependencyWithoutScope() throws MavenInvocationException, IOException {
testDir = initProject(PROJECT_SOURCE_DIR, "projects/testListExtensionsWithManagedDependencyWithoutScope");
invoker = initInvoker(testDir);
// Edit the pom.xml.
File source = new File(testDir, "pom.xml");
filter(source, Collections.singletonMap("<!-- insert managed dependencies here -->",
" <dependency>\n" +
" <groupId>org.assertj</groupId>\n" +
" <artifactId>assertj-core</artifactId>\n" +
" <version>3.16.1</version>\n" +
" </dependency>"));
List<String> outputLogLines = listExtensions();
assertThat(outputLogLines).anyMatch(line -> line.contains(VERTX_ARTIFACT_ID));
}
private List<String> listExtensions()
throws MavenInvocationException, IOException {
InvocationRequest request = new DefaultInvocationRequest();
request.setBatchMode(true);
request.setGoals(Collections.singletonList(
getMavenPluginGroupId() + ":" + getMavenPluginArtifactId() + ":" + getMavenPluginVersion()
+ ":list-extensions"));
File outputLog = new File(testDir, "output.log");
InvocationOutputHandler outputHandler = new PrintStreamHandler(
new PrintStream(new TeeOutputStream(System.out, Files.newOutputStream(outputLog.toPath())), true, "UTF-8"),
true);
request.setOutputHandler(outputHandler);
File invokerLog = new File(testDir, "invoker.log");
PrintStreamLogger logger = new PrintStreamLogger(new PrintStream(new FileOutputStream(invokerLog), false, "UTF-8"),
InvokerLogger.DEBUG);
invoker.setLogger(logger);
invoker.execute(request);
return Files.readAllLines(outputLog.toPath());
}
}
| ListExtensionsIT |
java | apache__maven | impl/maven-core/src/test/java/org/apache/maven/internal/MultilineMessageHelperTest.java | {
"start": 994,
"end": 3537
} | class ____ {
@Test
void testBuilderCommon() {
List<String> msgs = new ArrayList<>();
msgs.add("*****************************************************************");
msgs.add("* Your build is requesting parallel execution, but project *");
msgs.add("* contains the following plugin(s) that have goals not marked *");
msgs.add("* as @threadSafe to support parallel building. *");
msgs.add("* While this /may/ work fine, please look for plugin updates *");
msgs.add("* and/or request plugins be made thread-safe. *");
msgs.add("* If reporting an issue, report it against the plugin in *");
msgs.add("* question, not against maven-core *");
msgs.add("*****************************************************************");
assertEquals(
msgs,
MultilineMessageHelper.format(
"Your build is requesting parallel execution, but project contains the following "
+ "plugin(s) that have goals not marked as @threadSafe to support parallel building.",
"While this /may/ work fine, please look for plugin updates and/or "
+ "request plugins be made thread-safe.",
"If reporting an issue, report it against the plugin in question, not against maven-core"));
}
@Test
void testMojoExecutor() {
List<String> msgs = new ArrayList<>();
msgs.add("*****************************************************************");
msgs.add("* An aggregator Mojo is already executing in parallel build, *");
msgs.add("* but aggregator Mojos require exclusive access to reactor to *");
msgs.add("* prevent race conditions. This mojo execution will be blocked *");
msgs.add("* until the aggregator work is done. *");
msgs.add("*****************************************************************");
assertEquals(
msgs,
MultilineMessageHelper.format(
"An aggregator Mojo is already executing in parallel build, but aggregator "
+ "Mojos require exclusive access to reactor to prevent race conditions. This "
+ "mojo execution will be blocked until the aggregator work is done."));
}
}
| MultilineMessageHelperTest |
java | quarkusio__quarkus | extensions/grpc/deployment/src/test/java/io/quarkus/grpc/server/tls/TlsWithHttpServerUsingP12AndTlsRegistryTest.java | {
"start": 1145,
"end": 2727
} | class ____ {
static String configuration = """
quarkus.grpc.server.use-separate-server=false
quarkus.tls.key-store.p12.path=target/certs/grpc-keystore.p12
quarkus.tls.key-store.p12.password=password
quarkus.http.insecure-requests=disabled
""";
@RegisterExtension
static final QuarkusUnitTest config = new QuarkusUnitTest().setArchiveProducer(
() -> ShrinkWrap.create(JavaArchive.class)
.addPackage(GreeterGrpc.class.getPackage())
.addClass(HelloService.class)
.add(new StringAsset(configuration), "application.properties"));
protected ManagedChannel channel;
@BeforeEach
public void init() throws Exception {
File certs = new File("target/certs/grpc-client-ca.crt");
SslContext sslcontext = GrpcSslContexts.forClient()
.trustManager(certs)
.build();
channel = NettyChannelBuilder.forAddress("localhost", 8444)
.sslContext(sslcontext)
.useTransportSecurity()
.build();
}
@AfterEach
public void shutdown() {
if (channel != null) {
channel.shutdownNow();
}
}
@Test
public void testInvokingGrpcServiceUsingTls() {
HelloReply reply = GreeterGrpc.newBlockingStub(channel)
.sayHello(HelloRequest.newBuilder().setName("neo").build());
assertThat(reply.getMessage()).isEqualTo("Hello neo");
}
}
| TlsWithHttpServerUsingP12AndTlsRegistryTest |
java | spring-projects__spring-security | config/src/test/java/org/springframework/security/config/annotation/web/configurers/saml2/Saml2LoginConfigurerTests.java | {
"start": 33578,
"end": 33747
} | class ____ {
@GetMapping("/")
String user(@AuthenticationPrincipal Saml2AuthenticatedPrincipal principal) {
return principal.getName();
}
}
}
| ResourceController |
java | apache__dubbo | dubbo-common/src/test/java/org/apache/dubbo/rpc/model/SerializablePerson.java | {
"start": 901,
"end": 2653
} | class ____ implements Serializable {
private static final long serialVersionUID = 1L;
byte oneByte = 123;
private String name = "name1";
private int age = 11;
private String[] value = {"value1", "value2"};
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public byte getOneByte() {
return oneByte;
}
public void setOneByte(byte b) {
this.oneByte = b;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String[] getValue() {
return value;
}
public void setValue(String[] value) {
this.value = value;
}
@Override
public String toString() {
return String.format("Person name(%s) age(%d) byte(%s) [value=%s]", name, age, oneByte, Arrays.toString(value));
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + age;
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + Arrays.hashCode(value);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
SerializablePerson other = (SerializablePerson) obj;
if (age != other.age) return false;
if (name == null) {
if (other.name != null) return false;
} else if (!name.equals(other.name)) return false;
if (!Arrays.equals(value, other.value)) return false;
return true;
}
}
| SerializablePerson |
java | hibernate__hibernate-orm | hibernate-core/src/test/java/org/hibernate/orm/test/annotations/cid/CompositeIdWithOrderedUpdatesTest.java | {
"start": 2032,
"end": 2991
} | class ____ {
@Id
private String stringProperty;
@Id
private int integerProperty;
@ManyToOne
private ModelWithSelfChildren parent;
@OneToMany(mappedBy = "parent")
private List<ModelWithSelfChildren> children = new ArrayList<>();
public String getStringProperty() {
return stringProperty;
}
public void setStringProperty(String stringProperty) {
this.stringProperty = stringProperty;
}
public int getIntegerProperty() {
return integerProperty;
}
public void setIntegerProperty(int integerProperty) {
this.integerProperty = integerProperty;
}
public ModelWithSelfChildren getParent() {
return parent;
}
public void setParent(ModelWithSelfChildren parent) {
this.parent = parent;
}
public List<ModelWithSelfChildren> getChildren() {
return children;
}
public void setChildren(List<ModelWithSelfChildren> children) {
this.children = children;
}
}
public static | ModelWithSelfChildren |
java | apache__flink | flink-state-backends/flink-statebackend-rocksdb/src/main/java/org/apache/flink/state/rocksdb/snapshot/RocksDBSnapshotStrategyBase.java | {
"start": 13552,
"end": 15852
} | class ____
implements SnapshotResultSupplier<KeyedStateHandle> {
/** Id for the current checkpoint. */
protected final long checkpointId;
/** Stream factory that creates the output streams to DFS. */
@Nonnull protected final CheckpointStreamFactory checkpointStreamFactory;
/** The state meta data. */
@Nonnull protected final List<StateMetaInfoSnapshot> stateMetaInfoSnapshots;
/** Local directory for the RocksDB native backup. */
@Nonnull protected final SnapshotDirectory localBackupDirectory;
@Nonnull protected final CloseableRegistry tmpResourcesRegistry;
protected RocksDBSnapshotOperation(
long checkpointId,
@Nonnull CheckpointStreamFactory checkpointStreamFactory,
@Nonnull SnapshotDirectory localBackupDirectory,
@Nonnull List<StateMetaInfoSnapshot> stateMetaInfoSnapshots) {
this.checkpointId = checkpointId;
this.checkpointStreamFactory = checkpointStreamFactory;
this.stateMetaInfoSnapshots = stateMetaInfoSnapshots;
this.localBackupDirectory = localBackupDirectory;
this.tmpResourcesRegistry = new CloseableRegistry();
}
protected Optional<KeyedStateHandle> getLocalSnapshot(
@Nullable StreamStateHandle localStreamStateHandle,
List<HandleAndLocalPath> sharedState)
throws IOException {
final DirectoryStateHandle directoryStateHandle =
localBackupDirectory.completeSnapshotAndGetHandle();
if (directoryStateHandle != null && localStreamStateHandle != null) {
return Optional.of(
new IncrementalLocalKeyedStateHandle(
backendUID,
checkpointId,
directoryStateHandle,
keyGroupRange,
localStreamStateHandle,
sharedState));
} else {
return Optional.empty();
}
}
}
/** A {@link SnapshotResources} for native rocksdb snapshot. */
protected static | RocksDBSnapshotOperation |
java | apache__camel | core/camel-management/src/test/java/org/apache/camel/management/ManagedRoutePerformanceCounterTest.java | {
"start": 1563,
"end": 4456
} | class ____ extends ManagementTestSupport {
@Test
public void testPerformanceCounterStats() throws Exception {
// get the stats for the route
MBeanServer mbeanServer = getMBeanServer();
ObjectName on = getCamelObjectName(TYPE_ROUTE, context.getRoutes().get(0).getRouteId());
Long delta = (Long) mbeanServer.getAttribute(on, "DeltaProcessingTime");
assertEquals(0, delta.intValue());
getMockEndpoint("mock:result").expectedMessageCount(1);
template.asyncSendBody("direct:start", "Hello World");
// cater for slow boxes
await().atMost(5, TimeUnit.SECONDS).until(() -> {
Long num = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
return num == 1L;
});
assertMockEndpointsSatisfied();
await().atMost(5, TimeUnit.SECONDS).untilAsserted(() -> {
Long completed = (Long) mbeanServer.getAttribute(on, "ExchangesCompleted");
assertEquals(1, completed.longValue());
});
delta = (Long) mbeanServer.getAttribute(on, "DeltaProcessingTime");
Long last = (Long) mbeanServer.getAttribute(on, "LastProcessingTime");
Long total = (Long) mbeanServer.getAttribute(on, "TotalProcessingTime");
assertNotNull(delta);
assertTrue(last > 900, "Should take around 1 sec: was " + last);
assertTrue(total > 900, "Should take around 1 sec: was " + total);
// send in another message
template.sendBody("direct:start", "Bye World");
Long completed = (Long) mbeanServer.getAttribute(on, "ExchangesCompleted");
assertEquals(2, completed.longValue());
delta = (Long) mbeanServer.getAttribute(on, "DeltaProcessingTime");
last = (Long) mbeanServer.getAttribute(on, "LastProcessingTime");
total = (Long) mbeanServer.getAttribute(on, "TotalProcessingTime");
assertNotNull(delta);
assertTrue(last > 900, "Should take around 1 sec: was " + last);
assertTrue(total > 1900, "Should be around 2 sec now: was " + total);
Date reset = (Date) mbeanServer.getAttribute(on, "ResetTimestamp");
assertNotNull(reset);
Date lastFailed = (Date) mbeanServer.getAttribute(on, "LastExchangeFailureTimestamp");
Date firstFailed = (Date) mbeanServer.getAttribute(on, "FirstExchangeFailureTimestamp");
assertNull(lastFailed);
assertNull(firstFailed);
Long inFlight = (Long) mbeanServer.getAttribute(on, "ExchangesInflight");
assertEquals(0L, inFlight.longValue());
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
from("direct:start").to("log:foo").delay(1000).to("mock:result");
}
};
}
}
| ManagedRoutePerformanceCounterTest |
java | apache__hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java | {
"start": 1127,
"end": 1456
} | class ____ {
private ArrayList<AppInfo> app = new ArrayList<>();
public AppsInfo() {
} // JAXB needs this
public void add(AppInfo appinfo) {
app.add(appinfo);
}
public ArrayList<AppInfo> getApps() {
return app;
}
public void addAll(ArrayList<AppInfo> appsInfo) {
app.addAll(appsInfo);
}
}
| AppsInfo |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.