index int64 0 0 | repo_id stringlengths 9 205 | file_path stringlengths 31 246 | content stringlengths 1 12.2M | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/flink-statefun/statefun-sdk-embedded/src/main/java/org/apache/flink/statefun/sdk | Create_ds/flink-statefun/statefun-sdk-embedded/src/main/java/org/apache/flink/statefun/sdk/annotations/Persisted.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.sdk.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface Persisted {}
| 5,900 |
0 | Create_ds/flink-statefun/statefun-sdk-embedded/src/main/java/org/apache/flink/statefun/sdk | Create_ds/flink-statefun/statefun-sdk-embedded/src/main/java/org/apache/flink/statefun/sdk/spi/StatefulFunctionModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.sdk.spi;
import java.util.Map;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.FunctionTypeNamespaceMatcher;
import org.apache.flink.statefun.sdk.StatefulFunction;
import org.apache.flink.statefun.sdk.StatefulFunctionProvider;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.statefun.sdk.io.IngressIdentifier;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.statefun.sdk.io.Router;
/**
* A {@link StatefulFunctionModule} is the entry point for adding to a Stateful Functions
* application the core building block primitives, i.e. {@link IngressSpec}s, {@link EgressSpec}s,
* {@link Router}s, and {@link StatefulFunction}s.
*
* <h2>Extensibility of a Stateful Functions application</h2>
*
* <p>A Stateful Functions application is built up of ingresses, egresses, routers, and stateful
* functions that are added to the application by multiple different {@link
* StatefulFunctionModule}s. This allows different parts of the application to be contributed by
* different modules; for example, one module may provide ingresses and egresses, while other
* modules may individually contribute specific parts of the application as stateful functions.
*
* <p>The extensibility is achieved by leveraging the <a
* href="https://docs.oracle.com/javase/tutorial/ext/basics/spi.html#the-serviceloader-class">Java
* Service Loader</a>. In this context, each module is essentially a service provider.
*
* <h2>Registering a {@code StatefulFunctionModule}</h2>
*
* <p>In order for an application to discover a given module, likewise to how the Java Service
* Loader works, a UTF-8 encoded provider configuration file needs to be stored in the {@code
* META-INF/services} directory of the module's containing JAR file. The name of the file should be
* {@code org.apache.flink.statefun.sdk.spi.StatefulFunctionModule}, i.e. the fully qualified name
* of the {@link StatefulFunctionModule} class. Each line in the file should be the fully qualified
* class name of a module in that JAR that you want to register for the Stateful Functions
* application. The configuration file may also be automatically generated using Google's <a
* href="https://github.com/google/auto/tree/master/service">AutoService</a> tool.
*
* <p>Finally, to allow the Stateful Functions runtime to discover the registered modules, the JAR
* files containing the modules and provider configuration files should be added to a
* system-specific class path directory, {@code /opt/statefun/modules/}.
*
* <p>For a simple demonstration, you can consult the {@code statefun-greeter-example} example.
*/
public interface StatefulFunctionModule {
/**
* This method is the entry point for extending a Stateful Functions application by binding
* ingresses, egresses, routers, and functions.
*
* @param globalConfiguration global configuration of the Stateful Functions application.
* @param binder the binder to be used to bind ingresses, egresses, routers, and functions.
*/
void configure(Map<String, String> globalConfiguration, Binder binder);
/**
* A {@link Binder} binds ingresses, egresses, routers, and functions to a Stateful Functions
* application.
*/
interface Binder {
/**
* Binds an {@link IngressSpec} to the Stateful Functions application.
*
* @param spec the {@link IngressSpec} to bind.
* @param <T> the output type of the ingress.
*/
<T> void bindIngress(IngressSpec<T> spec);
/**
* Binds an {@link EgressSpec} to the Stateful Functions application.
*
* @param spec the {@link EgressSpec} to bind.
* @param <T> the type of inputs that the egress consumes.
*/
<T> void bindEgress(EgressSpec<T> spec);
/**
* Binds a {@link StatefulFunctionProvider} to the Stateful Functions application for a specific
* {@link FunctionType}.
*
* @param functionType the type of functions that the {@link StatefulFunctionProvider} provides.
* @param provider the provider to bind.
*/
void bindFunctionProvider(FunctionType functionType, StatefulFunctionProvider provider);
/**
* Binds a {@link StatefulFunctionProvider} to the Stateful Functions application for all
* functions under the specified namespace. If a provider was bound for a specific function type
* using {@link #bindFunctionProvider(FunctionType, StatefulFunctionProvider)}, that provider
* would be used instead.
*
* @param namespaceMatcher matcher for the target namespace of functions that the {@link
* StatefulFunctionProvider} provides.
* @param provider the provider to bind.
*/
void bindFunctionProvider(
FunctionTypeNamespaceMatcher namespaceMatcher, StatefulFunctionProvider provider);
/**
* Binds a {@link Router} for a given ingress to the Stateful Functions application.
*
* @param id the id of the ingress to bind the router to.
* @param router the router to bind.
* @param <T> the type of messages that is being routed.
*/
<T> void bindIngressRouter(IngressIdentifier<T> id, Router<T> router);
}
}
| 5,901 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-js/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-js/src/test/java/org/apache/flink/statefun/e2e/smoke/js/SmokeVerificationJsE2E.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.js;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.flink.statefun.e2e.common.StatefulFunctionsAppContainers;
import org.apache.flink.statefun.e2e.smoke.SmokeRunner;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.images.builder.ImageFromDockerfile;
public class SmokeVerificationJsE2E {
private static final Logger LOG = LoggerFactory.getLogger(SmokeVerificationJsE2E.class);
private static final int NUM_WORKERS = 2;
@Test(timeout = 1_000 * 60 * 10)
public void runWith() throws Throwable {
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setNumberOfFunctionInstances(128);
parameters.setMessageCount(100_000);
parameters.setMaxFailures(1);
GenericContainer<?> remoteFunction = configureRemoteFunction();
StatefulFunctionsAppContainers.Builder builder =
StatefulFunctionsAppContainers.builder("flink-statefun-cluster", NUM_WORKERS)
.withBuildContextFileFromClasspath("remote-module", "/remote-module/")
.dependsOn(remoteFunction);
SmokeRunner.run(parameters, builder);
}
private GenericContainer<?> configureRemoteFunction() {
ImageFromDockerfile remoteFunctionImage =
new ImageFromDockerfile("remote-function-image")
.withFileFromClasspath("Dockerfile", "Dockerfile.remote-function")
.withFileFromPath("sdk", sdkPath())
.withFileFromClasspath("remote-function/", "remote-function/");
return new GenericContainer<>(remoteFunctionImage)
.withNetworkAliases("remote-function-host")
.withLogConsumer(new Slf4jLogConsumer(LOG));
}
private static Path sdkPath() {
return Paths.get(System.getProperty("user.dir") + "/../../statefun-sdk-js");
}
}
| 5,902 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e/common/StatefulFunctionsAppContainers.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.common;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nullable;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.statefun.flink.core.StatefulFunctionsConfig;
import org.apache.flink.util.FileUtils;
import org.junit.rules.ExternalResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.BindMode;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.images.builder.ImageFromDockerfile;
/**
* A JUnit {@link org.junit.rules.TestRule} that setups a containerized Stateful Functions
* application using <a href="https://www.testcontainers.org/">Testcontainers</a>. This allows
* composing end-to-end tests for Stateful Functions applications easier, by managing the
* containerized application as an external test resource whose lifecycle is integrated with the
* JUnit test framework.
*
* <h2>Example usage</h2>
*
* <pre>{@code
* public class MyE2E {
*
* {@code @Rule}
* public StatefulFunctionsAppContainers myApp =
* StatefulFunctionsAppContainers.builder("app-name", 3).build();
*
* {@code @Test}
* public void runTest() {
* // the containers for the app, including master and workers, will already be running
* // before the test is run; implement your test logic against the app
* }
* }
* }</pre>
*
* <p>In most cases you'd also need to start an additional system for the test, for example starting
* a container that runs Kafka from which the application depends on as an ingress or egress. The
* following demonstrates adding a Kafka container to the setup:
*
* <pre>{@code
* public class MyKafkaE2E {
*
* {@code @Rule}
* public KafkaContainer kafka = new KafkaContainer();
*
* {@code @Rule}
* public StatefulFunctionsAppContainers myApp =
* StatefulFunctionsAppContainers.builder("app-name", 3)
* .dependsOn(kafka)
* .build();
*
* ...
* }
* }</pre>
*
* <p>Application master and worker containers will always be started after containers that are
* added using {@link Builder#dependsOn(GenericContainer)} have started. Moreover, containers being
* depended on will also be setup such that they share the same network with the master and workers,
* so that they can freely communicate with each other.
*
* <h2>Prerequisites</h2>
*
* <p>Since Testcontainers uses Docker, it is required that you have Docker installed for this test
* rule to work.
*
* <p>When building the Docker image for the Stateful Functions application under test, the
* following files are added to the build context:
*
* <uL>
* <li>The {@code Dockerfile} found at path {@code /Dockerfile} in the classpath. This is required
* to be present. A simple way is to add the Dockerfile to the test resources directory. This
* will be added to the root of the Docker image build context.
* <li>The {@code flink-conf.yaml} found at path {@code /flink-conf.yaml} in the classpath, if
* any. You can also add this to the test resources directory. This will be added to the root
* of the Docker image build context.
* <li>All built artifacts under the generated {@code target} folder for the project module that
* the test resides in. This is required to be present, so this entails that the tests can
* only be ran after artifacts are built. The built artifacts are added to the root of the
* Docker image build context.
* </uL>
*/
public final class StatefulFunctionsAppContainers extends ExternalResource {
private static final Logger LOG = LoggerFactory.getLogger(StatefulFunctionsAppContainers.class);
private GenericContainer<?> master;
private List<GenericContainer<?>> workers;
private File checkpointDir;
private StatefulFunctionsAppContainers(
GenericContainer<?> masterContainer, List<GenericContainer<?>> workerContainers) {
this.master = Objects.requireNonNull(masterContainer);
this.workers = Objects.requireNonNull(workerContainers);
}
/**
* Creates a builder for creating a {@link StatefulFunctionsAppContainers}.
*
* @param appName the name of the application.
* @param numWorkers the number of workers to run the application.
* @return a builder for creating a {@link StatefulFunctionsAppContainers}.
*/
public static Builder builder(String appName, int numWorkers) {
return new Builder(appName, numWorkers);
}
@Override
protected void before() throws Throwable {
checkpointDir = temporaryCheckpointDir();
master.withFileSystemBind(
checkpointDir.getAbsolutePath(), "/checkpoint-dir", BindMode.READ_WRITE);
workers.forEach(
worker ->
worker.withFileSystemBind(
checkpointDir.getAbsolutePath(), "/checkpoint-dir", BindMode.READ_WRITE));
master.start();
workers.forEach(GenericContainer::start);
}
@Override
protected void after() {
master.stop();
workers.forEach(GenericContainer::stop);
FileUtils.deleteDirectoryQuietly(checkpointDir);
}
/** @return the exposed port on master for calling REST APIs. */
public int getMasterRestPort() {
return master.getMappedPort(8081);
}
/**
* Restarts a single worker of this Stateful Functions application.
*
* @param workerIndex the index of the worker to restart.
*/
public void restartWorker(int workerIndex) {
if (workerIndex >= workers.size()) {
throw new IndexOutOfBoundsException(
"Invalid worker index; valid values are 0 to " + (workers.size() - 1));
}
final GenericContainer<?> worker = workers.get(workerIndex);
worker.stop();
worker.start();
}
private static File temporaryCheckpointDir() throws IOException {
final Path currentWorkingDir = Paths.get(System.getProperty("user.dir"));
return Files.createTempDirectory(currentWorkingDir, "statefun-app-checkpoints-").toFile();
}
public static final class Builder {
private static final String MASTER_HOST = "statefun-app-master";
private static final String WORKER_HOST_PREFIX = "statefun-app-worker";
private final String appName;
private final int numWorkers;
private final Network network;
private final Configuration dynamicProperties = new Configuration();
private final List<GenericContainer<?>> dependentContainers = new ArrayList<>();
private final List<ClasspathBuildContextFile> classpathBuildContextFiles = new ArrayList<>();
private Logger logger;
private Builder(String appName, int numWorkers) {
if (appName == null || appName.isEmpty()) {
throw new IllegalArgumentException(
"App name must be non-empty. This is used as the application image name.");
}
if (numWorkers < 1) {
throw new IllegalArgumentException("Must have at least 1 worker.");
}
this.network = Network.newNetwork();
this.appName = appName;
this.numWorkers = numWorkers;
}
public StatefulFunctionsAppContainers.Builder dependsOn(GenericContainer<?> container) {
container.withNetwork(network);
this.dependentContainers.add(container);
return this;
}
public StatefulFunctionsAppContainers.Builder exposeLogs(Logger logger) {
this.logger = logger;
return this;
}
public StatefulFunctionsAppContainers.Builder withModuleGlobalConfiguration(
String key, String value) {
this.dynamicProperties.setString(StatefulFunctionsConfig.MODULE_CONFIG_PREFIX + key, value);
return this;
}
public <T> StatefulFunctionsAppContainers.Builder withConfiguration(
ConfigOption<T> config, T value) {
this.dynamicProperties.set(config, value);
return this;
}
public StatefulFunctionsAppContainers.Builder withConfiguration(String key, String value) {
this.dynamicProperties.setString(key, value);
return this;
}
public StatefulFunctionsAppContainers.Builder withBuildContextFileFromClasspath(
String buildContextPath, String resourcePath) {
this.classpathBuildContextFiles.add(
new ClasspathBuildContextFile(buildContextPath, resourcePath));
return this;
}
public StatefulFunctionsAppContainers build() {
final ImageFromDockerfile appImage =
appImage(appName, dynamicProperties, classpathBuildContextFiles);
return new StatefulFunctionsAppContainers(
masterContainer(appImage, network, dependentContainers, numWorkers, logger),
workerContainers(appImage, numWorkers, network, logger));
}
private static ImageFromDockerfile appImage(
String appName,
Configuration dynamicProperties,
List<ClasspathBuildContextFile> classpathBuildContextFiles) {
final Path targetDirPath = Paths.get(System.getProperty("user.dir") + "/target/");
LOG.info("Building app image with built artifacts located at: {}", targetDirPath);
final ImageFromDockerfile appImage =
new ImageFromDockerfile(appName)
.withFileFromClasspath("Dockerfile", "Dockerfile")
.withFileFromPath(".", targetDirPath);
Configuration flinkConf = resolveFlinkConf(dynamicProperties);
String flinkConfString = flinkConfigAsString(flinkConf);
LOG.info(
"Resolved Flink configuration after merging dynamic properties with base flink-conf.yaml:\n\n{}",
flinkConf);
appImage.withFileFromString("flink-conf.yaml", flinkConfString);
for (ClasspathBuildContextFile classpathBuildContextFile : classpathBuildContextFiles) {
appImage.withFileFromClasspath(
classpathBuildContextFile.buildContextPath, classpathBuildContextFile.fromResourcePath);
}
return appImage;
}
/**
* Merges set dynamic properties with configuration in the base flink-conf.yaml located in
* resources.
*/
private static Configuration resolveFlinkConf(Configuration dynamicProperties) {
final InputStream baseFlinkConfResourceInputStream =
StatefulFunctionsAppContainers.class.getResourceAsStream("/flink-conf.yaml");
if (baseFlinkConfResourceInputStream == null) {
throw new RuntimeException("Base flink-conf.yaml cannot be found.");
}
final File tempBaseFlinkConfFile = copyToTempFlinkConfFile(baseFlinkConfResourceInputStream);
return GlobalConfiguration.loadConfiguration(
tempBaseFlinkConfFile.getParentFile().getAbsolutePath(), dynamicProperties);
}
private static String flinkConfigAsString(Configuration configuration) {
StringBuilder yaml = new StringBuilder();
for (Map.Entry<String, String> entry : configuration.toMap().entrySet()) {
yaml.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
return yaml.toString();
}
private static File copyToTempFlinkConfFile(InputStream inputStream) {
try {
final File tempFile =
new File(
Files.createTempDirectory("statefun-app-containers").toString(), "flink-conf.yaml");
Files.copy(inputStream, tempFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
return tempFile;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private static GenericContainer<?> masterContainer(
ImageFromDockerfile appImage,
Network network,
List<GenericContainer<?>> dependents,
int numWorkers,
@Nullable Logger logger) {
final GenericContainer<?> master =
new GenericContainer(appImage)
.withNetwork(network)
.withNetworkAliases(MASTER_HOST)
.withEnv("ROLE", "master")
.withEnv("MASTER_HOST", MASTER_HOST)
.withCommand("-p " + numWorkers)
.withExposedPorts(8081);
for (GenericContainer<?> dependent : dependents) {
master.dependsOn(dependent);
}
if (logger != null) {
master.withLogConsumer(new Slf4jLogConsumer(logger, true));
}
return master;
}
private static List<GenericContainer<?>> workerContainers(
ImageFromDockerfile appImage, int numWorkers, Network network, @Nullable Logger logger) {
final List<GenericContainer<?>> workers = new ArrayList<>(numWorkers);
for (int i = 0; i < numWorkers; i++) {
final GenericContainer<?> worker =
new GenericContainer<>(appImage)
.withNetwork(network)
.withNetworkAliases(workerHostOf(i))
.withEnv("ROLE", "worker")
.withEnv("MASTER_HOST", MASTER_HOST);
if (logger != null) {
worker.withLogConsumer(new Slf4jLogConsumer(logger, true));
}
workers.add(worker);
}
return workers;
}
private static String workerHostOf(int workerIndex) {
return WORKER_HOST_PREFIX + "-" + workerIndex;
}
private static class ClasspathBuildContextFile {
private final String buildContextPath;
private final String fromResourcePath;
ClasspathBuildContextFile(String buildContextPath, String fromResourcePath) {
this.buildContextPath = Objects.requireNonNull(buildContextPath);
this.fromResourcePath = Objects.requireNonNull(fromResourcePath);
}
}
}
}
| 5,903 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e/common | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e/common/kafka/KafkaIOVerifier.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.common.kafka;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
/**
* A utility to make test assertions by means of writing inputs to Kafka, and then matching on
* outputs read from Kafka.
*
* <p>Example usage:
*
* <pre>{@code
* KafkaProducer<String, Integer> producer = ...
* KafkaConsumer<String, Boolean> consumer = ...
*
* KafkaIOVerifier<String, Integer, String, Boolean> verifier =
* new KafkaIOVerifier(producer, consumer);
*
* assertThat(
* verifier.sending(
* new ProducerRecord<>("topic", "key-1", 1991),
* new ProducerRecord<>("topic", "key-2", 1108)
* ), verifier.resultsInOrder(
* true, false, true, true
* )
* );
* }</pre>
*
* @param <PK> key type of input records written to Kafka
* @param <PV> value type of input records written to Kafka
* @param <CK> key type of output records read from Kafka
* @param <CV> value type of output records read from Kafka
*/
public final class KafkaIOVerifier<PK, PV, CK, CV> {
private final Producer<PK, PV> producer;
private final Consumer<CK, CV> consumer;
/**
* Creates a verifier.
*
* @param producer producer to use to write input records to Kafka.
* @param consumer consumer to use for reading output records from Kafka.
*/
public KafkaIOVerifier(Producer<PK, PV> producer, Consumer<CK, CV> consumer) {
this.producer = Objects.requireNonNull(producer);
this.consumer = Objects.requireNonNull(consumer);
}
/**
* Writes to Kafka multiple assertion input producer records, in the given order.
*
* <p>The results of calling this method should be asserted using {@link
* #resultsInOrder(Matcher[])}. In the background, the provided Kafka consumer will be used to
* continuously poll output records. For each assertion input provided via this method, you must
* consequently use {@link #resultsInOrder(Matcher[])} to complete the assertion, which then stops
* the consumer from polling Kafka.
*
* @param assertionInputs assertion input producer records to send to Kafka.
* @return resulting outputs consumed from Kafka that can be asserted using {@link
* #resultsInOrder(Matcher[])}.
*/
@SafeVarargs
public final OutputsHandoff<CV> sending(ProducerRecord<PK, PV>... assertionInputs) {
CompletableFuture.runAsync(
() -> {
for (ProducerRecord<PK, PV> input : assertionInputs) {
producer.send(input);
}
producer.flush();
});
final OutputsHandoff<CV> outputsHandoff = new OutputsHandoff<>();
CompletableFuture.runAsync(
() -> {
while (!outputsHandoff.isVerified()) {
ConsumerRecords<CK, CV> consumerRecords = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<CK, CV> record : consumerRecords) {
outputsHandoff.add(record.value());
}
}
});
return outputsHandoff;
}
/**
* Matcher for verifying the outputs as a result of calling {@link #sending(ProducerRecord[])}.
*
* @param expectedResults matchers for the expected results.
* @return a matcher for verifying the output of calling {@link #sending(ProducerRecord[])}.
*/
@SafeVarargs
public final Matcher<OutputsHandoff<CV>> resultsInOrder(Matcher<CV>... expectedResults) {
return new TypeSafeMatcher<OutputsHandoff<CV>>() {
@Override
protected boolean matchesSafely(OutputsHandoff<CV> outputHandoff) {
try {
for (Matcher<CV> r : expectedResults) {
CV output = outputHandoff.take();
if (!r.matches(output)) {
return false;
}
}
// any dangling unexpected output should count as a mismatch
// TODO should we poll with timeout for a stronger verification?
return outputHandoff.peek() == null;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
outputHandoff.verified();
}
}
@Override
public void describeTo(Description description) {}
};
}
/**
* Matcher for verifying the outputs, happening in any order, as a result of calling {@link
* #sending(ProducerRecord[])}.
*
* @param expectedResults matchers for the expected results.
* @return a matcher for verifying the output of calling {@link #sending(ProducerRecord[])}.
*/
@SafeVarargs
public final Matcher<OutputsHandoff<CV>> resultsInAnyOrder(Matcher<CV>... expectedResults) {
return new TypeSafeMatcher<OutputsHandoff<CV>>() {
@Override
protected boolean matchesSafely(OutputsHandoff<CV> outputHandoff) {
final List<Matcher<CV>> expectedResultsList =
new ArrayList<>(Arrays.asList(expectedResults));
try {
while (!expectedResultsList.isEmpty()) {
CV output = outputHandoff.take();
if (!checkAndRemoveIfMatch(expectedResultsList, output)) {
return false;
}
}
// any dangling unexpected output should count as a mismatch
// TODO should we poll with timeout for a stronger verification?
return outputHandoff.peek() == null;
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
outputHandoff.verified();
}
}
@Override
public void describeTo(Description description) {}
};
}
private static final class OutputsHandoff<T> extends LinkedBlockingQueue<T> {
private static final long serialVersionUID = 1L;
private volatile boolean isVerified;
boolean isVerified() {
return isVerified;
}
void verified() {
this.isVerified = true;
}
}
private static <CV> boolean checkAndRemoveIfMatch(List<Matcher<CV>> expectedResultsList, CV in) {
final Iterator<Matcher<CV>> matchersIterator = expectedResultsList.iterator();
while (matchersIterator.hasNext()) {
if (matchersIterator.next().matches(in)) {
matchersIterator.remove();
return true;
}
}
return false;
}
}
| 5,904 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e/common | Create_ds/flink-statefun/statefun-e2e-tests/statefun-e2e-tests-common/src/main/java/org/apache/flink/statefun/e2e/common/kafka/KafkaProtobufSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.common.kafka;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import java.util.Map;
import java.util.Objects;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
/**
* A Kafka {@link Serializer} and {@link Deserializer} that uses Protobuf for serialization.
*
* @param <T> type of the Protobuf message.
*/
public final class KafkaProtobufSerializer<T extends Message>
implements Serializer<T>, Deserializer<T> {
private final Parser<T> parser;
public KafkaProtobufSerializer(Parser<T> parser) {
this.parser = Objects.requireNonNull(parser);
}
@Override
public byte[] serialize(String s, T command) {
return command.toByteArray();
}
@Override
public T deserialize(String s, byte[] bytes) {
try {
return parser.parseFrom(bytes);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void close() {}
@Override
public void configure(Map<String, ?> map, boolean b) {}
}
| 5,905 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke/driver/CommandGeneratorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.junit.Test;
public class CommandGeneratorTest {
@Test
public void usageExample() {
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setAsyncOpSupported(true);
CommandGenerator generator = new CommandGenerator(new JDKRandomGenerator(), parameters);
SourceCommand command = generator.get();
assertThat(command.getTarget(), notNullValue());
assertThat(command.getCommands(), notNullValue());
}
}
| 5,906 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke/driver/FunctionStateTrackerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static org.apache.flink.statefun.e2e.smoke.testutils.Utils.aRelayedStateModificationCommand;
import static org.apache.flink.statefun.e2e.smoke.testutils.Utils.aStateModificationCommand;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import org.junit.Test;
public class FunctionStateTrackerTest {
@Test
public void exampleUsage() {
FunctionStateTracker tracker = new FunctionStateTracker(1_000);
tracker.apply(aStateModificationCommand(5));
tracker.apply(aStateModificationCommand(5));
tracker.apply(aStateModificationCommand(5));
assertThat(tracker.stateOf(5), is(3L));
}
@Test
public void testRelay() {
FunctionStateTracker tracker = new FunctionStateTracker(1_000);
// send a layered state increment message, first to function 5, and then
// to function 6.
tracker.apply(aRelayedStateModificationCommand(5, 6));
assertThat(tracker.stateOf(5), is(0L));
assertThat(tracker.stateOf(6), is(1L));
}
}
| 5,907 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/test/java/org/apache/flink/statefun/e2e/smoke/testutils/Utils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.testutils;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
public class Utils {
public static SourceCommand aStateModificationCommand() {
return aStateModificationCommand(-1234); // the id doesn't matter
}
public static SourceCommand aStateModificationCommand(int functionInstanceId) {
return SourceCommand.newBuilder()
.setTarget(functionInstanceId)
.setCommands(Commands.newBuilder().addCommand(modify()))
.build();
}
public static SourceCommand aRelayedStateModificationCommand(
int firstFunctionId, int secondFunctionId) {
return SourceCommand.newBuilder()
.setTarget(firstFunctionId)
.setCommands(Commands.newBuilder().addCommand(sendTo(secondFunctionId, modify())))
.build();
}
private static Command.Builder sendTo(int id, Command.Builder body) {
return Command.newBuilder()
.setSend(
Command.Send.newBuilder()
.setTarget(id)
.setCommands(Commands.newBuilder().addCommand(body)));
}
private static Command.Builder modify() {
return Command.newBuilder().setIncrement(Command.IncrementState.getDefaultInstance());
}
}
| 5,908 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/CommandRouter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.unpackSourceCommand;
import java.util.Objects;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.Router;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
public class CommandRouter implements Router<TypedValue> {
private final Ids ids;
public CommandRouter(Ids ids) {
this.ids = Objects.requireNonNull(ids);
}
@Override
public void route(TypedValue command, Downstream<TypedValue> downstream) {
SourceCommand sourceCommand = unpackSourceCommand(command);
FunctionType type = Constants.FN_TYPE;
String id = ids.idOf(sourceCommand.getTarget());
downstream.forward(type, id, command);
}
}
| 5,909 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/DriverModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static org.apache.flink.statefun.e2e.smoke.driver.Constants.IN;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.unpackVerificationResult;
import com.google.auto.service.AutoService;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;
import org.apache.flink.api.common.serialization.SerializationSchema;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.statefun.flink.io.datastream.SinkFunctionSpec;
import org.apache.flink.statefun.flink.io.datastream.SourceFunctionSpec;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
import org.apache.flink.statefun.sdk.spi.StatefulFunctionModule;
import org.apache.flink.streaming.api.functions.sink.DiscardingSink;
import org.apache.flink.streaming.api.functions.sink.SocketClientSink;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@AutoService(StatefulFunctionModule.class)
public class DriverModule implements StatefulFunctionModule {
public static final Logger LOG = LoggerFactory.getLogger(DriverModule.class);
@Override
public void configure(Map<String, String> globalConfiguration, Binder binder) {
SmokeRunnerParameters parameters = SmokeRunnerParameters.from(globalConfiguration);
LOG.info(parameters.toString());
Ids ids = new Ids(parameters.getNumberOfFunctionInstances());
binder.bindIngress(new SourceFunctionSpec<>(IN, new CommandFlinkSource(parameters)));
binder.bindIngressRouter(IN, new CommandRouter(ids));
binder.bindEgress(new SinkFunctionSpec<>(Constants.OUT, new DiscardingSink<>()));
SocketClientSink<TypedValue> client =
new SocketClientSink<>(
parameters.getVerificationServerHost(),
parameters.getVerificationServerPort(),
new VerificationResultSerializer(),
3,
true);
binder.bindEgress(new SinkFunctionSpec<>(Constants.VERIFICATION_RESULT, client));
}
private static final class VerificationResultSerializer
implements SerializationSchema<TypedValue> {
@Override
public byte[] serialize(TypedValue element) {
try {
VerificationResult result = unpackVerificationResult(element);
ByteArrayOutputStream out = new ByteArrayOutputStream(result.getSerializedSize() + 8);
result.writeDelimitedTo(out);
return out.toByteArray();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
| 5,910 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/FunctionStateTracker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.FunctionTrackerSnapshot;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
final class FunctionStateTracker {
private final long[] expectedStates;
public FunctionStateTracker(int numberOfFunctionInstances) {
this.expectedStates = new long[numberOfFunctionInstances];
}
/**
* Find any state modification commands nested under @sourceCommand, and apply them in the
* internal state representation.
*/
public void apply(SourceCommand sourceCommand) {
updateInternally(sourceCommand.getTarget(), sourceCommand.getCommands());
}
/** Apply all the state modification stored in the snapshot represented by the snapshotBytes. */
public FunctionStateTracker apply(FunctionTrackerSnapshot snapshot) {
for (int i = 0; i < snapshot.getStateCount(); i++) {
expectedStates[i] += snapshot.getState(i);
}
return this;
}
/** Get the current expected state of a function instance. */
public long stateOf(int id) {
return expectedStates[id];
}
public FunctionTrackerSnapshot.Builder snapshot() {
FunctionTrackerSnapshot.Builder snapshot = FunctionTrackerSnapshot.newBuilder();
for (long state : expectedStates) {
snapshot.addState(state);
}
return snapshot;
}
/**
* Recursively traverse the commands tree and look for {@link Command.IncrementState} commands.
* For each {@code ModifyState} command found update the corresponding expected state.
*/
private void updateInternally(int currentAddress, Commands commands) {
for (Command command : commands.getCommandList()) {
if (command.hasIncrement()) {
expectedStates[currentAddress]++;
} else if (command.hasSend()) {
updateInternally(command.getSend().getTarget(), command.getSend().getCommands());
} else if (command.hasSendAfter()) {
updateInternally(command.getSendAfter().getTarget(), command.getSendAfter().getCommands());
} else if (command.hasAsyncOperation()) {
updateInternally(currentAddress, command.getAsyncOperation().getResolvedCommands());
}
}
}
}
| 5,911 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/Types.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.statefun.sdk.TypeName;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
public final class Types {
private Types() {}
public static final TypeName SOURCE_COMMANDS_TYPE =
TypeName.parseFrom(Constants.NAMESPACE + "/source-command");
public static final TypeName VERIFICATION_RESULT_TYPE =
TypeName.parseFrom(Constants.NAMESPACE + "/verification-result");
public static final TypeName COMMANDS_TYPE =
TypeName.parseFrom(Constants.NAMESPACE + "/commands");
public static boolean isTypeOf(TypedValue value, TypeName type) {
return value.getTypename().equals(type.canonicalTypenameString());
}
public static TypedValue packSourceCommand(SourceCommand sourceCommand) {
return TypedValue.newBuilder()
.setTypename(SOURCE_COMMANDS_TYPE.canonicalTypenameString())
.setHasValue(true)
.setValue(sourceCommand.toByteString())
.build();
}
public static SourceCommand unpackSourceCommand(TypedValue typedValue) {
if (!isTypeOf(typedValue, SOURCE_COMMANDS_TYPE)) {
throw new IllegalStateException("Unexpected TypedValue: " + typedValue);
}
try {
return SourceCommand.parseFrom(typedValue.getValue());
} catch (Exception e) {
throw new RuntimeException("Unable to parse SourceCommand from TypedValue.", e);
}
}
public static TypedValue packCommands(Commands commands) {
return TypedValue.newBuilder()
.setTypename(COMMANDS_TYPE.canonicalTypenameString())
.setHasValue(true)
.setValue(commands.toByteString())
.build();
}
public static Commands unpackCommands(TypedValue typedValue) {
if (!isTypeOf(typedValue, COMMANDS_TYPE)) {
throw new IllegalStateException("Unexpected TypedValue: " + typedValue);
}
try {
return Commands.parseFrom(typedValue.getValue());
} catch (Exception e) {
throw new RuntimeException("Unable to parse Commands from TypedValue.", e);
}
}
public static TypedValue packVerificationResult(VerificationResult verificationResult) {
return TypedValue.newBuilder()
.setTypename(VERIFICATION_RESULT_TYPE.canonicalTypenameString())
.setHasValue(true)
.setValue(verificationResult.toByteString())
.build();
}
public static VerificationResult unpackVerificationResult(TypedValue typedValue) {
if (!isTypeOf(typedValue, VERIFICATION_RESULT_TYPE)) {
throw new IllegalStateException("Unexpected TypedValue: " + typedValue);
}
try {
return VerificationResult.parseFrom(typedValue.getValue());
} catch (Exception e) {
throw new RuntimeException("Unable to parse SourceCommand from TypedValue.", e);
}
}
}
| 5,912 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/CommandGenerator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static java.util.Arrays.asList;
import static org.apache.commons.math3.util.Pair.create;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.function.Supplier;
import org.apache.commons.math3.distribution.EnumeratedDistribution;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.commons.math3.util.Pair;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
/**
* Generates random commands to be interpreted by functions of type {@link Constants#FN_TYPE}.
*
* <p>see {src/main/protobuf/commands.proto}
*/
public final class CommandGenerator implements Supplier<SourceCommand> {
private final RandomGenerator random;
private final EnumeratedDistribution<Gen> distribution;
private final SmokeRunnerParameters parameters;
public CommandGenerator(RandomGenerator random, SmokeRunnerParameters parameters) {
this.random = Objects.requireNonNull(random);
this.parameters = Objects.requireNonNull(parameters);
this.distribution = new EnumeratedDistribution<>(random, randomCommandGenerators());
}
@Override
public SourceCommand get() {
final int depth = random.nextInt(parameters.getCommandDepth());
return SourceCommand.newBuilder().setTarget(address()).setCommands(commands(depth)).build();
}
private Commands.Builder commands(int depth) {
Commands.Builder builder = Commands.newBuilder();
if (depth <= 0) {
StateModifyGen.instance().generate(builder, depth);
return builder;
}
final int n = random.nextInt(parameters.getMaxCommandsPerDepth());
for (int i = 0; i < n; i++) {
Gen gen = distribution.sample();
gen.generate(builder, depth);
}
if (builder.getCommandCount() == 0) {
StateModifyGen.instance().generate(builder, depth);
}
return builder;
}
private int address() {
return random.nextInt(parameters.getNumberOfFunctionInstances());
}
private List<Pair<Gen, Double>> randomCommandGenerators() {
List<Pair<Gen, Double>> list =
new ArrayList<>(
asList(
create(new StateModifyGen(), parameters.getStateModificationsPr()),
create(new SendGen(), parameters.getSendPr()),
create(new SendAfterGen(), parameters.getSendAfterPr()),
create(new Noop(), parameters.getNoopPr()),
create(new SendEgress(), parameters.getSendEgressPr())));
if (parameters.isAsyncOpSupported()) {
list.add(create(new SendAsyncOp(), parameters.getAsyncSendPr()));
}
if (parameters.isDelayCancellationOpSupported()) {
list.add(create(new SendAfterCancellationGen(), parameters.getSendAfterWithCancellationPr()));
}
return list;
}
interface Gen {
/** generates one or more commands with depth at most @depth. */
void generate(Commands.Builder builder, int depth);
}
// ----------------------------------------------------------------------------------------------------
// generators
// ----------------------------------------------------------------------------------------------------
private static final class SendEgress implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {
builder.addCommand(
Command.newBuilder().setSendEgress(Command.SendEgress.getDefaultInstance()));
}
}
private static final class Noop implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {}
}
private static final class StateModifyGen implements Gen {
static final Gen INSTANCE = new StateModifyGen();
static Gen instance() {
return INSTANCE;
}
@Override
public void generate(Commands.Builder builder, int depth) {
builder.addCommand(
Command.newBuilder().setIncrement(Command.IncrementState.getDefaultInstance()));
}
}
private final class SendAfterGen implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {
builder.addCommand(Command.newBuilder().setSendAfter(sendAfter(depth)));
}
private Command.SendAfter.Builder sendAfter(int depth) {
return Command.SendAfter.newBuilder().setTarget(address()).setCommands(commands(depth - 1));
}
}
private final class SendAfterCancellationGen implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {
final String token = new UUID(random.nextLong(), random.nextLong()).toString();
final int address = address();
Command.SendAfter.Builder first =
Command.SendAfter.newBuilder().setTarget(address).setCancellationToken(token);
Command.CancelSendAfter.Builder second =
Command.CancelSendAfter.newBuilder().setTarget(address).setCancellationToken(token);
builder.addCommand(Command.newBuilder().setSendAfter(first));
builder.addCommand(Command.newBuilder().setCancelSendAfter(second));
}
}
private final class SendGen implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {
builder.addCommand(Command.newBuilder().setSend(send(depth)));
}
private Command.Send.Builder send(int depth) {
return Command.Send.newBuilder().setTarget(address()).setCommands(commands(depth - 1));
}
}
private final class SendAsyncOp implements Gen {
@Override
public void generate(Commands.Builder builder, int depth) {
builder.addCommand(Command.newBuilder().setAsyncOperation(asyncOp(depth)));
}
private Command.AsyncOperation.Builder asyncOp(int depth) {
return Command.AsyncOperation.newBuilder()
.setFailure(random.nextBoolean())
.setResolvedCommands(commands(depth - 1));
}
}
}
| 5,913 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/Ids.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
public final class Ids {
private final String[] cache;
public Ids(int maxIds) {
this.cache = createIds(maxIds);
}
public String idOf(int address) {
return cache[address];
}
private static String[] createIds(int maxIds) {
String[] ids = new String[maxIds];
for (int i = 0; i < maxIds; i++) {
ids[i] = Integer.toString(i);
}
return ids;
}
}
| 5,914 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/CommandFlinkSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.packSourceCommand;
import static org.apache.flink.statefun.e2e.smoke.generated.Command.Verify;
import static org.apache.flink.statefun.e2e.smoke.generated.Command.newBuilder;
import java.util.Iterator;
import java.util.Objects;
import java.util.OptionalInt;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.Supplier;
import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.commons.math3.random.RandomGenerator;
import org.apache.flink.api.common.state.CheckpointListener;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.OperatorStateStore;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.apache.flink.statefun.e2e.smoke.generated.SourceSnapshot;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Flink Source that Emits {@link SourceCommand}s.
*
* <p>This source is configured by {@link SmokeRunnerParameters} and would generate random commands,
* addressed to various functions. This source might also throw exceptions (kaboom) to simulate
* failures.
*
* <p>After generating {@link SmokeRunnerParameters#getMessageCount()} messages, this source will
* switch to {@code verification} step. At this step, it would keep sending (every 2 seconds) a
* {@link Verify} command to every function indefinitely.
*/
final class CommandFlinkSource extends RichSourceFunction<TypedValue>
implements CheckpointedFunction, CheckpointListener {
private static final Logger LOG = LoggerFactory.getLogger(CommandFlinkSource.class);
// ------------------------------------------------------------------------------------------------------------
// Configuration
// ------------------------------------------------------------------------------------------------------------
private final SmokeRunnerParameters parameters;
// ------------------------------------------------------------------------------------------------------------
// Runtime
// ------------------------------------------------------------------------------------------------------------
private transient ListState<SourceSnapshot> sourceSnapshotHandle;
private transient FunctionStateTracker functionStateTracker;
private transient int commandsSentSoFar;
private transient int failuresSoFar;
private transient boolean done;
private transient boolean atLeastOneCheckpointCompleted;
public CommandFlinkSource(SmokeRunnerParameters parameters) {
this.parameters = Objects.requireNonNull(parameters);
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
OperatorStateStore store = context.getOperatorStateStore();
sourceSnapshotHandle =
store.getUnionListState(new ListStateDescriptor<>("snapshot", SourceSnapshot.class));
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
SourceSnapshot sourceSnapshot =
getOnlyElement(sourceSnapshotHandle.get(), SourceSnapshot.getDefaultInstance());
functionStateTracker =
new FunctionStateTracker(this.parameters.getNumberOfFunctionInstances())
.apply(sourceSnapshot.getTracker());
commandsSentSoFar = sourceSnapshot.getCommandsSentSoFarHandle();
failuresSoFar = sourceSnapshot.getFailuresGeneratedSoFar();
}
@Override
public void snapshotState(FunctionSnapshotContext context) throws Exception {
sourceSnapshotHandle.clear();
sourceSnapshotHandle.add(
SourceSnapshot.newBuilder()
.setCommandsSentSoFarHandle(commandsSentSoFar)
.setTracker(functionStateTracker.snapshot())
.setFailuresGeneratedSoFar(failuresSoFar)
.build());
if (commandsSentSoFar < parameters.getMessageCount()) {
double perCent = 100.0d * (commandsSentSoFar) / parameters.getMessageCount();
LOG.info(
"Commands sent {} / {} ({} %)", commandsSentSoFar, parameters.getMessageCount(), perCent);
}
}
@Override
public void notifyCheckpointComplete(long checkpointId) {
atLeastOneCheckpointCompleted = true;
}
@Override
public void cancel() {
done = true;
}
// ------------------------------------------------------------------------------------------------------------
// Generation
// ------------------------------------------------------------------------------------------------------------
@Override
public void run(SourceContext<TypedValue> ctx) {
generate(ctx);
do {
verify(ctx);
snooze();
synchronized (ctx.getCheckpointLock()) {
if (done) {
return;
}
}
} while (true);
}
private void generate(SourceContext<TypedValue> ctx) {
final int startPosition = this.commandsSentSoFar;
final OptionalInt kaboomIndex =
computeFailureIndex(startPosition, failuresSoFar, parameters.getMaxFailures());
if (kaboomIndex.isPresent()) {
failuresSoFar++;
}
LOG.info(
"starting at {}, kaboom at {}, total messages {}, random command generator seed {}",
startPosition,
kaboomIndex,
parameters.getMessageCount(),
parameters.getRandomGeneratorSeed());
RandomGenerator random = new JDKRandomGenerator();
random.setSeed(parameters.getRandomGeneratorSeed());
Supplier<SourceCommand> generator = new CommandGenerator(random, parameters);
FunctionStateTracker functionStateTracker = this.functionStateTracker;
for (int i = startPosition; i < parameters.getMessageCount(); i++) {
if (atLeastOneCheckpointCompleted && kaboomIndex.isPresent() && i >= kaboomIndex.getAsInt()) {
throw new RuntimeException("KABOOM!!!");
}
SourceCommand command = generator.get();
synchronized (ctx.getCheckpointLock()) {
if (done) {
return;
}
functionStateTracker.apply(command);
ctx.collect(packSourceCommand(command));
this.commandsSentSoFar = i;
}
}
}
private void verify(SourceContext<TypedValue> ctx) {
FunctionStateTracker functionStateTracker = this.functionStateTracker;
for (int i = 0; i < parameters.getNumberOfFunctionInstances(); i++) {
final long expected = functionStateTracker.stateOf(i);
Command.Builder verify = newBuilder().setVerify(Verify.newBuilder().setExpected(expected));
SourceCommand command =
SourceCommand.newBuilder()
.setTarget(i)
.setCommands(Commands.newBuilder().addCommand(verify))
.build();
synchronized (ctx.getCheckpointLock()) {
ctx.collect(packSourceCommand(command));
}
}
}
// ---------------------------------------------------------------------------------------------------------------
// Utils
// ---------------------------------------------------------------------------------------------------------------
private OptionalInt computeFailureIndex(int startPosition, int failureSoFar, int maxFailures) {
if (failureSoFar >= maxFailures) {
return OptionalInt.empty();
}
if (startPosition >= parameters.getMessageCount()) {
return OptionalInt.empty();
}
int index = ThreadLocalRandom.current().nextInt(startPosition, parameters.getMessageCount());
return OptionalInt.of(index);
}
private static void snooze() {
try {
Thread.sleep(2_000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
private static <T> T getOnlyElement(Iterable<T> items, T def) {
Iterator<T> it = items.iterator();
if (!it.hasNext()) {
return def;
}
T item = it.next();
if (it.hasNext()) {
throw new IllegalStateException("Iterable has additional elements");
}
return item;
}
}
| 5,915 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-driver/src/main/java/org/apache/flink/statefun/e2e/smoke/driver/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.driver;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.EgressIdentifier;
import org.apache.flink.statefun.sdk.io.IngressIdentifier;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
public class Constants {
public static final String NAMESPACE = "statefun.smoke.e2e";
public static final String INGRESS_NAME = "command-generator-source";
public static final String EGRESS_NAME = "discard-sink";
public static final String VERIFICATION_EGRESS_NAME = "verification-sink";
public static final String FUNCTION_NAME = "command-interpreter-fn";
public static final IngressIdentifier<TypedValue> IN =
new IngressIdentifier<>(TypedValue.class, NAMESPACE, INGRESS_NAME);
public static final EgressIdentifier<TypedValue> OUT =
new EgressIdentifier<>(NAMESPACE, EGRESS_NAME, TypedValue.class);
public static final EgressIdentifier<TypedValue> VERIFICATION_RESULT =
new EgressIdentifier<>(NAMESPACE, VERIFICATION_EGRESS_NAME, TypedValue.class);
// For embedded/remote functions to bind with the smoke-e2e-common testing framework
public static final FunctionType FN_TYPE = new FunctionType(NAMESPACE, FUNCTION_NAME);
}
| 5,916 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-golang/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-golang/src/test/java/org/apache/flink/statefun/e2e/smoke/golang/SmokeVerificationGolangE2E.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.golang;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.flink.statefun.e2e.common.StatefulFunctionsAppContainers;
import org.apache.flink.statefun.e2e.smoke.SmokeRunner;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.images.builder.ImageFromDockerfile;
public class SmokeVerificationGolangE2E {
private static final Logger LOG = LoggerFactory.getLogger(SmokeVerificationGolangE2E.class);
private static final int NUM_WORKERS = 2;
@Test(timeout = 1_000 * 60 * 10)
public void runWith() throws Throwable {
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setNumberOfFunctionInstances(128);
parameters.setMessageCount(100_000);
parameters.setMaxFailures(1);
GenericContainer<?> remoteFunction = configureRemoteFunction();
StatefulFunctionsAppContainers.Builder builder =
StatefulFunctionsAppContainers.builder("flink-statefun-cluster", NUM_WORKERS)
.withBuildContextFileFromClasspath("remote-module", "/remote-module/")
.withBuildContextFileFromClasspath("ssl/", "ssl/")
.dependsOn(remoteFunction);
SmokeRunner.run(parameters, builder);
}
private GenericContainer<?> configureRemoteFunction() {
ImageFromDockerfile remoteFunctionImage =
new ImageFromDockerfile("remote-function-image")
.withFileFromClasspath("Dockerfile", "Dockerfile.remote-function")
.withFileFromClasspath("ssl/", "ssl/")
.withFileFromPath("source/", goSdkPath())
.withFileFromPath("smoketest/", remoteFunctionGoSourcePath());
return new GenericContainer<>(remoteFunctionImage)
.withNetworkAliases("remote-function-host")
.withLogConsumer(new Slf4jLogConsumer(LOG));
}
private static Path goSdkPath() {
return Paths.get(System.getProperty("user.dir") + "/../../statefun-sdk-go");
}
private static Path remoteFunctionGoSourcePath() {
return Paths.get(System.getProperty("user.dir") + "/src/main/go");
}
}
| 5,917 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-multilang-harness/src/test/java/org/apache/flink/statefun/e2e/smoke/multilang | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-multilang-harness/src/test/java/org/apache/flink/statefun/e2e/smoke/multilang/harness/MultiLangSmokeHarnessTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.multilang.harness;
import static org.apache.flink.statefun.e2e.smoke.SmokeRunner.awaitVerificationSuccess;
import org.apache.flink.statefun.e2e.smoke.SimpleVerificationServer;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.driver.DriverModule;
import org.apache.flink.statefun.flink.harness.Harness;
import org.apache.flink.statefun.sdk.spi.StatefulFunctionModule;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This harness test is used for locally running smoke E2E tests that run their polyglot functions
* in a remote process.
*
* <p>The harness test has exactly 2 {@link StatefulFunctionModule} in the classpath that will be
* picked up to be included in the application universe:
*
* <ul>
* <li>1. The driver module at {@link DriverModule} which contains the application components that
* drive the smoke test.
* <li>2. A remote module, as defined by the module specification defined at {@code
* /resources/module.yaml}. This module defines only a single function endpoint at {@code
* http://localhost:8000} for messages targeted at function {@code
* statefun.smoke.e2e/command-interpreter-fn}.
* </ul>
*
* <p>You can directly run this test within the IDE by:
*
* <ul>
* <li>1. Start the remote function process locally in the IDE. It should be reachable at {@code
* http://localhost:8000}.
* <li>2. Remove the {@code @Ignore} annotation from the test.
* <li>3. Run the {@code miniClusterTest()} JUnit test method.
* </ul>
*/
public final class MultiLangSmokeHarnessTest {
private static final Logger LOG = LoggerFactory.getLogger(MultiLangSmokeHarnessTest.class);
@Ignore
@Test(timeout = 1_000 * 60 * 2)
public void miniClusterTest() throws Exception {
Harness harness = new Harness();
// set Flink related configuration.
harness.withConfiguration(
"classloader.parent-first-patterns.additional",
"org.apache.flink.statefun;org.apache.kafka;com.google.protobuf");
harness.withConfiguration("restart-strategy", "fixed-delay");
harness.withConfiguration("restart-strategy.fixed-delay.attempts", "2147483647");
harness.withConfiguration("restart-strategy.fixed-delay.delay", "1sec");
harness.withConfiguration("execution.checkpointing.interval", "2sec");
harness.withConfiguration("execution.checkpointing.mode", "EXACTLY_ONCE");
harness.withConfiguration("execution.checkpointing.max-concurrent-checkpoints", "3");
harness.withConfiguration("parallelism.default", "2");
harness.withConfiguration("state.checkpoints.dir", "file:///tmp/checkpoints");
// start the verification server
SimpleVerificationServer.StartedServer started = new SimpleVerificationServer().start();
// configure test parameters.
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setMaxFailures(1);
parameters.setMessageCount(100_000);
parameters.setNumberOfFunctionInstances(128);
parameters.setVerificationServerHost("localhost");
parameters.setVerificationServerPort(started.port());
parameters.asMap().forEach(harness::withGlobalConfiguration);
// run the harness.
try (AutoCloseable ignored = startHarnessInTheBackground(harness)) {
awaitVerificationSuccess(started.results(), parameters.getNumberOfFunctionInstances());
}
LOG.info("All done.");
}
private static AutoCloseable startHarnessInTheBackground(Harness harness) {
Thread t =
new Thread(
() -> {
try {
harness.start();
} catch (InterruptedException ignored) {
LOG.info("Harness Thread was interrupted. Exiting...");
} catch (Exception exception) {
LOG.info("Something happened while trying to run the Harness.", exception);
}
});
t.setName("harness-runner");
t.setDaemon(true);
t.start();
return t::interrupt;
}
}
| 5,918 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/test/java/org/apache/flink/statefun/e2e/smoke/java/SmokeVerificationJavaE2E.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.java;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.apache.flink.statefun.e2e.common.StatefulFunctionsAppContainers;
import org.apache.flink.statefun.e2e.smoke.SmokeRunner;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.images.builder.ImageFromDockerfile;
public class SmokeVerificationJavaE2E {
private static final Logger LOG = LoggerFactory.getLogger(SmokeVerificationJavaE2E.class);
private static final int NUM_WORKERS = 2;
@Test(timeout = 1_000 * 60 * 10)
public void runWith() throws Throwable {
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setNumberOfFunctionInstances(128);
parameters.setMessageCount(100_000);
parameters.setMaxFailures(1);
GenericContainer<?> remoteFunction = configureRemoteFunction();
StatefulFunctionsAppContainers.Builder builder =
StatefulFunctionsAppContainers.builder("flink-statefun-cluster", NUM_WORKERS)
.withBuildContextFileFromClasspath("remote-module", "/remote-module/")
.withBuildContextFileFromClasspath("certs", "/certs/")
.dependsOn(remoteFunction);
SmokeRunner.run(parameters, builder);
}
private GenericContainer<?> configureRemoteFunction() {
Path targetDirPath = Paths.get(System.getProperty("user.dir") + "/target/");
ImageFromDockerfile remoteFunctionImage =
new ImageFromDockerfile("remote-function-image")
.withFileFromClasspath("Dockerfile", "Dockerfile.remote-function")
.withFileFromPath(".", targetDirPath);
return new GenericContainer<>(remoteFunctionImage)
.withNetworkAliases("remote-function-host")
.withLogConsumer(new Slf4jLogConsumer(LOG));
}
}
| 5,919 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke/java/CommandInterpreterFn.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.java;
import java.util.concurrent.CompletableFuture;
import org.apache.flink.statefun.sdk.java.Context;
import org.apache.flink.statefun.sdk.java.StatefulFunction;
import org.apache.flink.statefun.sdk.java.ValueSpec;
import org.apache.flink.statefun.sdk.java.message.Message;
public class CommandInterpreterFn implements StatefulFunction {
public static final ValueSpec<Long> STATE = ValueSpec.named("state").withLongType();
private final CommandInterpreter interpreter;
public CommandInterpreterFn(CommandInterpreter interpreter) {
this.interpreter = interpreter;
}
@Override
public CompletableFuture<Void> apply(Context context, Message message) throws Throwable {
interpreter.interpret(STATE, context, message);
return context.done();
}
}
| 5,920 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke/java/CommandInterpreterAppServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.java;
import static org.apache.flink.statefun.e2e.smoke.java.Constants.CMD_INTERPRETER_FN;
import java.io.IOException;
import java.io.InputStream;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import org.apache.flink.shaded.netty4.io.netty.bootstrap.ServerBootstrap;
import org.apache.flink.shaded.netty4.io.netty.buffer.Unpooled;
import org.apache.flink.shaded.netty4.io.netty.channel.*;
import org.apache.flink.shaded.netty4.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.flink.shaded.netty4.io.netty.channel.socket.nio.NioServerSocketChannel;
import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.*;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.ClientAuth;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslContext;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslContextBuilder;
import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslProvider;
import org.apache.flink.statefun.sdk.java.StatefulFunctionSpec;
import org.apache.flink.statefun.sdk.java.StatefulFunctions;
import org.apache.flink.statefun.sdk.java.slice.Slice;
import org.apache.flink.statefun.sdk.java.slice.Slices;
public class CommandInterpreterAppServer {
private static final int PORT = 8000;
private static final String A_SERVER_KEY_PASSWORD = "test";
private static final CommandInterpreter commandInterpreter = new CommandInterpreter();
private static final StatefulFunctionSpec FN_SPEC =
StatefulFunctionSpec.builder(CMD_INTERPRETER_FN)
.withSupplier(() -> new CommandInterpreterFn(commandInterpreter))
.withValueSpec(CommandInterpreterFn.STATE)
.build();
public static void main(String[] args) throws IOException, InterruptedException {
final InputStream trustCaCerts =
Objects.requireNonNull(
CommandInterpreter.class.getClassLoader().getResource("certs/a_ca.pem"))
.openStream();
final InputStream aServerCert =
Objects.requireNonNull(
CommandInterpreter.class.getClassLoader().getResource("certs/a_server.crt"))
.openStream();
final InputStream aServerKey =
Objects.requireNonNull(
CommandInterpreter.class.getClassLoader().getResource("certs/a_server.key.p8"))
.openStream();
ServerBootstrap httpsMutualTlsBootstrap =
getServerBootstrap(getChannelInitializer(trustCaCerts, aServerCert, aServerKey));
httpsMutualTlsBootstrap.bind(PORT).sync();
}
private static ChannelInitializer<Channel> getChannelInitializer(
InputStream trustInputStream, InputStream certInputStream, InputStream keyInputStream) {
return getTlsEnabledInitializer(
SslContextBuilder.forServer(certInputStream, keyInputStream, A_SERVER_KEY_PASSWORD)
.trustManager(trustInputStream));
}
private static ChannelInitializer<Channel> getTlsEnabledInitializer(
SslContextBuilder sslContextBuilder) {
return new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel channel) throws IOException {
ChannelPipeline pipeline = channel.pipeline();
SslContext sslContext =
sslContextBuilder.sslProvider(SslProvider.JDK).clientAuth(ClientAuth.REQUIRE).build();
pipeline.addLast(sslContext.newHandler(channel.alloc()));
addResponseHandlerToPipeline(pipeline);
}
};
}
private static ServerBootstrap getServerBootstrap(ChannelInitializer<Channel> childHandler) {
NioEventLoopGroup eventLoopGroup = new NioEventLoopGroup();
NioEventLoopGroup workerGroup = new NioEventLoopGroup();
return new ServerBootstrap()
.group(eventLoopGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(childHandler)
.option(ChannelOption.SO_BACKLOG, 128)
.childOption(ChannelOption.SO_KEEPALIVE, true);
}
private static void addResponseHandlerToPipeline(ChannelPipeline pipeline) {
pipeline.addLast(new HttpServerCodec());
pipeline.addLast(new HttpObjectAggregator(Integer.MAX_VALUE));
pipeline.addLast(getStatefunInboundHandler());
}
private static SimpleChannelInboundHandler<FullHttpRequest> getStatefunInboundHandler() {
StatefulFunctions functions = new StatefulFunctions();
functions.withStatefulFunction(FN_SPEC);
return new SimpleChannelInboundHandler<FullHttpRequest>() {
@Override
protected void channelRead0(
ChannelHandlerContext channelHandlerContext, FullHttpRequest fullHttpRequest) {
CompletableFuture<Slice> res =
functions
.requestReplyHandler()
.handle(Slices.wrap(fullHttpRequest.content().nioBuffer()));
res.whenComplete(
(r, e) -> {
FullHttpResponse response =
new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1,
HttpResponseStatus.OK,
Unpooled.copiedBuffer(r.toByteArray()));
response.headers().set(HttpHeaderNames.CONTENT_TYPE, "application/octet-stream");
response.headers().set(HttpHeaderNames.CONTENT_LENGTH, r.readableBytes());
channelHandlerContext.write(response);
channelHandlerContext.flush();
});
}
};
}
}
| 5,921 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke/java/CommandInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.java;
import static org.apache.flink.statefun.e2e.smoke.java.Constants.*;
import java.time.Duration;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.statefun.sdk.java.*;
import org.apache.flink.statefun.sdk.java.message.EgressMessage;
import org.apache.flink.statefun.sdk.java.message.EgressMessageBuilder;
import org.apache.flink.statefun.sdk.java.message.Message;
import org.apache.flink.statefun.sdk.java.message.MessageBuilder;
public final class CommandInterpreter {
private static final Duration sendAfterDelay = Duration.ofMillis(1);
public void interpret(ValueSpec<Long> state, Context context, Message message) {
if (message.is(SOURCE_COMMAND_TYPE)) {
interpret(state, context, message.as(SOURCE_COMMAND_TYPE).getCommands());
} else if (message.is(COMMANDS_TYPE)) {
interpret(state, context, message.as(COMMANDS_TYPE));
} else {
throw new IllegalArgumentException("Unrecognized message type " + message.valueTypeName());
}
}
private void interpret(ValueSpec<Long> state, Context context, Commands cmds) {
for (Command cmd : cmds.getCommandList()) {
if (cmd.hasIncrement()) {
modifyState(state, context, cmd.getIncrement());
} else if (cmd.hasSend()) {
send(state, context, cmd.getSend());
} else if (cmd.hasSendAfter()) {
sendAfter(state, context, cmd.getSendAfter());
} else if (cmd.hasSendEgress()) {
sendEgress(state, context, cmd.getSendEgress());
} else if (cmd.hasVerify()) {
verify(state, context, cmd.getVerify());
}
}
}
private void verify(
ValueSpec<Long> state, @SuppressWarnings("unused") Context context, Command.Verify verify) {
AddressScopedStorage storage = context.storage();
int selfId = Integer.parseInt(context.self().id());
long actual = storage.get(state).orElse(0L);
long expected = verify.getExpected();
VerificationResult verificationResult =
VerificationResult.newBuilder()
.setId(selfId)
.setActual(actual)
.setExpected(expected)
.build();
EgressMessage egressMessage =
EgressMessageBuilder.forEgress(VERIFICATION_EGRESS)
.withCustomType(VERIFICATION_RESULT_TYPE, verificationResult)
.build();
context.send(egressMessage);
}
private void sendEgress(
@SuppressWarnings("unused") ValueSpec<Long> state,
Context context,
@SuppressWarnings("unused") Command.SendEgress sendEgress) {
EgressMessage egressMessage =
EgressMessageBuilder.forEgress(DISCARD_EGRESS).withValue("discarded-message").build();
context.send(egressMessage);
}
private void sendAfter(
@SuppressWarnings("unused") ValueSpec<Long> state, Context context, Command.SendAfter send) {
String id = Integer.toString(send.getTarget());
Address targetAddress = new Address(CMD_INTERPRETER_FN, id);
Message message =
MessageBuilder.forAddress(targetAddress)
.withCustomType(COMMANDS_TYPE, send.getCommands())
.build();
context.sendAfter(sendAfterDelay, message);
}
private void send(
@SuppressWarnings("unused") ValueSpec<Long> state, Context context, Command.Send send) {
String id = Integer.toString(send.getTarget());
Address targetAddress = new Address(CMD_INTERPRETER_FN, id);
Message message =
MessageBuilder.forAddress(targetAddress)
.withCustomType(COMMANDS_TYPE, send.getCommands())
.build();
context.send(message);
}
private void modifyState(
ValueSpec<Long> state,
@SuppressWarnings("unused") Context context,
@SuppressWarnings("unused") Command.IncrementState incrementState) {
AddressScopedStorage storage = context.storage();
long n = storage.get(state).orElse(0L);
storage.set(state, n + 1);
}
}
| 5,922 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-java/src/main/java/org/apache/flink/statefun/e2e/smoke/java/Constants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.java;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.statefun.sdk.java.TypeName;
import org.apache.flink.statefun.sdk.java.types.SimpleType;
import org.apache.flink.statefun.sdk.java.types.Type;
final class Constants {
private Constants() {}
private static final String APP_NAMESPACE = "statefun.smoke.e2e";
// =====================================================
// Egresses
// =====================================================
static final TypeName DISCARD_EGRESS = TypeName.typeNameOf(APP_NAMESPACE, "discard-sink");
static final TypeName VERIFICATION_EGRESS =
TypeName.typeNameOf(APP_NAMESPACE, "verification-sink");
// =====================================================
// This function
// =====================================================
static final TypeName CMD_INTERPRETER_FN =
TypeName.typeNameOf(APP_NAMESPACE, "command-interpreter-fn");
// =====================================================
// Command types
// =====================================================
static final Type<Commands> COMMANDS_TYPE =
SimpleType.simpleImmutableTypeFrom(
TypeName.typeNameOf(APP_NAMESPACE, "commands"),
Commands::toByteArray,
Commands::parseFrom);
static final Type<SourceCommand> SOURCE_COMMAND_TYPE =
SimpleType.simpleImmutableTypeFrom(
TypeName.typeNameOf(APP_NAMESPACE, "source-command"),
SourceCommand::toByteArray,
SourceCommand::parseFrom);
static final Type<VerificationResult> VERIFICATION_RESULT_TYPE =
SimpleType.simpleImmutableTypeFrom(
TypeName.typeNameOf(APP_NAMESPACE, "verification-result"),
VerificationResult::toByteArray,
VerificationResult::parseFrom);
}
| 5,923 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/test/java/org/apache/flink/statefun/e2e | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/test/java/org/apache/flink/statefun/e2e/smoke/SmokeRunnerParametersTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import java.util.Collections;
import java.util.Map;
import org.junit.Test;
public class SmokeRunnerParametersTest {
@Test
public void exampleUsage() {
Map<String, String> keys = Collections.singletonMap("messageCount", "1");
SmokeRunnerParameters parameters = SmokeRunnerParameters.from(keys);
assertThat(parameters.getMessageCount(), is(1));
}
@Test
public void roundTrip() {
SmokeRunnerParameters original = new SmokeRunnerParameters();
original.setCommandDepth(1234);
SmokeRunnerParameters deserialized = SmokeRunnerParameters.from(original.asMap());
assertThat(deserialized.getCommandDepth(), is(1234));
}
}
| 5,924 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e/smoke/SmokeRunner.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke;
import java.util.HashSet;
import java.util.Set;
import java.util.function.Supplier;
import org.apache.flink.statefun.e2e.common.StatefulFunctionsAppContainers;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.util.function.ThrowingRunnable;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.Testcontainers;
public final class SmokeRunner {
private static final Logger LOG = LoggerFactory.getLogger(SmokeRunner.class);
public static void run(
SmokeRunnerParameters parameters, StatefulFunctionsAppContainers.Builder builder)
throws Throwable {
// start verification server
SimpleVerificationServer.StartedServer server = new SimpleVerificationServer().start();
parameters.setVerificationServerHost("host.testcontainers.internal");
parameters.setVerificationServerPort(server.port());
Testcontainers.exposeHostPorts(server.port());
// set the test module parameters as global configurations, so that
// it can be deserialized at Module#configure()
parameters.asMap().forEach(builder::withModuleGlobalConfiguration);
builder.exposeLogs(LOG);
StatefulFunctionsAppContainers app = builder.build();
// run the test
run(
app,
() ->
awaitVerificationSuccess(server.results(), parameters.getNumberOfFunctionInstances()));
}
private static void run(StatefulFunctionsAppContainers app, ThrowingRunnable<Throwable> r)
throws Throwable {
Statement statement =
app.apply(
new Statement() {
@Override
public void evaluate() throws Throwable {
r.run();
}
},
Description.EMPTY);
statement.evaluate();
}
public static void awaitVerificationSuccess(
Supplier<VerificationResult> results, final int numberOfFunctionInstances) {
Set<Integer> successfullyVerified = new HashSet<>();
while (successfullyVerified.size() != numberOfFunctionInstances) {
VerificationResult result = results.get();
if (result.getActual() == result.getExpected()) {
successfullyVerified.add(result.getId());
} else if (result.getActual() > result.getExpected()) {
throw new AssertionError(
"Over counted. Expected: "
+ result.getExpected()
+ ", actual: "
+ result.getActual()
+ ", function: "
+ result.getId());
}
}
}
}
| 5,925 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e/smoke/SmokeRunnerParameters.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke;
import java.io.Serializable;
import java.util.Map;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.type.TypeReference;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.DeserializationFeature;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
@SuppressWarnings("unused")
public final class SmokeRunnerParameters implements Serializable {
private static final long serialVersionUID = 1;
private int numberOfFunctionInstances = 1_000;
private int commandDepth = 10;
private int messageCount = 100_000;
private int maxCommandsPerDepth = 3;
private double stateModificationsPr = 0.4;
private double sendPr = 0.9;
private double sendAfterPr = 0.1;
private double sendAfterWithCancellationPr = 0.1;
private double asyncSendPr = 0.1;
private double noopPr = 0.2;
private double sendEgressPr = 0.03;
private int maxFailures = 1;
private String verificationServerHost = "localhost";
private int verificationServerPort = 5050;
private boolean isAsyncOpSupported = false;
private boolean isDelayCancellationOpSupported = false;
private long randomGeneratorSeed = System.nanoTime();
/** Creates an instance of ModuleParameters from a key-value map. */
public static SmokeRunnerParameters from(Map<String, String> globalConfiguration) {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return mapper.convertValue(globalConfiguration, SmokeRunnerParameters.class);
}
public Map<String, String> asMap() {
ObjectMapper mapper = new ObjectMapper();
return mapper.convertValue(this, new TypeReference<Map<String, String>>() {});
}
public int getNumberOfFunctionInstances() {
return numberOfFunctionInstances;
}
public void setNumberOfFunctionInstances(int numberOfFunctionInstances) {
this.numberOfFunctionInstances = numberOfFunctionInstances;
}
public int getCommandDepth() {
return commandDepth;
}
public void setCommandDepth(int commandDepth) {
this.commandDepth = commandDepth;
}
public int getMessageCount() {
return messageCount;
}
public void setMessageCount(int messageCount) {
this.messageCount = messageCount;
}
public int getMaxCommandsPerDepth() {
return maxCommandsPerDepth;
}
public void setMaxCommandsPerDepth(int maxCommandsPerDepth) {
this.maxCommandsPerDepth = maxCommandsPerDepth;
}
public double getStateModificationsPr() {
return stateModificationsPr;
}
public void setStateModificationsPr(double stateModificationsPr) {
this.stateModificationsPr = stateModificationsPr;
}
public double getSendPr() {
return sendPr;
}
public void setSendPr(double sendPr) {
this.sendPr = sendPr;
}
public double getSendAfterPr() {
return sendAfterPr;
}
public boolean isDelayCancellationOpSupported() {
return isDelayCancellationOpSupported;
}
public void setDelayCancellationOpSupported(boolean delayCancellationOpSupported) {
isDelayCancellationOpSupported = delayCancellationOpSupported;
}
public void setSendAfterPr(double sendAfterPr) {
this.sendAfterPr = sendAfterPr;
}
public double getAsyncSendPr() {
return asyncSendPr;
}
public void setAsyncSendPr(double asyncSendPr) {
this.asyncSendPr = asyncSendPr;
}
public double getNoopPr() {
return noopPr;
}
public void setNoopPr(double noopPr) {
this.noopPr = noopPr;
}
public double getSendEgressPr() {
return sendEgressPr;
}
public void setSendEgressPr(double sendEgressPr) {
this.sendEgressPr = sendEgressPr;
}
public void setMaxFailures(int maxFailures) {
this.maxFailures = maxFailures;
}
public int getMaxFailures() {
return maxFailures;
}
public String getVerificationServerHost() {
return verificationServerHost;
}
public void setVerificationServerHost(String verificationServerHost) {
this.verificationServerHost = verificationServerHost;
}
public int getVerificationServerPort() {
return verificationServerPort;
}
public void setVerificationServerPort(int verificationServerPort) {
this.verificationServerPort = verificationServerPort;
}
public double getSendAfterWithCancellationPr() {
return sendAfterWithCancellationPr;
}
public void setSendAfterWithCancellationPr(double sendAfterWithCancellationPr) {
this.sendAfterWithCancellationPr = sendAfterWithCancellationPr;
}
public boolean isAsyncOpSupported() {
return isAsyncOpSupported;
}
public void setAsyncOpSupported(boolean asyncOpSupported) {
isAsyncOpSupported = asyncOpSupported;
}
public long getRandomGeneratorSeed() {
return randomGeneratorSeed;
}
public void setRandomGeneratorSeed(long randomGeneratorSeed) {
this.randomGeneratorSeed = randomGeneratorSeed;
}
@Override
public String toString() {
return "SmokeRunnerParameters{"
+ "numberOfFunctionInstances="
+ numberOfFunctionInstances
+ ", commandDepth="
+ commandDepth
+ ", messageCount="
+ messageCount
+ ", maxCommandsPerDepth="
+ maxCommandsPerDepth
+ ", stateModificationsPr="
+ stateModificationsPr
+ ", sendPr="
+ sendPr
+ ", sendAfterPr="
+ sendAfterPr
+ ", asyncSendPr="
+ asyncSendPr
+ ", noopPr="
+ noopPr
+ ", sendEgressPr="
+ sendEgressPr
+ ", maxFailures="
+ maxFailures
+ ", verificationServerHost='"
+ verificationServerHost
+ '\''
+ ", verificationServerPort="
+ verificationServerPort
+ ", isAsyncOpSupported="
+ isAsyncOpSupported
+ ", randomGeneratorSeed="
+ randomGeneratorSeed
+ '}';
}
}
| 5,926 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-common/src/main/java/org/apache/flink/statefun/e2e/smoke/SimpleVerificationServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke;
import java.io.IOException;
import java.io.InputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A simple threaded TCP server that is able to receive {@link VerificationResult} messages. */
@ThreadSafe
public final class SimpleVerificationServer {
private static final Logger LOG = LoggerFactory.getLogger(SimpleVerificationServer.class);
private final LinkedBlockingDeque<VerificationResult> results = new LinkedBlockingDeque<>();
private final ExecutorService executor;
private final AtomicBoolean started = new AtomicBoolean(false);
public SimpleVerificationServer() {
this.executor = MoreExecutors.newCachedDaemonThreadPool();
}
public StartedServer start() {
if (!started.compareAndSet(false, true)) {
throw new IllegalArgumentException("Already started.");
}
try {
ServerSocket serverSocket = new ServerSocket(0);
serverSocket.setReuseAddress(true);
LOG.info("Starting server at " + serverSocket.getLocalPort());
executor.submit(() -> acceptClients(serverSocket));
return new StartedServer(serverSocket.getLocalPort(), results());
} catch (IOException e) {
throw new IllegalStateException("Unable to bind the TCP server.", e);
}
}
private Supplier<VerificationResult> results() {
return () -> {
try {
return results.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
};
}
@SuppressWarnings("InfiniteLoopStatement")
private void acceptClients(ServerSocket serverSocket) {
while (true) {
try {
Socket client = serverSocket.accept();
InputStream input = client.getInputStream();
executor.submit(() -> pumpVerificationResults(client, input));
} catch (IOException e) {
LOG.info("Exception while trying to accept a connection.", e);
}
}
}
private void pumpVerificationResults(Socket client, InputStream input) {
while (true) {
try {
VerificationResult result = VerificationResult.parseDelimitedFrom(input);
if (result != null) {
results.add(result);
}
} catch (IOException e) {
LOG.info(
"Exception reading a verification result from "
+ client.getRemoteSocketAddress()
+ ", bye...",
e);
IOUtils.closeQuietly(client);
return;
}
}
}
public static final class StartedServer {
private final int port;
private final Supplier<VerificationResult> results;
public StartedServer(int port, Supplier<VerificationResult> results) {
this.port = port;
this.results = results;
}
public int port() {
return port;
}
public Supplier<VerificationResult> results() {
return results;
}
}
private static final class MoreExecutors {
static ExecutorService newCachedDaemonThreadPool() {
return Executors.newCachedThreadPool(
r -> {
Thread t = new Thread(r);
t.setDaemon(true);
return t;
});
}
}
}
| 5,927 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/test/java/org/apache/flink/statefun/e2e/smoke/embedded/EmbeddedSmokeHarnessTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import static org.apache.flink.statefun.e2e.smoke.SmokeRunner.awaitVerificationSuccess;
import org.apache.flink.statefun.e2e.smoke.SimpleVerificationServer;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.flink.harness.Harness;
import org.junit.Ignore;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class EmbeddedSmokeHarnessTest {
private static final Logger LOG = LoggerFactory.getLogger(EmbeddedSmokeHarnessTest.class);
@Ignore
@Test(timeout = 1_000 * 60 * 2)
public void miniClusterTest() throws Exception {
Harness harness = new Harness();
// set Flink related configuration.
harness.withConfiguration(
"classloader.parent-first-patterns.additional",
"org.apache.flink.statefun;org.apache.kafka;com.google.protobuf");
harness.withConfiguration("restart-strategy", "fixed-delay");
harness.withConfiguration("restart-strategy.fixed-delay.attempts", "2147483647");
harness.withConfiguration("restart-strategy.fixed-delay.delay", "1sec");
harness.withConfiguration("execution.checkpointing.interval", "2sec");
harness.withConfiguration("execution.checkpointing.mode", "EXACTLY_ONCE");
harness.withConfiguration("execution.checkpointing.max-concurrent-checkpoints", "3");
harness.withConfiguration("parallelism.default", "2");
harness.withConfiguration("state.checkpoints.dir", "file:///tmp/checkpoints");
// start the verification server
SimpleVerificationServer.StartedServer started = new SimpleVerificationServer().start();
// configure test parameters.
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setMaxFailures(1);
parameters.setMessageCount(100_000);
parameters.setNumberOfFunctionInstances(128);
parameters.setVerificationServerHost("localhost");
parameters.setVerificationServerPort(started.port());
parameters.setAsyncOpSupported(true);
parameters.setDelayCancellationOpSupported(true);
parameters.asMap().forEach(harness::withGlobalConfiguration);
// run the harness.
try (AutoCloseable ignored = startHarnessInTheBackground(harness)) {
awaitVerificationSuccess(started.results(), parameters.getNumberOfFunctionInstances());
}
LOG.info("All done.");
}
private static AutoCloseable startHarnessInTheBackground(Harness harness) {
Thread t =
new Thread(
() -> {
try {
harness.start();
} catch (InterruptedException ignored) {
LOG.info("Harness Thread was interrupted. Exiting...");
} catch (Exception exception) {
LOG.info("Something happened while trying to run the Harness.", exception);
}
});
t.setName("harness-runner");
t.setDaemon(true);
t.start();
return t::interrupt;
}
}
| 5,928 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/test/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/test/java/org/apache/flink/statefun/e2e/smoke/embedded/SmokeVerificationEmbeddedE2E.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import org.apache.flink.statefun.e2e.common.StatefulFunctionsAppContainers;
import org.apache.flink.statefun.e2e.smoke.SmokeRunner;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.junit.Test;
public class SmokeVerificationEmbeddedE2E {
private static final int NUM_WORKERS = 2;
@Test(timeout = 1_000 * 60 * 10)
public void run() throws Throwable {
SmokeRunnerParameters parameters = new SmokeRunnerParameters();
parameters.setNumberOfFunctionInstances(128);
parameters.setMessageCount(100_000);
parameters.setMaxFailures(1);
parameters.setAsyncOpSupported(true);
parameters.setDelayCancellationOpSupported(true);
StatefulFunctionsAppContainers.Builder builder =
StatefulFunctionsAppContainers.builder("flink-statefun-cluster", NUM_WORKERS);
SmokeRunner.run(parameters, builder);
}
}
| 5,929 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke/embedded/EmbeddedFnModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import com.google.auto.service.AutoService;
import java.util.Map;
import org.apache.flink.statefun.e2e.smoke.SmokeRunnerParameters;
import org.apache.flink.statefun.e2e.smoke.driver.Constants;
import org.apache.flink.statefun.e2e.smoke.driver.Ids;
import org.apache.flink.statefun.sdk.spi.StatefulFunctionModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@AutoService(StatefulFunctionModule.class)
public class EmbeddedFnModule implements StatefulFunctionModule {
public static final Logger LOG = LoggerFactory.getLogger(EmbeddedFnModule.class);
@Override
public void configure(Map<String, String> globalConfiguration, Binder binder) {
SmokeRunnerParameters parameters = SmokeRunnerParameters.from(globalConfiguration);
LOG.info(parameters.toString());
Ids ids = new Ids(parameters.getNumberOfFunctionInstances());
FunctionProvider provider = new FunctionProvider(ids);
binder.bindFunctionProvider(Constants.FN_TYPE, provider);
}
}
| 5,930 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke/embedded/CommandInterpreterFn.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import java.util.Objects;
import org.apache.flink.statefun.sdk.Context;
import org.apache.flink.statefun.sdk.StatefulFunction;
import org.apache.flink.statefun.sdk.annotations.Persisted;
import org.apache.flink.statefun.sdk.state.PersistedValue;
public class CommandInterpreterFn implements StatefulFunction {
@Persisted private final PersistedValue<Long> STATE = PersistedValue.of("state", Long.class);
private final CommandInterpreter interpreter;
public CommandInterpreterFn(CommandInterpreter interpreter) {
this.interpreter = Objects.requireNonNull(interpreter);
}
@Override
public void invoke(Context context, Object message) {
interpreter.interpret(STATE, context, message);
}
}
| 5,931 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke/embedded/AsyncCompleter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import java.time.Duration;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.LinkedBlockingDeque;
/**
* Creates {@link CompletableFuture}s that can be completed successfully or unsuccessfully, within 1
* millisecond delay.
*/
final class AsyncCompleter {
private static final Throwable EXCEPTION;
static {
Throwable t = new RuntimeException();
t.setStackTrace(new StackTraceElement[0]);
EXCEPTION = t;
}
private static final class Task {
final long time;
final CompletableFuture<Boolean> future;
final boolean success;
public Task(boolean success) {
this.time = System.nanoTime();
this.future = new CompletableFuture<>();
this.success = success;
}
}
private static final int ONE_MILLISECOND = Duration.ofMillis(1).getNano();
private final LinkedBlockingDeque<Task> queue = new LinkedBlockingDeque<>();
private boolean started;
/**
* Returns a future that would be complete successfully, no sooner than 1 millisecond from now.
*/
CompletableFuture<Boolean> successfulFuture() {
return future(true);
}
/**
* Returns a future that would be completed unsuccessfully, no sooner than 1 millisecond from now.
*/
CompletableFuture<Boolean> failedFuture() {
return future(false);
}
private CompletableFuture<Boolean> future(boolean success) {
Task e = new Task(success);
queue.add(e);
return e.future;
}
void start() {
if (started) {
return;
}
started = true;
Thread t = new Thread(this::run);
t.setDaemon(true);
t.start();
}
@SuppressWarnings({"InfiniteLoopStatement", "BusyWait"})
void run() {
while (true) {
try {
Task e = queue.take();
final long duration = System.nanoTime() - e.time;
if (duration < ONE_MILLISECOND) {
Thread.sleep(1);
}
CompletableFuture<Boolean> future = e.future;
if (e.success) {
future.complete(Boolean.TRUE);
} else {
future.completeExceptionally(EXCEPTION);
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
}
| 5,932 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke/embedded/CommandInterpreter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.COMMANDS_TYPE;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.SOURCE_COMMANDS_TYPE;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.isTypeOf;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.packCommands;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.packVerificationResult;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.unpackCommands;
import static org.apache.flink.statefun.e2e.smoke.driver.Types.unpackSourceCommand;
import java.time.Duration;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import org.apache.flink.statefun.e2e.smoke.driver.Constants;
import org.apache.flink.statefun.e2e.smoke.driver.Ids;
import org.apache.flink.statefun.e2e.smoke.generated.Command;
import org.apache.flink.statefun.e2e.smoke.generated.Commands;
import org.apache.flink.statefun.e2e.smoke.generated.SourceCommand;
import org.apache.flink.statefun.e2e.smoke.generated.VerificationResult;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.AsyncOperationResult;
import org.apache.flink.statefun.sdk.Context;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
import org.apache.flink.statefun.sdk.state.PersistedValue;
public final class CommandInterpreter {
private final AsyncCompleter asyncCompleter;
private final Ids ids;
private static final Duration sendAfterDelay = Duration.ofMillis(1);
public CommandInterpreter(Ids ids) {
this.asyncCompleter = new AsyncCompleter();
asyncCompleter.start();
this.ids = Objects.requireNonNull(ids);
}
public void interpret(PersistedValue<Long> state, Context context, Object message) {
if (message instanceof AsyncOperationResult) {
@SuppressWarnings("unchecked")
AsyncOperationResult<Commands, ?> res = (AsyncOperationResult<Commands, ?>) message;
interpret(state, context, res.metadata());
return;
}
if (!(message instanceof TypedValue)) {
throw new IllegalStateException("Unexpected message: " + message);
}
TypedValue typedValue = (TypedValue) message;
if (isTypeOf(typedValue, SOURCE_COMMANDS_TYPE)) {
SourceCommand sourceCommand = unpackSourceCommand(typedValue);
interpret(state, context, sourceCommand.getCommands());
} else if (isTypeOf(typedValue, COMMANDS_TYPE)) {
Commands commands = unpackCommands(typedValue);
interpret(state, context, commands);
} else {
throw new IllegalArgumentException("Unknown message type " + typedValue.getTypename());
}
}
private void interpret(PersistedValue<Long> state, Context context, Commands command) {
for (Command cmd : command.getCommandList()) {
if (cmd.hasIncrement()) {
modifyState(state, context, cmd.getIncrement());
} else if (cmd.hasAsyncOperation()) {
registerAsyncOps(state, context, cmd.getAsyncOperation());
} else if (cmd.hasSend()) {
send(state, context, cmd.getSend());
} else if (cmd.hasSendAfter()) {
sendAfter(state, context, cmd.getSendAfter());
} else if (cmd.hasSendEgress()) {
sendEgress(state, context, cmd.getSendEgress());
} else if (cmd.hasVerify()) {
verify(state, context, cmd.getVerify());
} else if (cmd.hasCancelSendAfter()) {
cancelSendAfter(state, context, cmd.getCancelSendAfter());
}
}
}
private void verify(
PersistedValue<Long> state,
@SuppressWarnings("unused") Context context,
Command.Verify verify) {
int selfId = Integer.parseInt(context.self().id());
long actual = state.getOrDefault(0L);
long expected = verify.getExpected();
VerificationResult verificationResult =
VerificationResult.newBuilder()
.setId(selfId)
.setActual(actual)
.setExpected(expected)
.build();
context.send(Constants.VERIFICATION_RESULT, packVerificationResult(verificationResult));
}
private void sendEgress(
@SuppressWarnings("unused") PersistedValue<Long> state,
Context context,
@SuppressWarnings("unused") Command.SendEgress sendEgress) {
context.send(Constants.OUT, TypedValue.getDefaultInstance());
}
private void sendAfter(
@SuppressWarnings("unused") PersistedValue<Long> state,
Context context,
Command.SendAfter send) {
FunctionType functionType = Constants.FN_TYPE;
String id = ids.idOf(send.getTarget());
TypedValue subCommands = packCommands(send.getCommands());
if (send.getCancellationToken().isEmpty()) {
context.sendAfter(sendAfterDelay, functionType, id, subCommands);
} else {
context.sendAfter(
sendAfterDelay, new Address(functionType, id), subCommands, send.getCancellationToken());
}
}
private void cancelSendAfter(
@SuppressWarnings("unused") PersistedValue<Long> state,
Context context,
Command.CancelSendAfter cancelSendAfter) {
String token = cancelSendAfter.getCancellationToken();
context.cancelDelayedMessage(token);
}
private void send(
@SuppressWarnings("unused") PersistedValue<Long> state, Context context, Command.Send send) {
FunctionType functionType = Constants.FN_TYPE;
String id = ids.idOf(send.getTarget());
context.send(functionType, id, packCommands(send.getCommands()));
}
private void registerAsyncOps(
@SuppressWarnings("unused") PersistedValue<Long> state,
Context context,
Command.AsyncOperation asyncOperation) {
CompletableFuture<Boolean> future =
asyncOperation.getFailure()
? asyncCompleter.successfulFuture()
: asyncCompleter.failedFuture();
Commands next = asyncOperation.getResolvedCommands();
context.registerAsyncOperation(next, future);
}
private void modifyState(
PersistedValue<Long> state,
@SuppressWarnings("unused") Context context,
@SuppressWarnings("unused") Command.IncrementState incrementState) {
state.updateAndGet(n -> n == null ? 1 : n + 1);
}
}
| 5,933 |
0 | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke | Create_ds/flink-statefun/statefun-e2e-tests/statefun-smoke-e2e-embedded/src/main/java/org/apache/flink/statefun/e2e/smoke/embedded/FunctionProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.e2e.smoke.embedded;
import java.util.Objects;
import org.apache.flink.statefun.e2e.smoke.driver.Ids;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.StatefulFunction;
import org.apache.flink.statefun.sdk.StatefulFunctionProvider;
public class FunctionProvider implements StatefulFunctionProvider {
private final Ids ids;
public FunctionProvider(Ids ids) {
this.ids = Objects.requireNonNull(ids);
}
@Override
public StatefulFunction functionOfType(FunctionType functionType) {
CommandInterpreter interpreter = new CommandInterpreter(ids);
return new CommandInterpreterFn(interpreter);
}
}
| 5,934 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink/launcher/StatefulFunctionsClusterConfiguration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.launcher;
import static java.util.Objects.requireNonNull;
import java.util.Properties;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.flink.api.common.JobID;
import org.apache.flink.runtime.entrypoint.EntrypointClusterConfiguration;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
/**
* Configuration for the {@link StatefulFunctionsClusterEntryPoint}.
*
* <p>This class was copied from Apache Flink.
*/
final class StatefulFunctionsClusterConfiguration extends EntrypointClusterConfiguration {
@Nonnull private final SavepointRestoreSettings savepointRestoreSettings;
@Nullable private final JobID jobId;
private final int parallelism;
StatefulFunctionsClusterConfiguration(
@Nonnull String configDir,
@Nonnull Properties dynamicProperties,
@Nonnull String[] args,
@Nullable String hostname,
int restPort,
@Nonnull SavepointRestoreSettings savepointRestoreSettings,
@Nullable JobID jobId,
int parallelism) {
super(configDir, dynamicProperties, args, hostname, restPort);
this.savepointRestoreSettings =
requireNonNull(savepointRestoreSettings, "savepointRestoreSettings");
this.jobId = jobId;
this.parallelism = parallelism;
}
@Nonnull
SavepointRestoreSettings getSavepointRestoreSettings() {
return savepointRestoreSettings;
}
@Nullable
JobID getJobId() {
return jobId;
}
public int getParallelism() {
return parallelism;
}
}
| 5,935 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink/launcher/StatefulFunctionsClusterConfigurationParserFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.launcher;
import static org.apache.flink.client.cli.CliFrontendParser.PARALLELISM_OPTION;
import static org.apache.flink.runtime.entrypoint.parser.CommandLineOptions.CONFIG_DIR_OPTION;
import static org.apache.flink.runtime.entrypoint.parser.CommandLineOptions.DYNAMIC_PROPERTY_OPTION;
import static org.apache.flink.runtime.entrypoint.parser.CommandLineOptions.HOST_OPTION;
import static org.apache.flink.runtime.entrypoint.parser.CommandLineOptions.REST_PORT_OPTION;
import java.util.Properties;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobID;
import org.apache.flink.client.cli.CliFrontendParser;
import org.apache.flink.runtime.entrypoint.FlinkParseException;
import org.apache.flink.runtime.entrypoint.parser.ParserResultFactory;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
/**
* Parser factory which generates a {@link StatefulFunctionsClusterConfiguration} from a given list
* of command line arguments.
*
* <p>This class was copied from Apache Flink.
*/
public class StatefulFunctionsClusterConfigurationParserFactory
implements ParserResultFactory<StatefulFunctionsClusterConfiguration> {
private static final Option JOB_ID_OPTION =
Option.builder("jid")
.longOpt("job-id")
.required(false)
.hasArg(true)
.argName("job id")
.desc("Job ID of the job to run.")
.build();
@Nullable
private static JobID getJobId(CommandLine commandLine) throws FlinkParseException {
String jobId = commandLine.getOptionValue(JOB_ID_OPTION.getOpt());
if (jobId == null) {
return null;
}
try {
return JobID.fromHexString(jobId);
} catch (IllegalArgumentException e) {
throw createFlinkParseException(JOB_ID_OPTION, e);
}
}
private static FlinkParseException createFlinkParseException(Option option, Exception cause) {
return new FlinkParseException(
String.format("Failed to parse '--%s' option", option.getLongOpt()), cause);
}
@Override
public Options getOptions() {
final Options options = new Options();
options.addOption(CONFIG_DIR_OPTION);
options.addOption(REST_PORT_OPTION);
options.addOption(JOB_ID_OPTION);
options.addOption(DYNAMIC_PROPERTY_OPTION);
options.addOption(PARALLELISM_OPTION);
options.addOption(CliFrontendParser.SAVEPOINT_PATH_OPTION);
options.addOption(CliFrontendParser.SAVEPOINT_ALLOW_NON_RESTORED_OPTION);
return options;
}
@Override
public StatefulFunctionsClusterConfiguration createResult(@Nonnull CommandLine commandLine)
throws FlinkParseException {
final String configDir = commandLine.getOptionValue(CONFIG_DIR_OPTION.getOpt());
final Properties dynamicProperties =
commandLine.getOptionProperties(DYNAMIC_PROPERTY_OPTION.getOpt());
final int restPort = getRestPort(commandLine);
final String hostname = commandLine.getOptionValue(HOST_OPTION.getOpt());
final int parallelism = getParallelism(commandLine);
final SavepointRestoreSettings savepointRestoreSettings =
CliFrontendParser.createSavepointRestoreSettings(commandLine);
final JobID jobId = getJobId(commandLine);
return new StatefulFunctionsClusterConfiguration(
configDir,
dynamicProperties,
commandLine.getArgs(),
hostname,
restPort,
savepointRestoreSettings,
jobId,
parallelism);
}
private int getRestPort(CommandLine commandLine) throws FlinkParseException {
final String restPortString = commandLine.getOptionValue(REST_PORT_OPTION.getOpt(), "-1");
try {
return Integer.parseInt(restPortString);
} catch (NumberFormatException e) {
throw createFlinkParseException(REST_PORT_OPTION, e);
}
}
private int getParallelism(CommandLine commandLine) throws FlinkParseException {
final String parallelismString =
commandLine.getOptionValue(
PARALLELISM_OPTION.getOpt(), String.valueOf(ExecutionConfig.PARALLELISM_DEFAULT));
try {
int parallelism = Integer.parseInt(parallelismString);
if (parallelism <= 0 && parallelism != ExecutionConfig.PARALLELISM_DEFAULT) {
throw new IllegalArgumentException(
"Parallelism must be at least 1. Provided: " + parallelism);
}
return parallelism;
} catch (NumberFormatException e) {
throw createFlinkParseException(PARALLELISM_OPTION, e);
}
}
}
| 5,936 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink/launcher/StatefulFunctionsJobGraphRetriever.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.launcher;
import static java.util.Objects.requireNonNull;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobID;
import org.apache.flink.client.program.PackagedProgram;
import org.apache.flink.client.program.PackagedProgramUtils;
import org.apache.flink.client.program.ProgramInvocationException;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.runtime.entrypoint.component.JobGraphRetriever;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.statefun.flink.core.StatefulFunctionsJob;
import org.apache.flink.statefun.flink.core.spi.Constants;
import org.apache.flink.statefun.flink.core.spi.ModuleSpecs;
import org.apache.flink.util.FlinkException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@link JobGraphRetriever} which creates the {@link JobGraph} from a class on the class path.
*
* <p>This class is based on a class present in Apache Flink but it sets the correct class path for
* the child first classloader.
*/
final class StatefulFunctionsJobGraphRetriever implements JobGraphRetriever {
private static final Logger LOG =
LoggerFactory.getLogger(StatefulFunctionsJobGraphRetriever.class);
private final JobID jobId;
private final SavepointRestoreSettings savepointRestoreSettings;
private final int parallelism;
private final String[] programArguments;
StatefulFunctionsJobGraphRetriever(
JobID jobId,
SavepointRestoreSettings savepointRestoreSettings,
int parallelism,
String[] programArguments) {
this.jobId = requireNonNull(jobId, "jobId");
this.savepointRestoreSettings =
requireNonNull(savepointRestoreSettings, "savepointRestoreSettings");
this.parallelism = parallelism;
this.programArguments = requireNonNull(programArguments, "programArguments");
}
private static List<URL> obtainModuleAdditionalClassPath() {
try {
ModuleSpecs specs = ModuleSpecs.fromPath(Constants.MODULE_DIRECTORY);
List<URL> classPath = new ArrayList<>();
for (ModuleSpecs.ModuleSpec spec : specs) {
for (URI uri : spec.artifactUris()) {
classPath.add(uri.toURL());
}
}
return classPath;
} catch (IOException e) {
throw new RuntimeException(
"Unable to load modules from path " + Constants.MODULE_DIRECTORY, e);
}
}
@Override
public JobGraph retrieveJobGraph(Configuration configuration) throws FlinkException {
final PackagedProgram packagedProgram = createPackagedProgram();
int resolvedParallelism = resolveParallelism(parallelism, configuration);
LOG.info(
"Creating JobGraph for job {}, with parallelism {} and savepoint restore settings {}.",
jobId,
resolvedParallelism,
savepointRestoreSettings);
try {
final JobGraph jobGraph =
PackagedProgramUtils.createJobGraph(
packagedProgram, configuration, resolvedParallelism, jobId, false);
jobGraph.setSavepointRestoreSettings(savepointRestoreSettings);
return jobGraph;
} catch (Exception e) {
throw new FlinkException("Could not create the JobGraph from the provided user code jar.", e);
}
}
private PackagedProgram createPackagedProgram() {
File mainJar = new File(Constants.FLINK_JOB_JAR_PATH);
if (!mainJar.exists()) {
throw new IllegalStateException("Unable to locate the launcher jar");
}
try {
return PackagedProgram.newBuilder()
.setJarFile(mainJar)
.setUserClassPaths(obtainModuleAdditionalClassPath())
.setEntryPointClassName(StatefulFunctionsJob.class.getName())
.setArguments(programArguments)
.build();
} catch (ProgramInvocationException e) {
throw new RuntimeException("Unable to construct a packaged program", e);
}
}
private static int resolveParallelism(int parallelism, Configuration configuration) {
return (parallelism == ExecutionConfig.PARALLELISM_DEFAULT)
? configuration.getInteger(CoreOptions.DEFAULT_PARALLELISM)
: parallelism;
}
}
| 5,937 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-launcher/src/main/java/org/apache/flink/statefun/flink/launcher/StatefulFunctionsClusterEntryPoint.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.launcher;
import static java.util.Objects.requireNonNull;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nonnull;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.runtime.entrypoint.ClusterEntrypoint;
import org.apache.flink.runtime.entrypoint.JobClusterEntrypoint;
import org.apache.flink.runtime.entrypoint.component.DefaultDispatcherResourceManagerComponentFactory;
import org.apache.flink.runtime.entrypoint.component.DispatcherResourceManagerComponentFactory;
import org.apache.flink.runtime.entrypoint.parser.CommandLineParser;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.apache.flink.runtime.resourcemanager.StandaloneResourceManagerFactory;
import org.apache.flink.runtime.util.EnvironmentInformation;
import org.apache.flink.runtime.util.JvmShutdownSafeguard;
import org.apache.flink.runtime.util.SignalHandler;
import org.apache.flink.statefun.flink.core.spi.Constants;
/** {@link JobClusterEntrypoint} which is started with a job in a predefined location. */
public final class StatefulFunctionsClusterEntryPoint extends JobClusterEntrypoint {
public static final JobID ZERO_JOB_ID = new JobID(0, 0);
@Nonnull private final JobID jobId;
@Nonnull private final SavepointRestoreSettings savepointRestoreSettings;
private final int parallelism;
@Nonnull private final String[] programArguments;
private StatefulFunctionsClusterEntryPoint(
Configuration configuration,
@Nonnull JobID jobId,
@Nonnull SavepointRestoreSettings savepointRestoreSettings,
int parallelism,
@Nonnull String[] programArguments) {
super(configuration);
this.jobId = requireNonNull(jobId, "jobId");
this.savepointRestoreSettings =
requireNonNull(savepointRestoreSettings, "savepointRestoreSettings");
this.programArguments = requireNonNull(programArguments, "programArguments");
this.parallelism = parallelism;
}
public static void main(String[] args) {
EnvironmentInformation.logEnvironmentInfo(
LOG, StatefulFunctionsClusterEntryPoint.class.getSimpleName(), args);
SignalHandler.register(LOG);
JvmShutdownSafeguard.installAsShutdownHook(LOG);
final CommandLineParser<StatefulFunctionsClusterConfiguration> commandLineParser =
new CommandLineParser<>(new StatefulFunctionsClusterConfigurationParserFactory());
StatefulFunctionsClusterConfiguration clusterConfiguration = null;
try {
clusterConfiguration = commandLineParser.parse(args);
} catch (Exception e) {
LOG.error("Could not parse command line arguments {}.", args, e);
commandLineParser.printHelp(StatefulFunctionsClusterEntryPoint.class.getSimpleName());
System.exit(1);
}
Configuration configuration = loadConfiguration(clusterConfiguration);
addStatefulFunctionsConfiguration(configuration);
setDefaultExecutionModeIfNotConfigured(configuration);
StatefulFunctionsClusterEntryPoint entrypoint =
new StatefulFunctionsClusterEntryPoint(
configuration,
resolveJobIdForCluster(
Optional.ofNullable(clusterConfiguration.getJobId()), configuration),
clusterConfiguration.getSavepointRestoreSettings(),
clusterConfiguration.getParallelism(),
clusterConfiguration.getArgs());
ClusterEntrypoint.runClusterEntrypoint(entrypoint);
}
@VisibleForTesting
@Nonnull
static JobID resolveJobIdForCluster(Optional<JobID> optionalJobID, Configuration configuration) {
return optionalJobID.orElseGet(() -> createJobIdForCluster(configuration));
}
@Nonnull
private static JobID createJobIdForCluster(Configuration globalConfiguration) {
if (HighAvailabilityMode.isHighAvailabilityModeActivated(globalConfiguration)) {
return ZERO_JOB_ID;
} else {
return JobID.generate();
}
}
@VisibleForTesting
static void setDefaultExecutionModeIfNotConfigured(Configuration configuration) {
if (isNoExecutionModeConfigured(configuration)) {
// In contrast to other places, the default for standalone job clusters is
// ExecutionMode.DETACHED
configuration.setString(
ClusterEntrypoint.INTERNAL_CLUSTER_EXECUTION_MODE, ExecutionMode.DETACHED.toString());
}
}
private static boolean isNoExecutionModeConfigured(Configuration configuration) {
return configuration.getString(ClusterEntrypoint.INTERNAL_CLUSTER_EXECUTION_MODE, null) == null;
}
private static void addStatefulFunctionsConfiguration(Configuration configuration) {
List<String> patterns =
configuration.get(CoreOptions.ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL);
if (!patterns.contains(Constants.STATEFUL_FUNCTIONS_PACKAGE)) {
patterns.add(Constants.STATEFUL_FUNCTIONS_PACKAGE);
}
configuration.set(CoreOptions.ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL, patterns);
}
@Override
protected DispatcherResourceManagerComponentFactory
createDispatcherResourceManagerComponentFactory(Configuration configuration) {
return DefaultDispatcherResourceManagerComponentFactory.createJobComponentFactory(
StandaloneResourceManagerFactory.getInstance(),
new StatefulFunctionsJobGraphRetriever(
jobId, savepointRestoreSettings, parallelism, programArguments));
}
}
| 5,938 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-distribution/src/main/resources/META-INF | Create_ds/flink-statefun/statefun-flink/statefun-flink-distribution/src/main/resources/META-INF/licenses/LICENSE.protobuf-java | Copyright 2008 Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Code generated by the Protocol Buffer compiler is owned by the owner
of the input file used when generating it. This code is not
standalone and requires a support library to be linked with it. This
support library is itself covered by the above license. | 5,939 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common/ResourceLocatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import com.google.common.jimfs.Configuration;
import com.google.common.jimfs.Jimfs;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class ResourceLocatorTest {
@Parameterized.Parameters
public static Collection<Configuration> filesystemTypes() {
return Arrays.asList(Configuration.unix(), Configuration.osX(), Configuration.windows());
}
private final FileSystem fileSystem;
public ResourceLocatorTest(Configuration filesystemConfiguration) {
this.fileSystem = Jimfs.newFileSystem(filesystemConfiguration);
}
@Test
public void classPathExample() throws IOException {
final Path firstModuleDir = createDirectoryWithAFile("first", "module.yaml");
final Path secondModuleDir = createDirectoryWithAFile("second", "module.yaml");
ClassLoader urlClassLoader = urlClassLoader(firstModuleDir, secondModuleDir);
try (SetContextClassLoader ignored = new SetContextClassLoader(urlClassLoader)) {
Iterable<URL> foundUrls = ResourceLocator.findNamedResources("classpath:module.yaml");
assertThat(
foundUrls,
contains(
url(firstModuleDir.resolve("module.yaml")),
url(secondModuleDir.resolve("module.yaml"))));
}
}
@Test
public void classPathSingleResourceExample() {
URL url = ResourceLocator.findNamedResource("classpath:dummy-file.txt");
assertThat(url, notNullValue());
}
@Test
public void absolutePathExample() throws IOException {
Path modulePath = createDirectoryWithAFile("some-module", "module.yaml").resolve("module.yaml");
URL url = ResourceLocator.findNamedResource(modulePath.toUri().toString());
assertThat(url, is(url(modulePath)));
}
@Test
public void nonAbosultePath() throws MalformedURLException {
URL url = ResourceLocator.findNamedResource("/tmp/a.txt");
assertThat(url, is(url("file:/tmp/a.txt")));
}
private URL url(@SuppressWarnings("SameParameterValue") String url) throws MalformedURLException {
return URI.create(url).toURL();
}
private Path createDirectoryWithAFile(
String basedir, @SuppressWarnings("SameParameterValue") String filename) throws IOException {
final Path dir = fileSystem.getPath(basedir);
Files.createDirectories(dir);
Path file = dir.resolve(filename);
Files.write(file, "hello world".getBytes(StandardCharsets.UTF_8));
return dir;
}
private static ClassLoader urlClassLoader(Path... urlPath) {
URL[] urls = Arrays.stream(urlPath).map(ResourceLocatorTest::url).toArray(URL[]::new);
return new URLClassLoader(urls);
}
private static URL url(Path path) {
try {
return path.toUri().toURL();
} catch (MalformedURLException e) {
throw new RuntimeException(e);
}
}
}
| 5,940 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common/json/SelectorsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.hasEntry;
import static org.junit.Assert.assertThat;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonPointer;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.IntNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Test;
public class SelectorsTest {
public static final JsonPointer FOO_FIELD = JsonPointer.valueOf("/foo");
private final ObjectMapper mapper = new ObjectMapper();
@Test
public void textAt() {
ObjectNode node = newObject();
node.put("foo", "bar");
String value = Selectors.textAt(node, FOO_FIELD);
assertThat(value, is("bar"));
}
@Test
public void emptyOptionalTextAt() {
ObjectNode node = newObject();
Optional<String> value = Selectors.optionalTextAt(node, FOO_FIELD);
assertThat(value, is(Optional.empty()));
}
@Test
public void nonEmptyOptionalTextAt() {
ObjectNode node = newObject();
node.put("foo", "bar");
Optional<String> value = Selectors.optionalTextAt(node, FOO_FIELD);
assertThat(value, is(Optional.of("bar")));
}
@Test
public void intAt() {
ObjectNode node = new ObjectNode(mapper.getNodeFactory());
node.put("foo", 1);
int value = Selectors.integerAt(node, FOO_FIELD);
assertThat(value, is(1));
}
@Test
public void durationAt() {
ObjectNode node = new ObjectNode(mapper.getNodeFactory());
node.put("foo", "30s");
Duration value = Selectors.durationAt(node, FOO_FIELD);
assertThat(value, is(Duration.ofSeconds(30)));
}
@Test
public void optionalDurationAt() {
ObjectNode node = new ObjectNode(mapper.getNodeFactory());
node.put("foo", "30s");
Optional<Duration> value = Selectors.optionalDurationAt(node, FOO_FIELD);
assertThat(value, is(Optional.of(Duration.ofSeconds(30))));
}
@Test
public void longAt() {
ObjectNode node = newObject();
node.put("foo", 100_000L);
long value = Selectors.longAt(node, FOO_FIELD);
assertThat(value, is(100_000L));
}
@Test
public void optionalLongAt() {
ObjectNode node = newObject();
node.put("foo", 100_000L);
OptionalLong value = Selectors.optionalLongAt(node, FOO_FIELD);
assertThat(value, is(OptionalLong.of(100_000L)));
}
@Test
public void listAt() {
ObjectNode node = newObject();
node.putArray("foo").add(1).add(2).add(3);
Iterable<? extends JsonNode> value = Selectors.listAt(node, FOO_FIELD);
assertThat(value, contains(new IntNode(1), new IntNode(2), new IntNode(3)));
}
@Test
public void textListAt() {
ObjectNode node = newObject();
node.putArray("foo").add("hello").add("world");
List<String> value = Selectors.textListAt(node, FOO_FIELD);
assertThat(value, contains("hello", "world"));
}
@Test
public void propertiesAt() {
ObjectNode node = newObject();
node.putArray("foo").add(newKvObject("k1", "v1")).add(newKvObject("k2", "v2"));
Map<String, String> value = Selectors.propertiesAt(node, FOO_FIELD);
assertThat(value, allOf(hasEntry("k1", "v1"), hasEntry("k2", "v2")));
}
@Test
public void longPropertiesAt() {
ObjectNode node = newObject();
node.putArray("foo").add(newKvObject("k1", 91L)).add(newKvObject("k2", 1108L));
Map<String, Long> value = Selectors.longPropertiesAt(node, FOO_FIELD);
assertThat(value, allOf(hasEntry("k1", 91L), hasEntry("k2", 1108L)));
}
private ObjectNode newObject() {
return mapper.createObjectNode();
}
private ObjectNode newKvObject(String key, String value) {
return newObject().put(key, value);
}
private ObjectNode newKvObject(String key, long value) {
return newObject().put(key, value);
}
}
| 5,941 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufSerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import com.google.protobuf.Message;
import java.io.IOException;
import org.apache.flink.core.memory.DataInputDeserializer;
import org.apache.flink.core.memory.DataOutputSerializer;
import org.apache.flink.statefun.flink.common.generated.ProtobufSerializerSnapshot;
import org.apache.flink.statefun.flink.common.protobuf.generated.TestProtos.SimpleMessage;
import org.junit.Test;
public class ProtobufSerializerTest {
SimpleMessage originalMessage = SimpleMessage.newBuilder().setName("bob").build();
@Test
public void roundTrip() throws IOException {
SimpleMessage message = roundTrip(SimpleMessage.class, originalMessage);
assertThat(message, is(originalMessage));
}
@Test
public void deserializeCopiedMessage() throws IOException {
DataInputDeserializer in = serialize(SimpleMessage.class, originalMessage);
ProtobufSerializer<SimpleMessage> serializer =
ProtobufSerializer.forMessageGeneratedClass(SimpleMessage.class);
DataOutputSerializer out = new DataOutputSerializer(32);
serializer.copy(in, out);
SimpleMessage message = deserialize(SimpleMessage.class, out);
assertThat(message, is(originalMessage));
}
@Test
public void testSnapshot() {
ProtobufSerializer<SimpleMessage> serializer =
ProtobufSerializer.forMessageGeneratedClass(SimpleMessage.class);
ProtobufSerializerSnapshot snapshot = serializer.snapshot();
assertThat(snapshot.getGeneratedJavaName(), is(SimpleMessage.class.getName()));
assertThat(snapshot.getMessageName(), is(SimpleMessage.getDescriptor().getFullName()));
assertThat(snapshot.getDescriptorSet(), notNullValue());
}
@Test
public void duplicatedSerializerCanDeserialize() throws IOException {
ProtobufSerializer<SimpleMessage> serializer =
ProtobufSerializer.forMessageGeneratedClass(SimpleMessage.class);
DataOutputSerializer out = new DataOutputSerializer(512);
serializer.serialize(originalMessage, out);
DataInputDeserializer in = new DataInputDeserializer(out.getCopyOfBuffer());
SimpleMessage foo = serializer.duplicate().deserialize(in);
assertThat(foo, is(originalMessage));
}
@SuppressWarnings("SameParameterValue")
private static <M extends Message> M roundTrip(Class<M> messageType, M original)
throws IOException {
DataInputDeserializer source = serialize(messageType, original);
return deserialize(messageType, source);
}
@SafeVarargs
private static <M extends Message> DataInputDeserializer serialize(Class<M> type, M... items)
throws IOException {
ProtobufSerializer<M> serializer = ProtobufSerializer.forMessageGeneratedClass(type);
DataOutputSerializer out = new DataOutputSerializer(512);
for (Object message : items) {
serializer.serialize(type.cast(message), out);
}
return new DataInputDeserializer(out.getCopyOfBuffer());
}
@SuppressWarnings("SameParameterValue")
private static <M extends Message> M deserialize(Class<M> type, DataOutputSerializer target)
throws IOException {
DataInputDeserializer source = new DataInputDeserializer(target.getCopyOfBuffer());
return deserialize(type, source);
}
private static <M extends Message> M deserialize(Class<M> type, DataInputDeserializer source)
throws IOException {
ProtobufSerializer<M> serializer = ProtobufSerializer.forMessageGeneratedClass(type);
return serializer.deserialize(source);
}
}
| 5,942 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/test/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufTypeSerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import org.apache.flink.api.common.typeutils.SerializerTestBase;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.statefun.flink.common.protobuf.generated.TestProtos;
import org.junit.Ignore;
import org.junit.Test;
public class ProtobufTypeSerializerTest extends SerializerTestBase<TestProtos.SimpleMessage> {
@Override
protected TypeSerializer<TestProtos.SimpleMessage> createSerializer() {
return new ProtobufTypeSerializer<>(TestProtos.SimpleMessage.class);
}
@Ignore
@Test()
@Override
public void testInstantiate() {
// do nothing.
}
@Override
protected int getLength() {
return -1;
}
@Override
protected Class<TestProtos.SimpleMessage> getTypeClass() {
return TestProtos.SimpleMessage.class;
}
@Override
protected TestProtos.SimpleMessage[] getTestData() {
return new TestProtos.SimpleMessage[] {
TestProtos.SimpleMessage.newBuilder().setName("a").build(),
TestProtos.SimpleMessage.newBuilder().setName("b").build(),
TestProtos.SimpleMessage.newBuilder().setName("c").build(),
TestProtos.SimpleMessage.newBuilder().setName("d").build()
};
}
}
| 5,943 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/UnimplementedTypeInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
public final class UnimplementedTypeInfo<T> extends TypeInformation<T> {
private static final long serialVersionUID = 1;
@Override
public boolean isBasicType() {
return false;
}
@Override
public boolean isTupleType() {
return false;
}
@Override
public int getArity() {
return 0;
}
@Override
public int getTotalFields() {
return 0;
}
@Override
public Class<T> getTypeClass() {
throw new UnsupportedOperationException();
}
@Override
public boolean isKeyType() {
throw new UnsupportedOperationException();
}
@Override
public TypeSerializer<T> createSerializer(ExecutionConfig executionConfig) {
return new UnimplementedTypeSerializer<>();
}
@Override
public String toString() {
return "UnimplementedTypeInfo";
}
@Override
public boolean equals(Object o) {
return o == this;
}
@Override
public int hashCode() {
return 1337;
}
@Override
public boolean canEqual(Object o) {
return o instanceof UnimplementedTypeInfo;
}
}
| 5,944 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/ResourceLocator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import javax.annotation.Nullable;
public final class ResourceLocator {
private ResourceLocator() {}
public static Iterable<URL> findResources(String namedResource) {
if (namedResource.startsWith("classpath:")) {
return findNamedResources(namedResource);
} else {
URL u = findNamedResource(namedResource);
return Collections.singletonList(u);
}
}
/** Locates a resource with a given name in the classpath or url path. */
public static Iterable<URL> findNamedResources(String name) {
URI nameUri = URI.create(name);
if (!isClasspath(nameUri)) {
throw new IllegalArgumentException(
"unsupported or missing schema <"
+ nameUri.getScheme()
+ "> classpath: schema is supported.");
}
return urlClassPathResource(nameUri);
}
public static URL findNamedResource(final String name) {
URI nameUri = URI.create(name);
if (isClasspath(nameUri)) {
Iterable<URL> resources = urlClassPathResource(nameUri);
return firstElementOrNull(resources);
}
try {
if (nameUri.isAbsolute()) {
return nameUri.toURL();
}
// this URI is missing a schema (non absolute), therefore we assume that is a file.
return new URL("file:" + name);
} catch (MalformedURLException e) {
throw new IllegalArgumentException(e);
}
}
private static boolean isClasspath(URI nameUri) {
@Nullable String scheme = nameUri.getScheme();
if (scheme == null) {
return false;
}
return scheme.equalsIgnoreCase("classpath");
}
private static Iterable<URL> urlClassPathResource(URI uri) {
ClassLoader cl =
firstNonNull(
Thread.currentThread().getContextClassLoader(), ResourceLocator.class.getClassLoader());
try {
Enumeration<URL> enumeration = cl.getResources(uri.getSchemeSpecificPart());
return asIterable(enumeration);
} catch (IOException e) {
throw new IllegalStateException(e);
}
}
private static URL firstElementOrNull(Iterable<URL> urls) {
for (URL url : urls) {
return url;
}
return null;
}
private static <T> Iterable<T> asIterable(Enumeration<T> enumeration) {
return () ->
new Iterator<T>() {
@Override
public boolean hasNext() {
return enumeration.hasMoreElements();
}
@Override
public T next() {
return enumeration.nextElement();
}
};
}
private static <T> T firstNonNull(T a, T b) {
return (a != null) ? a : b;
}
}
| 5,945 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/UnimplementedTypeSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
final class UnimplementedTypeSerializer<T> extends TypeSerializer<T> {
private static final long serialVersionUID = 1L;
@Override
public boolean isImmutableType() {
return false;
}
@Override
public TypeSerializer<T> duplicate() {
throw new UnsupportedOperationException();
}
@Override
public T createInstance() {
throw new UnsupportedOperationException();
}
@Override
public T copy(T t) {
throw new UnsupportedOperationException();
}
@Override
public T copy(T t, T t1) {
throw new UnsupportedOperationException();
}
@Override
public int getLength() {
return 0;
}
@Override
public void serialize(T t, DataOutputView dataOutputView) {
throw new UnsupportedOperationException();
}
@Override
public T deserialize(DataInputView dataInputView) {
throw new UnsupportedOperationException();
}
@Override
public T deserialize(T t, DataInputView dataInputView) {
throw new UnsupportedOperationException();
}
@Override
public void copy(DataInputView dataInputView, DataOutputView dataOutputView) {
throw new UnsupportedOperationException();
}
@Override
public boolean equals(Object o) {
return o == this;
}
@Override
public int hashCode() {
return 7;
}
@Override
public TypeSerializerSnapshot<T> snapshotConfiguration() {
throw new UnsupportedOperationException();
}
}
| 5,946 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/SetContextClassLoader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common;
import java.io.Closeable;
import javax.annotation.Nonnull;
public final class SetContextClassLoader implements Closeable {
private final ClassLoader originalClassLoader;
public SetContextClassLoader(@Nonnull Object o) {
this(o.getClass().getClassLoader());
}
public SetContextClassLoader(@Nonnull ClassLoader classLoader) {
this.originalClassLoader = Thread.currentThread().getContextClassLoader();
Thread.currentThread().setContextClassLoader(classLoader);
}
@Override
public void close() {
Thread.currentThread().setContextClassLoader(originalClassLoader);
}
}
| 5,947 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/types/TypedValueUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.types;
import com.google.protobuf.Descriptors;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import org.apache.flink.statefun.sdk.reqreply.generated.TypedValue;
public final class TypedValueUtil {
private TypedValueUtil() {}
public static boolean isProtobufTypeOf(
TypedValue typedValue, Descriptors.Descriptor messageDescriptor) {
return typedValue.getTypename().equals(protobufTypeUrl(messageDescriptor));
}
public static TypedValue packProtobufMessage(Message protobufMessage) {
return TypedValue.newBuilder()
.setTypename(protobufTypeUrl(protobufMessage.getDescriptorForType()))
.setHasValue(true)
.setValue(protobufMessage.toByteString())
.build();
}
public static <PB extends Message> PB unpackProtobufMessage(
TypedValue typedValue, Parser<PB> protobufMessageParser) {
try {
return protobufMessageParser.parseFrom(typedValue.getValue());
} catch (InvalidProtocolBufferException e) {
throw new IllegalStateException(e);
}
}
private static String protobufTypeUrl(Descriptors.Descriptor messageDescriptor) {
return "type.googleapis.com/" + messageDescriptor.getFullName();
}
}
| 5,948 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/json/NamespaceNamePair.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import java.util.Objects;
public final class NamespaceNamePair {
private final String namespace;
private final String name;
public static NamespaceNamePair from(String namespaceAndName) {
Objects.requireNonNull(namespaceAndName);
final int pos = namespaceAndName.lastIndexOf("/");
if (pos <= 0 || pos == namespaceAndName.length() - 1) {
throw new IllegalArgumentException(
namespaceAndName + " does not conform to the <namespace>/<name> format");
}
String namespace = namespaceAndName.substring(0, pos);
String name = namespaceAndName.substring(pos + 1);
return new NamespaceNamePair(namespace, name);
}
private NamespaceNamePair(String namespace, String name) {
this.namespace = Objects.requireNonNull(namespace);
this.name = Objects.requireNonNull(name);
}
public String namespace() {
return namespace;
}
public String name() {
return name;
}
}
| 5,949 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/json/Selectors.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import java.time.Duration;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonPointer;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.util.TimeUtils;
public final class Selectors {
public static Optional<ObjectNode> optionalObjectAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Optional.empty();
}
if (!node.isObject()) {
throw new WrongTypeException(pointer, "not an object");
}
return Optional.of((ObjectNode) node);
}
public static String textAt(JsonNode node, JsonPointer pointer) {
node = dereference(node, pointer);
if (!node.isTextual()) {
throw new WrongTypeException(pointer, "not a string");
}
return node.asText();
}
public static Optional<String> optionalTextAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Optional.empty();
}
if (!node.isTextual()) {
throw new WrongTypeException(pointer, "not a string");
}
return Optional.of(node.asText());
}
public static Duration durationAt(JsonNode node, JsonPointer pointer) {
node = dereference(node, pointer);
if (!node.isTextual()) {
throw new WrongTypeException(pointer, "not a duration");
}
try {
return TimeUtils.parseDuration(node.asText());
} catch (IllegalArgumentException ignore) {
throw new WrongTypeException(pointer, "not a duration");
}
}
public static Optional<Duration> optionalDurationAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Optional.empty();
}
if (!node.isTextual()) {
throw new WrongTypeException(pointer, "not a duration");
}
try {
Duration duration = TimeUtils.parseDuration(node.asText());
return Optional.of(duration);
} catch (IllegalArgumentException ignore) {
throw new WrongTypeException(pointer, "not a duration");
}
}
public static int integerAt(JsonNode node, JsonPointer pointer) {
node = dereference(node, pointer);
if (!node.isInt()) {
throw new WrongTypeException(pointer, "not an integer");
}
return node.asInt();
}
public static long longAt(JsonNode node, JsonPointer pointer) {
node = dereference(node, pointer);
if (!node.isLong() && !node.isInt()) {
throw new WrongTypeException(pointer, "not a long");
}
return node.asLong();
}
public static OptionalLong optionalLongAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return OptionalLong.empty();
}
if (!node.isLong() && !node.isInt()) {
throw new WrongTypeException(pointer, "not a long");
}
return OptionalLong.of(node.asLong());
}
public static OptionalInt optionalIntegerAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return OptionalInt.empty();
}
if (!node.isInt()) {
throw new WrongTypeException(pointer, "not an integer");
}
return OptionalInt.of(node.asInt());
}
public static Iterable<? extends JsonNode> listAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Collections.emptyList();
}
if (!node.isArray()) {
throw new WrongTypeException(pointer, "not a list");
}
return node;
}
public static List<String> textListAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Collections.emptyList();
}
if (!node.isArray()) {
throw new WrongTypeException(pointer, "not a list");
}
return StreamSupport.stream(node.spliterator(), false)
.filter(JsonNode::isTextual)
.map(JsonNode::asText)
.collect(Collectors.toList());
}
public static Map<String, String> propertiesAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Collections.emptyMap();
}
if (!node.isArray()) {
throw new WrongTypeException(pointer, "not a key-value list");
}
Map<String, String> properties = new LinkedHashMap<>();
for (JsonNode listElement : node) {
Iterator<Map.Entry<String, JsonNode>> fields = listElement.fields();
if (!fields.hasNext()) {
throw new WrongTypeException(pointer, "not a key-value list");
}
Map.Entry<String, JsonNode> field = fields.next();
properties.put(field.getKey(), field.getValue().asText());
}
return properties;
}
public static Map<String, Long> longPropertiesAt(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
return Collections.emptyMap();
}
if (!node.isArray()) {
throw new WrongTypeException(pointer, "not a key-value list");
}
Map<String, Long> longProperties = new LinkedHashMap<>();
for (JsonNode listElement : node) {
Iterator<Map.Entry<String, JsonNode>> fields = listElement.fields();
if (!fields.hasNext()) {
throw new WrongTypeException(pointer, "not a key-value list");
}
Map.Entry<String, JsonNode> field = fields.next();
if (!field.getValue().isLong() && !field.getValue().isInt()) {
throw new WrongTypeException(
pointer,
"value for key-value pair at "
+ field.getKey()
+ " is not a long: "
+ field.getValue());
}
longProperties.put(field.getKey(), field.getValue().asLong());
}
return longProperties;
}
private static JsonNode dereference(JsonNode node, JsonPointer pointer) {
node = node.at(pointer);
if (node.isMissingNode()) {
throw new MissingKeyException(pointer);
}
return node;
}
}
| 5,950 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/json/WrongTypeException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonPointer;
public class WrongTypeException extends RuntimeException {
private static final long serialVersionUID = 1;
public WrongTypeException(JsonPointer pointer, String message) {
super("Wrong type for key " + pointer + " " + message);
}
}
| 5,951 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/json/MissingKeyException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonPointer;
public class MissingKeyException extends RuntimeException {
private static final long serialVersionUID = 1;
public MissingKeyException(JsonPointer pointer) {
super("missing key " + pointer.toString());
}
}
| 5,952 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/json/StateFunObjectMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.json;
import java.io.IOException;
import java.time.Duration;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonGenerator;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonParser;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.*;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.module.SimpleModule;
import org.apache.flink.statefun.sdk.TypeName;
import org.apache.flink.util.TimeUtils;
public final class StateFunObjectMapper {
public static ObjectMapper create() {
final ObjectMapper mapper =
new ObjectMapper().configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
final SimpleModule module = new SimpleModule("statefun");
module.addSerializer(Duration.class, new DurationJsonSerializer());
module.addDeserializer(Duration.class, new DurationJsonDeserializer());
module.addDeserializer(TypeName.class, new TypeNameJsonDeserializer());
mapper.registerModule(module);
return mapper;
}
private static final class DurationJsonDeserializer extends JsonDeserializer<Duration> {
@Override
public Duration deserialize(
JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
return TimeUtils.parseDuration(jsonParser.getText());
}
}
private static final class DurationJsonSerializer extends JsonSerializer<Duration> {
@Override
public void serialize(
Duration duration, JsonGenerator jsonGenerator, SerializerProvider serializerProvider)
throws IOException {
jsonGenerator.writeString(TimeUtils.formatWithHighestUnit(duration));
}
}
private static final class TypeNameJsonDeserializer extends JsonDeserializer<TypeName> {
@Override
public TypeName deserialize(
JsonParser jsonParser, DeserializationContext deserializationContext) throws IOException {
return TypeName.parseFrom(jsonParser.getText());
}
}
}
| 5,953 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufReflectionUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import com.google.protobuf.DescriptorProtos.FileDescriptorSet;
import com.google.protobuf.DescriptorProtos.FileDescriptorSet.Builder;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
public final class ProtobufReflectionUtil {
private ProtobufReflectionUtil() {}
@SuppressWarnings("unchecked")
public static <M extends Message> Parser<M> protobufParser(Class<M> messageClass) {
Object parser = getParserFromGeneratedMessage(messageClass);
if (!(parser instanceof Parser)) {
throw new IllegalStateException(
"was expecting a protobuf parser to be return from the static parser() method on the type "
+ messageClass
+ " but instead got "
+ parser);
}
return (Parser<M>) parser;
}
static FileDescriptorSet protoFileDescriptorSet(Descriptor descriptor) {
Set<FileDescriptor> descriptors = new HashSet<>();
descriptors.add(descriptor.getFile());
addDependenciesRecursively(descriptors, descriptor.getFile());
Builder fileDescriptorSet = FileDescriptorSet.newBuilder();
for (FileDescriptor d : descriptors) {
fileDescriptorSet.addFile(d.toProto());
}
return fileDescriptorSet.build();
}
/** extract the {@linkplain Descriptor} for the generated message type. */
static <M extends Message> Descriptor protobufDescriptor(Class<M> type) {
try {
Method getDescriptor = type.getDeclaredMethod("getDescriptor");
return (Descriptor) getDescriptor.invoke(type);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
throw new IllegalStateException(
"unable to obtain protobuf type fileDescriptorSet for " + type, e);
}
}
/**
* extracts the {@linkplain Parser} implementation for that type. see:
* https://developers.google.com/protocol-buffers/docs/reference/java-generated
*/
private static <M extends Message> Object getParserFromGeneratedMessage(Class<M> messageClass) {
try {
Method parserMethod = messageClass.getDeclaredMethod("parser");
return parserMethod.invoke(parserMethod);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private static void addDependenciesRecursively(
Set<FileDescriptor> visited, FileDescriptor descriptor) {
for (FileDescriptor dependency : descriptor.getDependencies()) {
if (visited.add(dependency)) {
addDependenciesRecursively(visited, dependency.getFile());
}
}
}
}
| 5,954 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufTypeInformation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import com.google.protobuf.Message;
import java.util.Objects;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
public class ProtobufTypeInformation<M extends Message> extends TypeInformation<M> {
private static final long serialVersionUID = 1L;
private final Class<M> messageTypeClass;
public ProtobufTypeInformation(Class<M> messageTypeClass) {
this.messageTypeClass = Objects.requireNonNull(messageTypeClass);
}
@Override
public boolean isBasicType() {
return false;
}
@Override
public boolean isTupleType() {
return false;
}
@Override
public int getArity() {
return 0;
}
@Override
public int getTotalFields() {
return 0;
}
@Override
public Class<M> getTypeClass() {
return messageTypeClass;
}
@Override
public boolean isKeyType() {
return false;
}
@Override
public TypeSerializer<M> createSerializer(ExecutionConfig config) {
return new ProtobufTypeSerializer<>(messageTypeClass);
}
@Override
public String toString() {
return "ProtobufTypeInformation(" + messageTypeClass + ")";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ProtobufTypeInformation<?> that = (ProtobufTypeInformation<?>) o;
return messageTypeClass.equals(that.messageTypeClass);
}
@Override
public int hashCode() {
return Objects.hashCode(messageTypeClass);
}
@Override
public boolean canEqual(Object obj) {
return obj instanceof ProtobufTypeInformation;
}
}
| 5,955 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/OutputStreamView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import java.io.IOException;
import java.io.OutputStream;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.NotThreadSafe;
import org.apache.flink.core.memory.DataOutputView;
@NotThreadSafe
final class OutputStreamView extends OutputStream {
private DataOutputView target;
void set(DataOutputView target) {
this.target = target;
}
void done() {
target = null;
}
@Override
public void write(@Nonnull byte[] b) throws IOException {
target.write(b);
}
@Override
public void write(@Nonnull byte[] b, int off, int len) throws IOException {
target.write(b, off, len);
}
@Override
public void write(int b) throws IOException {
target.write(b);
}
@Override
public void flush() {}
@Override
public void close() {}
}
| 5,956 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.DescriptorProtos.FileDescriptorSet;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import java.io.IOException;
import java.util.Objects;
import javax.annotation.concurrent.NotThreadSafe;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.statefun.flink.common.generated.ProtobufSerializerSnapshot;
@NotThreadSafe
public final class ProtobufSerializer<M extends Message> {
private final OutputStreamView output;
private final CodedOutputStream codedOutputStream;
private final InputStreamView input;
private final CodedInputStream codedInputStream;
private final Parser<M> parser;
private final ProtobufSerializerSnapshot snapshot;
public static <M extends Message> ProtobufSerializer<M> forMessageGeneratedClass(Class<M> type) {
Objects.requireNonNull(type);
Parser<M> parser = ProtobufReflectionUtil.protobufParser(type);
ProtobufSerializerSnapshot snapshot = createSnapshot(type);
return new ProtobufSerializer<>(parser, snapshot);
}
private ProtobufSerializer(Parser<M> parser, ProtobufSerializerSnapshot snapshot) {
this.parser = Objects.requireNonNull(parser);
this.snapshot = Objects.requireNonNull(snapshot);
this.input = new InputStreamView();
this.output = new OutputStreamView();
this.codedInputStream = CodedInputStream.newInstance(input);
this.codedOutputStream = CodedOutputStream.newInstance(output);
}
public void serialize(M record, DataOutputView target) throws IOException {
final int size = record.getSerializedSize();
target.writeInt(size);
output.set(target);
try {
record.writeTo(codedOutputStream);
codedOutputStream.flush();
} finally {
output.done();
}
}
public M deserialize(DataInputView source) throws IOException {
final int serializedSize = source.readInt();
input.set(source, serializedSize);
codedInputStream.resetSizeCounter();
try {
return parser.parseFrom(codedInputStream);
} finally {
input.done();
}
}
public void copy(DataInputView source, DataOutputView target) throws IOException {
int serializedSize = source.readInt();
target.writeInt(serializedSize);
target.write(source, serializedSize);
}
ProtobufSerializerSnapshot snapshot() {
return snapshot;
}
public ProtobufSerializer<M> duplicate() {
return new ProtobufSerializer<>(parser, snapshot);
}
// ---------------------------------------------------------------------------------------------------------------
// Internal helpers
// ---------------------------------------------------------------------------------------------------------------
private static <M extends Message> ProtobufSerializerSnapshot createSnapshot(Class<M> type) {
Descriptor messageDescriptor = ProtobufReflectionUtil.protobufDescriptor(type);
FileDescriptorSet dependencies =
ProtobufReflectionUtil.protoFileDescriptorSet(messageDescriptor);
return ProtobufSerializerSnapshot.newBuilder()
.setMessageName(messageDescriptor.getFullName())
.setGeneratedJavaName(type.getName())
.setDescriptorSet(dependencies)
.build();
}
}
| 5,957 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufTypeSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import com.google.protobuf.Message;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.Objects;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
public final class ProtobufTypeSerializer<M extends Message> extends TypeSerializer<M> {
private static final long serialVersionUID = 1;
private final Class<M> typeClass;
private transient ProtobufSerializer<M> underlyingSerializer;
/** this is a lazy computed snapshot */
@SuppressWarnings("InstanceVariableMayNotBeInitializedByReadObject")
private transient ProtobufTypeSerializerSnapshot<M> snapshot;
// --------------------------------------------------------------------------------------------------
// Constructors
// --------------------------------------------------------------------------------------------------
ProtobufTypeSerializer(Class<M> typeClass) {
this(typeClass, ProtobufSerializer.forMessageGeneratedClass(typeClass));
}
private ProtobufTypeSerializer(Class<M> typeClass, ProtobufSerializer<M> protobufSerializer) {
this.typeClass = Objects.requireNonNull(typeClass);
this.underlyingSerializer = Objects.requireNonNull(protobufSerializer);
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.underlyingSerializer = ProtobufSerializer.forMessageGeneratedClass(typeClass);
}
@Override
public TypeSerializer<M> duplicate() {
return new ProtobufTypeSerializer<>(typeClass, underlyingSerializer.duplicate());
}
@Override
public boolean isImmutableType() {
return true;
}
@Override
public M createInstance() {
return null;
}
@SuppressWarnings("unchecked")
@Override
public M copy(M from) {
return (M) from.toBuilder().build();
}
@Override
public M copy(M from, M reuse) {
return copy(from);
}
@Override
public int getLength() {
return -1;
}
@Override
public void serialize(M record, DataOutputView target) throws IOException {
underlyingSerializer.serialize(record, target);
}
@Override
public M deserialize(DataInputView source) throws IOException {
return underlyingSerializer.deserialize(source);
}
@Override
public M deserialize(M reuse, DataInputView source) throws IOException {
return deserialize(source);
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
underlyingSerializer.copy(source, target);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
Class<?> aClass = obj.getClass();
return getClass().equals(aClass);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public TypeSerializerSnapshot<M> snapshotConfiguration() {
ProtobufTypeSerializerSnapshot<M> snapshot = this.snapshot;
if (snapshot == null) {
snapshot = new ProtobufTypeSerializerSnapshot<>(typeClass, underlyingSerializer.snapshot());
this.snapshot = snapshot;
}
return snapshot;
}
Class<M> getTypeClass() {
return typeClass;
}
}
| 5,958 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/InputStreamView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import java.io.IOException;
import java.io.InputStream;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.NotThreadSafe;
import org.apache.flink.core.memory.DataInputView;
@NotThreadSafe
final class InputStreamView extends InputStream {
private int limit;
private DataInputView source;
void set(DataInputView source, int serializedSize) {
this.source = source;
this.limit = serializedSize;
}
void done() {
this.source = null;
this.limit = 0;
}
@Override
public int read() throws IOException {
if (limit <= 0) {
return -1;
}
--limit;
return source.readByte();
}
@Override
public int read(@Nonnull byte[] b, final int off, int len) throws IOException {
if (limit <= 0) {
return -1;
}
len = Math.min(len, limit);
final int result = source.read(b, off, len);
if (result >= 0) {
limit -= result;
}
return result;
}
@Override
public long skip(final long n) throws IOException {
final int min = (int) Math.min(n, limit);
final long result = source.skipBytes(min);
if (result >= 0) {
limit -= result;
}
return result;
}
@Override
public synchronized void mark(int unused) {
throw new UnsupportedOperationException();
}
@Override
public synchronized void reset() {
throw new UnsupportedOperationException();
}
}
| 5,959 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/org/apache/flink/statefun/flink/common/protobuf/ProtobufTypeSerializerSnapshot.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.common.protobuf;
import com.google.protobuf.Message;
import java.io.IOException;
import java.util.Objects;
import javax.annotation.Nullable;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSchemaCompatibility;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.statefun.flink.common.generated.ProtobufSerializerSnapshot;
public final class ProtobufTypeSerializerSnapshot<M extends Message>
implements TypeSerializerSnapshot<M> {
private static final int VERSION = 1;
@Nullable private ProtobufSerializerSnapshot snapshotData;
@Nullable private Class<M> typeClass;
@SuppressWarnings("unused")
public ProtobufTypeSerializerSnapshot() {
// used for reflective instantiation.
}
ProtobufTypeSerializerSnapshot(Class<M> messageType, ProtobufSerializerSnapshot snapshotData) {
this.typeClass = Objects.requireNonNull(messageType);
this.snapshotData = Objects.requireNonNull(snapshotData);
}
@SuppressWarnings("unchecked")
private static <M extends Message> Class<M> classForName(
ClassLoader userCodeClassLoader, ProtobufSerializerSnapshot snapshotData) {
try {
return (Class<M>)
Class.forName(snapshotData.getGeneratedJavaName(), false, userCodeClassLoader);
} catch (ClassNotFoundException e) {
throw new IllegalStateException(
"Unable to restore the protobuf serializer since the generated java class is not found. "
+ "previously the generated java class was at "
+ snapshotData.getGeneratedJavaName()
+ " with "
+ snapshotData.getMessageName(),
e);
}
}
@Override
public int getCurrentVersion() {
return VERSION;
}
@Override
public void writeSnapshot(DataOutputView out) throws IOException {
if (snapshotData == null) {
throw new IllegalStateException();
}
out.writeInt(snapshotData.getSerializedSize());
out.write(snapshotData.toByteArray());
}
@Override
public void readSnapshot(int readVersion, DataInputView in, ClassLoader userCodeClassLoader)
throws IOException {
final int snapshotSize = in.readInt();
final byte[] snapshotBytes = new byte[snapshotSize];
in.readFully(snapshotBytes);
this.snapshotData = ProtobufSerializerSnapshot.parseFrom(snapshotBytes);
this.typeClass = classForName(userCodeClassLoader, snapshotData);
}
@Override
public TypeSerializer<M> restoreSerializer() {
Objects.requireNonNull(typeClass);
return new ProtobufTypeSerializer<>(typeClass);
}
@Override
public TypeSerializerSchemaCompatibility<M> resolveSchemaCompatibility(
TypeSerializer<M> newSerializer) {
if (!(newSerializer instanceof ProtobufTypeSerializer)) {
return TypeSerializerSchemaCompatibility.incompatible();
}
ProtobufTypeSerializer<?> casted = (ProtobufTypeSerializer<?>) newSerializer;
return resolveSchemaCompatibility(casted);
}
/**
* Check schema compatibility with the new serializer.
*
* <p>This check is very simplistic, that just compares the two typeClasses, but the {@link
* ProtobufSerializerSnapshot} has much more information to be used for compatibility resolution.
* We make sure to store this information first, and implement a more robust schema resolution
* logic in the future.
*/
private TypeSerializerSchemaCompatibility<M> resolveSchemaCompatibility(
ProtobufTypeSerializer<?> newSerializer) {
Class<?> otherTypeClass = newSerializer.getTypeClass();
if (otherTypeClass == typeClass) {
return TypeSerializerSchemaCompatibility.compatibleAsIs();
}
return TypeSerializerSchemaCompatibility.incompatible();
}
}
| 5,960 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/com/google | Create_ds/flink-statefun/statefun-flink/statefun-flink-common/src/main/java/com/google/protobuf/MoreByteStrings.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.protobuf;
import java.nio.ByteBuffer;
public class MoreByteStrings {
public static ByteString wrap(byte[] bytes) {
return ByteString.wrap(bytes);
}
public static ByteString wrap(byte[] bytes, int offset, int len) {
return ByteString.wrap(bytes, offset, len);
}
public static ByteString wrap(ByteBuffer buffer) {
return ByteString.wrap(buffer);
}
public static ByteString concat(ByteString first, ByteString second) {
return first.concat(second);
}
}
| 5,961 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun/extensions/ComponentJsonFormatException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.extensions;
public final class ComponentJsonFormatException extends IllegalArgumentException {
private static final long serialVersionUID = 1L;
public ComponentJsonFormatException(String message) {
super(message);
}
public ComponentJsonFormatException(String message, Throwable cause) {
super(message, cause);
}
}
| 5,962 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun/extensions/ComponentBinder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.extensions;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.statefun.sdk.spi.StatefulFunctionModule;
/**
* A {@link ComponentBinder} binds {@link ComponentJsonObject}s to a remote module. It parses the
* specifications of a given component, resolves them into application entities, such as function
* providers, ingresses, or egresses, and then binds the entities to the module.
*/
@PublicEvolving
public interface ComponentBinder {
/**
* Bind a {@link ComponentJsonObject} to an underlying remote module through the provided module
* binder.
*
* @param component the component to parse and bind.
* @param remoteModuleBinder the binder to use to bind application entities to the underlying
* remote module.
*/
void bind(ComponentJsonObject component, StatefulFunctionModule.Binder remoteModuleBinder);
}
| 5,963 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun/extensions/ExtensionModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.extensions;
import java.util.Map;
import org.apache.flink.statefun.sdk.TypeName;
/**
* A module that binds multiple extension objects to the Stateful Functions application. Each
* extension is uniquely identified by a {@link TypeName}.
*/
public interface ExtensionModule {
/**
* This method binds multiple extension objects to the Stateful Functions application.
*
* @param globalConfigurations global configuration of the Stateful Functions application.
* @param binder binder for binding extensions.
*/
void configure(Map<String, String> globalConfigurations, Binder binder);
interface Binder {
<T> void bindExtension(TypeName typeName, T extension);
}
}
| 5,964 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun | Create_ds/flink-statefun/statefun-flink/statefun-flink-extensions/src/main/java/org/apache/flink/statefun/extensions/ComponentJsonObject.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.extensions;
import java.util.Objects;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.flink.statefun.sdk.TypeName;
import org.apache.flink.statefun.sdk.spi.StatefulFunctionModule;
/**
* A {@link ComponentJsonObject} consists one or more application entities (i.e. function providers,
* ingresses, routers, or egresses) that should be bound to a remote {@link StatefulFunctionModule}.
*
* <p>Each component is represented in remote module YAML specification files as a single YAML
* document of the following format:
*
* <pre>
* kind: com.foo.bar.v5/some.component (typename)
* spec:
* ... (specification document of the component)
* </pre>
*
* <p>The {@code kind} is a {@link TypeName} that should be universally unique within the
* application. It identifies which {@link ComponentBinder} recognizes this component and knows how
* to parse it to resolve application entities to be bound to the module.
*
* @see ComponentBinder
*/
@PublicEvolving
public final class ComponentJsonObject {
public static final String BINDER_KIND_FIELD = "kind";
public static final String SPEC_FIELD = "spec";
private final ObjectNode rawObjectNode;
private final TypeName binderTypename;
private final JsonNode specJsonNode;
public ComponentJsonObject(JsonNode jsonNode) {
Objects.requireNonNull(jsonNode);
checkIsObject(jsonNode);
this.rawObjectNode = (ObjectNode) jsonNode;
this.binderTypename = parseBinderTypename(rawObjectNode);
this.specJsonNode = extractSpecJsonNode(rawObjectNode);
}
/**
* Returns the complete component JSON object.
*
* @return the complete component JSON object.
*/
public ObjectNode get() {
return rawObjectNode;
}
/**
* Returns the {@link TypeName} of the binder for this component.
*
* @return the {@link TypeName} of the binder for this component.
*/
public TypeName binderTypename() {
return binderTypename;
}
/**
* Returns the specification JSON node for this component.
*
* @return the specification JSON node for this component.
*/
public JsonNode specJsonNode() {
return specJsonNode;
}
@Override
public String toString() {
return rawObjectNode.toString();
}
private static void checkIsObject(JsonNode jsonNode) {
if (!jsonNode.isObject()) {
throwExceptionWithFormatHint();
}
}
private static TypeName parseBinderTypename(ObjectNode componentObject) {
final JsonNode binderKindObject = componentObject.get(BINDER_KIND_FIELD);
if (binderKindObject == null) {
throwExceptionWithFormatHint();
}
try {
return TypeName.parseFrom(binderKindObject.asText());
} catch (Exception e) {
throw new ComponentJsonFormatException("Invalid binder kind format.", e);
}
}
private static JsonNode extractSpecJsonNode(ObjectNode componentObject) {
final JsonNode specJsonNode = componentObject.get(SPEC_FIELD);
if (specJsonNode == null) {
throwExceptionWithFormatHint();
}
return specJsonNode;
}
private static void throwExceptionWithFormatHint() {
throw new ComponentJsonFormatException(
"Invalid ComponentJsonObject; components should be a JSON object with the required fields [kind] and [spec].");
}
}
| 5,965 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor/StatefulFunctionsSavepointCreatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.Router;
import org.junit.Test;
public class StatefulFunctionsSavepointCreatorTest {
@Test(expected = IllegalArgumentException.class)
public void invalidMaxParallelism() {
new StatefulFunctionsSavepointCreator(-1);
}
@Test(expected = IllegalArgumentException.class)
public void duplicateStateBootstrapFunctionProvider() {
final StatefulFunctionsSavepointCreator testCreator = new StatefulFunctionsSavepointCreator(1);
testCreator.withStateBootstrapFunctionProvider(
new FunctionType("ns", "test"), ignored -> new NoOpStateBootstrapFunction());
testCreator.withStateBootstrapFunctionProvider(
new FunctionType("ns", "test"), ignored -> new NoOpStateBootstrapFunction());
}
@Test(expected = IllegalStateException.class)
public void noBootstrapDataOnWrite() {
final StatefulFunctionsSavepointCreator testCreator = new StatefulFunctionsSavepointCreator(1);
testCreator.withStateBootstrapFunctionProvider(
new FunctionType("ns", "test"), ignored -> new NoOpStateBootstrapFunction());
testCreator.write("ignored");
}
@Test(expected = IllegalStateException.class)
public void noStateBootstrapFunctionProvidersOnWrite() {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final StatefulFunctionsSavepointCreator testCreator = new StatefulFunctionsSavepointCreator(1);
testCreator.withBootstrapData(env.fromElements("foobar"), NoOpBootstrapDataRouter::new);
testCreator.write("ignored");
}
private static class NoOpStateBootstrapFunction implements StateBootstrapFunction {
@Override
public void bootstrap(Context context, Object bootstrapData) {}
}
private static class NoOpBootstrapDataRouter<T> implements Router<T> {
@Override
public void route(T message, Downstream<T> downstream) {}
}
}
| 5,966 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor/union/TaggedBootstrapDataSerializerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.flink.api.common.typeutils.SerializerTestBase;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.base.BooleanSerializer;
import org.apache.flink.api.common.typeutils.base.IntSerializer;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.testutils.DeeplyEqualsChecker;
import org.junit.Ignore;
public class TaggedBootstrapDataSerializerTest extends SerializerTestBase<TaggedBootstrapData> {
private static final List<TypeSerializer<?>> TEST_PAYLOAD_SERIALIZERS =
Arrays.asList(IntSerializer.INSTANCE, BooleanSerializer.INSTANCE);
private static final Map<Class<?>, Integer> TYPE_TO_UNION_INDEX = new HashMap<>(2);
static {
TYPE_TO_UNION_INDEX.put(Integer.class, 0);
TYPE_TO_UNION_INDEX.put(Boolean.class, 1);
}
public TaggedBootstrapDataSerializerTest() {
super(
new DeeplyEqualsChecker()
.withCustomCheck(
(o1, o2) -> o1 instanceof TaggedBootstrapData && o2 instanceof TaggedBootstrapData,
(o1, o2, checker) -> {
TaggedBootstrapData obj1 = (TaggedBootstrapData) o1;
TaggedBootstrapData obj2 = (TaggedBootstrapData) o2;
return obj1.getTarget().equals(obj2.getTarget())
&& obj1.getUnionIndex() == obj2.getUnionIndex()
// equality checks on payload makes sense here since
// the payloads are only booleans or integers in this test
&& obj1.getPayload().equals(obj2.getPayload());
}));
}
@Override
protected TaggedBootstrapData[] getTestData() {
final TaggedBootstrapData[] testData = new TaggedBootstrapData[3];
testData[0] = integerPayloadBootstrapData("test-namespace", "test-name", "test-id-1", 1991);
testData[1] = booleanPayloadBootstrapData("test-namespace", "test-name-2", "test-id-80", false);
testData[2] = integerPayloadBootstrapData("test-namespace", "test-name", "test-id-56", 1108);
return testData;
}
private TaggedBootstrapData integerPayloadBootstrapData(
String functionNamespace, String functionName, String functionId, int payload) {
return new TaggedBootstrapData(
addressOf(functionNamespace, functionName, functionId),
payload,
TYPE_TO_UNION_INDEX.get(Integer.class));
}
private TaggedBootstrapData booleanPayloadBootstrapData(
String functionNamespace, String functionName, String functionId, boolean payload) {
return new TaggedBootstrapData(
addressOf(functionNamespace, functionName, functionId),
payload,
TYPE_TO_UNION_INDEX.get(Boolean.class));
}
@Override
protected TypeSerializer<TaggedBootstrapData> createSerializer() {
return new TaggedBootstrapDataSerializer(TEST_PAYLOAD_SERIALIZERS);
}
@Override
protected Class<TaggedBootstrapData> getTypeClass() {
return TaggedBootstrapData.class;
}
@Override
protected int getLength() {
return -1;
}
// -----------------------------------------------------------------------------
// Ignored tests
// -----------------------------------------------------------------------------
@Override
@Ignore
public void testConfigSnapshotInstantiation() {
// test ignored; this is a test that is only relevant for serializers that are used for
// persistent data
}
@Override
@Ignore
public void testSnapshotConfigurationAndReconfigure() {
// test ignored; this is a test that is only relevant for serializers that are used for
// persistent data
}
// -----------------------------------------------------------------------------
// Utilities
// -----------------------------------------------------------------------------
private static Address addressOf(
String functionNamespace, String functionName, String functionId) {
return new Address(new FunctionType(functionNamespace, functionName), functionId);
}
}
| 5,967 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor/union/BootstrapDatasetUnionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.statefun.flink.state.processor.BootstrapDataRouterProvider;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.Router;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Test;
public class BootstrapDatasetUnionTest {
@Test
public void correctTypeInformation() {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final List<BootstrapDataset<?>> bootstrapDatasets = new ArrayList<>(2);
bootstrapDatasets.add(
createBootstrapDataset(env, Collections.singletonList(1), TestRouter::noOp));
bootstrapDatasets.add(
createBootstrapDataset(env, Collections.singletonList(true), TestRouter::noOp));
final DataSet<TaggedBootstrapData> test = BootstrapDatasetUnion.apply(bootstrapDatasets);
assertThat(
test.getType(),
is(unionTypeInfoOf(BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.BOOLEAN_TYPE_INFO)));
}
@Test
public void correctUnion() throws Exception {
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
final List<BootstrapDataset<?>> bootstrapDatasets = new ArrayList<>(2);
bootstrapDatasets.add(
createBootstrapDataset(
env,
Collections.singletonList(911108),
() -> new TestRouter<>(addressOf("ns", "name", "id-0"), 2)));
bootstrapDatasets.add(
createBootstrapDataset(
env,
Arrays.asList(true, false),
() -> new TestRouter<>(addressOf("ns", "name-2", "id-99"), 1)));
final List<TaggedBootstrapData> test = BootstrapDatasetUnion.apply(bootstrapDatasets).collect();
assertThat(
test,
Matchers.containsInAnyOrder(
taggedBootstrapData(addressOf("ns", "name", "id-0"), 911108, 0),
taggedBootstrapData(addressOf("ns", "name", "id-0"), 911108, 0),
taggedBootstrapData(addressOf("ns", "name-2", "id-99"), true, 1),
taggedBootstrapData(addressOf("ns", "name-2", "id-99"), false, 1)));
}
private static <T> BootstrapDataset<T> createBootstrapDataset(
ExecutionEnvironment env,
Collection<T> bootstrapDataset,
BootstrapDataRouterProvider<T> routerProvider) {
return new BootstrapDataset<>(env.fromCollection(bootstrapDataset), routerProvider);
}
private static TaggedBootstrapDataTypeInfo unionTypeInfoOf(
TypeInformation<?>... payloadTypeInfos) {
return new TaggedBootstrapDataTypeInfo(Arrays.asList(payloadTypeInfos));
}
private static Address addressOf(
String functionNamespace, String functionName, String functionId) {
return new Address(new FunctionType(functionNamespace, functionName), functionId);
}
private static Matcher<TaggedBootstrapData> taggedBootstrapData(
Address expectedAddress, Object expectedPayload, Integer expectedUnionIndex) {
return new TypeSafeMatcher<TaggedBootstrapData>() {
@Override
protected boolean matchesSafely(TaggedBootstrapData test) {
return expectedUnionIndex == test.getUnionIndex()
&& expectedAddress.equals(test.getTarget())
// equality checks on payload makes sense here since
// the payloads are only booleans or integers in this test
&& expectedPayload.equals(test.getPayload());
}
@Override
public void describeTo(Description description) {}
};
}
private static class TestRouter<T> implements Router<T> {
/** Number of times to route the inputs. */
private final int routeCount;
/** Address to route the inputs to. */
private final Address targetAddress;
static <T> TestRouter<T> noOp() {
return new TestRouter<>(null, 0);
}
TestRouter(Address targetAddress, int routeCount) {
this.targetAddress = targetAddress;
this.routeCount = routeCount;
}
@Override
public void route(T message, Downstream<T> downstream) {
for (int i = 0; i < routeCount; i++) {
downstream.forward(targetAddress, message);
}
}
}
}
| 5,968 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/test/java/org/apache/flink/statefun/flink/state/processor/operator/StateBootstrapperTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.operator;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import java.util.HashMap;
import java.util.Map;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.statefun.flink.core.state.State;
import org.apache.flink.statefun.flink.state.processor.Context;
import org.apache.flink.statefun.flink.state.processor.StateBootstrapFunction;
import org.apache.flink.statefun.flink.state.processor.union.TaggedBootstrapData;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.annotations.Persisted;
import org.apache.flink.statefun.sdk.state.Accessor;
import org.apache.flink.statefun.sdk.state.AppendingBufferAccessor;
import org.apache.flink.statefun.sdk.state.PersistedAppendingBuffer;
import org.apache.flink.statefun.sdk.state.PersistedTable;
import org.apache.flink.statefun.sdk.state.PersistedValue;
import org.apache.flink.statefun.sdk.state.RemotePersistedValue;
import org.apache.flink.statefun.sdk.state.TableAccessor;
import org.junit.Test;
public class StateBootstrapperTest {
@Test
public void bootstrapState() {
final TestState testState = new TestState();
final StateBootstrapper stateBootstrapper =
createBootstrapper(
testState,
Tuple2.of(IntegerStateBootstrapFunction.TYPE, new IntegerStateBootstrapFunction()),
Tuple2.of(FloatStateBootstrapFunction.TYPE, new FloatStateBootstrapFunction()));
stateBootstrapper.apply(taggedBootstrapData(IntegerStateBootstrapFunction.TYPE, "foo", 1991));
stateBootstrapper.apply(taggedBootstrapData(FloatStateBootstrapFunction.TYPE, "foo", 3.1415F));
stateBootstrapper.apply(taggedBootstrapData(IntegerStateBootstrapFunction.TYPE, "bar", 1108));
assertThat(
testState.getState(
IntegerStateBootstrapFunction.TYPE, IntegerStateBootstrapFunction.STATE_NAME, "foo"),
is(1991));
assertThat(
testState.getState(
IntegerStateBootstrapFunction.TYPE, IntegerStateBootstrapFunction.STATE_NAME, "bar"),
is(1108));
assertThat(
testState.getState(
FloatStateBootstrapFunction.TYPE, FloatStateBootstrapFunction.STATE_NAME, "foo"),
is(3.1415F));
}
@Test
public void bootstrapsWithCorrectContext() {
final Address expectedSelfAddress =
new Address(ContextVerifyingStateBootstrapFunction.TYPE, "test-id");
final StateBootstrapper stateBootstrapper =
createBootstrapper(
new TestState(),
Tuple2.of(
ContextVerifyingStateBootstrapFunction.TYPE,
new ContextVerifyingStateBootstrapFunction(expectedSelfAddress)));
stateBootstrapper.apply(
taggedBootstrapData(ContextVerifyingStateBootstrapFunction.TYPE, "test-id", "foobar"));
}
private static class IntegerStateBootstrapFunction implements StateBootstrapFunction {
static final FunctionType TYPE = new FunctionType("test", "int-state-function");
static final String STATE_NAME = "int-state";
@Persisted
private PersistedValue<Integer> intState = PersistedValue.of(STATE_NAME, Integer.class);
@Override
public void bootstrap(Context context, Object bootstrapData) {
intState.set((int) bootstrapData);
}
}
private static class FloatStateBootstrapFunction implements StateBootstrapFunction {
static final FunctionType TYPE = new FunctionType("test", "float-state-function");
static final String STATE_NAME = "float-state";
@Persisted
private PersistedValue<Float> floatState = PersistedValue.of(STATE_NAME, Float.class);
@Override
public void bootstrap(Context context, Object bootstrapData) {
floatState.set((float) bootstrapData);
}
}
private static class ContextVerifyingStateBootstrapFunction implements StateBootstrapFunction {
static final FunctionType TYPE = new FunctionType("test", "context-verifier");
private final Address expectedSelfAddress;
ContextVerifyingStateBootstrapFunction(Address expectedSelfAddress) {
this.expectedSelfAddress = expectedSelfAddress;
}
@Override
public void bootstrap(Context context, Object bootstrapData) {
assertEquals(expectedSelfAddress, context.self());
}
}
private static StateBootstrapper createBootstrapper(
State state, Tuple2<FunctionType, StateBootstrapFunction>... bootstrapFunctions) {
final StateBootstrapFunctionRegistry registry = new StateBootstrapFunctionRegistry();
for (Tuple2<FunctionType, StateBootstrapFunction> bootstrapFunction : bootstrapFunctions) {
registry.register(bootstrapFunction.f0, ignored -> bootstrapFunction.f1);
}
return new StateBootstrapper(registry, state);
}
private static TaggedBootstrapData taggedBootstrapData(
FunctionType functionType, String functionId, Object payload) {
return new TaggedBootstrapData(new Address(functionType, functionId), payload, 0);
}
private static class TestState implements State {
private Address currentKey;
/**
* Nested state table for function states. A single state value is addressable via (function
* type, state name, function id);
*/
private Map<FunctionType, Map<String, Map<Address, Object>>> functionStates = new HashMap<>();
@SuppressWarnings("unchecked")
@Override
public <T> Accessor<T> createFlinkStateAccessor(
FunctionType functionType, PersistedValue<T> persistedValue) {
return new Accessor<T>() {
@Override
public void set(T value) {
assertKeySet();
functionStates
.computeIfAbsent(functionType, ignored -> new HashMap<>())
.computeIfAbsent(persistedValue.name(), ignored -> new HashMap())
.put(currentKey, value);
}
@Override
public T get() {
assertKeySet();
return (T)
functionStates
.computeIfAbsent(functionType, ignored -> new HashMap<>())
.computeIfAbsent(persistedValue.name(), ignored -> new HashMap())
.get(currentKey);
}
@Override
public void clear() {
assertKeySet();
functionStates
.computeIfAbsent(functionType, ignored -> new HashMap<>())
.computeIfAbsent(persistedValue.name(), ignored -> new HashMap())
.remove(currentKey);
}
};
}
@Override
public <K, V> TableAccessor<K, V> createFlinkStateTableAccessor(
FunctionType functionType, PersistedTable<K, V> persistedTable) {
throw new UnsupportedOperationException();
}
@Override
public <E> AppendingBufferAccessor<E> createFlinkStateAppendingBufferAccessor(
FunctionType functionType, PersistedAppendingBuffer<E> persistedAppendingBuffer) {
throw new UnsupportedOperationException();
}
@Override
public Accessor<byte[]> createFlinkRemoteStateAccessor(
FunctionType functionType, RemotePersistedValue remotePersistedValue) {
throw new UnsupportedOperationException();
}
@Override
public void setCurrentKey(Address address) {
this.currentKey = address;
}
private void assertKeySet() {
assertNotNull("Key should have been set before accessing state.", currentKey);
}
Object getState(FunctionType functionType, String stateName, String functionId) {
return functionStates
.get(functionType)
.get(stateName)
.get(new Address(functionType, functionId));
}
}
}
| 5,969 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/StateBootstrapFunctionProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import java.io.Serializable;
import org.apache.flink.statefun.sdk.FunctionType;
/** Provides instances of {@link StateBootstrapFunction}s for a given {@link FunctionType}. */
public interface StateBootstrapFunctionProvider extends Serializable {
/**
* Creates a {@link StateBootstrapFunction} instance for the given {@link FunctionType},
*
* @param type the type of function to create a boostrap function instance for.
* @return an instance of a bootstrap function for the given type.
*/
StateBootstrapFunction bootstrapFunctionOfType(FunctionType type);
}
| 5,970 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/Context.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import org.apache.flink.statefun.sdk.Address;
/** Provides context for a single {@link StateBootstrapFunction} invocation. */
public interface Context {
/**
* Returns the {@link Address} of the function being bootstrapped.
*
* @return the address of the function being bootstrapped.
*/
Address self();
}
| 5,971 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/BootstrapDataRouterProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import java.io.Serializable;
import javax.annotation.Nonnull;
import org.apache.flink.statefun.sdk.io.Router;
/**
* Provides instances of a {@link Router} to route bootstrap data to {@link
* StateBootstrapFunction}s.
*
* @param <T> data type of elements in the bootstrap dataset being routed.
*/
public interface BootstrapDataRouterProvider<T> extends Serializable {
/**
* Creates a {@link Router} instance.
*
* @return a router for bootstrap data
*/
@Nonnull
Router<T> provide();
}
| 5,972 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/StatefulFunctionsSavepointCreator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.contrib.streaming.state.RocksDBStateBackend;
import org.apache.flink.runtime.state.StateBackend;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.state.api.BootstrapTransformation;
import org.apache.flink.state.api.NewSavepoint;
import org.apache.flink.state.api.OperatorTransformation;
import org.apache.flink.state.api.Savepoint;
import org.apache.flink.statefun.flink.core.StatefulFunctionsJobConstants;
import org.apache.flink.statefun.flink.state.processor.operator.FunctionsStateBootstrapOperator;
import org.apache.flink.statefun.flink.state.processor.operator.StateBootstrapFunctionRegistry;
import org.apache.flink.statefun.flink.state.processor.union.BootstrapDataset;
import org.apache.flink.statefun.flink.state.processor.union.BootstrapDatasetUnion;
import org.apache.flink.statefun.flink.state.processor.union.TaggedBootstrapData;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.Router;
import org.apache.flink.util.Preconditions;
/**
* Entry point for generating a new savepoint for a Stateful Functions application.
*
* <p>Users register multiple {@link StateBootstrapFunction}s that each define how to bootstrap a
* given stateful function, as well as provide Flink {@link DataSet}s that contain data that serve
* as input for the bootstrap functions. The {@code StatefulFunctionsSavepointCreator} can then be
* used to construct a Flink batch job which writes out a savepoint that contains the bootstrapped
* state and may be used to restore a Stateful Functions application.
*/
public class StatefulFunctionsSavepointCreator {
private final int maxParallelism;
private StateBackend stateBackend;
private final StateBootstrapFunctionRegistry stateBootstrapFunctionRegistry =
new StateBootstrapFunctionRegistry();
private final List<BootstrapDataset<?>> bootstrapDatasets = new LinkedList<>();
/**
* Creates a {@link StatefulFunctionsSavepointCreator}.
*
* @param maxParallelism max parallelism of the Stateful Functions application to be restored
* using the generated savepoint.
*/
public StatefulFunctionsSavepointCreator(int maxParallelism) {
Preconditions.checkArgument(maxParallelism > 0);
this.maxParallelism = maxParallelism;
try {
this.stateBackend = new RocksDBStateBackend("file:///tmp/ignored");
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Use Flink's {@link FsStateBackend} to generate the savepoint. By default, {@link
* RocksDBStateBackend} is used.
*
* <p>This affects the format of the generated savepoint, and should therefore be the same as what
* is configured by the Stateful Functions application to be restored using the generated
* savepoint.
*
* @return the savepoint creator, configured to use the {@link FsStateBackend}.
*/
public StatefulFunctionsSavepointCreator withFsStateBackend() {
this.stateBackend = new FsStateBackend("file:///tmp/ignored");
return this;
}
/**
* Registers a Flink {@link DataSet} to be used as inputs to {@link StateBootstrapFunction}s for
* bootstrapping state. A provider for a {@link Router} that addresses each element in the
* bootstrap dataset to {@link StateBootstrapFunction} instances must also be defined.
*
* <p>For all bootstrap functions that may receive a state bootstrap input, a {@link
* StateBootstrapFunctionProvider} must also be registered for it using {@link
* #withStateBootstrapFunctionProvider(FunctionType, StateBootstrapFunctionProvider)}.
*
* @param bootstrapDataset a Flink {@link DataSet} containing inputs for bootstrapping state.
* @param routerProvider provider of a {@link Router} that addresses each element in the bootstrap
* dataset to {@link StateBootstrapFunction} instances.
* @param <IN> data type of the input bootstrap dataset
* @return the savepoint creator, configured to use the given bootstrap data
*/
public <IN> StatefulFunctionsSavepointCreator withBootstrapData(
DataSet<IN> bootstrapDataset, BootstrapDataRouterProvider<IN> routerProvider) {
bootstrapDatasets.add(new BootstrapDataset<>(bootstrapDataset, routerProvider));
return this;
}
/**
* Registers a {@link StateBootstrapFunctionProvider} to the savepoint creator.
*
* @param functionType the type of function that is being bootstrapped.
* @param bootstrapFunctionProvider the bootstrap function provider to register.
* @return the savepoint creator, configured to use the given {@link
* StateBootstrapFunctionProvider}.
*/
public StatefulFunctionsSavepointCreator withStateBootstrapFunctionProvider(
FunctionType functionType, StateBootstrapFunctionProvider bootstrapFunctionProvider) {
stateBootstrapFunctionRegistry.register(functionType, bootstrapFunctionProvider);
return this;
}
/**
* Writes the constructed savepoint to a given path.
*
* @param path path to write the generated savepoint to.
*/
public void write(String path) {
Preconditions.checkState(
bootstrapDatasets.size() > 0, "At least 1 bootstrap DataSet must be registered.");
Preconditions.checkState(
stateBootstrapFunctionRegistry.numRegistrations() > 0,
"At least 1 StateBootstrapFunctionProvider must be registered.");
final NewSavepoint newSavepoint = Savepoint.create(stateBackend, maxParallelism);
final DataSet<TaggedBootstrapData> taggedUnionBootstrapDataset =
BootstrapDatasetUnion.apply(bootstrapDatasets);
final BootstrapTransformation<TaggedBootstrapData> bootstrapTransformation =
OperatorTransformation.bootstrapWith(taggedUnionBootstrapDataset)
.keyBy(data -> data.getTarget().id())
.transform(
(timestamp, savepointPath) ->
new FunctionsStateBootstrapOperator(
stateBootstrapFunctionRegistry, timestamp, savepointPath));
newSavepoint.withOperator(
StatefulFunctionsJobConstants.FUNCTION_OPERATOR_UID, bootstrapTransformation);
newSavepoint.write(path);
}
}
| 5,973 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/StateBootstrapFunction.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.StatefulFunction;
/**
* A {@link StateBootstrapFunction} defines how to bootstrap state for a {@link StatefulFunction}
* instance with a given input.
*
* <p>Each {@code StateBootstrapFunction} instance directly corresponds to a {@code
* StatefulFunction} instance. Likewise, each instance is uniquely identified by an {@link Address},
* represented by the type and id of the function being bootstrapped. Any state that is persisted by
* a {@code StateBootstrapFunction} instance will be available to the corresponding live {@code
* StatefulFunction} instance having the same address.
*
* <p>For example, consider the following state bootstrap function:
*
* <pre>{@code
* public class MyStateBootstrapFunction implements StateBootstrapFunction {
*
* {@code @Persisted}
* private PersistedValue<MyState> state = PersistedValue.of("my-state", MyState.class);
*
* {@code @Override}
* public void bootstrap(Context context, Object input) {
* state.set(extractStateFromInput(input));
* }
* }
* }</pre>
*
* <p>Assume that this bootstrap function was provided for function type {@literal MyFunctionType},
* and the id of the bootstrap function instance was {@literal id-13}. The function writes persisted
* state of name {@literal my-state} using the given bootstrap data. After restoring a Stateful
* Functions application from the savepoint generated using this bootstrap function, the stateful
* function instance with address {@literal (MyFunctionType, id-13)} will already have state values
* available under state name {@literal my-state}.
*/
public interface StateBootstrapFunction {
/**
* Bootstraps state for this function with the given bootstrap data.
*
* @param context context for the current bootstrap invocation. The provided context instance
* should not be used outside the scope of the current invocation.
* @param bootstrapData input to be used for bootstrapping state.
*/
void bootstrap(Context context, Object bootstrapData);
}
| 5,974 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/union/TaggedBootstrapDataTypeInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.util.Preconditions;
/** Type information for {@link TaggedBootstrapData}. */
public final class TaggedBootstrapDataTypeInfo extends TypeInformation<TaggedBootstrapData> {
private static final long serialVersionUID = 1L;
private final List<TypeInformation<?>> payloadTypeInfos;
TaggedBootstrapDataTypeInfo(List<TypeInformation<?>> payloadTypeInfos) {
Preconditions.checkNotNull(payloadTypeInfos);
Preconditions.checkArgument(!payloadTypeInfos.isEmpty());
this.payloadTypeInfos = payloadTypeInfos;
}
@Override
public TypeSerializer<TaggedBootstrapData> createSerializer(ExecutionConfig executionConfig) {
final List<TypeSerializer<?>> payloadSerializers =
payloadTypeInfos.stream()
.map(typeInfo -> typeInfo.createSerializer(executionConfig))
.collect(Collectors.toList());
return new TaggedBootstrapDataSerializer(payloadSerializers);
}
@Override
public int getTotalFields() {
return 1;
}
@Override
public int getArity() {
return 1;
}
@Override
public boolean isBasicType() {
return false;
}
@Override
public boolean isKeyType() {
return false;
}
@Override
public boolean isTupleType() {
return false;
}
@Override
public Class<TaggedBootstrapData> getTypeClass() {
return TaggedBootstrapData.class;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("TaggedBootstrapDataTypeInfo {");
final int size = payloadTypeInfos.size();
for (int i = 0; i < size; i++) {
sb.append(payloadTypeInfos.get(i).toString());
if (i < size - 1) {
sb.append(", ");
}
}
sb.append(" }");
return sb.toString();
}
@Override
public boolean canEqual(Object o) {
return o instanceof TaggedBootstrapDataTypeInfo;
}
@Override
public int hashCode() {
return Objects.hash(payloadTypeInfos);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TaggedBootstrapDataTypeInfo that = (TaggedBootstrapDataTypeInfo) o;
return Objects.equals(payloadTypeInfos, that.payloadTypeInfos);
}
}
| 5,975 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/union/BootstrapDataset.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.util.Objects;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.statefun.flink.state.processor.BootstrapDataRouterProvider;
/**
* Represents a single registered bootstrap dataset, containing pre-tagged/routed bootstrap data
* entries.
*/
public class BootstrapDataset<T> {
private final DataSet<T> dataSet;
private final BootstrapDataRouterProvider<T> routerProvider;
public BootstrapDataset(DataSet<T> dataSet, BootstrapDataRouterProvider<T> routerProvider) {
this.dataSet = Objects.requireNonNull(dataSet);
this.routerProvider = Objects.requireNonNull(routerProvider);
}
DataSet<T> getDataSet() {
return dataSet;
}
BootstrapDataRouterProvider<T> getRouterProvider() {
return routerProvider;
}
}
| 5,976 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/union/BootstrapDatasetUnion.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.statefun.flink.state.processor.BootstrapDataRouterProvider;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.statefun.sdk.io.Router;
import org.apache.flink.statefun.sdk.metrics.Metrics;
import org.apache.flink.util.Collector;
import org.apache.flink.util.Preconditions;
/** Utility to union multiple {@link BootstrapDataset}s to a single, tagged Flink dataset. */
public final class BootstrapDatasetUnion {
/**
* Unions multiple {@link BootstrapDataset}s to a single, tagged Flink dataset.
*
* <p>This does a few things:
*
* <ul>
* <li>For each {@code BootstrapDataset}, assign a union index according to the order that they
* were given.
* <li>For each {@code BootstrapDataset}, tag each element with their assigned union index and
* target state bootstrap function address (as designated by routers). This is a flat map
* operation that transforms the original user-registered dataset to a dataset of {@link
* TaggedBootstrapData}.
* <li>Creates a type information that unions all type information of the data types of all
* user-registered datasets. The union serializer created by this type information multiplex
* multiple data types by pre-fixing each written record with their corresponding union
* index.
* </ul>
*
* @param bootstrapDatasets pre-tagged, user-registered bootstrap datasets to union.
* @return the union, tagged bootstrap dataset.
*/
public static DataSet<TaggedBootstrapData> apply(List<BootstrapDataset<?>> bootstrapDatasets) {
Objects.requireNonNull(bootstrapDatasets);
Preconditions.checkArgument(bootstrapDatasets.size() > 0);
final List<DataSet<TaggedBootstrapData>> unionBootstrapDataset =
new ArrayList<>(bootstrapDatasets.size());
final TypeInformation<TaggedBootstrapData> unionTypeInfo =
createUnionTypeInfo(bootstrapDatasets);
int unionIndex = 0;
for (BootstrapDataset<?> bootstrapDataset : bootstrapDatasets) {
unionBootstrapDataset.add(toTaggedFlinkDataSet(bootstrapDataset, unionIndex, unionTypeInfo));
unionIndex++;
}
return unionTaggedBootstrapDataSets(unionBootstrapDataset);
}
private static TypeInformation<TaggedBootstrapData> createUnionTypeInfo(
List<BootstrapDataset<?>> bootstrapDatasets) {
List<TypeInformation<?>> payloadTypeInfos =
bootstrapDatasets.stream()
.map(bootstrapDataset -> bootstrapDataset.getDataSet().getType())
.collect(Collectors.toList());
return new TaggedBootstrapDataTypeInfo(payloadTypeInfos);
}
private static <T> DataSet<TaggedBootstrapData> toTaggedFlinkDataSet(
BootstrapDataset<T> bootstrapDataset,
int unionIndex,
TypeInformation<TaggedBootstrapData> unionTypeInfo) {
return bootstrapDataset
.getDataSet()
.flatMap(new BootstrapRouterFlatMap<>(bootstrapDataset.getRouterProvider(), unionIndex))
.returns(unionTypeInfo);
}
private static DataSet<TaggedBootstrapData> unionTaggedBootstrapDataSets(
List<DataSet<TaggedBootstrapData>> taggedBootstrapDatasets) {
DataSet<TaggedBootstrapData> result = null;
for (DataSet<TaggedBootstrapData> taggedBootstrapDataDataset : taggedBootstrapDatasets) {
if (result != null) {
result = result.union(taggedBootstrapDataDataset);
} else {
result = taggedBootstrapDataDataset;
}
}
return result;
}
private static class BootstrapRouterFlatMap<T>
extends RichFlatMapFunction<T, TaggedBootstrapData> {
private static final long serialVersionUID = 1L;
private final BootstrapDataRouterProvider<T> routerProvider;
private final int unionIndex;
private transient Router<T> router;
BootstrapRouterFlatMap(BootstrapDataRouterProvider<T> routerProvider, int unionIndex) {
this.routerProvider = Objects.requireNonNull(routerProvider);
Preconditions.checkArgument(unionIndex >= 0);
this.unionIndex = unionIndex;
}
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
this.router = routerProvider.provide();
}
@Override
public void flatMap(T data, Collector<TaggedBootstrapData> collector) throws Exception {
router.route(data, new TaggingBootstrapDataCollector<>(collector, unionIndex));
}
}
/**
* A collector which tags each collected element with their union index and designated address.
*/
private static class TaggingBootstrapDataCollector<T> implements Router.Downstream<T> {
private final Collector<TaggedBootstrapData> out;
private final int unionIndex;
TaggingBootstrapDataCollector(Collector<TaggedBootstrapData> out, int unionIndex) {
this.out = Objects.requireNonNull(out);
this.unionIndex = unionIndex;
}
@Override
public void forward(FunctionType functionType, String id, T message) {
out.collect(new TaggedBootstrapData(new Address(functionType, id), message, unionIndex));
}
@Override
public Metrics metrics() {
throw new UnsupportedOperationException();
}
@Override
public void forward(Address to, T message) {
out.collect(new TaggedBootstrapData(to, message, unionIndex));
}
}
}
| 5,977 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/union/TaggedBootstrapData.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.util.List;
import java.util.Objects;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.statefun.flink.state.processor.StateBootstrapFunction;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.util.Preconditions;
/**
* Represents a single state bootstrap data entry, tagged with the address of its target {@link
* StateBootstrapFunction} as well as the index of its payload serializer within a union of multiple
* {@link BootstrapDataset}s.
*
* @see BootstrapDatasetUnion#apply(List)
*/
@SuppressWarnings("WeakerAccess")
public class TaggedBootstrapData {
private static final Address DEFAULT_ADDRESS =
new Address(new FunctionType("apache", "DEFAULT"), "DEFAULT");
private static final Object DEFAULT_PAYLOAD = "DEFAULT_PAYLOAD";
private static final int DEFAULT_UNION_INDEX = 0;
private Address target;
private Object payload;
/**
* Index of the payload serializer within the union serializer. See {@link
* TaggedBootstrapDataSerializer}.
*/
private int unionIndex;
public static TaggedBootstrapData createDefaultInstance() {
return new TaggedBootstrapData(DEFAULT_ADDRESS, DEFAULT_PAYLOAD, DEFAULT_UNION_INDEX);
}
public TaggedBootstrapData(Address target, Object payload, int unionIndex) {
this.target = Objects.requireNonNull(target);
this.payload = Objects.requireNonNull(payload);
Preconditions.checkArgument(unionIndex >= 0);
this.unionIndex = unionIndex;
}
public Address getTarget() {
return target;
}
public void setTarget(Address target) {
this.target = target;
}
public Object getPayload() {
return payload;
}
public void setPayload(Object payload) {
this.payload = payload;
}
public int getUnionIndex() {
return unionIndex;
}
public void setUnionIndex(int unionIndex) {
this.unionIndex = unionIndex;
}
public TaggedBootstrapData copy(TypeSerializer<Object> payloadSerializer) {
return new TaggedBootstrapData(
new Address(target.type(), target.id()), payloadSerializer.copy(payload), unionIndex);
}
}
| 5,978 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/union/TaggedBootstrapDataSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.union;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot;
import org.apache.flink.core.memory.DataInputView;
import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.statefun.sdk.Address;
import org.apache.flink.statefun.sdk.FunctionType;
import org.apache.flink.util.Preconditions;
/** Serializer for {@link TaggedBootstrapData}. */
public final class TaggedBootstrapDataSerializer extends TypeSerializer<TaggedBootstrapData> {
private static final long serialVersionUID = 1L;
private final TypeSerializer<Object>[] payloadSerializers;
private transient Object[] reusablePayloadObjects;
TaggedBootstrapDataSerializer(List<TypeSerializer<?>> payloadSerializers) {
Preconditions.checkNotNull(payloadSerializers);
Preconditions.checkArgument(!payloadSerializers.isEmpty());
this.payloadSerializers = toPayloadSerializerIndexArray(payloadSerializers);
this.reusablePayloadObjects = createReusablePayloadObjectsIndexArray(this.payloadSerializers);
}
@Override
public boolean isImmutableType() {
for (TypeSerializer<?> serializer : payloadSerializers) {
if (!serializer.isImmutableType()) {
return false;
}
}
return true;
}
@Override
public TypeSerializer<TaggedBootstrapData> duplicate() {
List<TypeSerializer<?>> duplicates = new ArrayList<>(payloadSerializers.length);
boolean stateful = false;
for (TypeSerializer<?> serializer : payloadSerializers) {
TypeSerializer<?> duplicate = serializer.duplicate();
if (duplicate != serializer) {
stateful = true;
}
duplicates.add(duplicate);
}
if (!stateful) {
return this;
}
return new TaggedBootstrapDataSerializer(duplicates);
}
@Override
public TaggedBootstrapData createInstance() {
return TaggedBootstrapData.createDefaultInstance();
}
@Override
public void serialize(TaggedBootstrapData bootstrapData, DataOutputView dataOutputView)
throws IOException {
final int unionIndex = bootstrapData.getUnionIndex();
final Address address = bootstrapData.getTarget();
dataOutputView.writeInt(unionIndex);
dataOutputView.writeUTF(address.type().namespace());
dataOutputView.writeUTF(address.type().name());
dataOutputView.writeUTF(address.id());
payloadSerializers[unionIndex].serialize(bootstrapData.getPayload(), dataOutputView);
}
@Override
public TaggedBootstrapData deserialize(DataInputView dataInputView) throws IOException {
final int unionIndex = dataInputView.readInt();
final String targetFunctionTypeNamespace = dataInputView.readUTF();
final String targetFunctionTypeName = dataInputView.readUTF();
final String targetFunctionId = dataInputView.readUTF();
final Object payload = payloadSerializers[unionIndex].deserialize(dataInputView);
return new TaggedBootstrapData(
new Address(
new FunctionType(targetFunctionTypeNamespace, targetFunctionTypeName),
targetFunctionId),
payload,
unionIndex);
}
@Override
public TaggedBootstrapData deserialize(TaggedBootstrapData reuse, DataInputView dataInputView)
throws IOException {
final int unionIndex = dataInputView.readInt();
final String targetFunctionTypeNamespace = dataInputView.readUTF();
final String targetFunctionTypeName = dataInputView.readUTF();
final String targetFunctionId = dataInputView.readUTF();
reuse.setUnionIndex(unionIndex);
reuse.setTarget(
new Address(
new FunctionType(targetFunctionTypeNamespace, targetFunctionTypeName),
targetFunctionId));
reuse.setPayload(
payloadSerializers[unionIndex].deserialize(
reusablePayloadObjects[unionIndex], dataInputView));
return reuse;
}
@Override
public TaggedBootstrapData copy(TaggedBootstrapData bootstrapData) {
final TypeSerializer<Object> payloadSerializer =
payloadSerializers[bootstrapData.getUnionIndex()];
return bootstrapData.copy(payloadSerializer);
}
@Override
public TaggedBootstrapData copy(TaggedBootstrapData bootstrapData, TaggedBootstrapData reuse) {
final int unionIndex = bootstrapData.getUnionIndex();
final TypeSerializer<Object> payloadSerializer = payloadSerializers[unionIndex];
final Object reusedPayloadCopy =
payloadSerializer.copy(bootstrapData.getPayload(), reusablePayloadObjects[unionIndex]);
final Address address = bootstrapData.getTarget();
reuse.setTarget(new Address(address.type(), address.id()));
reuse.setPayload(reusedPayloadCopy);
reuse.setUnionIndex(bootstrapData.getUnionIndex());
return reuse;
}
@Override
public void copy(DataInputView source, DataOutputView target) throws IOException {
final int unionIndex = source.readInt();
target.writeInt(unionIndex);
// -- function type namespace
target.writeUTF(source.readUTF());
// -- function type name
target.writeUTF(source.readUTF());
// -- function id
target.writeUTF(source.readUTF());
payloadSerializers[unionIndex].copy(source, target);
}
@Override
public int getLength() {
return -1;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TaggedBootstrapDataSerializer that = (TaggedBootstrapDataSerializer) o;
return Arrays.equals(payloadSerializers, that.payloadSerializers);
}
@Override
public int hashCode() {
return Arrays.hashCode(payloadSerializers);
}
@Override
public TypeSerializerSnapshot<TaggedBootstrapData> snapshotConfiguration() {
throw new UnsupportedOperationException(
"This serializer should not have been used for any persistent data.");
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
this.reusablePayloadObjects = createReusablePayloadObjectsIndexArray(payloadSerializers);
}
@SuppressWarnings("unchecked")
private static TypeSerializer<Object>[] toPayloadSerializerIndexArray(
List<TypeSerializer<?>> payloadSerializers) {
return payloadSerializers.toArray(new TypeSerializer[0]);
}
private static Object[] createReusablePayloadObjectsIndexArray(
TypeSerializer<?>[] payloadSerializers) {
final Object[] result = new Object[payloadSerializers.length];
for (int index = 0; index < payloadSerializers.length; index++) {
result[index] = payloadSerializers[index].createInstance();
index++;
}
return result;
}
}
| 5,979 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/operator/FunctionsStateBootstrapOperator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.operator;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.core.fs.Path;
import org.apache.flink.runtime.state.KeyedStateBackend;
import org.apache.flink.state.api.output.SnapshotUtils;
import org.apache.flink.state.api.output.TaggedOperatorSubtaskState;
import org.apache.flink.statefun.flink.core.functions.FunctionGroupOperator;
import org.apache.flink.statefun.flink.core.message.MessageFactoryKey;
import org.apache.flink.statefun.flink.core.message.MessageFactoryType;
import org.apache.flink.statefun.flink.core.state.FlinkState;
import org.apache.flink.statefun.flink.core.state.State;
import org.apache.flink.statefun.flink.core.types.DynamicallyRegisteredTypes;
import org.apache.flink.statefun.flink.core.types.StaticallyRegisteredTypes;
import org.apache.flink.statefun.flink.state.processor.union.TaggedBootstrapData;
import org.apache.flink.streaming.api.operators.AbstractStreamOperator;
import org.apache.flink.streaming.api.operators.BoundedOneInput;
import org.apache.flink.streaming.api.operators.OneInputStreamOperator;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
/** An operator used to bootstrap function state for the {@link FunctionGroupOperator}. */
public final class FunctionsStateBootstrapOperator
extends AbstractStreamOperator<TaggedOperatorSubtaskState>
implements OneInputStreamOperator<TaggedBootstrapData, TaggedOperatorSubtaskState>,
BoundedOneInput {
private static final long serialVersionUID = 1L;
private final StateBootstrapFunctionRegistry stateBootstrapFunctionRegistry;
private final long snapshotTimestamp;
private final Path snapshotPath;
private transient StateBootstrapper stateBootstrapper;
public FunctionsStateBootstrapOperator(
StateBootstrapFunctionRegistry stateBootstrapFunctionRegistry,
long snapshotTimestamp,
Path snapshotPath) {
this.stateBootstrapFunctionRegistry = stateBootstrapFunctionRegistry;
this.snapshotTimestamp = snapshotTimestamp;
this.snapshotPath = snapshotPath;
}
@Override
public void open() throws Exception {
super.open();
if (this.stateBootstrapper == null) {
final State stateAccessor = createStateAccessor(getRuntimeContext(), getKeyedStateBackend());
this.stateBootstrapper = new StateBootstrapper(stateBootstrapFunctionRegistry, stateAccessor);
}
}
@Override
public void processElement(StreamRecord<TaggedBootstrapData> streamRecord) throws Exception {
stateBootstrapper.apply(streamRecord.getValue());
}
@Override
public void endInput() throws Exception {
// bootstrap dataset is now completely processed;
// take a snapshot of the function states
final TaggedOperatorSubtaskState state =
SnapshotUtils.snapshot(
this,
getRuntimeContext().getIndexOfThisSubtask(),
snapshotTimestamp,
true,
false,
getContainingTask().getEnvironment().getTaskManagerInfo().getConfiguration(),
snapshotPath);
output.collect(new StreamRecord<>(state));
}
private static State createStateAccessor(
RuntimeContext runtimeContext, KeyedStateBackend<Object> keyedStateBackend) {
return new FlinkState(
runtimeContext,
keyedStateBackend,
new DynamicallyRegisteredTypes(
new StaticallyRegisteredTypes(
MessageFactoryKey.forType(MessageFactoryType.WITH_RAW_PAYLOADS, null))));
}
}
| 5,980 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/operator/StateBootstrapFunctionRegistry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.operator;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nullable;
import org.apache.flink.statefun.flink.common.SetContextClassLoader;
import org.apache.flink.statefun.flink.core.state.FlinkStateBinder;
import org.apache.flink.statefun.flink.core.state.PersistedStates;
import org.apache.flink.statefun.flink.core.state.State;
import org.apache.flink.statefun.flink.state.processor.StateBootstrapFunction;
import org.apache.flink.statefun.flink.state.processor.StateBootstrapFunctionProvider;
import org.apache.flink.statefun.sdk.FunctionType;
/** A registry that handles {@link StateBootstrapFunctionProvider} registrations. */
public final class StateBootstrapFunctionRegistry implements Serializable {
private static final long serialVersionUID = 1L;
/** State bootstrap function providers registered by the user. */
private final Map<SerializableFunctionType, StateBootstrapFunctionProvider>
stateBootstrapFunctionProviders = new HashMap<>();
/**
* Registry of instantiated, state-bound bootstrap functions. This is created only after {@link
* #initialize(State)} is invoked during runtime.
*/
private transient Map<FunctionType, StateBootstrapFunction> registry;
/**
* Registers a {@link StateBootstrapFunctionProvider}.
*
* @param functionType the type of the function that is being bootstrapped.
* @param stateBootstrapFunctionProvider provider of the bootstrap function.
*/
public void register(
FunctionType functionType, StateBootstrapFunctionProvider stateBootstrapFunctionProvider) {
if (isInitialized()) {
throw new IllegalStateException(
"Cannot register bootstrap function providers after the registry is initialized.");
}
Objects.requireNonNull(functionType);
Objects.requireNonNull(stateBootstrapFunctionProvider);
final StateBootstrapFunctionProvider previous =
stateBootstrapFunctionProviders.put(
SerializableFunctionType.fromNonSerializable(functionType),
stateBootstrapFunctionProvider);
if (previous == null) {
return;
}
throw new IllegalArgumentException(
String.format(
"A StateBootstrapFunctionProvider for function type %s was previously defined.",
functionType));
}
/**
* Returns number of registrations.
*
* @return number of registrations.
*/
public int numRegistrations() {
return stateBootstrapFunctionProviders.size();
}
/**
* Initializes the registry. This instantiates all registered state bootstrap functions, and binds
* them with Flink state.
*
* @param stateAccessor accessor for Flink state to bind bootstrap functions with.
*/
void initialize(State stateAccessor) {
this.registry = new HashMap<>(stateBootstrapFunctionProviders.size());
for (Map.Entry<SerializableFunctionType, StateBootstrapFunctionProvider> entry :
stateBootstrapFunctionProviders.entrySet()) {
final FunctionType functionType = entry.getKey().toNonSerializable();
final StateBootstrapFunction bootstrapFunction =
entry.getValue().bootstrapFunctionOfType(functionType);
final FlinkStateBinder stateBinder = new FlinkStateBinder(stateAccessor, functionType);
registry.put(functionType, bindState(bootstrapFunction, stateBinder));
}
}
/** Retrieves the bootstrap function for a given function type. */
@Nullable
StateBootstrapFunction getBootstrapFunction(FunctionType functionType) {
if (!isInitialized()) {
throw new IllegalStateException("The registry must be initialized first.");
}
return registry.get(functionType);
}
private static StateBootstrapFunction bindState(
StateBootstrapFunction bootstrapFunction, FlinkStateBinder stateBinder) {
try (SetContextClassLoader ignored = new SetContextClassLoader(bootstrapFunction)) {
PersistedStates.findReflectivelyAndBind(bootstrapFunction, stateBinder);
return bootstrapFunction;
}
}
private boolean isInitialized() {
return registry != null;
}
private static final class SerializableFunctionType implements Serializable {
private static final long serialVersionUID = 1L;
private final String namespace;
private final String name;
static SerializableFunctionType fromNonSerializable(FunctionType functionType) {
return new SerializableFunctionType(functionType.namespace(), functionType.name());
}
private SerializableFunctionType(String namespace, String name) {
this.namespace = Objects.requireNonNull(namespace);
this.name = Objects.requireNonNull(name);
}
private FunctionType toNonSerializable() {
return new FunctionType(namespace, name);
}
@Override
public int hashCode() {
return Objects.hash(namespace, name);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SerializableFunctionType that = (SerializableFunctionType) o;
return namespace.equals(that.namespace) && name.equals(that.name);
}
}
}
| 5,981 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor | Create_ds/flink-statefun/statefun-flink/statefun-flink-state-processor/src/main/java/org/apache/flink/statefun/flink/state/processor/operator/StateBootstrapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.state.processor.operator;
import java.util.Objects;
import org.apache.flink.statefun.flink.core.state.State;
import org.apache.flink.statefun.flink.state.processor.Context;
import org.apache.flink.statefun.flink.state.processor.StateBootstrapFunction;
import org.apache.flink.statefun.flink.state.processor.union.TaggedBootstrapData;
import org.apache.flink.statefun.sdk.Address;
/** Core logic for bootstrapping function state using user-provided state bootstrap functions. */
final class StateBootstrapper {
private final StateBootstrapFunctionRegistry bootstrapFunctionRegistry;
private final State stateAccessor;
private final ReusableContext stateBootstrapFunctionContext;
StateBootstrapper(StateBootstrapFunctionRegistry bootstrapFunctionRegistry, State stateAccessor) {
this.bootstrapFunctionRegistry = Objects.requireNonNull(bootstrapFunctionRegistry);
this.stateAccessor = Objects.requireNonNull(stateAccessor);
this.stateBootstrapFunctionContext = new ReusableContext();
bootstrapFunctionRegistry.initialize(stateAccessor);
}
void apply(TaggedBootstrapData bootstrapData) {
final Address target = bootstrapData.getTarget();
stateAccessor.setCurrentKey(target);
stateBootstrapFunctionContext.setCurrentAddress(target);
final StateBootstrapFunction bootstrapFunction =
bootstrapFunctionRegistry.getBootstrapFunction(target.type());
if (bootstrapFunction == null) {
throw new IllegalArgumentException(
"A bootstrap input was targeted for function of type "
+ target.type()
+ ", but there was no StateBootstrapFunctionProvider registered for the type.");
}
bootstrapFunction.bootstrap(stateBootstrapFunctionContext, bootstrapData.getPayload());
}
private static class ReusableContext implements Context {
private Address self = null;
@Override
public Address self() {
if (self == null) {
throw new IllegalStateException("Current address is not set.");
}
return self;
}
private void setCurrentAddress(Address currentAddress) {
this.self = currentAddress;
}
}
}
| 5,982 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/datastream/SourceSinkModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.datastream;
import com.google.auto.service.AutoService;
import java.util.Map;
import org.apache.flink.statefun.flink.io.spi.FlinkIoModule;
import org.apache.flink.statefun.flink.io.spi.SinkProvider;
import org.apache.flink.statefun.flink.io.spi.SourceProvider;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
@AutoService(FlinkIoModule.class)
public class SourceSinkModule implements FlinkIoModule {
@Override
public void configure(Map<String, String> globalConfiguration, Binder binder) {
SinkSourceProvider provider = new SinkSourceProvider();
binder.bindSourceProvider(SourceFunctionSpec.TYPE, provider);
binder.bindSinkProvider(SinkFunctionSpec.TYPE, provider);
}
private static final class SinkSourceProvider implements SourceProvider, SinkProvider {
@Override
public <T> SourceFunction<T> forSpec(IngressSpec<T> spec) {
if (!(spec instanceof SourceFunctionSpec)) {
throw new IllegalStateException("spec " + spec + " is not of type SourceFunctionSpec");
}
SourceFunctionSpec<T> casted = (SourceFunctionSpec<T>) spec;
return casted.delegate();
}
@Override
public <T> SinkFunction<T> forSpec(EgressSpec<T> spec) {
if (!(spec instanceof SinkFunctionSpec)) {
throw new IllegalStateException("spec " + spec + " is not of type SourceFunctionSpec");
}
SinkFunctionSpec<T> casted = (SinkFunctionSpec<T>) spec;
return casted.delegate();
}
}
}
| 5,983 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/datastream/SourceFunctionSpec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.datastream;
import java.io.Serializable;
import java.util.Objects;
import org.apache.flink.statefun.sdk.IngressType;
import org.apache.flink.statefun.sdk.io.IngressIdentifier;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
/**
* An {@link IngressSpec} that can run any Apache Flink {@link SourceFunction}.
*
* @param <T> The input type consumed by the source.
*/
public final class SourceFunctionSpec<T> implements IngressSpec<T>, Serializable {
private static final long serialVersionUID = 1;
static final IngressType TYPE =
new IngressType("org.apache.flink.statefun.flink.io", "source-function-spec");
private final IngressIdentifier<T> id;
private final SourceFunction<T> delegate;
/**
* @param id A unique ingress identifier.
* @param delegate The underlying source function that this spec will delegate to at runtime.
*/
public SourceFunctionSpec(IngressIdentifier<T> id, SourceFunction<T> delegate) {
this.id = Objects.requireNonNull(id);
this.delegate = Objects.requireNonNull(delegate);
}
@Override
public final IngressIdentifier<T> id() {
return id;
}
@Override
public final IngressType type() {
return TYPE;
}
SourceFunction<T> delegate() {
return delegate;
}
}
| 5,984 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/datastream/SinkFunctionSpec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.datastream;
import java.io.Serializable;
import java.util.Objects;
import org.apache.flink.statefun.sdk.EgressType;
import org.apache.flink.statefun.sdk.io.EgressIdentifier;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
/**
* An {@link EgressSpec} that can run any Apache Flink {@link SinkFunction}.
*
* @param <T> The input type output by the sink.
*/
public final class SinkFunctionSpec<T> implements EgressSpec<T>, Serializable {
private static final long serialVersionUID = 1;
static final EgressType TYPE =
new EgressType("org.apache.flink.statefun.flink.io", "sink-function-spec");
private final EgressIdentifier<T> id;
private final SinkFunction<T> delegate;
/**
* @param id A unique egress identifier.
* @param delegate The underlying sink that the egress will delegate to at runtime.
*/
public SinkFunctionSpec(EgressIdentifier<T> id, SinkFunction<T> delegate) {
this.id = Objects.requireNonNull(id);
this.delegate = Objects.requireNonNull(delegate);
}
@Override
public final EgressIdentifier<T> id() {
return id;
}
@Override
public final EgressType type() {
return TYPE;
}
SinkFunction<T> delegate() {
return delegate;
}
}
| 5,985 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/spi/SinkProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.spi;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
public interface SinkProvider {
<T> SinkFunction<T> forSpec(EgressSpec<T> spec);
}
| 5,986 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/spi/SourceProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.spi;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
public interface SourceProvider {
<T> SourceFunction<T> forSpec(IngressSpec<T> spec);
}
| 5,987 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io/src/main/java/org/apache/flink/statefun/flink/io/spi/FlinkIoModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.spi;
import java.util.Map;
import org.apache.flink.statefun.sdk.EgressType;
import org.apache.flink.statefun.sdk.IngressType;
public interface FlinkIoModule {
void configure(Map<String, String> globalConfiguration, Binder binder);
interface Binder {
void bindSourceProvider(IngressType type, SourceProvider provider);
void bindSinkProvider(EgressType type, SinkProvider provider);
}
}
| 5,988 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/Harness.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.CoreOptions;
import org.apache.flink.runtime.jobgraph.SavepointConfigOptions;
import org.apache.flink.statefun.flink.core.StatefulFunctionsConfig;
import org.apache.flink.statefun.flink.core.StatefulFunctionsConfigValidator;
import org.apache.flink.statefun.flink.core.StatefulFunctionsJob;
import org.apache.flink.statefun.flink.core.StatefulFunctionsUniverse;
import org.apache.flink.statefun.flink.core.StatefulFunctionsUniverseProvider;
import org.apache.flink.statefun.flink.core.message.MessageFactoryType;
import org.apache.flink.statefun.flink.core.spi.Modules;
import org.apache.flink.statefun.flink.harness.io.ConsumingEgressSpec;
import org.apache.flink.statefun.flink.harness.io.SerializableConsumer;
import org.apache.flink.statefun.flink.harness.io.SerializableSupplier;
import org.apache.flink.statefun.flink.harness.io.SupplyingIngressSpec;
import org.apache.flink.statefun.flink.io.datastream.SourceFunctionSpec;
import org.apache.flink.statefun.sdk.io.EgressIdentifier;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.statefun.sdk.io.IngressIdentifier;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.streaming.api.environment.ExecutionCheckpointingOptions;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
public class Harness {
private final Configuration flinkConfig;
private final Map<String, String> globalConfigurations = new HashMap<>();
private final Map<IngressIdentifier<?>, IngressSpec<?>> overrideIngress = new HashMap<>();
private final Map<EgressIdentifier<?>, EgressSpec<?>> overrideEgress = new HashMap<>();
public Harness() {
flinkConfig = new Configuration();
}
public <T> Harness withSupplyingIngress(
IngressIdentifier<T> identifier, SerializableSupplier<T> supplier) {
Objects.requireNonNull(identifier);
Objects.requireNonNull(supplier);
// TODO: consider closure cleaner
overrideIngress.put(identifier, new SupplyingIngressSpec<>(identifier, supplier, 0));
return this;
}
public <T> Harness withFlinkSourceFunction(
IngressIdentifier<T> identifier, SourceFunction<T> supplier) {
Objects.requireNonNull(identifier);
Objects.requireNonNull(supplier);
overrideIngress.put(identifier, new SourceFunctionSpec<>(identifier, supplier));
return this;
}
public <T> Harness withConsumingEgress(
EgressIdentifier<T> identifier, SerializableConsumer<T> consumer) {
Objects.requireNonNull(identifier);
Objects.requireNonNull(consumer);
// TODO: consider closure cleaner
overrideEgress.put(identifier, new ConsumingEgressSpec<>(identifier, consumer));
return this;
}
public <T> Harness withPrintingEgress(EgressIdentifier<T> identifier) {
return withConsumingEgress(identifier, new PrintingConsumer<>());
}
public Harness withKryoMessageSerializer() {
flinkConfig.set(
StatefulFunctionsConfig.USER_MESSAGE_SERIALIZER, MessageFactoryType.WITH_KRYO_PAYLOADS);
return this;
}
/** Set the name used in the Flink UI. */
public Harness withFlinkJobName(String flinkJobName) {
flinkConfig.set(StatefulFunctionsConfig.FLINK_JOB_NAME, flinkJobName);
return this;
}
/** Set a flink-conf configuration. */
public Harness withConfiguration(String key, String value) {
flinkConfig.setString(key, value);
return this;
}
/** Set the desired parallelism. */
public Harness withParallelism(int parallelism) {
flinkConfig.setInteger(CoreOptions.DEFAULT_PARALLELISM, parallelism);
return this;
}
/**
* Sets a global configuration available in the {@link
* org.apache.flink.statefun.sdk.spi.StatefulFunctionModule} on configure.
*/
public Harness withGlobalConfiguration(String key, String value) {
globalConfigurations.put(key, value);
return this;
}
/** Sets the path to the savepoint location to restore from, when this harness starts. */
public Harness withSavepointLocation(String savepointLocation) {
Objects.requireNonNull(savepointLocation);
flinkConfig.set(SavepointConfigOptions.SAVEPOINT_PATH, savepointLocation);
return this;
}
public void start() throws Exception {
configureStrictlyRequiredFlinkConfigs(flinkConfig);
final int parallelism = getParallelism(flinkConfig);
StreamExecutionEnvironment env =
StreamExecutionEnvironment.createLocalEnvironment(parallelism, flinkConfig);
// Configure will change the value of a setting only if a corresponding option was set in the
// underlying configuration. If a key is not present, the current value of a field will remain
// untouched.
env.configure(flinkConfig, Thread.currentThread().getContextClassLoader());
StatefulFunctionsConfig stateFunConfig =
StatefulFunctionsConfig.fromFlinkConfiguration(flinkConfig);
stateFunConfig.addAllGlobalConfigurations(globalConfigurations);
stateFunConfig.setProvider(new HarnessProvider(overrideIngress, overrideEgress));
StatefulFunctionsJob.main(env, stateFunConfig);
}
private static int getParallelism(Configuration config) {
final int parallelism;
if (config.contains(CoreOptions.DEFAULT_PARALLELISM)) {
parallelism = config.getInteger(CoreOptions.DEFAULT_PARALLELISM);
} else {
parallelism = Runtime.getRuntime().availableProcessors();
}
return parallelism;
}
private static final class HarnessProvider implements StatefulFunctionsUniverseProvider {
private static final long serialVersionUID = 1;
private final Map<IngressIdentifier<?>, IngressSpec<?>> ingressToReplace;
private final Map<EgressIdentifier<?>, EgressSpec<?>> egressToReplace;
HarnessProvider(
Map<IngressIdentifier<?>, IngressSpec<?>> dummyIngress,
Map<EgressIdentifier<?>, EgressSpec<?>> dummyEgress) {
this.ingressToReplace = dummyIngress;
this.egressToReplace = dummyEgress;
}
@Override
public StatefulFunctionsUniverse get(
ClassLoader classLoader, StatefulFunctionsConfig configuration) {
Modules modules = Modules.loadFromClassPath(configuration);
StatefulFunctionsUniverse universe = modules.createStatefulFunctionsUniverse();
ingressToReplace.forEach((id, spec) -> universe.ingress().put(id, spec));
egressToReplace.forEach((id, spec) -> universe.egress().put(id, spec));
return universe;
}
}
private static final class PrintingConsumer<T> implements SerializableConsumer<T> {
private static final long serialVersionUID = 1;
@Override
public void accept(T t) {
System.out.println(t);
}
}
private static void configureStrictlyRequiredFlinkConfigs(Configuration flinkConfig) {
flinkConfig.set(
CoreOptions.ALWAYS_PARENT_FIRST_LOADER_PATTERNS_ADDITIONAL,
StatefulFunctionsConfigValidator.PARENT_FIRST_CLASSLOADER_PATTERNS);
flinkConfig.set(
ExecutionCheckpointingOptions.MAX_CONCURRENT_CHECKPOINTS,
StatefulFunctionsConfigValidator.MAX_CONCURRENT_CHECKPOINTS);
}
}
| 5,989 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/ConsumingSink.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import java.util.Objects;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
final class ConsumingSink<T> extends RichSinkFunction<T> {
private static final long serialVersionUID = 1;
private final SerializableConsumer<T> consumer;
ConsumingSink(SerializableConsumer<T> consumer) {
this.consumer = Objects.requireNonNull(consumer);
}
@Override
public void invoke(T value, Context context) {
consumer.accept(value);
}
}
| 5,990 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/SupplyingIngressSpec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import java.io.Serializable;
import java.util.Objects;
import org.apache.flink.statefun.sdk.IngressType;
import org.apache.flink.statefun.sdk.io.IngressIdentifier;
import org.apache.flink.statefun.sdk.io.IngressSpec;
public final class SupplyingIngressSpec<T> implements IngressSpec<T>, Serializable {
private static final long serialVersionUID = 1;
private final IngressIdentifier<T> id;
private final SerializableSupplier<T> supplier;
private final long delayInMilliseconds;
public SupplyingIngressSpec(
IngressIdentifier<T> id,
SerializableSupplier<T> supplier,
long productionDelayInMilliseconds) {
this.id = Objects.requireNonNull(id);
this.supplier = Objects.requireNonNull(supplier);
this.delayInMilliseconds = productionDelayInMilliseconds;
}
@Override
public IngressIdentifier<T> id() {
return id;
}
@Override
public IngressType type() {
return HarnessConstants.SUPPLYING_INGRESS_TYPE;
}
SerializableSupplier<T> supplier() {
return supplier;
}
long delayInMilliseconds() {
return delayInMilliseconds;
}
}
| 5,991 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/SupplyingSource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
final class SupplyingSource<T> extends RichParallelSourceFunction<T> {
private static final long serialVersionUID = 1;
private final SerializableSupplier<T> supplier;
private final long delayInMilliseconds;
private transient volatile boolean done;
SupplyingSource(SerializableSupplier<T> supplier, long delayInMilliseconds) {
this.supplier = supplier;
this.delayInMilliseconds = delayInMilliseconds;
}
@Override
public void run(SourceContext<T> sourceContext) throws Exception {
while (!done) {
final T nextElement = supplier.get();
synchronized (sourceContext.getCheckpointLock()) {
sourceContext.collect(nextElement);
}
if (delayInMilliseconds > 0) {
Thread.sleep(delayInMilliseconds);
}
}
}
@Override
public void cancel() {
done = true;
}
}
| 5,992 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/ConsumingEgressSpec.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import java.io.Serializable;
import java.util.Objects;
import org.apache.flink.statefun.sdk.EgressType;
import org.apache.flink.statefun.sdk.io.EgressIdentifier;
import org.apache.flink.statefun.sdk.io.EgressSpec;
public final class ConsumingEgressSpec<T> implements EgressSpec<T>, Serializable {
private static final long serialVersionUID = 1;
private final EgressIdentifier<T> id;
private final SerializableConsumer<T> consumer;
public ConsumingEgressSpec(EgressIdentifier<T> id, SerializableConsumer<T> consumer) {
this.id = Objects.requireNonNull(id);
this.consumer = Objects.requireNonNull(consumer);
}
@Override
public EgressIdentifier<T> id() {
return id;
}
@Override
public EgressType type() {
return HarnessConstants.CONSUMING_EGRESS_TYPE;
}
SerializableConsumer<T> consumer() {
return consumer;
}
}
| 5,993 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/SerializableConsumer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import java.io.Serializable;
import java.util.function.Consumer;
public interface SerializableConsumer<T> extends Serializable, Consumer<T> {}
| 5,994 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/HarnessIoModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import com.google.auto.service.AutoService;
import java.util.Map;
import org.apache.flink.statefun.flink.io.spi.FlinkIoModule;
import org.apache.flink.statefun.sdk.io.EgressSpec;
import org.apache.flink.statefun.sdk.io.IngressSpec;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.api.functions.source.SourceFunction;
@AutoService(FlinkIoModule.class)
public class HarnessIoModule implements FlinkIoModule {
@Override
public void configure(Map<String, String> globalConfiguration, Binder binder) {
binder.bindSourceProvider(
HarnessConstants.SUPPLYING_INGRESS_TYPE, HarnessIoModule::supplingIngressSpec);
binder.bindSinkProvider(
HarnessConstants.CONSUMING_EGRESS_TYPE, HarnessIoModule::consumingEgressSpec);
}
@SuppressWarnings("unchecked")
private static <T> SourceFunction<T> supplingIngressSpec(IngressSpec<T> spec) {
SupplyingIngressSpec<T> casted = (SupplyingIngressSpec) spec;
return new SupplyingSource<>(casted.supplier(), casted.delayInMilliseconds());
}
private static <T> SinkFunction<T> consumingEgressSpec(EgressSpec<T> spec) {
if (!(spec instanceof ConsumingEgressSpec)) {
throw new IllegalArgumentException("Unable to provider a source for " + spec);
}
ConsumingEgressSpec<T> casted = (ConsumingEgressSpec<T>) spec;
return new ConsumingSink<>(casted.consumer());
}
}
| 5,995 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/SerializableSupplier.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import java.io.Serializable;
import java.util.function.Supplier;
public interface SerializableSupplier<T> extends Serializable, Supplier<T> {}
| 5,996 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness | Create_ds/flink-statefun/statefun-flink/statefun-flink-harness/src/main/java/org/apache/flink/statefun/flink/harness/io/HarnessConstants.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.harness.io;
import org.apache.flink.statefun.sdk.EgressType;
import org.apache.flink.statefun.sdk.IngressType;
@SuppressWarnings("WeakerAccess")
public class HarnessConstants {
public static final IngressType SUPPLYING_INGRESS_TYPE =
new IngressType("org.apache.flink.statefun.flink.harness", "supplier");
public static final EgressType CONSUMING_EGRESS_TYPE =
new EgressType("org.apache.flink.statefun.flink.harness", "consuming-egress");
}
| 5,997 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io-bundle/src/test/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io-bundle/src/test/java/org/apache/flink/statefun/flink/io/kinesis/KinesisSinkProviderTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.kinesis;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
import org.apache.flink.statefun.sdk.io.EgressIdentifier;
import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials;
import org.apache.flink.statefun.sdk.kinesis.egress.EgressRecord;
import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressBuilder;
import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressSerializer;
import org.apache.flink.statefun.sdk.kinesis.egress.KinesisEgressSpec;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kinesis.FlinkKinesisProducer;
import org.junit.Test;
public class KinesisSinkProviderTest {
private static final EgressIdentifier<String> ID =
new EgressIdentifier<>("namespace", "name", String.class);
@Test
public void exampleUsage() {
final KinesisEgressSpec<String> kinesisEgressSpec =
KinesisEgressBuilder.forIdentifier(ID)
.withAwsRegion("us-west-1")
.withAwsCredentials(AwsCredentials.basic("access-key-id", "secret-access-key"))
.withSerializer(TestSerializer.class)
.build();
final KinesisSinkProvider provider = new KinesisSinkProvider();
final SinkFunction<String> sink = provider.forSpec(kinesisEgressSpec);
assertThat(sink, instanceOf(FlinkKinesisProducer.class));
}
private static final class TestSerializer implements KinesisEgressSerializer<String> {
private static final long serialVersionUID = 1L;
@Override
public EgressRecord serialize(String value) {
return null;
}
}
}
| 5,998 |
0 | Create_ds/flink-statefun/statefun-flink/statefun-flink-io-bundle/src/test/java/org/apache/flink/statefun/flink/io | Create_ds/flink-statefun/statefun-flink/statefun-flink-io-bundle/src/test/java/org/apache/flink/statefun/flink/io/kinesis/AwsAuthConfigPropertiesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.statefun.flink.io.kinesis;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.hamcrest.collection.IsMapContaining.hasEntry;
import java.io.Closeable;
import java.util.Properties;
import org.apache.flink.kinesis.shaded.com.amazonaws.SDKGlobalConfiguration;
import org.apache.flink.statefun.sdk.kinesis.auth.AwsCredentials;
import org.apache.flink.statefun.sdk.kinesis.auth.AwsRegion;
import org.apache.flink.streaming.connectors.kinesis.config.AWSConfigConstants;
import org.junit.Test;
public class AwsAuthConfigPropertiesTest {
@Test
public void awsDefaultRegionConsumerProperties() {
// TODO Flink doesn't support auto region detection from the AWS provider chain,
// TODO so we always have to have the region settings available in the client side
// TODO this should no longer be a restriction once we fix this in the Flink connector side
try (final ScopedSystemProperty awsRegionSystemProps =
new ScopedSystemProperty(SDKGlobalConfiguration.AWS_REGION_SYSTEM_PROPERTY, "us-west-1")) {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionConsumerProps(AwsRegion.fromDefaultProviderChain());
assertThat(properties.entrySet(), hasSize(1));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-west-1"));
}
}
@Test
public void awsSpecificRegionConsumerProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionConsumerProps(AwsRegion.ofId("us-east-2"));
assertThat(properties.entrySet(), hasSize(1));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-east-2"));
}
@Test
public void awsCustomEndpointRegionConsumerProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionConsumerProps(
AwsRegion.ofCustomEndpoint("https://foo.bar:6666", "us-east-1"));
assertThat(properties.entrySet(), hasSize(2));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_ENDPOINT, "https://foo.bar:6666"));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-east-1"));
}
@Test
public void awsDefaultRegionProducerProperties() {
// TODO Flink doesn't support auto region detection from the AWS provider chain,
// TODO so we always have to have the region settings available in the client side
// TODO this should no longer be a restriction once we fix this in the Flink connector side
try (final ScopedSystemProperty awsRegionSystemProps =
new ScopedSystemProperty(SDKGlobalConfiguration.AWS_REGION_SYSTEM_PROPERTY, "us-west-1")) {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionProducerProps(AwsRegion.fromDefaultProviderChain());
assertThat(properties.entrySet(), hasSize(1));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-west-1"));
}
}
@Test
public void awsSpecificRegionProducerProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionProducerProps(AwsRegion.ofId("us-east-2"));
assertThat(properties.entrySet(), hasSize(1));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-east-2"));
}
@Test
public void awsCustomEndpointRegionProducerProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsRegionProducerProps(
AwsRegion.ofCustomEndpoint("https://foo.bar:6666", "us-east-1"));
assertThat(properties.entrySet(), hasSize(3));
assertThat(properties, hasEntry("KinesisEndpoint", "foo.bar"));
assertThat(properties, hasEntry("KinesisPort", "6666"));
assertThat(properties, hasEntry(AWSConfigConstants.AWS_REGION, "us-east-1"));
}
@Test
public void awsDefaultCredentialsProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsCredentials(AwsCredentials.fromDefaultProviderChain());
assertThat(properties.entrySet(), hasSize(1));
assertThat(
properties,
hasEntry(
AWSConfigConstants.AWS_CREDENTIALS_PROVIDER,
AWSConfigConstants.CredentialProvider.AUTO.name()));
}
@Test
public void awsBasicCredentialsProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsCredentials(
AwsCredentials.basic("fake-access-key-id", "fake-secret-access-key"));
assertThat(properties.entrySet(), hasSize(3));
assertThat(
properties,
hasEntry(
AWSConfigConstants.AWS_CREDENTIALS_PROVIDER,
AWSConfigConstants.CredentialProvider.BASIC.name()));
assertThat(
properties,
hasEntry(
AWSConfigConstants.accessKeyId(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER),
"fake-access-key-id"));
assertThat(
properties,
hasEntry(
AWSConfigConstants.secretKey(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER),
"fake-secret-access-key"));
}
@Test
public void awsProfileCredentialsProperties() {
final Properties properties =
AwsAuthConfigProperties.forAwsCredentials(
AwsCredentials.profile("fake-profile", "/fake/profile/path"));
assertThat(properties.entrySet(), hasSize(3));
assertThat(
properties,
hasEntry(
AWSConfigConstants.AWS_CREDENTIALS_PROVIDER,
AWSConfigConstants.CredentialProvider.PROFILE.name()));
assertThat(
properties,
hasEntry(
AWSConfigConstants.profileName(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER),
"fake-profile"));
assertThat(
properties,
hasEntry(
AWSConfigConstants.profilePath(AWSConfigConstants.AWS_CREDENTIALS_PROVIDER),
"/fake/profile/path"));
}
private static class ScopedSystemProperty implements Closeable {
private final String key;
private final String previousValue;
private ScopedSystemProperty(String key, String value) {
this.key = key;
this.previousValue = System.setProperty(key, value);
}
@Override
public void close() {
if (previousValue != null) {
System.setProperty(key, previousValue);
} else {
System.clearProperty(key);
}
}
}
}
| 5,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.