code
stringlengths 25
201k
| docstring
stringlengths 19
96.2k
| func_name
stringlengths 0
235
| language
stringclasses 1
value | repo
stringlengths 8
51
| path
stringlengths 11
314
| url
stringlengths 62
377
| license
stringclasses 7
values |
|---|---|---|---|---|---|---|---|
private static LinkedHashSet<Class<?>> getRegisteredSubclassesFromSerializerConfig(
Class<?> basePojoClass, SerializerConfig serializerConfig) {
LinkedHashSet<Class<?>> subclassesInRegistrationOrder =
CollectionUtil.newLinkedHashSetWithExpectedSize(
serializerConfig.getRegisteredPojoTypes().size());
for (Class<?> registeredClass : serializerConfig.getRegisteredPojoTypes()) {
if (registeredClass.equals(basePojoClass)) {
continue;
}
if (!basePojoClass.isAssignableFrom(registeredClass)) {
continue;
}
subclassesInRegistrationOrder.add(registeredClass);
}
return subclassesInRegistrationOrder;
}
|
Extracts the subclasses of the base POJO class registered in the execution config.
|
getRegisteredSubclassesFromSerializerConfig
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
Apache-2.0
|
private static LinkedHashMap<Class<?>, Integer> createRegisteredSubclassTags(
LinkedHashSet<Class<?>> registeredSubclasses) {
final LinkedHashMap<Class<?>, Integer> classToTag = new LinkedHashMap<>();
int id = 0;
for (Class<?> registeredClass : registeredSubclasses) {
classToTag.put(registeredClass, id);
id++;
}
return classToTag;
}
|
Builds map of registered subclasses to their class tags. Class tags will be integers starting
from 0, assigned incrementally with the order of provided subclasses.
|
createRegisteredSubclassTags
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
Apache-2.0
|
private static <T> PojoSerializerSnapshot<T> buildSnapshot(
Class<T> pojoType,
LinkedHashMap<Class<?>, Integer> registeredSubclassesToTags,
TypeSerializer<?>[] registeredSubclassSerializers,
Field[] fields,
TypeSerializer<?>[] fieldSerializers,
Map<Class<?>, TypeSerializer<?>> nonRegisteredSubclassSerializerCache,
SerializerConfig serializerConfig) {
final LinkedHashMap<Class<?>, TypeSerializer<?>> subclassRegistry =
CollectionUtil.newLinkedHashMapWithExpectedSize(registeredSubclassesToTags.size());
for (Map.Entry<Class<?>, Integer> entry : registeredSubclassesToTags.entrySet()) {
subclassRegistry.put(entry.getKey(), registeredSubclassSerializers[entry.getValue()]);
}
return new PojoSerializerSnapshot<>(
pojoType,
fields,
fieldSerializers,
subclassRegistry,
nonRegisteredSubclassSerializerCache,
serializerConfig);
}
|
Build and return a snapshot of the serializer's parameters and currently cached serializers.
|
buildSnapshot
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializer.java
|
Apache-2.0
|
private static <K> LinkedHashMap<K, TypeSerializer<?>> restoreSerializers(
LinkedHashMap<K, TypeSerializerSnapshot<?>> snapshotsMap) {
final LinkedHashMap<K, TypeSerializer<?>> restoredSerializersMap =
CollectionUtil.newLinkedHashMapWithExpectedSize(snapshotsMap.size());
snapshotsMap.forEach(
(key, snapshot) -> restoredSerializersMap.put(key, snapshot.restoreSerializer()));
return restoredSerializersMap;
}
|
Transforms a {@link LinkedHashMap} with {@link TypeSerializerSnapshot}s as the value to
{@link TypeSerializer} as the value by restoring the snapshot.
|
restoreSerializers
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
@SuppressWarnings("unchecked")
private static Tuple2<LinkedHashMap<Class<?>, Integer>, TypeSerializer<Object>[]>
decomposeSubclassSerializerRegistry(
LinkedHashMap<Class<?>, TypeSerializer<?>> subclassSerializerRegistry) {
final LinkedHashMap<Class<?>, Integer> subclassIds =
CollectionUtil.newLinkedHashMapWithExpectedSize(subclassSerializerRegistry.size());
final TypeSerializer[] subclassSerializers =
new TypeSerializer[subclassSerializerRegistry.size()];
subclassSerializerRegistry.forEach(
(registeredSubclassClass, serializer) -> {
int id = subclassIds.size();
subclassIds.put(registeredSubclassClass, id);
subclassSerializers[id] = serializer;
});
return Tuple2.of(subclassIds, subclassSerializers);
}
|
Transforms the subclass serializer registry structure, {@code LinkedHashMap<Class<?>,
TypeSerializer<?>>} to 2 separate structures: a map containing with registered classes as key
and their corresponding ids (order in the original map) as value, as well as a separate array
of the corresponding subclass serializers.
|
decomposeSubclassSerializerRegistry
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private IntermediateCompatibilityResult<T> getCompatibilityOfPreExistingFields(
LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> oldFieldSerializerSnapshots) {
// the present entries dictates the preexisting fields, because removed fields would be
// represented as absent keys in the optional map.
final Set<LinkedOptionalMap.KeyValue<Field, TypeSerializerSnapshot<?>>>
presentFieldSnapshots = oldFieldSerializerSnapshots.getPresentEntries();
final ArrayList<TypeSerializerSnapshot<?>> associatedFieldSerializerSnapshots =
new ArrayList<>(presentFieldSnapshots.size());
final ArrayList<TypeSerializerSnapshot<?>> associatedNewFieldSerializerSnapshots =
new ArrayList<>(presentFieldSnapshots.size());
Map<Field, TypeSerializerSnapshot<?>> newFieldSerializerSnapshots =
snapshotData.getFieldSerializerSnapshots().unwrapOptionals();
for (LinkedOptionalMap.KeyValue<Field, TypeSerializerSnapshot<?>> presentFieldEntry :
presentFieldSnapshots) {
TypeSerializerSnapshot<?> associatedNewFieldSerializer =
newFieldSerializerSnapshots.get(presentFieldEntry.getKey());
checkState(
associatedNewFieldSerializer != null,
"a present field should have its associated new field serializer available.");
associatedFieldSerializerSnapshots.add(presentFieldEntry.getValue());
associatedNewFieldSerializerSnapshots.add(associatedNewFieldSerializer);
}
return CompositeTypeSerializerUtil.constructIntermediateCompatibilityResult(
associatedNewFieldSerializerSnapshots.toArray(
new TypeSerializerSnapshot<?>
[associatedNewFieldSerializerSnapshots.size()]),
associatedFieldSerializerSnapshots.toArray(
new TypeSerializerSnapshot<?>[associatedFieldSerializerSnapshots.size()]));
}
|
Finds which Pojo fields exists both in the new {@link PojoSerializerSnapshot} as well as in
the previous one, and returns an {@link IntermediateCompatibilityResult} of the serializers
of those preexisting fields.
|
getCompatibilityOfPreExistingFields
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private IntermediateCompatibilityResult<T> getCompatibilityOfPreExistingRegisteredSubclasses(
LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
registeredSubclassSerializerSnapshots) {
final LinkedHashMap<Class<?>, TypeSerializerSnapshot<?>> unwrappedSerializerSnapshots =
registeredSubclassSerializerSnapshots.unwrapOptionals();
final ArrayList<TypeSerializerSnapshot<?>> associatedSubclassSerializerSnapshots =
new ArrayList<>();
final ArrayList<TypeSerializerSnapshot<?>> associatedNewSubclassSerializerSnapshots =
new ArrayList<>();
final LinkedHashMap<Class<?>, TypeSerializerSnapshot<?>> newSubclassSerializerRegistry =
snapshotData.getRegisteredSubclassSerializerSnapshots().unwrapOptionals();
for (Map.Entry<Class<?>, TypeSerializerSnapshot<?>> entry :
unwrappedSerializerSnapshots.entrySet()) {
TypeSerializerSnapshot<?> newRegisteredSerializerSnapshot =
newSubclassSerializerRegistry.get(entry.getKey());
if (newRegisteredSerializerSnapshot != null) {
associatedSubclassSerializerSnapshots.add(entry.getValue());
associatedNewSubclassSerializerSnapshots.add(newRegisteredSerializerSnapshot);
}
}
return CompositeTypeSerializerUtil.constructIntermediateCompatibilityResult(
associatedNewSubclassSerializerSnapshots.toArray(
new TypeSerializerSnapshot<?>
[associatedNewSubclassSerializerSnapshots.size()]),
associatedSubclassSerializerSnapshots.toArray(
new TypeSerializerSnapshot<?>
[associatedSubclassSerializerSnapshots.size()]));
}
|
Finds which registered subclasses exists both in the new {@link PojoSerializerSnapshot} as
well as in the previous one, and returns an {@link IntermediateCompatibilityResult} of the
serializers of this preexisting registered subclasses.
|
getCompatibilityOfPreExistingRegisteredSubclasses
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private boolean newPojoSerializerIsCompatibleAfterMigration(
IntermediateCompatibilityResult<T> fieldSerializerCompatibility,
IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility,
LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots) {
return newPojoHasNewOrRemovedFields(fieldSerializerSnapshots)
|| fieldSerializerCompatibility.isCompatibleAfterMigration()
|| preExistingRegistrationsCompatibility.isCompatibleAfterMigration();
}
|
Checks if the new {@link PojoSerializerSnapshot} is compatible after migration.
|
newPojoSerializerIsCompatibleAfterMigration
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private boolean newPojoSerializerIsCompatibleWithReconfiguredSerializer(
IntermediateCompatibilityResult<T> fieldSerializerCompatibility,
IntermediateCompatibilityResult<T> preExistingRegistrationsCompatibility,
LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
registeredSubclassSerializerSnapshots,
LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
nonRegisteredSubclassSerializerSnapshots) {
return newPojoHasDifferentSubclassRegistrationOrder(registeredSubclassSerializerSnapshots)
|| previousSerializerHasNonRegisteredSubclasses(
nonRegisteredSubclassSerializerSnapshots)
|| fieldSerializerCompatibility.isCompatibleWithReconfiguredSerializer()
|| preExistingRegistrationsCompatibility.isCompatibleWithReconfiguredSerializer();
}
|
Checks if the new {@link PojoSerializerSnapshot} is compatible with a reconfigured instance.
|
newPojoSerializerIsCompatibleWithReconfiguredSerializer
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private boolean newPojoHasNewOrRemovedFields(
LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots) {
int numRemovedFields = fieldSerializerSnapshots.absentKeysOrValues().size();
int numPreexistingFields = fieldSerializerSnapshots.size() - numRemovedFields;
boolean hasRemovedFields = numRemovedFields > 0;
boolean hasNewFields =
snapshotData.getFieldSerializerSnapshots().size() - numPreexistingFields > 0;
return hasRemovedFields || hasNewFields;
}
|
Checks whether the new {@link PojoSerializerSnapshot} has new or removed fields compared to
the previous one.
|
newPojoHasNewOrRemovedFields
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
private static boolean previousSerializerHasNonRegisteredSubclasses(
LinkedOptionalMap<Class<?>, TypeSerializerSnapshot<?>>
nonRegisteredSubclassSerializerSnapshots) {
return nonRegisteredSubclassSerializerSnapshots.size() > 0;
}
|
Checks whether the previous serializer, represented by this snapshot, has non-registered
subclasses.
|
previousSerializerHasNonRegisteredSubclasses
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshot.java
|
Apache-2.0
|
static <T> PojoSerializerSnapshotData<T> createFrom(
Class<T> pojoClass,
Field[] fields,
TypeSerializer<?>[] fieldSerializers,
LinkedHashMap<Class<?>, TypeSerializer<?>> registeredSubclassSerializers,
Map<Class<?>, TypeSerializer<?>> nonRegisteredSubclassSerializers) {
final LinkedOptionalMap<Field, TypeSerializerSnapshot<?>> fieldSerializerSnapshots =
new LinkedOptionalMap<>(fields.length);
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
String fieldName = (field == null) ? getDummyNameForMissingField(i) : field.getName();
fieldSerializerSnapshots.put(
fieldName, field, fieldSerializers[i].snapshotConfiguration());
}
LinkedHashMap<Class<?>, TypeSerializerSnapshot<?>> registeredSubclassSerializerSnapshots =
CollectionUtil.newLinkedHashMapWithExpectedSize(
registeredSubclassSerializers.size());
registeredSubclassSerializers.forEach(
(k, v) -> registeredSubclassSerializerSnapshots.put(k, v.snapshotConfiguration()));
Map<Class<?>, TypeSerializerSnapshot<?>> nonRegisteredSubclassSerializerSnapshots =
CollectionUtil.newHashMapWithExpectedSize(nonRegisteredSubclassSerializers.size());
nonRegisteredSubclassSerializers.forEach(
(k, v) ->
nonRegisteredSubclassSerializerSnapshots.put(k, v.snapshotConfiguration()));
return new PojoSerializerSnapshotData<>(
pojoClass,
fieldSerializerSnapshots,
optionalMapOf(registeredSubclassSerializerSnapshots, Class::getName),
optionalMapOf(nonRegisteredSubclassSerializerSnapshots, Class::getName));
}
|
Creates a {@link PojoSerializerSnapshotData} from configuration of a {@link PojoSerializer}.
<p>This factory method is meant to be used in regular write paths, i.e. when taking a
snapshot of the {@link PojoSerializer}. All registered subclass classes, and non-registered
subclass classes are all present. Some POJO fields may be absent, if the originating {@link
PojoSerializer} was a restored one with already missing fields, and was never replaced by a
new {@link PojoSerializer} (i.e. because the serialized old data was never accessed).
|
createFrom
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshotData.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshotData.java
|
Apache-2.0
|
static <T> PojoSerializerSnapshotData<T> createFrom(
DataInputView in, ClassLoader userCodeClassLoader) throws IOException {
return PojoSerializerSnapshotData.readSnapshotData(in, userCodeClassLoader);
}
|
Creates a {@link PojoSerializerSnapshotData} from serialized data stream.
<p>This factory method is meant to be used in regular read paths, i.e. when reading back a
snapshot of the {@link PojoSerializer}. POJO fields, registered subclass classes, and
non-registered subclass classes may no longer be present anymore.
|
createFrom
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshotData.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/PojoSerializerSnapshotData.java
|
Apache-2.0
|
@Nullable
private static ChillSerializerRegistrar loadFlinkChillPackageRegistrar() {
try {
return (ChillSerializerRegistrar)
Class.forName(
"org.apache.flink.streaming.util.serialize.FlinkChillPackageRegistrar")
.getDeclaredConstructor()
.newInstance();
} catch (Exception e) {
return null;
}
}
|
Flag whether to check for concurrent thread access. Because this flag is static final, a
value of 'false' allows the JIT compiler to eliminate the guarded code sections.
|
loadFlinkChillPackageRegistrar
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializer.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializer.java
|
Apache-2.0
|
private Kryo getKryoInstance() {
try {
// check if ScalaKryoInstantiator is in class path (coming from Twitter's Chill
// library).
// This will be true if Flink's Table Api Scala is used.
Class<?> chillInstantiatorClazz =
Class.forName(
"org.apache.flink.table.api.runtime.types.FlinkScalaKryoInstantiator");
Object chillInstantiator = chillInstantiatorClazz.newInstance();
// obtain a Kryo instance through Twitter Chill
Method m = chillInstantiatorClazz.getMethod("newKryo");
return (Kryo) m.invoke(chillInstantiator);
} catch (ClassNotFoundException
| InstantiationException
| NoSuchMethodException
| IllegalAccessException
| InvocationTargetException e) {
Optional<Kryo> kryoInstanceFromLegacyPackage = getKryoInstanceFromLegacyPackage();
if (kryoInstanceFromLegacyPackage.isPresent()) {
return kryoInstanceFromLegacyPackage.get();
}
if (LOG.isDebugEnabled()) {
LOG.info("Kryo serializer scala extensions are not available.", e);
} else {
LOG.info("Kryo serializer scala extensions are not available.");
}
DefaultInstantiatorStrategy initStrategy = new DefaultInstantiatorStrategy();
initStrategy.setFallbackInstantiatorStrategy(new StdInstantiatorStrategy());
Kryo kryo = new Kryo();
kryo.setInstantiatorStrategy(initStrategy);
if (flinkChillPackageRegistrar != null) {
flinkChillPackageRegistrar.registerSerializers(kryo);
}
return kryo;
}
}
|
Returns the Chill Kryo Serializer which is implicitly added to the classpath via
flink-runtime. Falls back to the default Kryo serializer if it can't be found.
@return The Kryo serializer instance.
|
getKryoInstance
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializer.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/KryoSerializer.java
|
Apache-2.0
|
@Override
public void log(int level, String category, String message, Throwable ex) {
final String logString = "[KRYO " + category + "] " + message;
switch (level) {
case Log.LEVEL_ERROR:
log.error(logString, ex);
break;
case Log.LEVEL_WARN:
log.warn(logString, ex);
break;
case Log.LEVEL_INFO:
log.info(logString, ex);
break;
case Log.LEVEL_DEBUG:
log.debug(logString, ex);
break;
case Log.LEVEL_TRACE:
log.trace(logString, ex);
break;
}
}
|
An implementation of the Minlog Logger that forwards to slf4j.
|
log
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/MinlogForwarder.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/MinlogForwarder.java
|
Apache-2.0
|
private static void getContainedGenericTypes(
CompositeType<?> typeInfo, List<GenericTypeInfo<?>> target) {
for (int i = 0; i < typeInfo.getArity(); i++) {
TypeInformation<?> type = typeInfo.getTypeAt(i);
if (type instanceof CompositeType) {
getContainedGenericTypes((CompositeType<?>) type, target);
} else if (type instanceof GenericTypeInfo) {
if (!target.contains(type)) {
target.add((GenericTypeInfo<?>) type);
}
}
}
}
|
Returns all GenericTypeInfos contained in a composite type.
@param typeInfo {@link CompositeType}
|
getContainedGenericTypes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/Serializers.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/api/java/typeutils/runtime/kryo/Serializers.java
|
Apache-2.0
|
public ConfigOption<T> withFallbackKeys(String... fallbackKeys) {
final Stream<FallbackKey> newFallbackKeys =
Arrays.stream(fallbackKeys).map(FallbackKey::createFallbackKey);
final Stream<FallbackKey> currentAlternativeKeys = Arrays.stream(this.fallbackKeys);
// put fallback keys first so that they are prioritized
final FallbackKey[] mergedAlternativeKeys =
Stream.concat(newFallbackKeys, currentAlternativeKeys).toArray(FallbackKey[]::new);
return new ConfigOption<>(
key, clazz, description, defaultValue, isList, mergedAlternativeKeys);
}
|
Creates a new config option, using this option's key and default value, and adding the given
fallback keys.
<p>When obtaining a value from the configuration via {@link
Configuration#getValue(ConfigOption)}, the fallback keys will be checked in the order
provided to this method. The first key for which a value is found will be used - that value
will be returned.
@param fallbackKeys The fallback keys, in the order in which they should be checked.
@return A new config options, with the given fallback keys.
|
withFallbackKeys
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public ConfigOption<T> withDeprecatedKeys(String... deprecatedKeys) {
final Stream<FallbackKey> newDeprecatedKeys =
Arrays.stream(deprecatedKeys).map(FallbackKey::createDeprecatedKey);
final Stream<FallbackKey> currentAlternativeKeys = Arrays.stream(this.fallbackKeys);
// put deprecated keys last so that they are de-prioritized
final FallbackKey[] mergedAlternativeKeys =
Stream.concat(currentAlternativeKeys, newDeprecatedKeys)
.toArray(FallbackKey[]::new);
return new ConfigOption<>(
key, clazz, description, defaultValue, isList, mergedAlternativeKeys);
}
|
Creates a new config option, using this option's key and default value, and adding the given
deprecated keys.
<p>When obtaining a value from the configuration via {@link
Configuration#getValue(ConfigOption)}, the deprecated keys will be checked in the order
provided to this method. The first key for which a value is found will be used - that value
will be returned.
@param deprecatedKeys The deprecated keys, in the order in which they should be checked.
@return A new config options, with the given deprecated keys.
|
withDeprecatedKeys
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public String key() {
return key;
}
|
Gets the configuration key.
@return The configuration key
|
key
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public boolean hasDefaultValue() {
return defaultValue != null;
}
|
Checks if this option has a default value.
@return True if it has a default value, false if not.
|
hasDefaultValue
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public T defaultValue() {
return defaultValue;
}
|
Returns the default value, or null, if there is no default value.
@return The default value, or null.
|
defaultValue
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public boolean hasFallbackKeys() {
return fallbackKeys != EMPTY;
}
|
Checks whether this option has fallback keys.
@return True if the option has fallback keys, false if not.
|
hasFallbackKeys
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public Iterable<FallbackKey> fallbackKeys() {
return (fallbackKeys == EMPTY) ? Collections.emptyList() : Arrays.asList(fallbackKeys);
}
|
Gets the fallback keys, in the order to be checked.
@return The option's fallback keys.
|
fallbackKeys
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOption.java
|
Apache-2.0
|
public static OptionBuilder key(String key) {
checkNotNull(key);
return new OptionBuilder(key);
}
|
Starts building a new {@link ConfigOption}.
@param key The key for the config option.
@return The builder for the config option with the given key.
|
key
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Boolean> booleanType() {
return new TypedConfigOptionBuilder<>(key, Boolean.class);
}
|
Defines that the value of the option should be of {@link Boolean} type.
|
booleanType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Integer> intType() {
return new TypedConfigOptionBuilder<>(key, Integer.class);
}
|
Defines that the value of the option should be of {@link Integer} type.
|
intType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Long> longType() {
return new TypedConfigOptionBuilder<>(key, Long.class);
}
|
Defines that the value of the option should be of {@link Long} type.
|
longType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Float> floatType() {
return new TypedConfigOptionBuilder<>(key, Float.class);
}
|
Defines that the value of the option should be of {@link Float} type.
|
floatType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Double> doubleType() {
return new TypedConfigOptionBuilder<>(key, Double.class);
}
|
Defines that the value of the option should be of {@link Double} type.
|
doubleType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<String> stringType() {
return new TypedConfigOptionBuilder<>(key, String.class);
}
|
Defines that the value of the option should be of {@link String} type.
|
stringType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Duration> durationType() {
return new TypedConfigOptionBuilder<>(key, Duration.class);
}
|
Defines that the value of the option should be of {@link Duration} type.
|
durationType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<MemorySize> memoryType() {
return new TypedConfigOptionBuilder<>(key, MemorySize.class);
}
|
Defines that the value of the option should be of {@link MemorySize} type.
|
memoryType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public <T extends Enum<T>> TypedConfigOptionBuilder<T> enumType(Class<T> enumClass) {
return new TypedConfigOptionBuilder<>(key, enumClass);
}
|
Defines that the value of the option should be of {@link Enum} type.
@param enumClass Concrete type of the expected enum.
|
enumType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public TypedConfigOptionBuilder<Map<String, String>> mapType() {
return new TypedConfigOptionBuilder<>(key, PROPERTIES_MAP_CLASS);
}
|
Defines that the value of the option should be a set of properties, which can be
represented as {@code Map<String, String>}.
|
mapType
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public ListConfigOptionBuilder<T> asList() {
return new ListConfigOptionBuilder<>(key, clazz);
}
|
Defines that the option's type should be a list of previously defined atomic type.
|
asList
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public ConfigOption<T> defaultValue(T value) {
return new ConfigOption<>(key, clazz, ConfigOption.EMPTY_DESCRIPTION, value, false);
}
|
Creates a ConfigOption with the given default value.
@param value The default value for the config option
@return The config option with the default value.
|
defaultValue
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
@SafeVarargs
public final ConfigOption<List<E>> defaultValues(E... values) {
return new ConfigOption<>(
key, clazz, ConfigOption.EMPTY_DESCRIPTION, Arrays.asList(values), true);
}
|
Creates a ConfigOption with the given default value.
@param values The list of default values for the config option
@return The config option with the default value.
|
defaultValues
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigOptions.java
|
Apache-2.0
|
public static Configuration fromMap(Map<String, String> map) {
final Configuration configuration = new Configuration();
map.forEach(configuration::setString);
return configuration;
}
|
Creates a new configuration that is initialized with the options of the given map.
|
fromMap
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public String getString(String key, String defaultValue) {
return getRawValue(key)
.map(o -> ConfigurationUtils.convertToString(o))
.orElse(defaultValue);
}
|
Returns the value associated with the given key as a string. We encourage users and
developers to always use ConfigOption for getting the configurations if possible, for its
rich description, type, default-value and other supports. The string-key-based getter should
only be used when ConfigOption is not applicable, e.g., the key is programmatically generated
in runtime.
@param key the key pointing to the associated value
@param defaultValue the default value which is returned in case there is no value associated
with the given key
@return the (default) value associated with the given key
|
getString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public void setString(String key, String value) {
setValueInternal(key, value);
}
|
Adds the given key/value pair to the configuration object. We encourage users and developers
to always use ConfigOption for setting the configurations if possible, for its rich
description, type, default-value and other supports. The string-key-based setter should only
be used when ConfigOption is not applicable, e.g., the key is programmatically generated in
runtime.
@param key the key of the key/value pair to be added
@param value the value of the key/value pair to be added
|
setString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@Internal
public byte[] getBytes(String key, byte[] defaultValue) {
return getRawValue(key)
.map(
o -> {
if (o.getClass().equals(byte[].class)) {
return (byte[]) o;
} else {
throw new IllegalArgumentException(
String.format(
"Configuration cannot evaluate value %s as a byte[] value",
o));
}
})
.orElse(defaultValue);
}
|
Returns the value associated with the given key as a byte array.
@param key The key pointing to the associated value.
@param defaultValue The default value which is returned in case there is no value associated
with the given key.
@return the (default) value associated with the given key.
|
getBytes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@Internal
public void setBytes(String key, byte[] bytes) {
setValueInternal(key, bytes);
}
|
Adds the given byte array to the configuration object. If key is <code>null</code> then
nothing is added.
@param key The key under which the bytes are added.
@param bytes The bytes to be added.
|
setBytes
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@PublicEvolving
public String getValue(ConfigOption<?> configOption) {
return Optional.ofNullable(
getRawValueFromOption(configOption).orElseGet(configOption::defaultValue))
.map(String::valueOf)
.orElse(null);
}
|
Returns the value associated with the given config option as a string.
@param configOption The configuration option
@return the (default) value associated with the given config option
|
getValue
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@PublicEvolving
public <T extends Enum<T>> T getEnum(
final Class<T> enumClass, final ConfigOption<String> configOption) {
checkNotNull(enumClass, "enumClass must not be null");
checkNotNull(configOption, "configOption must not be null");
Object rawValue = getRawValueFromOption(configOption).orElseGet(configOption::defaultValue);
try {
return ConfigurationUtils.convertToEnum(rawValue, enumClass);
} catch (IllegalArgumentException ex) {
final String errorMessage =
String.format(
"Value for config option %s must be one of %s (was %s)",
configOption.key(),
Arrays.toString(enumClass.getEnumConstants()),
rawValue);
throw new IllegalArgumentException(errorMessage);
}
}
|
Returns the value associated with the given config option as an enum.
@param enumClass The return enum class
@param configOption The configuration option
@throws IllegalArgumentException If the string associated with the given config option cannot
be parsed as a value of the provided enum class.
|
getEnum
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public Set<String> keySet() {
synchronized (this.confData) {
return new HashSet<>(this.confData.keySet());
}
}
|
Returns the keys of all key/value pairs stored inside this configuration object.
@return the keys of all key/value pairs stored inside this configuration object
|
keySet
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public boolean containsKey(String key) {
synchronized (this.confData) {
return this.confData.containsKey(key);
}
}
|
Checks whether there is an entry with the specified key.
@param key key of entry
@return true if the key is stored, false otherwise
|
containsKey
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@Override
public <T> T get(ConfigOption<T> option) {
return getOptional(option).orElseGet(option::defaultValue);
}
|
Please check the java doc of {@link #getRawValueFromOption(ConfigOption)}. If no keys are
found in {@link Configuration}, default value of the given option will return. Please make
sure there will be at least one value available. Otherwise, a NPE will be thrown by Flink
when the value is used.
<p>NOTE: current logic is not able to get the default value of the fallback key's
ConfigOption, in case the given ConfigOption has no default value. If you want to use
fallback key, please make sure its value could be found in {@link Configuration} at runtime.
@param option metadata of the option to read
@return the value of the given option
|
get
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
@Internal
public Map<String, String> toFileWritableMap() {
synchronized (this.confData) {
Map<String, String> ret =
CollectionUtil.newHashMapWithExpectedSize(this.confData.size());
for (Map.Entry<String, Object> entry : confData.entrySet()) {
// Because some character in standard yaml should be escaped by quotes, such as
// '*', here we should wrap the value by Yaml pattern
ret.put(entry.getKey(), YamlParserUtils.toYAMLString(entry.getValue()));
}
return ret;
}
}
|
Convert Config into a {@code Map<String, String>} representation.
<p>NOTE: This method is extracted from the {@link Configuration#toMap} method and should be
called when Config needs to be written to a file.
<p>This method ensures the value is properly escaped when writing the key-value pair to a
standard YAML file.
|
toFileWritableMap
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public <T> boolean removeConfig(ConfigOption<T> configOption) {
synchronized (this.confData) {
final BiFunction<String, Boolean, Optional<Boolean>> applier =
(key, canBePrefixMap) -> {
if (canBePrefixMap && removePrefixMap(this.confData, key)
|| this.confData.remove(key) != null) {
return Optional.of(true);
}
return Optional.empty();
};
return applyWithOption(configOption, applier).orElse(false);
}
}
|
Removes given config option from the configuration.
@param configOption config option to remove
@param <T> Type of the config option
@return true is config has been removed, false otherwise
|
removeConfig
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/Configuration.java
|
Apache-2.0
|
public static Configuration migrateLegacyToStandardYamlConfig(final String configDir) {
if (configDir == null) {
throw new IllegalArgumentException(
"Given configuration directory is null, cannot load configuration");
}
final File confDirFile = new File(configDir);
if (!(confDirFile.exists())) {
throw new IllegalConfigurationException(
"The given configuration directory name '"
+ configDir
+ "' ("
+ confDirFile.getAbsolutePath()
+ ") does not describe an existing directory.");
}
// get Flink yaml configuration file
Map<String, String> configuration;
File yamlConfigFile = new File(confDirFile, LEGACY_FLINK_CONF_FILENAME);
if (!yamlConfigFile.exists()) {
throw new IllegalConfigurationException(
"The Flink config file '"
+ yamlConfigFile
+ "' ("
+ yamlConfigFile.getAbsolutePath()
+ ") does not exist.");
} else {
LOG.info(
"Using legacy YAML parser to load flink configuration file from {}.",
yamlConfigFile.getAbsolutePath());
configuration = loadLegacyYAMLResource(yamlConfigFile);
}
Configuration standardYamlConfig = new Configuration();
configuration.forEach(standardYamlConfig::setString);
return standardYamlConfig;
}
|
Migrates the legacy Flink configuration from the specified directory to a standard YAML
format representation.
<p>This method loads the legacy configuration file named {@code flink-conf.yaml} from the
specified directory. If the file is found, it converts the legacy format into a standard
{@link Configuration} object in YAML format.
@param configDir the directory where the legacy configuration file is located
@return a {@link Configuration} object in standard YAML format
|
migrateLegacyToStandardYamlConfig
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationFileMigrationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationFileMigrationUtils.java
|
Apache-2.0
|
@VisibleForTesting
public static Map<String, String> loadLegacyYAMLResource(File file) {
Map<String, String> config = new HashMap<>();
try (BufferedReader reader =
new BufferedReader(new InputStreamReader(new FileInputStream(file)))) {
String line;
int lineNo = 0;
while ((line = reader.readLine()) != null) {
lineNo++;
// 1. check for comments
String[] comments = line.split("#", 2);
String conf = comments[0].trim();
// 2. get key and value
if (conf.length() > 0) {
String[] kv = conf.split(": ", 2);
// skip line with no valid key-value pair
if (kv.length == 1) {
LOG.warn(
"Error while trying to split key and value in configuration file "
+ file
+ ":"
+ lineNo
+ ": Line is not a key-value pair (missing space after ':'?)");
continue;
}
String key = kv[0].trim();
String value = kv[1].trim();
// sanity check
if (key.length() == 0 || value.length() == 0) {
LOG.warn(
"Error after splitting key and value in configuration file "
+ file
+ ":"
+ lineNo
+ ": Key or value was empty");
continue;
}
config.put(key, value);
}
}
} catch (IOException e) {
throw new RuntimeException("Error parsing YAML configuration.", e);
}
return config;
}
|
Loads a YAML-file of key-value pairs.
<p>Colon and whitespace ": " separate key and value (one per line). The hash tag "#" starts a
single-line comment.
<p>Example:
<pre>
jobmanager.rpc.address: localhost # network address for communication with the job manager
jobmanager.rpc.port : 6123 # network port to connect to for communication with the job manager
taskmanager.rpc.port : 6122 # network port the task manager expects incoming IPC connections
</pre>
<p>This does not span the whole YAML specification, but only the *syntax* of simple YAML
key-value pairs (see issue #113 on GitHub). If at any point in time, there is a need to go
beyond simple key-value pairs syntax compatibility will allow to introduce a YAML parser
library.
@param file the YAML file to read from
@see <a href="http://www.yaml.org/spec/1.2/spec.html">YAML 1.2 specification</a>
|
loadLegacyYAMLResource
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationFileMigrationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationFileMigrationUtils.java
|
Apache-2.0
|
public static Optional<Duration> getSystemResourceMetricsProbingInterval(
Configuration configuration) {
if (!configuration.get(SYSTEM_RESOURCE_METRICS)) {
return Optional.empty();
} else {
return Optional.of(configuration.get(SYSTEM_RESOURCE_METRICS_PROBING_INTERVAL));
}
}
|
@return extracted {@link MetricOptions#SYSTEM_RESOURCE_METRICS_PROBING_INTERVAL} or {@code
Optional.empty()} if {@link MetricOptions#SYSTEM_RESOURCE_METRICS} are disabled.
|
getSystemResourceMetricsProbingInterval
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
@Nonnull
public static File getRandomTempDirectory(Configuration configuration) {
final String[] tmpDirectories = parseTempDirectories(configuration);
Preconditions.checkState(
tmpDirectories.length > 0,
String.format(
"No temporary directory has been specified for %s",
CoreOptions.TMP_DIRS.key()));
final int randomIndex = ThreadLocalRandom.current().nextInt(tmpDirectories.length);
return new File(tmpDirectories[randomIndex]);
}
|
Picks a temporary directory randomly from the given configuration.
@param configuration to extract the temp directory from
@return a randomly picked temporary directory
|
getRandomTempDirectory
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
@Nonnull
public static String[] parseLocalStateDirectories(Configuration configuration) {
String configValue =
configuration.get(
CheckpointingOptions.LOCAL_RECOVERY_TASK_MANAGER_STATE_ROOT_DIRS, "");
return splitPaths(configValue);
}
|
Extracts the local state directories as defined by {@link
CheckpointingOptions#LOCAL_RECOVERY_TASK_MANAGER_STATE_ROOT_DIRS}.
@param configuration configuration object
@return array of configured directories (in order)
|
parseLocalStateDirectories
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
@Nonnull
public static Configuration createConfiguration(Properties properties) {
final Configuration configuration = new Configuration();
final Set<String> propertyNames = properties.stringPropertyNames();
for (String propertyName : propertyNames) {
configuration.setString(propertyName, properties.getProperty(propertyName));
}
return configuration;
}
|
Creates a new {@link Configuration} from the given {@link Properties}.
@param properties to convert into a {@link Configuration}
@return {@link Configuration} which has been populated by the values of the given {@link
Properties}
|
createConfiguration
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
public static List<String> convertConfigToWritableLines(
Configuration configuration, boolean flattenYaml) {
if (!flattenYaml) {
return YamlParserUtils.convertAndDumpYamlFromFlatMap(
Collections.unmodifiableMap(configuration.confData));
} else {
Map<String, String> fileWritableMap = configuration.toFileWritableMap();
return fileWritableMap.entrySet().stream()
.map(entry -> entry.getKey() + ": " + entry.getValue())
.collect(Collectors.toList());
}
}
|
Converts the provided configuration data into a format suitable for writing to a file, based
on the {@code flattenYaml} flag and the {@code standardYaml} attribute of the configuration
object.
<p>Only when {@code flattenYaml} is set to {@code false} and the configuration object is
standard yaml, a nested YAML format is used. Otherwise, a flat key-value pair format is
output.
<p>Each entry in the returned list represents a single line that can be written directly to a
file.
<p>Example input (flat map configuration data):
<pre>{@code
{
"parent.child": "value1",
"parent.child2": "value2"
}
}</pre>
<p>Example output when {@code flattenYaml} is {@code false} and the configuration object is
standard yaml:
<pre>{@code
parent:
child: value1
child2: value2
}</pre>
<p>Otherwise, the Example output is:
<pre>{@code
parent.child: value1
parent.child2: value2
}</pre>
@param configuration The configuration to be converted.
@param flattenYaml A boolean flag indicating if the configuration data should be output in a
flattened format.
@return A list of strings, where each string represents a line of the file-writable data in
the chosen format.
|
convertConfigToWritableLines
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
public static Map<String, String> getPrefixedKeyValuePairs(
String prefix, Configuration configuration) {
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, String> entry : configuration.toMap().entrySet()) {
if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) {
String key = entry.getKey().substring(prefix.length());
result.put(key, entry.getValue());
}
}
return result;
}
|
Extract and parse Flink configuration properties with a given name prefix and return the
result as a Map.
|
getPrefixedKeyValuePairs
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
public static boolean canBePrefixMap(ConfigOption<?> configOption) {
return configOption.getClazz() == Map.class && !configOption.isList();
}
|
Maps can be represented in two ways.
<p>With constant key space:
<pre>
avro-confluent.properties = schema: 1, other-prop: 2
</pre>
<p>Or with variable key space (i.e. prefix notation):
<pre>
avro-confluent.properties.schema = 1
avro-confluent.properties.other-prop = 2
</pre>
|
canBePrefixMap
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigurationUtils.java
|
Apache-2.0
|
public static <IN, OUT> void encodeArrayToConfig(
final WritableConfig configuration,
final ConfigOption<List<OUT>> key,
@Nullable final IN[] values,
final Function<IN, OUT> mapper) {
checkNotNull(configuration);
checkNotNull(key);
checkNotNull(mapper);
if (values == null) {
return;
}
encodeCollectionToConfig(configuration, key, Arrays.asList(values), mapper);
}
|
Puts an array of values of type {@code IN} in a {@link WritableConfig} as a {@link
ConfigOption} of type {@link List} of type {@code OUT}. If the {@code values} is {@code null}
or empty, then nothing is put in the configuration.
@param configuration the configuration object to put the list in
@param key the {@link ConfigOption option} to serve as the key for the list in the
configuration
@param values the array of values to put as value for the {@code key}
@param mapper the transformation function from {@code IN} to {@code OUT}.
|
encodeArrayToConfig
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigUtils.java
|
Apache-2.0
|
public static <IN, OUT> void encodeCollectionToConfig(
final WritableConfig configuration,
final ConfigOption<List<OUT>> key,
@Nullable final Collection<IN> values,
final Function<IN, OUT> mapper) {
checkNotNull(configuration);
checkNotNull(key);
checkNotNull(mapper);
if (values == null) {
return;
}
final List<OUT> encodedOption =
values.stream()
.filter(Objects::nonNull)
.map(mapper)
.filter(Objects::nonNull)
.collect(Collectors.toCollection(ArrayList::new));
if (!encodedOption.isEmpty()) {
configuration.set(key, encodedOption);
}
}
|
Puts a {@link Collection} of values of type {@code IN} in a {@link WritableConfig} as a
{@link ConfigOption} of type {@link List} of type {@code OUT}. If the {@code values} is
{@code null} or empty, then nothing is put in the configuration.
@param configuration the configuration object to put the list in
@param key the {@link ConfigOption option} to serve as the key for the list in the
configuration
@param values the collection of values to put as value for the {@code key}
@param mapper the transformation function from {@code IN} to {@code OUT}.
|
encodeCollectionToConfig
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ConfigUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ConfigUtils.java
|
Apache-2.0
|
@Experimental
public static Configuration forReporter(Configuration configuration, String reporterName) {
return new DelegatingConfiguration(
configuration, ConfigConstants.EVENTS_REPORTER_PREFIX + reporterName + ".");
}
|
Configuration options for events and event reporters.
|
forReporter
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/EventOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/EventOptions.java
|
Apache-2.0
|
public static String genericKeyWithSuffix(String suffix) {
return keyWithResourceNameAndSuffix("<resource_name>", suffix);
}
|
The naming pattern of custom config options for the external resource specified by
>resource_name<. Only the configurations that follow this pattern would be passed into
the driver factory of that external resource.
<p>It is intentionally included into user docs while unused.
|
genericKeyWithSuffix
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
Apache-2.0
|
private static String keyWithResourceNameAndSuffix(String resourceName, String suffix) {
return String.format(
"%s.%s.%s",
EXTERNAL_RESOURCE_PREFIX,
Preconditions.checkNotNull(resourceName),
Preconditions.checkNotNull(suffix));
}
|
Generate the config option key with resource_name and suffix.
|
keyWithResourceNameAndSuffix
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
Apache-2.0
|
public static String getAmountConfigOptionForResource(String resourceName) {
return keyWithResourceNameAndSuffix(resourceName, EXTERNAL_RESOURCE_AMOUNT_SUFFIX);
}
|
Generate the config option key for the amount of external resource with resource_name.
|
getAmountConfigOptionForResource
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
Apache-2.0
|
public static String getSystemConfigKeyConfigOptionForResource(
String resourceName, String suffix) {
return keyWithResourceNameAndSuffix(resourceName, suffix);
}
|
Generate the config option key for the configuration key of external resource in the
deploying system.
|
getSystemConfigKeyConfigOptionForResource
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
Apache-2.0
|
public static String getExternalResourceParamConfigPrefixForResource(String resourceName) {
return keyWithResourceNameAndSuffix(resourceName, EXTERNAL_RESOURCE_DRIVER_PARAM_SUFFIX);
}
|
Generate the suffix option key prefix for the user-defined params for external resources.
|
getExternalResourceParamConfigPrefixForResource
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/ExternalResourceOptions.java
|
Apache-2.0
|
public static Configuration loadConfiguration() {
return loadConfiguration(new Configuration());
}
|
Loads the global configuration from the environment. Fails if an error occurs during loading.
Returns an empty configuration object if the environment variable is not set. In production
this variable is set but tests and local execution/debugging don't have this environment
variable set. That's why we should fail if it is not set.
@return Returns the Configuration
|
loadConfiguration
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
Apache-2.0
|
public static Configuration loadConfiguration(Configuration dynamicProperties) {
final String configDir = System.getenv(ConfigConstants.ENV_FLINK_CONF_DIR);
if (configDir == null) {
return new Configuration(dynamicProperties);
}
return loadConfiguration(configDir, dynamicProperties);
}
|
Loads the global configuration and adds the given dynamic properties configuration.
@param dynamicProperties The given dynamic properties
@return Returns the loaded global configuration with dynamic properties
|
loadConfiguration
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
Apache-2.0
|
public static Configuration loadConfiguration(final String configDir) {
return loadConfiguration(configDir, null);
}
|
Loads the configuration files from the specified directory.
<p>YAML files are supported as configuration files.
@param configDir the directory which contains the configuration files
|
loadConfiguration
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
Apache-2.0
|
private static Configuration loadYAMLResource(File file) {
final Configuration config = new Configuration();
try {
Map<String, Object> configDocument = flatten(YamlParserUtils.loadYamlFile(file));
configDocument.forEach((k, v) -> config.setValueInternal(k, v, false));
return config;
} catch (Exception e) {
throw new RuntimeException("Error parsing YAML configuration.", e);
}
}
|
Loads a YAML-file of key-value pairs.
<p>Keys can be expressed either as nested keys or as {@literal KEY_SEPARATOR} seperated keys.
For example, the following configurations are equivalent:
<pre>
jobmanager.rpc.address: localhost # network address for communication with the job manager
jobmanager.rpc.port : 6123 # network port to connect to for communication with the job manager
taskmanager.rpc.port : 6122 # network port the task manager expects incoming IPC connections
</pre>
<pre>
jobmanager:
rpc:
address: localhost # network address for communication with the job manager
port: 6123 # network port to connect to for communication with the job manager
taskmanager:
rpc:
port: 6122 # network port the task manager expects incoming IPC connections
</pre>
@param file the YAML file to read from
@see <a href="http://www.yaml.org/spec/1.2/spec.html">YAML 1.2 specification</a>
|
loadYAMLResource
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/GlobalConfiguration.java
|
Apache-2.0
|
public boolean isOnlyConsumeFinishedPartition() {
return onlyConsumeFinishedPartition;
}
|
Constraints of upstream hybrid partition data consumption by downstream.
|
isOnlyConsumeFinishedPartition
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/JobManagerOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/JobManagerOptions.java
|
Apache-2.0
|
@Override
public InlineElement getDescription() {
return TextElement.text(description);
}
|
Enum describing the different kinds of job status metrics.
|
getDescription
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/MetricOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/MetricOptions.java
|
Apache-2.0
|
@Experimental
public static Configuration forProvider(Configuration configuration, String providerName) {
return new DelegatingConfiguration(
configuration, DelegationTokenProvider.CONFIG_PREFIX + "." + providerName + ".");
}
|
Returns a view over the given configuration via which options can be set/retrieved for the
given provider.
<pre>
Configuration config = ...
SecurityOptions.forProvider(config, "my_provider")
.set(SecurityOptions.DELEGATION_TOKEN_PROVIDER_ENABLED, false)
...
</pre>
@param configuration backing configuration
@param providerName provider name
@return view over configuration
|
forProvider
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
Apache-2.0
|
public static boolean isInternalSSLEnabled(Configuration sslConfig) {
return sslConfig.get(SSL_INTERNAL_ENABLED);
}
|
Checks whether SSL for internal communication (rpc, data transport, blob server) is enabled.
|
isInternalSSLEnabled
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
Apache-2.0
|
public static boolean isRestSSLEnabled(Configuration sslConfig) {
return sslConfig.get(SSL_REST_ENABLED);
}
|
Checks whether SSL for the external REST endpoint is enabled.
|
isRestSSLEnabled
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
Apache-2.0
|
public static boolean isRestSSLAuthenticationEnabled(Configuration sslConfig) {
checkNotNull(sslConfig, "sslConfig");
return isRestSSLEnabled(sslConfig) && sslConfig.get(SSL_REST_AUTHENTICATION_ENABLED);
}
|
Checks whether mutual SSL authentication for the external REST endpoint is enabled.
|
isRestSSLAuthenticationEnabled
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/SecurityOptions.java
|
Apache-2.0
|
@Experimental
public static Configuration forReporter(Configuration configuration, String reporterName) {
return new DelegatingConfiguration(
configuration, ConfigConstants.TRACES_REPORTER_PREFIX + reporterName + ".");
}
|
Configuration options for traces and trace reporters.
|
forReporter
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/TraceOptions.java
|
Apache-2.0
|
public static synchronized @Nonnull Map<String, Object> loadYamlFile(File file)
throws Exception {
try (FileInputStream inputStream = new FileInputStream((file))) {
Map<String, Object> yamlResult =
(Map<String, Object>) loader.loadFromInputStream(inputStream);
return yamlResult == null ? new HashMap<>() : yamlResult;
} catch (FileNotFoundException e) {
LOG.error("Failed to find YAML file", e);
throw e;
} catch (IOException | YamlEngineException e) {
if (e instanceof MarkedYamlEngineException) {
YamlEngineException exception =
wrapExceptionToHiddenSensitiveData((MarkedYamlEngineException) e);
LOG.error("Failed to parse YAML configuration", exception);
throw exception;
} else {
throw e;
}
}
}
|
Loads the contents of the given YAML file into a map.
@param file the YAML file to load.
@return a non-null map representing the YAML content. If the file is empty or only contains
comments, an empty map is returned.
@throws FileNotFoundException if the YAML file is not found.
@throws YamlEngineException if the file cannot be parsed.
@throws IOException if an I/O error occurs while reading from the file stream.
|
loadYamlFile
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
Apache-2.0
|
public static synchronized String toYAMLString(Object value) {
try {
String output = flowDumper.dumpToString(value);
// remove the line break
String linebreak = flowDumperSettings.getBestLineBreak();
if (output.endsWith(linebreak)) {
output = output.substring(0, output.length() - linebreak.length());
}
return output;
} catch (MarkedYamlEngineException exception) {
throw wrapExceptionToHiddenSensitiveData(exception);
}
}
|
Converts the given value to a string representation in the YAML syntax. This method uses a
YAML parser to convert the object to YAML format.
<p>The resulting YAML string may have line breaks at the end of each line. This method
removes the line break at the end of the string if it exists.
<p>Note: This method may perform escaping on certain characters in the value to ensure proper
YAML syntax.
@param value The value to be converted.
@return The string representation of the value in YAML syntax.
|
toYAMLString
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
Apache-2.0
|
private static YamlEngineException wrapExceptionToHiddenSensitiveData(
MarkedYamlEngineException exception) {
StringBuilder lines = new StringBuilder();
String context = exception.getContext();
Optional<Mark> contextMark = exception.getContextMark();
Optional<Mark> problemMark = exception.getProblemMark();
String problem = exception.getProblem();
if (context != null) {
lines.append(context);
lines.append("\n");
}
if (contextMark.isPresent()
&& (problem == null
|| !problemMark.isPresent()
|| contextMark.get().getName().equals(problemMark.get().getName())
|| contextMark.get().getLine() != problemMark.get().getLine()
|| contextMark.get().getColumn() != problemMark.get().getColumn())) {
lines.append(hiddenSensitiveDataInMark(contextMark.get()));
lines.append("\n");
}
if (problem != null) {
lines.append(problem);
lines.append("\n");
}
if (problemMark.isPresent()) {
lines.append(hiddenSensitiveDataInMark(problemMark.get()));
lines.append("\n");
}
Throwable cause = exception.getCause();
if (cause instanceof MarkedYamlEngineException) {
cause = wrapExceptionToHiddenSensitiveData((MarkedYamlEngineException) cause);
}
YamlEngineException yamlException = new YamlEngineException(lines.toString(), cause);
yamlException.setStackTrace(exception.getStackTrace());
return yamlException;
}
|
This method wraps a MarkedYAMLException to hide sensitive data in its message. Before using
this method, an exception message might include sensitive information like:
<pre>{@code
while constructing a mapping
in 'reader', line 1, column 1:
key1: secret1
^
found duplicate key key1
in 'reader', line 2, column 1:
key1: secret2
^
}</pre>
<p>After using this method, the message will be sanitized to hide the sensitive details:
<pre>{@code
while constructing a mapping
in 'reader', line 1, column 1
found duplicate key key1
in 'reader', line 2, column 1
}</pre>
@param exception The MarkedYamlEngineException containing potentially sensitive data.
@return A YamlEngineException with a message that has sensitive data hidden.
|
wrapExceptionToHiddenSensitiveData
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/YamlParserUtils.java
|
Apache-2.0
|
public DescriptionBuilder text(String format, InlineElement... elements) {
blocks.add(TextElement.text(format, elements));
return this;
}
|
Adds a block of text with placeholders ("%s") that will be replaced with proper string
representation of given {@link InlineElement}. For example:
<p>{@code text("This is a text with a link %s", link("https://somepage", "to here"))}
@param format text with placeholders for elements
@param elements elements to be put in the text
@return description with added block of text
|
text
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
Apache-2.0
|
public DescriptionBuilder add(BlockElement block) {
blocks.add(block);
return this;
}
|
Block of description add.
@param block block of description to add
@return block of description
|
add
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
Apache-2.0
|
public DescriptionBuilder list(InlineElement... elements) {
blocks.add(ListElement.list(elements));
return this;
}
|
Adds a bulleted list to the description.
|
list
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/Description.java
|
Apache-2.0
|
public static LinkElement link(String link, String text) {
return new LinkElement(link, text);
}
|
Creates a link with a given url and description.
@param link address that this link should point to
@param text a description for that link, that should be used in text
@return link representation
|
link
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/LinkElement.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/LinkElement.java
|
Apache-2.0
|
public static TextElement text(String format, InlineElement... elements) {
return new TextElement(format, Arrays.asList(elements));
}
|
Creates a block of text with placeholders ("%s") that will be replaced with proper string
representation of given {@link InlineElement}. For example:
<p>{@code text("This is a text with a link %s", link("https://somepage", "to here"))}
@param format text with placeholders for elements
@param elements elements to be put in the text
@return block of text
|
text
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/TextElement.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/TextElement.java
|
Apache-2.0
|
public static InlineElement wrap(InlineElement... elements) {
return text(Strings.repeat("%s", elements.length), elements);
}
|
Wraps a list of {@link InlineElement}s into a single {@link TextElement}.
|
wrap
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/configuration/description/TextElement.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/configuration/description/TextElement.java
|
Apache-2.0
|
@Override
@Internal
public InlineElement getDescription() {
return text(description);
}
|
Defines state files ownership when Flink restore from a given savepoint or retained checkpoint.
|
getDescription
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/execution/RecoveryClaimMode.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/execution/RecoveryClaimMode.java
|
Apache-2.0
|
@Override
@Internal
public InlineElement getDescription() {
return description;
}
|
A format specific for the chosen state backend, in its native binary format. Might be faster
to take and restore from than the canonical one.
|
getDescription
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/execution/SavepointFormatType.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/execution/SavepointFormatType.java
|
Apache-2.0
|
@Override
protected void doClose(List<AutoCloseable> toClose) throws Exception {
IOUtils.closeAll(reverse(toClose), Throwable.class);
}
|
This implementation implies that any exception is possible during closing.
|
doClose
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/AutoCloseableRegistry.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/AutoCloseableRegistry.java
|
Apache-2.0
|
public void unregisterAndCloseAll(Closeable... toUnregisterAndClose) throws IOException {
IOException suppressed = null;
for (Closeable closeable : toUnregisterAndClose) {
if (unregisterCloseable(closeable)) {
try {
closeable.close();
} catch (IOException ex) {
suppressed = ExceptionUtils.firstOrSuppressed(ex, suppressed);
}
}
}
if (suppressed != null) {
throw suppressed;
}
}
|
Unregisters all given {@link Closeable} objects from this registry and closes all objects
that are were actually registered. Suppressed (and collects) all exceptions that happen
during closing and throws only when the all {@link Closeable} objects have been processed.
@param toUnregisterAndClose closables to unregister and close.
@throws IOException collects all exceptions encountered during closing of the given objects.
|
unregisterAndCloseAll
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/CloseableRegistry.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/CloseableRegistry.java
|
Apache-2.0
|
public static Path addEntropy(FileSystem fs, Path path) throws IOException {
// check and possibly inject entropy into the path
final EntropyInjectingFileSystem efs = getEntropyFs(fs);
return efs == null ? path : resolveEntropy(path, efs, true);
}
|
Handles entropy injection across regular and entropy-aware file systems.
<p>If the given file system is entropy-aware (a implements {@link
EntropyInjectingFileSystem}), then this method replaces the entropy marker in the path with
random characters. The entropy marker is defined by {@link
EntropyInjectingFileSystem#getEntropyInjectionKey()}.
<p>If the given file system does not implement {@code EntropyInjectingFileSystem}, then this
method returns the same path.
|
addEntropy
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/EntropyInjector.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/EntropyInjector.java
|
Apache-2.0
|
public static OutputStreamAndPath createEntropyAware(
FileSystem fs, Path path, WriteMode writeMode) throws IOException {
final Path processedPath = addEntropy(fs, path);
// create the stream on the original file system to let the safety net
// take its effect
final FSDataOutputStream out = fs.create(processedPath, writeMode);
return new OutputStreamAndPath(out, processedPath);
}
|
Handles entropy injection across regular and entropy-aware file systems.
<p>If the given file system is entropy-aware (a implements {@link
EntropyInjectingFileSystem}), then this method replaces the entropy marker in the path with
random characters. The entropy marker is defined by {@link
EntropyInjectingFileSystem#getEntropyInjectionKey()}.
<p>If the given file system does not implement {@code EntropyInjectingFileSystem}, then this
method delegates to {@link FileSystem#create(Path, WriteMode)} and returns the same path in
the resulting {@code OutputStreamAndPath}.
|
createEntropyAware
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/EntropyInjector.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/EntropyInjector.java
|
Apache-2.0
|
public Path getPath() {
return file;
}
|
Returns the path of the file containing this split's data.
@return the path of the file containing this split's data.
|
getPath
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileInputSplit.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileInputSplit.java
|
Apache-2.0
|
public long getStart() {
return start;
}
|
Returns the position of the first byte in the file to process.
@return the position of the first byte in the file to process
|
getStart
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileInputSplit.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileInputSplit.java
|
Apache-2.0
|
public static void initialize(Configuration config, @Nullable PluginManager pluginManager)
throws IllegalConfigurationException {
LOCK.lock();
try {
// make sure file systems are re-instantiated after re-configuration
CACHE.clear();
FS_FACTORIES.clear();
Collection<Supplier<Iterator<FileSystemFactory>>> factorySuppliers = new ArrayList<>(2);
factorySuppliers.add(() -> ServiceLoader.load(FileSystemFactory.class).iterator());
if (pluginManager != null) {
factorySuppliers.add(
() ->
Iterators.transform(
pluginManager.load(FileSystemFactory.class),
PluginFileSystemFactory::of));
}
final List<FileSystemFactory> fileSystemFactories =
loadFileSystemFactories(factorySuppliers);
// configure all file system factories
for (FileSystemFactory factory : fileSystemFactories) {
factory.configure(config);
String scheme = factory.getScheme();
FileSystemFactory fsf =
ConnectionLimitingFactory.decorateIfLimited(factory, scheme, config);
FS_FACTORIES.put(scheme, fsf);
}
// configure the default (fallback) factory
FALLBACK_FACTORY.configure(config);
// also read the default file system scheme
final String stringifiedUri = config.get(CoreOptions.DEFAULT_FILESYSTEM_SCHEME, null);
if (stringifiedUri == null) {
defaultScheme = null;
} else {
try {
defaultScheme = new URI(stringifiedUri);
} catch (URISyntaxException e) {
throw new IllegalConfigurationException(
"The default file system scheme ('"
+ CoreOptions.DEFAULT_FILESYSTEM_SCHEME
+ "') is invalid: "
+ stringifiedUri,
e);
}
}
ALLOWED_FALLBACK_FILESYSTEMS.clear();
final Iterable<String> allowedFallbackFilesystems =
Splitter.on(';')
.omitEmptyStrings()
.trimResults()
.split(config.get(CoreOptions.ALLOWED_FALLBACK_FILESYSTEMS));
allowedFallbackFilesystems.forEach(ALLOWED_FALLBACK_FILESYSTEMS::add);
} finally {
LOCK.unlock();
}
}
|
Initializes the shared file system settings.
<p>The given configuration is passed to each file system factory to initialize the respective
file systems. Because the configuration of file systems may be different subsequent to the
call of this method, this method clears the file system instance cache.
<p>This method also reads the default file system URI from the configuration key {@link
CoreOptions#DEFAULT_FILESYSTEM_SCHEME}. All calls to {@link FileSystem#get(URI)} where the
URI has no scheme will be interpreted as relative to that URI. As an example, assume the
default file system URI is set to {@code 'hdfs://localhost:9000/'}. A file path of {@code
'/user/USERNAME/in.txt'} is interpreted as {@code
'hdfs://localhost:9000/user/USERNAME/in.txt'}.
@param config the configuration from where to fetch the parameter.
@param pluginManager optional plugin manager that is used to initialized filesystems provided
as plugins.
|
initialize
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
Apache-2.0
|
public static FileSystem getLocalFileSystem() {
return FileSystemSafetyNet.wrapWithSafetyNetWhenActivated(
LocalFileSystem.getSharedInstance());
}
|
Returns a reference to the {@link FileSystem} instance for accessing the local file system.
@return a reference to the {@link FileSystem} instance for accessing the local file system.
|
getLocalFileSystem
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
Apache-2.0
|
public static URI getDefaultFsUri() {
return defaultScheme != null ? defaultScheme : LocalFileSystem.getLocalFsURI();
}
|
Gets the default file system URI that is used for paths and file systems that do not specify
and explicit scheme.
<p>As an example, assume the default file system URI is set to {@code
'hdfs://someserver:9000/'}. A file path of {@code '/user/USERNAME/in.txt'} is interpreted as
{@code 'hdfs://someserver:9000/user/USERNAME/in.txt'}.
@return The default file system URI
|
getDefaultFsUri
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
Apache-2.0
|
@Deprecated
public FSDataOutputStream create(
Path f, boolean overwrite, int bufferSize, short replication, long blockSize)
throws IOException {
return create(f, overwrite ? WriteMode.OVERWRITE : WriteMode.NO_OVERWRITE);
}
|
Opens an FSDataOutputStream at the indicated Path.
<p>This method is deprecated, because most of its parameters are ignored by most file
systems. To control for example the replication factor and block size in the Hadoop
Distributed File system, make sure that the respective Hadoop configuration file is either
linked from the Flink configuration, or in the classpath of either Flink or the user code.
@param f the file name to open
@param overwrite if a file with this name already exists, then if true, the file will be
overwritten, and if false an error will be thrown.
@param bufferSize the size of the buffer to be used.
@param replication required block replication for the file.
@param blockSize the size of the file blocks
@throws IOException Thrown, if the stream could not be opened because of an I/O, or because a
file already exists at that path and the write mode indicates to not overwrite the file.
@deprecated Deprecated because not well supported across types of file systems. Control the
behavior of specific file systems via configurations instead.
|
create
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
Apache-2.0
|
@Deprecated
public FSDataOutputStream create(Path f, boolean overwrite) throws IOException {
return create(f, overwrite ? WriteMode.OVERWRITE : WriteMode.NO_OVERWRITE);
}
|
Opens an FSDataOutputStream at the indicated Path.
@param f the file name to open
@param overwrite if a file with this name already exists, then if true, the file will be
overwritten, and if false an error will be thrown.
@throws IOException Thrown, if the stream could not be opened because of an I/O, or because a
file already exists at that path and the write mode indicates to not overwrite the file.
@deprecated Use {@link #create(Path, FileSystem.WriteMode)} instead.
|
create
|
java
|
apache/flink
|
flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
https://github.com/apache/flink/blob/master/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
|
Apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.