code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public <E> List<E> getArray(String key, Function<String, E> keyMapper) { return getOptionalArray(key, keyMapper).orElseThrow(exceptionSupplier(key)); }
Returns all array elements under a given existing key.
getArray
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public boolean isValue(String key, String value) { return optionalGet(key).orElseThrow(exceptionSupplier(key)).equals(value); }
Returns if a value under key is exactly equal to the given value.
isValue
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Map<String, String> getPropertiesWithPrefix(String prefix) { String prefixWithDot = prefix + '.'; return properties.entrySet().stream() .filter(e -> e.getKey().startsWith(prefixWithDot)) .collect( Collectors.toMap( e -> e.getKey().substring(prefix.length() + 1), Map.Entry::getValue)); }
Returns a map of properties whose key starts with the given prefix, and the prefix is removed upon return. <p>For example, for prefix "flink" and a map of a single property with key "flink.k" and value "v", this method will return it as key "k" and value "v" by identifying and removing the prefix "flink".
getPropertiesWithPrefix
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateValue(String key, String value, boolean isOptional) { validateOptional( key, isOptional, (v) -> { if (!v.equals(value)) { throw new ValidationException( "Could not find required value '" + value + "' for property '" + key + "'."); } }); }
Validates that a certain value is present under the given key.
validateValue
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateBoolean(String key, boolean isOptional) { validateOptional( key, isOptional, (value) -> { if (!value.equalsIgnoreCase("true") && !value.equalsIgnoreCase("false")) { throw new ValidationException( "Property '" + key + "' must be a boolean value (true/false) but was: " + value); } }); }
Validates that a boolean value is present under the given key.
validateBoolean
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateBigDecimal(String key, boolean isOptional, BigDecimal min, BigDecimal max) { validateComparable(key, isOptional, min, max, "decimal", BigDecimal::new); }
Validates a big decimal property. The boundaries are inclusive.
validateBigDecimal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateFixedIndexedProperties( String key, boolean allowEmpty, Map<String, Consumer<String>> subKeyValidation) { // determine max index final int maxIndex = extractMaxIndex(key, "\\.(.*)"); if (maxIndex < 0 && !allowEmpty) { throw new ValidationException("Property key '" + key + "' must not be empty."); } // validate for (int i = 0; i <= maxIndex; i++) { for (Map.Entry<String, Consumer<String>> subKey : subKeyValidation.entrySet()) { final String fullKey = key + '.' + i + '.' + subKey.getKey(); // run validation logic subKey.getValue().accept(fullKey); } } }
Validation for fixed indexed properties. <p>For example: <pre> schema.fields.0.data-type = INT, schema.fields.0.name = test schema.fields.1.data-type = BIGINT, schema.fields.1.name = test2 </pre> <p>The subKeyValidation map must define e.g. "data-type" and "name" and a validation logic for the given full key.
validateFixedIndexedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateMemorySize(String key, boolean isOptional, int precision) { validateMemorySize(key, isOptional, precision, 0L, Long.MAX_VALUE); }
Validates a Flink {@link MemorySize}. <p>The precision defines the allowed minimum unit in bytes (e.g. 1024 would only allow KB).
validateMemorySize
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateDuration(String key, boolean isOptional, int precision) { validateDuration(key, isOptional, precision, 0L, Long.MAX_VALUE); }
Validates a Java {@link Duration}. <p>The precision defines the allowed minimum unit in milliseconds (e.g. 1000 would only allow seconds).
validateDuration
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateEnum( String key, boolean isOptional, Map<String, Consumer<String>> enumValidation) { validateOptional( key, isOptional, (value) -> { if (!enumValidation.containsKey(value)) { throw new ValidationException( "Unknown value for property '" + key + "'.\n" + "Supported values are " + enumValidation.keySet() + " but was: " + value); } else { // run validation logic enumValidation.get(value).accept(key); } }); }
Validates an enum property with a set of validation logic for each enum value.
validateEnum
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateEnumValues(String key, boolean isOptional, List<String> values) { validateEnum( key, isOptional, values.stream().collect(Collectors.toMap(v -> v, v -> noValidation()))); }
Validates an enum property with a set of enum values.
validateEnumValues
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validatePrefixExclusion(String prefix) { properties.keySet().stream() .filter(k -> k.startsWith(prefix)) .findFirst() .ifPresent( (k) -> { throw new ValidationException( "Properties with prefix '" + prefix + "' are not allowed in this context. " + "But property '" + k + "' was found."); }); }
Validates that the given prefix is not included in these properties.
validatePrefixExclusion
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void validateExclusion(String key) { if (properties.containsKey(key)) { throw new ValidationException("Property '" + key + "' is not allowed in this context."); } }
Validates that the given key is not included in these properties.
validateExclusion
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public boolean containsKey(String key) { return properties.containsKey(key); }
Returns if the given key is contained.
containsKey
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public boolean hasPrefix(String prefix) { return properties.keySet().stream().anyMatch(k -> k.startsWith(prefix)); }
Returns if a given prefix exists in the properties.
hasPrefix
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Map<String, String> asMap() { final Map<String, String> copy = new HashMap<>(properties); return Collections.unmodifiableMap(copy); }
Returns the properties as a map copy.
asMap
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Map<String, String> asPrefixedMap(String prefix) { return properties.entrySet().stream() .collect(Collectors.toMap(e -> prefix + e.getKey(), Map.Entry::getValue)); }
Returns the properties as a map copy with a prefix key.
asPrefixedMap
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
private <T extends Comparable<T>> void validateComparable( String key, boolean isOptional, T min, T max, String typeName, Function<String, T> parseFunction) { if (!properties.containsKey(key)) { if (!isOptional) { throw new ValidationException("Could not find required property '" + key + "'."); } } else { final String value = properties.get(key); try { final T parsed = parseFunction.apply(value); if (parsed.compareTo(min) < 0 || parsed.compareTo(max) > 0) { throw new ValidationException( "Property '" + key + "' must be a " + typeName + " value between " + min + " and " + max + " but was: " + parsed); } } catch (Exception e) { throw new ValidationException( "Property '" + key + "' must be a " + typeName + " value but was: " + value); } } }
Validates a property by first parsing the string value to a comparable object. The boundaries are inclusive.
validateComparable
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
@Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE, false); properties.validateString(CONNECTOR_PATH, false, 1); }
Validator for the options of the filesystem connector. @deprecated The legacy CSV connector has been replaced by {@code FileSource} / {@code FileSink}. It is kept only to support tests for the legacy connector stack.
validate
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/FileSystemValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/FileSystemValidator.java
Apache-2.0
public static CallExpression permanent( FunctionIdentifier functionIdentifier, FunctionDefinition functionDefinition, List<ResolvedExpression> args, DataType dataType) { return new CallExpression( false, Preconditions.checkNotNull( functionIdentifier, "Function identifier must not be null for permanent functions."), functionDefinition, args, dataType); }
Creates a {@link CallExpression} to a permanent function (persisted in a {@link Catalog} or provided by a {@link Module}).
permanent
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
Apache-2.0
@Internal public static CallExpression permanent( BuiltInFunctionDefinition builtInFunctionDefinition, List<ResolvedExpression> args, DataType dataType) { return new CallExpression( false, FunctionIdentifier.of(builtInFunctionDefinition.getName()), builtInFunctionDefinition, args, dataType); }
Creates a {@link CallExpression} to a resolved built-in function. It assumes that the {@link BuiltInFunctionDefinition} instance is provided by the framework (usually the core module).
permanent
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
Apache-2.0
public static CallExpression temporary( FunctionIdentifier functionIdentifier, FunctionDefinition functionDefinition, List<ResolvedExpression> args, DataType dataType) { return new CallExpression( true, Preconditions.checkNotNull( functionIdentifier, "Function identifier must not be null for temporary functions."), functionDefinition, args, dataType); }
Creates a {@link CallExpression} to a temporary function (potentially shadowing a {@link Catalog} function or providing a system function).
temporary
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
Apache-2.0
@Override public String serializeInlineFunction(FunctionDefinition functionDefinition) { throw new ValidationException( "Only functions that have been registered before are serializable."); }
Default implementation of {@link SqlFactory} that throws an exception when trying to serialize an inline function.
serializeInlineFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/DefaultSqlFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/DefaultSqlFactory.java
Apache-2.0
default R visit(NestedFieldReferenceExpression nestedFieldReference) { throw new UnsupportedOperationException("NestedFieldReferenceExpression is not supported."); }
The visitor definition of {@link Expression}. <p>An expression visitor transforms an expression to instances of {@code R}. <p>Please note that only {@link ResolvedExpression}s are listed here. Pure API expression are handled in {@link #visit(Expression)}.
visit
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ExpressionVisitor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ExpressionVisitor.java
Apache-2.0
public boolean isNull() { return value == null; }
Expression for constant literal values. <p>By design, this class can take any value described by a {@link DataType}. However, it is recommended to use instances with default conversion (see {@link DataType#getConversionClass()}. <p>Equals/hashCode support of this expression depends on the equals/hashCode support of the value. <p>The data type can be extracted automatically from non-null values using value-based extraction (see {@link ValueDataTypeConverter}). <p>Symbols (enums extending from {@link TableSymbol}) are considered as literal values.
isNull
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ValueLiteralExpression.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ValueLiteralExpression.java
Apache-2.0
private static String stringifyValue(Object value) { if (value instanceof String[]) { final String[] array = (String[]) value; return Stream.of(array) .map(ValueLiteralExpression::stringifyValue) .collect(Collectors.joining(", ", "[", "]")); } else if (value instanceof Object[]) { final Object[] array = (Object[]) value; return Stream.of(array) .map(ValueLiteralExpression::stringifyValue) .collect(Collectors.joining(", ", "[", "]")); } else if (value instanceof String) { return "'" + ((String) value).replace("'", "''") + "'"; } return StringUtils.arrayAwareToString(value); }
Supports (nested) arrays and makes string values more explicit.
stringifyValue
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ValueLiteralExpression.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/ValueLiteralExpression.java
Apache-2.0
default Set<ConfigOption<?>> forwardOptions() { return Collections.emptySet(); }
Returns a set of {@link ConfigOption} that are directly forwarded to the runtime implementation but don't affect the final execution topology. <p>Options declared here can override options of the persisted plan during an enrichment phase. Since a restored topology is static, an implementer has to ensure that the declared options don't affect fundamental abilities such as {@link SupportsProjectionPushDown} or {@link SupportsFilterPushDown}. <p>For example, given a database connector, if an option defines the connection timeout, changing this value does not affect the pipeline topology and can be allowed. However, an option that defines whether the connector supports {@link SupportsReadingMetadata} or not is not allowed. The planner might not react to changed abilities anymore. @see DynamicTableFactory.Context#getEnrichmentOptions() @see TableFactoryHelper#getOptions() @see FormatFactory#forwardOptions()
forwardOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
Apache-2.0
default Map<String, String> getEnrichmentOptions() { return Collections.emptyMap(); }
Returns a map of options that can enrich the options of the original {@link #getCatalogTable()} during a plan restore. <p>If and only if {@code table.plan.restore.catalog-objects} is set to {@code ALL}, this method may return a non-empty {@link Map} of options retrieved from the {@link Catalog}. <p>Because only the {@link DynamicTableFactory} is able to decide which options are safe to be forwarded without affecting the original topology, enrichment options are exposed through this method. In general, it's highly recommended using the {@link FactoryUtil#createTableFactoryHelper(DynamicTableFactory, Context)} to merge the options and then get the result with {@link TableFactoryHelper#getOptions()}. The helper considers both {@link #forwardOptions()} and {@link FormatFactory#forwardOptions()}. <p>Since a restored topology is static, an implementer has to ensure that the declared options don't affect fundamental abilities. The planner might not react to changed abilities anymore. @see TableFactoryHelper
getEnrichmentOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
Apache-2.0
default DataType getPhysicalRowDataType() { return getCatalogTable().getResolvedSchema().toPhysicalRowDataType(); }
Returns the physical schema to use for encoding and decoding records. The returned row data type contains only physical columns. It does not include computed or metadata columns. A factory can use the returned data type to configure the table connector, and can manipulate it using the {@link DataType} static methods: <pre>{@code // Project some fields into a new data type DataType projectedDataType = Projection.of(projectedIndexes) .project(context.getPhysicalRowDataType()); // Create key data type DataType keyDataType = Projection.of(context.getPrimaryKeyIndexes()) .project(context.getPhysicalRowDataType()); // Create a new data type filtering columns of the original data type DataType myOwnDataType = DataTypes.ROW( DataType.getFields(context.getPhysicalRowDataType()) .stream() .filter(myFieldFilterPredicate) .toArray(DataTypes.Field[]::new)) }</pre> <p>Shortcut for {@code getCatalogTable().getResolvedSchema().toPhysicalRowDataType()}. @see ResolvedSchema#toPhysicalRowDataType()
getPhysicalRowDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
Apache-2.0
default int[] getPrimaryKeyIndexes() { return getCatalogTable().getResolvedSchema().getPrimaryKeyIndexes(); }
Returns the primary key indexes, if any, otherwise returns an empty array. A factory can use it to compute the schema projection of the key fields with {@code Projection.of(ctx.getPrimaryKeyIndexes()).project(dataType)}. <p>Shortcut for {@code getCatalogTable().getResolvedSchema().getPrimaryKeyIndexes()}. @see ResolvedSchema#getPrimaryKeyIndexes()
getPrimaryKeyIndexes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/DynamicTableFactory.java
Apache-2.0
public static DynamicTableSource createDynamicTableSource( @Nullable DynamicTableSourceFactory preferredFactory, ObjectIdentifier objectIdentifier, ResolvedCatalogTable catalogTable, Map<String, String> enrichmentOptions, ReadableConfig configuration, ClassLoader classLoader, boolean isTemporary) { final DefaultDynamicTableContext context = new DefaultDynamicTableContext( objectIdentifier, catalogTable, enrichmentOptions, configuration, classLoader, isTemporary); try { final DynamicTableSourceFactory factory = preferredFactory != null ? preferredFactory : discoverTableFactory(DynamicTableSourceFactory.class, context); return factory.createDynamicTableSource(context); } catch (Throwable t) { throw new ValidationException( String.format( "Unable to create a source for reading table '%s'.\n\n" + "Table options are:\n\n" + "%s", objectIdentifier.asSummaryString(), catalogTable.getOptions().entrySet().stream() .map(e -> stringifyOption(e.getKey(), e.getValue())) .sorted() .collect(Collectors.joining("\n"))), t); } }
Creates a {@link DynamicTableSource} from a {@link CatalogTable}. <p>If {@param preferredFactory} is passed, the table source is created from that factory. Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link Factory} for details).
createDynamicTableSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static DynamicTableSink createDynamicTableSink( @Nullable DynamicTableSinkFactory preferredFactory, ObjectIdentifier objectIdentifier, ResolvedCatalogTable catalogTable, Map<String, String> enrichmentOptions, ReadableConfig configuration, ClassLoader classLoader, boolean isTemporary) { final DefaultDynamicTableContext context = new DefaultDynamicTableContext( objectIdentifier, catalogTable, enrichmentOptions, configuration, classLoader, isTemporary); try { final DynamicTableSinkFactory factory = preferredFactory != null ? preferredFactory : discoverTableFactory(DynamicTableSinkFactory.class, context); return factory.createDynamicTableSink(context); } catch (Throwable t) { throw new ValidationException( String.format( "Unable to create a sink for writing table '%s'.\n\n" + "Table options are:\n\n" + "%s", objectIdentifier.asSummaryString(), catalogTable.getOptions().entrySet().stream() .map(e -> stringifyOption(e.getKey(), e.getValue())) .sorted() .collect(Collectors.joining("\n"))), t); } }
Creates a {@link DynamicTableSink} from a {@link CatalogTable}. <p>If {@param preferredFactory} is passed, the table sink is created from that factory. Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link Factory} for details).
createDynamicTableSink
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static ModelProvider createModelProvider( @Nullable ModelProviderFactory preferredFactory, ObjectIdentifier objectIdentifier, ResolvedCatalogModel catalogModel, ReadableConfig configuration, ClassLoader classLoader, boolean isTemporary) { final DefaultModelProviderContext context = new DefaultModelProviderContext( objectIdentifier, catalogModel, configuration, classLoader, isTemporary); try { final ModelProviderFactory factory = preferredFactory != null ? preferredFactory : discoverModelProviderFactory(context); return factory.createModelProvider(context); } catch (Throwable t) { throw new ValidationException( String.format( "Unable to create a model provider for model '%s'.\n\n" + "Model options are:\n\n" + "%s", objectIdentifier.asSummaryString(), catalogModel.getOptions().entrySet().stream() .map(e -> stringifyOption(e.getKey(), e.getValue())) .sorted() .collect(Collectors.joining("\n"))), t); } }
Creates a {@link ModelProvider} from a {@link ResolvedCatalogModel}. <p>If {@param preferredFactory} is passed, the model provider is created from that factory. Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link Factory} for details).
createModelProvider
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static CatalogFactoryHelper createCatalogFactoryHelper( CatalogFactory factory, CatalogFactory.Context context) { return new CatalogFactoryHelper(factory, context); }
Creates a utility that helps validating options for a {@link CatalogFactory}. <p>Note: This utility checks for left-over options in the final step.
createCatalogFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static CatalogStoreFactoryHelper createCatalogStoreFactoryHelper( CatalogStoreFactory factory, CatalogStoreFactory.Context context) { return new CatalogStoreFactoryHelper(factory, context); }
Creates a utility that helps validating options for a {@link CatalogStoreFactory}. <p>Note: This utility checks for left-over options in the final step.
createCatalogStoreFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static ModuleFactoryHelper createModuleFactoryHelper( ModuleFactory factory, ModuleFactory.Context context) { return new ModuleFactoryHelper(factory, context); }
Creates a utility that helps validating options for a {@link ModuleFactory}. <p>Note: This utility checks for left-over options in the final step.
createModuleFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static TableFactoryHelper createTableFactoryHelper( DynamicTableFactory factory, DynamicTableFactory.Context context) { return new TableFactoryHelper(factory, context); }
Creates a utility that helps in discovering formats, merging options with {@link DynamicTableFactory.Context#getEnrichmentOptions()} and validating them all for a {@link DynamicTableFactory}. <p>The following example sketches the usage: <pre>{@code // in createDynamicTableSource() helper = FactoryUtil.createTableFactoryHelper(this, context); keyFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, KEY_FORMAT); valueFormat = helper.discoverDecodingFormat(DeserializationFormatFactory.class, VALUE_FORMAT); helper.validate(); ... // construct connector with discovered formats }</pre> <p>Note: The format option parameter of {@link TableFactoryHelper#discoverEncodingFormat(Class, ConfigOption)} and {@link TableFactoryHelper#discoverDecodingFormat(Class, ConfigOption)} must be {@link #FORMAT} or end with {@link #FORMAT_SUFFIX}. The discovery logic will replace 'format' with the factory identifier value as the format prefix. For example, assuming the identifier is 'json', if the format option key is 'format', then the format prefix is 'json.'. If the format option key is 'value.format', then the format prefix is 'value.json'. The format prefix is used to project the options for the format factory. <p>Note: When created, this utility merges the options from {@link DynamicTableFactory.Context#getEnrichmentOptions()} using {@link DynamicTableFactory#forwardOptions()}. When invoking {@link TableFactoryHelper#validate()}, this utility checks for left-over options in the final step.
createTableFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static ModelProviderFactoryHelper createModelProviderFactoryHelper( ModelProviderFactory factory, ModelProviderFactory.Context context) { return new ModelProviderFactoryHelper(factory, context); }
Creates a utility that helps validate options for a {@link ModelProviderFactory}. <p>Note: This utility checks for left-over options in the final step.
createModelProviderFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static Catalog createCatalog( String catalogName, Map<String, String> options, ReadableConfig configuration, ClassLoader classLoader) { final DefaultCatalogContext discoveryContext = new DefaultCatalogContext(catalogName, options, configuration, classLoader); try { final CatalogFactory factory = getCatalogFactory(discoveryContext); // The type option is only used for discovery, we don't actually want to forward it // to the catalog factory itself. final Map<String, String> factoryOptions = options.entrySet().stream() .filter( entry -> !CommonCatalogOptions.CATALOG_TYPE .key() .equals(entry.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); final DefaultCatalogContext context = new DefaultCatalogContext( catalogName, factoryOptions, configuration, classLoader); return factory.createCatalog(context); } catch (Throwable t) { throw new ValidationException( String.format( "Unable to create catalog '%s'.%n%nCatalog options are:%n%s", catalogName, options.entrySet().stream() .map( optionEntry -> stringifyOption( optionEntry.getKey(), optionEntry.getValue())) .sorted() .collect(Collectors.joining("\n"))), t); } }
Attempts to discover an appropriate catalog factory and creates an instance of the catalog. <p>This first uses the legacy {@link TableFactory} stack to discover a matching {@link CatalogFactory}. If none is found, it falls back to the new stack using {@link Factory} instead.
createCatalog
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static Module createModule( String moduleName, Map<String, String> options, ReadableConfig configuration, ClassLoader classLoader) { if (options.containsKey(MODULE_TYPE.key())) { throw new ValidationException( String.format( "Option '%s' = '%s' is not supported since module name " + "is used to find module", MODULE_TYPE.key(), options.get(MODULE_TYPE.key()))); } final DefaultModuleContext discoveryContext = new DefaultModuleContext(options, configuration, classLoader); try { final ModuleFactory factory = discoverFactory( ((ModuleFactory.Context) discoveryContext).getClassLoader(), ModuleFactory.class, moduleName); final DefaultModuleContext context = new DefaultModuleContext(options, configuration, classLoader); return factory.createModule(context); } catch (Throwable t) { throw new ValidationException( String.format( "Unable to create module '%s'.%n%nModule options are:%n%s", moduleName, options.entrySet().stream() .map( optionEntry -> stringifyOption( optionEntry.getKey(), optionEntry.getValue())) .sorted() .collect(Collectors.joining("\n"))), t); } }
Discovers a matching module factory and creates an instance of it. <p>This first uses the legacy {@link TableFactory} stack to discover a matching {@link ModuleFactory}. If none is found, it falls back to the new stack using {@link Factory} instead.
createModule
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
@SuppressWarnings("unchecked") public static <T extends Factory> T discoverFactory( ClassLoader classLoader, Class<T> factoryClass, String factoryIdentifier) { final List<Factory> factories = discoverFactories(classLoader); final List<Factory> foundFactories = factories.stream() .filter(f -> factoryClass.isAssignableFrom(f.getClass())) .collect(Collectors.toList()); if (foundFactories.isEmpty()) { throw new ValidationException( String.format( "Could not find any factories that implement '%s' in the classpath.", factoryClass.getName())); } final List<Factory> matchingFactories = foundFactories.stream() .filter(f -> f.factoryIdentifier().equals(factoryIdentifier)) .collect(Collectors.toList()); if (matchingFactories.isEmpty()) { throw new ValidationException( String.format( "Could not find any factory for identifier '%s' that implements '%s' in the classpath.\n\n" + "Available factory identifiers are:\n\n" + "%s", factoryIdentifier, factoryClass.getName(), foundFactories.stream() .map(Factory::factoryIdentifier) .distinct() .sorted() .collect(Collectors.joining("\n")))); } if (matchingFactories.size() > 1) { throw new ValidationException( String.format( "Multiple factories for identifier '%s' that implement '%s' found in the classpath.\n\n" + "Ambiguous factory classes are:\n\n" + "%s", factoryIdentifier, factoryClass.getName(), matchingFactories.stream() .map(f -> f.getClass().getName()) .sorted() .collect(Collectors.joining("\n")))); } return (T) matchingFactories.get(0); }
Discovers a factory using the given factory base class and identifier. <p>This method is meant for cases where {@link #createTableFactoryHelper(DynamicTableFactory, DynamicTableFactory.Context)} {@link #createDynamicTableSource(DynamicTableSourceFactory, ObjectIdentifier, ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)}, and {@link #createDynamicTableSink(DynamicTableSinkFactory, ObjectIdentifier, ResolvedCatalogTable, Map, ReadableConfig, ClassLoader, boolean)} are not applicable.
discoverFactory
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static void validateFactoryOptions(Factory factory, ReadableConfig options) { validateFactoryOptions(factory.requiredOptions(), factory.optionalOptions(), options); }
Validates the required and optional {@link ConfigOption}s of a factory. <p>Note: It does not check for left-over options.
validateFactoryOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static void validateFactoryOptions( Set<ConfigOption<?>> requiredOptions, Set<ConfigOption<?>> optionalOptions, ReadableConfig options) { // currently Flink's options have no validation feature which is why we access them eagerly // to provoke a parsing error final List<String> missingRequiredOptions = requiredOptions.stream() // Templated options will never appear with their template key, so we need // to ignore them as required properties here .filter( option -> allKeys(option) .noneMatch(k -> k.contains(PLACEHOLDER_SYMBOL))) .filter(option -> readOption(options, option) == null) .map(ConfigOption::key) .sorted() .collect(Collectors.toList()); if (!missingRequiredOptions.isEmpty()) { throw new ValidationException( String.format( "One or more required options are missing.\n\n" + "Missing required options are:\n\n" + "%s", String.join("\n", missingRequiredOptions))); } optionalOptions.forEach(option -> readOption(options, option)); }
Validates the required options and optional options. <p>Note: It does not check for left-over options.
validateFactoryOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static String getFormatPrefix( ConfigOption<String> formatOption, String formatIdentifier) { final String formatOptionKey = formatOption.key(); if (formatOptionKey.equals(FORMAT.key())) { return formatIdentifier + "."; } else if (formatOptionKey.endsWith(FORMAT_SUFFIX)) { // extract the key prefix, e.g. extract 'key' from 'key.format' String keyPrefix = formatOptionKey.substring(0, formatOptionKey.length() - FORMAT_SUFFIX.length()); return keyPrefix + "." + formatIdentifier + "."; } else { throw new ValidationException( "Format identifier key should be 'format' or suffix with '.format', " + "don't support format identifier key '" + formatOptionKey + "'."); } }
Returns the required option prefix for options of the given format.
getFormatPrefix
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public void validate() { validateFactoryOptions(factory, allOptions); validateUnconsumedKeys( factory.factoryIdentifier(), allOptions.keySet(), consumedOptionKeys, deprecatedOptionKeys); validateWatermarkOptions(factory.factoryIdentifier(), allOptions); }
Validates the options of the factory. It checks for unconsumed option keys.
validate
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public void validateExcept(String... prefixesToSkip) { Preconditions.checkArgument( prefixesToSkip.length > 0, "Prefixes to skip can not be empty."); final List<String> prefixesList = Arrays.asList(prefixesToSkip); consumedOptionKeys.addAll( allOptions.keySet().stream() .filter(key -> prefixesList.stream().anyMatch(key::startsWith)) .collect(Collectors.toSet())); validate(); }
Validates the options of the factory. It checks for unconsumed option keys while ignoring the options with given prefixes. <p>The option keys that have given prefix {@code prefixToSkip} would just be skipped for validation. @param prefixesToSkip Set of option key prefixes to skip validation
validateExcept
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public ReadableConfig getOptions() { return allOptions; }
Returns all options currently being consumed by the factory.
getOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
@Override public ReadableConfig getOptions() { return super.getOptions(); }
Returns all options currently being consumed by the factory. This method returns the options already merged with {@link DynamicTableFactory.Context#getEnrichmentOptions()}, using {@link DynamicTableFactory#forwardOptions()} as reference of mergeable options.
getOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public <I, F extends DecodingFormatFactory<I>> DecodingFormat<I> discoverDecodingFormat( Class<F> formatFactoryClass, ConfigOption<String> formatOption) { return discoverOptionalDecodingFormat(formatFactoryClass, formatOption) .orElseThrow( () -> new ValidationException( String.format( "Could not find required scan format '%s'.", formatOption.key()))); }
Discovers a {@link DecodingFormat} of the given type using the given option as factory identifier.
discoverDecodingFormat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public <I, F extends DecodingFormatFactory<I>> Optional<DecodingFormat<I>> discoverOptionalDecodingFormat( Class<F> formatFactoryClass, ConfigOption<String> formatOption) { return discoverOptionalFormatFactory(formatFactoryClass, formatOption) .map( formatFactory -> { String formatPrefix = formatPrefix(formatFactory, formatOption); try { return formatFactory.createDecodingFormat( context, createFormatOptions(formatPrefix, formatFactory)); } catch (Throwable t) { throw new ValidationException( String.format( "Error creating scan format '%s' in option space '%s'.", formatFactory.factoryIdentifier(), formatPrefix), t); } }); }
Discovers a {@link DecodingFormat} of the given type using the given option (if present) as factory identifier.
discoverOptionalDecodingFormat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public <I, F extends EncodingFormatFactory<I>> EncodingFormat<I> discoverEncodingFormat( Class<F> formatFactoryClass, ConfigOption<String> formatOption) { return discoverOptionalEncodingFormat(formatFactoryClass, formatOption) .orElseThrow( () -> new ValidationException( String.format( "Could not find required sink format '%s'.", formatOption.key()))); }
Discovers a {@link EncodingFormat} of the given type using the given option as factory identifier.
discoverEncodingFormat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public <I, F extends EncodingFormatFactory<I>> Optional<EncodingFormat<I>> discoverOptionalEncodingFormat( Class<F> formatFactoryClass, ConfigOption<String> formatOption) { return discoverOptionalFormatFactory(formatFactoryClass, formatOption) .map( formatFactory -> { String formatPrefix = formatPrefix(formatFactory, formatOption); try { return formatFactory.createEncodingFormat( context, createFormatOptions(formatPrefix, formatFactory)); } catch (Throwable t) { throw new ValidationException( String.format( "Error creating sink format '%s' in option space '%s'.", formatFactory.factoryIdentifier(), formatPrefix), t); } }); }
Discovers a {@link EncodingFormat} of the given type using the given option (if present) as factory identifier.
discoverOptionalEncodingFormat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
private void checkFormatIdentifierMatchesWithEnrichingOptions( ConfigOption<String> formatOption, String identifierFromPlan) { Optional<String> identifierFromEnrichingOptions = enrichingOptions.getOptional(formatOption); if (!identifierFromEnrichingOptions.isPresent()) { return; } if (identifierFromPlan == null) { throw new ValidationException( String.format( "The persisted plan has no format option '%s' specified, while the catalog table has it with value '%s'. " + "This is invalid, as either only the persisted plan table defines the format, " + "or both the persisted plan table and the catalog table defines the same format.", formatOption, identifierFromEnrichingOptions.get())); } if (!Objects.equals(identifierFromPlan, identifierFromEnrichingOptions.get())) { throw new ValidationException( String.format( "Both persisted plan table and catalog table define the format option '%s', " + "but they mismatch: '%s' != '%s'.", formatOption, identifierFromPlan, identifierFromEnrichingOptions.get())); } }
This function assumes that the format config is used only and only if the original configuration contains the format config option. It will fail if there is a mismatch of the identifier between the format in the plan table map and the one in enriching table map.
checkFormatIdentifierMatchesWithEnrichingOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static void validateWatermarkOptions(String factoryIdentifier, ReadableConfig conf) { Optional<String> errMsgOptional = checkWatermarkOptions(conf); if (errMsgOptional.isPresent()) { throw new ValidationException( String.format( "Error configuring watermark for '%s', %s", factoryIdentifier, errMsgOptional.get())); } }
Validate watermark options from table options. @param factoryIdentifier identifier of table @param conf table options
validateWatermarkOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static Optional<String> checkWatermarkOptions(ReadableConfig conf) { // try to validate watermark options by parsing it watermarkOptionSet.forEach(option -> readOption(conf, option)); // check watermark alignment options Optional<String> groupOptional = conf.getOptional(WATERMARK_ALIGNMENT_GROUP); Optional<Duration> maxDriftOptional = conf.getOptional(WATERMARK_ALIGNMENT_MAX_DRIFT); Optional<Duration> updateIntervalOptional = conf.getOptional(WATERMARK_ALIGNMENT_UPDATE_INTERVAL); if ((groupOptional.isPresent() || maxDriftOptional.isPresent() || updateIntervalOptional.isPresent()) && (!groupOptional.isPresent() || !maxDriftOptional.isPresent())) { String errMsg = String.format( "'%s' and '%s' must be set when configuring watermark alignment", WATERMARK_ALIGNMENT_GROUP.key(), WATERMARK_ALIGNMENT_MAX_DRIFT.key()); return Optional.of(errMsg); } return Optional.empty(); }
Check watermark-related options and return error messages. @param conf table options @return Optional of error messages
checkWatermarkOptions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/FactoryUtil.java
Apache-2.0
public static <T extends TableFactory> T find(Class<T> factoryClass, Descriptor descriptor) { Preconditions.checkNotNull(descriptor); return findSingleInternal(factoryClass, descriptor.toProperties(), Optional.empty()); }
Finds a table factory of the given class and descriptor. @param factoryClass desired factory class @param descriptor descriptor describing the factory configuration @param <T> factory class type @return the matching factory
find
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
public static <T extends TableFactory> T find( Class<T> factoryClass, Descriptor descriptor, ClassLoader classLoader) { Preconditions.checkNotNull(descriptor); Preconditions.checkNotNull(classLoader); return findSingleInternal( factoryClass, descriptor.toProperties(), Optional.of(classLoader)); }
Finds a table factory of the given class, descriptor, and classloader. @param factoryClass desired factory class @param descriptor descriptor describing the factory configuration @param classLoader classloader for service loading @param <T> factory class type @return the matching factory
find
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
public static <T extends TableFactory> T find( Class<T> factoryClass, Map<String, String> propertyMap) { return findSingleInternal(factoryClass, propertyMap, Optional.empty()); }
Finds a table factory of the given class and property map. @param factoryClass desired factory class @param propertyMap properties that describe the factory configuration @param <T> factory class type @return the matching factory
find
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
public static <T extends TableFactory> T find( Class<T> factoryClass, Map<String, String> propertyMap, ClassLoader classLoader) { Preconditions.checkNotNull(classLoader); return findSingleInternal(factoryClass, propertyMap, Optional.of(classLoader)); }
Finds a table factory of the given class, property map, and classloader. @param factoryClass desired factory class @param propertyMap properties that describe the factory configuration @param classLoader classloader for service loading @param <T> factory class type @return the matching factory
find
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
public static <T extends TableFactory> List<T> findAll( Class<T> factoryClass, Map<String, String> propertyMap) { return findAllInternal(factoryClass, propertyMap, Optional.empty()); }
Finds all table factories of the given class and property map. @param factoryClass desired factory class @param propertyMap properties that describe the factory configuration @param <T> factory class type @return all the matching factories
findAll
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static <T extends TableFactory> List<T> filter( List<TableFactory> foundFactories, Class<T> factoryClass, Map<String, String> properties) { Preconditions.checkNotNull(factoryClass); Preconditions.checkNotNull(properties); List<T> classFactories = filterByFactoryClass(factoryClass, properties, foundFactories); List<T> contextFactories = filterByContext(factoryClass, properties, classFactories); return filterBySupportedProperties( factoryClass, properties, classFactories, contextFactories); }
Filters found factories by factory class and with matching context.
filter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static List<TableFactory> discoverFactories(Optional<ClassLoader> classLoader) { try { List<TableFactory> result = new LinkedList<>(); ClassLoader cl = classLoader.orElse(Thread.currentThread().getContextClassLoader()); ServiceLoader.load(TableFactory.class, cl).iterator().forEachRemaining(result::add); return result; } catch (ServiceConfigurationError e) { LOG.error("Could not load service provider for table factories.", e); throw new TableException("Could not load service provider for table factories.", e); } }
Searches for factories using Java service providers. @return all factories in the classpath
discoverFactories
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
@SuppressWarnings("unchecked") private static <T> List<T> filterByFactoryClass( Class<T> factoryClass, Map<String, String> properties, List<TableFactory> foundFactories) { List<TableFactory> classFactories = foundFactories.stream() .filter(p -> factoryClass.isAssignableFrom(p.getClass())) .collect(Collectors.toList()); if (classFactories.isEmpty()) { throw new NoMatchingTableFactoryException( String.format("No factory implements '%s'.", factoryClass.getCanonicalName()), factoryClass, foundFactories, properties); } return (List<T>) classFactories; }
Filters factories with matching context by factory class.
filterByFactoryClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static <T extends TableFactory> List<T> filterByContext( Class<T> factoryClass, Map<String, String> properties, List<T> classFactories) { List<T> matchingFactories = new ArrayList<>(); ContextBestMatched<T> bestMatched = null; for (T factory : classFactories) { Map<String, String> requestedContext = normalizeContext(factory); Map<String, String> plainContext = new HashMap<>(requestedContext); // we remove the version for now until we have the first backwards compatibility case // with the version we can provide mappings in case the format changes plainContext.remove(CONNECTOR_PROPERTY_VERSION); plainContext.remove(FORMAT_PROPERTY_VERSION); plainContext.remove(FactoryUtil.PROPERTY_VERSION.key()); // check if required context is met Map<String, Tuple2<String, String>> mismatchedProperties = new HashMap<>(); Map<String, String> missingProperties = new HashMap<>(); for (Map.Entry<String, String> e : plainContext.entrySet()) { if (properties.containsKey(e.getKey())) { String fromProperties = properties.get(e.getKey()); if (!Objects.equals(fromProperties, e.getValue())) { mismatchedProperties.put( e.getKey(), new Tuple2<>(e.getValue(), fromProperties)); } } else { missingProperties.put(e.getKey(), e.getValue()); } } int matchedSize = plainContext.size() - mismatchedProperties.size() - missingProperties.size(); if (matchedSize == plainContext.size()) { matchingFactories.add(factory); } else { if (bestMatched == null || matchedSize > bestMatched.matchedSize) { bestMatched = new ContextBestMatched<>( factory, matchedSize, mismatchedProperties, missingProperties); } } } if (matchingFactories.isEmpty()) { String bestMatchedMessage = null; if (bestMatched != null && bestMatched.matchedSize > 0) { StringBuilder builder = new StringBuilder(); builder.append(bestMatched.factory.getClass().getName()); if (bestMatched.missingProperties.size() > 0) { builder.append("\nMissing properties:"); bestMatched.missingProperties.forEach( (k, v) -> builder.append("\n").append(k).append("=").append(v)); } if (bestMatched.mismatchedProperties.size() > 0) { builder.append("\nMismatched properties:"); bestMatched.mismatchedProperties.entrySet().stream() .filter(e -> e.getValue().f1 != null) .forEach( e -> builder.append( String.format( "\n'%s' expects '%s', but is '%s'", e.getKey(), e.getValue().f0, e.getValue().f1))); } bestMatchedMessage = builder.toString(); } //noinspection unchecked throw new NoMatchingTableFactoryException( "Required context properties mismatch.", bestMatchedMessage, factoryClass, (List<TableFactory>) classFactories, properties); } return matchingFactories; }
Filters for factories with matching context. @return all matching factories
filterByContext
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static Map<String, String> normalizeContext(TableFactory factory) { Map<String, String> requiredContext = factory.requiredContext(); if (requiredContext == null) { throw new TableException( String.format( "Required context of factory '%s' must not be null.", factory.getClass().getName())); } return requiredContext.keySet().stream() .collect(Collectors.toMap(String::toLowerCase, requiredContext::get)); }
Prepares the properties of a context to be used for match operations.
normalizeContext
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static <T extends TableFactory> List<T> filterBySupportedProperties( Class<T> factoryClass, Map<String, String> properties, List<T> classFactories, List<T> contextFactories) { final List<String> plainGivenKeys = new LinkedList<>(); properties .keySet() .forEach( k -> { // replace arrays with wildcard String key = k.replaceAll(".\\d+", ".#"); // ignore duplicates if (!plainGivenKeys.contains(key)) { plainGivenKeys.add(key); } }); List<T> supportedFactories = new LinkedList<>(); Tuple2<T, List<String>> bestMatched = null; for (T factory : contextFactories) { Set<String> requiredContextKeys = normalizeContext(factory).keySet(); Tuple2<List<String>, List<String>> tuple2 = normalizeSupportedProperties(factory); // ignore context keys List<String> givenContextFreeKeys = plainGivenKeys.stream() .filter(p -> !requiredContextKeys.contains(p)) .collect(Collectors.toList()); boolean allTrue = true; List<String> unsupportedKeys = new ArrayList<>(); for (String k : givenContextFreeKeys) { if (!(tuple2.f0.contains(k) || tuple2.f1.stream().anyMatch(k::startsWith))) { allTrue = false; unsupportedKeys.add(k); } } if (allTrue) { supportedFactories.add(factory); } else { if (bestMatched == null || unsupportedKeys.size() < bestMatched.f1.size()) { bestMatched = new Tuple2<>(factory, unsupportedKeys); } } } if (supportedFactories.isEmpty()) { String bestMatchedMessage = null; if (bestMatched != null) { bestMatchedMessage = String.format( "%s\nUnsupported property keys:\n%s", bestMatched.f0.getClass().getName(), String.join("\n", bestMatched.f1)); } //noinspection unchecked throw new NoMatchingTableFactoryException( "No factory supports all properties.", bestMatchedMessage, factoryClass, (List<TableFactory>) classFactories, properties); } return supportedFactories; }
Filters the matching class factories by supported properties.
filterBySupportedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static Tuple2<List<String>, List<String>> normalizeSupportedProperties( TableFactory factory) { List<String> supportedProperties = factory.supportedProperties(); if (supportedProperties == null) { throw new TableException( String.format( "Supported properties of factory '%s' must not be null.", factory.getClass().getName())); } List<String> supportedKeys = supportedProperties.stream().map(String::toLowerCase).collect(Collectors.toList()); // extract wildcard prefixes List<String> wildcards = extractWildcardPrefixes(supportedKeys); return Tuple2.of(supportedKeys, wildcards); }
Prepares the supported properties of a factory to be used for match operations.
normalizeSupportedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
private static List<String> extractWildcardPrefixes(List<String> propertyKeys) { return propertyKeys.stream() .filter(p -> p.endsWith("*")) .map(s -> s.substring(0, s.length() - 1)) .collect(Collectors.toList()); }
Converts the prefix of properties with wildcards (e.g., "format.*").
extractWildcardPrefixes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/TableFactoryService.java
Apache-2.0
public static WorkflowSchedulerFactoryHelper createWorkflowSchedulerFactoryHelper( WorkflowSchedulerFactory workflowSchedulerFactory, WorkflowSchedulerFactory.Context context) { return new WorkflowSchedulerFactoryHelper( workflowSchedulerFactory, context.getWorkflowSchedulerOptions()); }
Creates a utility that helps to validate options for a {@link WorkflowSchedulerFactory}. <p>Note: This utility checks for left-over options in the final step.
createWorkflowSchedulerFactoryHelper
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/WorkflowSchedulerFactoryUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/factories/WorkflowSchedulerFactoryUtil.java
Apache-2.0
public final void eval(CompletableFuture<Collection<RowData>> future, Object... keys) { GenericRowData keyRow = GenericRowData.of(keys); asyncLookup(keyRow) .whenComplete( (result, exception) -> { if (exception != null) { future.completeExceptionally( new TableException( String.format( "Failed to asynchronously lookup entries with key '%s'", keyRow), exception)); return; } future.complete(result); }); }
Invokes {@link #asyncLookup} and chains futures.
eval
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AsyncLookupFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AsyncLookupFunction.java
Apache-2.0
public void eval(CompletableFuture<Collection<RowData>> future, Object... args) { GenericRowData argsData = GenericRowData.of(args); asyncPredict(argsData) .whenComplete( (result, exception) -> { if (exception != null) { future.completeExceptionally( new TableException( String.format( "Failed to execute asynchronously prediction with input row %s.", argsData), exception)); return; } future.complete(result); }); }
Invokes {@link #asyncPredict} and chains futures.
eval
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AsyncPredictFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AsyncPredictFunction.java
Apache-2.0
public Builder runtimeProvided() { this.isRuntimeProvided = true; return this; }
Specifies that this {@link BuiltInFunctionDefinition} is implemented during code generation.
runtimeProvided
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
public Builder runtimeClass(String runtimeClass) { this.runtimeClass = runtimeClass; return this; }
Specifies the runtime class implementing this {@link BuiltInFunctionDefinition}.
runtimeClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
public Builder runtimeDeferred() { // This method is just a marker method for clarity. It is equivalent to calling // neither {@link #runtimeProvided} nor {@link #runtimeClass}. return this; }
Specifies that this {@link BuiltInFunctionDefinition} will be mapped to a Calcite function.
runtimeDeferred
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
public Builder internal() { this.isInternal = true; return this; }
Specifies that this {@link BuiltInFunctionDefinition} is meant for internal purposes only and should not be exposed when listing functions.
internal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
public Builder callSyntax(SqlCallSyntax syntax) { this.sqlCallSyntax = syntax; return this; }
Overwrites the syntax used for unparsing a function into a SQL string. If not specified, {@link SqlCallSyntax#FUNCTION} is used.
callSyntax
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
public Builder callSyntax(String name, SqlCallSyntax syntax) { this.sqlName = name; this.sqlCallSyntax = syntax; return this; }
Overwrites the syntax used for unparsing a function into a SQL string. If not specified, {@link SqlCallSyntax#FUNCTION} is used. This method overwrites the name as well. If the name is not provided {@link #name(String)} is passed to the {@link SqlCallSyntax}.
callSyntax
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinition.java
Apache-2.0
@Internal public static List<BuiltInFunctionDefinition> getDefinitions() { final Field[] fields = BuiltInFunctionDefinitions.class.getFields(); final List<BuiltInFunctionDefinition> list = new ArrayList<>(fields.length); for (Field field : fields) { if (FunctionDefinition.class.isAssignableFrom(field.getType())) { try { final BuiltInFunctionDefinition funcDef = (BuiltInFunctionDefinition) field.get(BuiltInFunctionDefinitions.class); list.add(Preconditions.checkNotNull(funcDef)); } catch (IllegalAccessException e) { throw new TableException( "The function definition for field " + field.getName() + " is not accessible.", e); } } } return list; }
Special "-" operator used internally for implementing SUM/AVG aggregations (with and without retractions) on a Decimal type. Uses the {@link LogicalTypeMerging#findSumAggType(LogicalType)} to prevent the normal {@link #MINUS} from overriding the special calculation for precision and scale needed by the aggregate function.
getDefinitions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/BuiltInFunctionDefinitions.java
Apache-2.0
public TaskInfo getTaskInfo() { if (context == null) { throw new TableException( "Calls to FunctionContext.getTaskInfo are not available " + "at the current location."); } return context.getTaskInfo(); }
Get the {@link TaskInfo} for this parallel subtask. @return task info for this parallel subtask.
getTaskInfo
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
Apache-2.0
public MetricGroup getMetricGroup() { if (context == null) { LOG.warn( "Calls to FunctionContext.getMetricGroup will have no effect " + "at the current location."); return defaultMetricsGroup; } return context.getMetricGroup(); }
Returns the metric group for this parallel subtask. @return metric group for this parallel subtask.
getMetricGroup
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
Apache-2.0
public File getCachedFile(String name) { if (context == null) { throw new TableException( "Calls to FunctionContext.getCachedFile are not available " + "at the current location."); } return context.getDistributedCache().getFile(name); }
Gets the local temporary file copy of a distributed cache files. @param name distributed cache file name @return local temporary file copy of a distributed cache file.
getCachedFile
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
Apache-2.0
public String getJobParameter(String key, String defaultValue) { if (context == null && jobParameters == null) { throw new TableException( "Calls to FunctionContext.getJobParameter are not available " + "at the current location."); } else if (context == null) { return jobParameters.getOrDefault(key, defaultValue); } return context.getGlobalJobParameters().getOrDefault(key, defaultValue); }
Gets the global job parameter value associated with the given key as a string. @param key key pointing to the associated value @param defaultValue default value which is returned in case global job parameter is null or there is no value associated with the given key @return (default) value associated with the given key
getJobParameter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
Apache-2.0
public ClassLoader getUserCodeClassLoader() { if (context == null && userClassLoader == null) { throw new TableException( "Calls to FunctionContext.getUserCodeClassLoader are not available " + "at the current location."); } else if (context == null) { return userClassLoader; } return context.getUserCodeClassLoader(); }
Gets the {@link ClassLoader} to load classes that are not in system's classpath, but are part of the JAR file of a user job.
getUserCodeClassLoader
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionContext.java
Apache-2.0
default Set<FunctionRequirement> getRequirements() { return Collections.emptySet(); }
Returns the set of requirements this definition demands.
getRequirements
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
Apache-2.0
default boolean isDeterministic() { return true; }
Returns information about the determinism of the function's results. <p>It returns <code>true</code> if and only if a call to this function is guaranteed to always return the same result given the same parameters. <code>true</code> is assumed by default. If the function is not purely functional like <code>random(), date(), now(), ... </code> this method must return <code>false</code>. <p>Furthermore, return <code>false</code> if the planner should always execute this function on the cluster side. In other words: the planner should not perform constant expression reduction during planning for constant calls to this function.
isDeterministic
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
Apache-2.0
default boolean supportsConstantFolding() { return true; }
If the constant-folding should be run during planning time on calls to this function. If not, the expression will be left as-is and the call will be made during runtime.
supportsConstantFolding
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionDefinition.java
Apache-2.0
public List<String> toList() { if (objectIdentifier != null) { return objectIdentifier.toList(); } else if (functionName != null) { return Collections.singletonList(functionName); } else { throw new IllegalStateException( "functionName and objectIdentifier are both null which should never happen."); } }
List of the component names of this function identifier.
toList
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionIdentifier.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/FunctionIdentifier.java
Apache-2.0
@Deprecated public TypeInformation<T> getResultType() { return null; }
Returns the {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s result. @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s result or <code>null</code> if the result type should be automatically inferred. @deprecated This method uses the old type system and is based on the old reflective extraction logic. The method will be removed in future versions and is only called when using the deprecated {@code TableEnvironment.registerFunction(...)} method. The new reflective extraction logic (possibly enriched with {@link DataTypeHint} and {@link FunctionHint}) should be powerful enough to cover most use cases. For advanced users, it is possible to override {@link UserDefinedFunction#getTypeInference(DataTypeFactory)}.
getResultType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
Apache-2.0
@Deprecated public TypeInformation<ACC> getAccumulatorType() { return null; }
Returns the {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s accumulator. @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s accumulator or <code>null</code> if the accumulator type should be automatically inferred. @deprecated This method uses the old type system and is based on the old reflective extraction logic. The method will be removed in future versions and is only called when using the deprecated {@code TableEnvironment.registerFunction(...)} method. The new reflective extraction logic (possibly enriched with {@link DataTypeHint} and {@link FunctionHint}) should be powerful enough to cover most use cases. For advanced users, it is possible to override {@link UserDefinedFunction#getTypeInference(DataTypeFactory)}.
getAccumulatorType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
Apache-2.0
public final void eval(Object... keys) { GenericRowData keyRow = GenericRowData.of(keys); try { Collection<RowData> lookup = lookup(keyRow); if (lookup == null) { return; } lookup.forEach(this::collect); } catch (IOException e) { throw new RuntimeException( String.format("Failed to lookup values with given key row '%s'", keyRow), e); } }
Invoke {@link #lookup} and handle exceptions.
eval
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/LookupFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/LookupFunction.java
Apache-2.0
public final void eval(Object... args) { GenericRowData argsData = GenericRowData.of(args); try { Collection<RowData> results = predict(argsData); if (results == null) { return; } results.forEach(this::collect); } catch (Exception e) { throw new FlinkRuntimeException( String.format("Failed to execute prediction with input row %s.", argsData), e); } }
Invoke {@link #predict} and handle exceptions.
eval
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/PredictFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/PredictFunction.java
Apache-2.0
@Deprecated public TypeInformation<?> getResultType(Class<?>[] signature) { return null; }
Returns the result type of the evaluation method with a given signature. @deprecated This method uses the old type system and is based on the old reflective extraction logic. The method will be removed in future versions and is only called when using the deprecated {@code TableEnvironment.registerFunction(...)} method. The new reflective extraction logic (possibly enriched with {@link DataTypeHint} and {@link FunctionHint}) should be powerful enough to cover most use cases. For advanced users, it is possible to override {@link UserDefinedFunction#getTypeInference(DataTypeFactory)}.
getResultType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java
Apache-2.0
default String unparseDistinct( String sqlName, List<ResolvedExpression> operands, SqlFactory sqlFactory) { throw new UnsupportedOperationException( "Only the FUNCTION syntax supports the DISTINCT clause."); }
Special case for aggregate functions, which can have a DISTINCT function applied. Called only from the DISTINCT function.
unparseDistinct
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/SqlCallSyntax.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/SqlCallSyntax.java
Apache-2.0
public final String functionIdentifier() { final String className = getClass().getName(); if (isClassNameSerializable(this)) { return className; } final String md5 = EncodingUtils.hex(EncodingUtils.md5(EncodingUtils.encodeObjectToString(this))); return className.concat("$").concat(md5); }
Returns a unique, serialized representation for this function.
functionIdentifier
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
Apache-2.0
public void open(FunctionContext context) throws Exception { // do nothing }
Setup method for user-defined function. It can be used for initialization work. By default, this method does nothing.
open
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
Apache-2.0
public void close() throws Exception { // do nothing }
Tear-down method for user-defined function. It can be used for clean up work. By default, this method does nothing.
close
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunction.java
Apache-2.0
public static <T> TypeInformation<T> getReturnTypeOfTableFunction( TableFunction<T> tableFunction) { return getReturnTypeOfTableFunction(tableFunction, null); }
Tries to infer the TypeInformation of an AggregateFunction's accumulator type. @param tableFunction The TableFunction for which the accumulator type is inferred. @return The inferred accumulator type of the AggregateFunction.
getReturnTypeOfTableFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public static <T> TypeInformation<T> getReturnTypeOfTableFunction( TableFunction<T> tableFunction, TypeInformation<T> scalaType) { TypeInformation<T> userProvidedType = tableFunction.getResultType(); if (userProvidedType != null) { return userProvidedType; } else if (scalaType != null) { return scalaType; } else { return TypeExtractor.createTypeInfo( tableFunction, TableFunction.class, tableFunction.getClass(), 0); } }
Tries to infer the TypeInformation of an AggregateFunction's accumulator type. @param tableFunction The TableFunction for which the accumulator type is inferred. @param scalaType The implicitly inferred type of the accumulator type. @return The inferred accumulator type of the AggregateFunction.
getReturnTypeOfTableFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public static UserDefinedFunction instantiateFunction( ClassLoader classLoader, @Nullable ReadableConfig config, String name, CatalogFunction catalogFunction) { try { switch (catalogFunction.getFunctionLanguage()) { case PYTHON: if (config == null) { throw new IllegalStateException( "Python functions are not supported at this location."); } return (UserDefinedFunction) PythonFunctionUtils.getPythonFunction( catalogFunction.getClassName(), config, classLoader); case JAVA: case SCALA: final Class<?> functionClass = classLoader.loadClass(catalogFunction.getClassName()); return UserDefinedFunctionHelper.instantiateFunction(functionClass); default: throw new IllegalArgumentException( "Unknown function language: " + catalogFunction.getFunctionLanguage()); } } catch (Exception e) { throw new ValidationException( String.format("Cannot instantiate user-defined function '%s'.", name), e); } }
Instantiates a {@link UserDefinedFunction} from a {@link CatalogFunction}. <p>Requires access to {@link ReadableConfig} if Python functions should be supported.
instantiateFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
@SuppressWarnings({"unchecked", "rawtypes"}) public static UserDefinedFunction instantiateFunction(Class<?> functionClass) { if (!UserDefinedFunction.class.isAssignableFrom(functionClass)) { throw new ValidationException( String.format( "Function '%s' does not extend from '%s'.", functionClass.getName(), UserDefinedFunction.class.getName())); } validateClass((Class) functionClass, true); try { return (UserDefinedFunction) functionClass.newInstance(); } catch (Exception e) { throw new ValidationException( String.format( "Cannot instantiate user-defined function class '%s'.", functionClass.getName()), e); } }
Instantiates a {@link UserDefinedFunction} assuming a JVM function with default constructor.
instantiateFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0