code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public static void prepareInstance(ReadableConfig config, UserDefinedFunction function) { validateClass(function.getClass(), false); cleanFunction(config, function); }
Prepares a {@link UserDefinedFunction} instance for usage in the API.
prepareInstance
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public static boolean isClassNameSerializable(UserDefinedFunction function) { final Class<?> functionClass = function.getClass(); if (!InstantiationUtil.hasPublicNullaryConstructor(functionClass)) { // function must be parameterized return false; } Class<?> currentClass = functionClass; while (!currentClass.equals(UserDefinedFunction.class)) { for (Field field : currentClass.getDeclaredFields()) { if (!Modifier.isTransient(field.getModifiers()) && !Modifier.isStatic(field.getModifiers())) { // function seems to be stateful return false; } } currentClass = currentClass.getSuperclass(); } return true; }
Returns whether a {@link UserDefinedFunction} can be easily serialized and identified by only a fully qualified class name. It must have a default constructor and no serializable fields. <p>Other properties (such as checks for abstract classes) are validated at the entry points of the API, see {@link #prepareInstance(ReadableConfig, UserDefinedFunction)}.
isClassNameSerializable
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public static void validateClass(Class<? extends UserDefinedFunction> functionClass) { validateClass(functionClass, true); }
Validates a {@link UserDefinedFunction} class for usage in the API. <p>Note: This is an initial validation to indicate common errors early. The concrete signature validation happens in the code generation when the actual {@link DataType}s for arguments and result are known.
validateClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public static void validateClassForRuntime( Class<? extends UserDefinedFunction> functionClass, String methodName, Class<?>[] argumentClasses, Class<?> outputClass, String functionName) { final List<Method> methods = ExtractionUtils.collectMethods(functionClass, methodName); // verifies regular JVM calling semantics final boolean isMatching = methods.stream() .anyMatch( method -> // Strict autoboxing is disabled for backwards compatibility ExtractionUtils.isInvokable( Autoboxing.JVM, method, argumentClasses) && ExtractionUtils.isAssignable( outputClass, method.getReturnType(), Autoboxing.JVM)); if (!isMatching) { throw new ValidationException( String.format( "Could not find an implementation method '%s' in class '%s' for function '%s' that " + "matches the following signature:\n%s", methodName, functionClass.getName(), functionName, ExtractionUtils.createMethodSignatureString( methodName, argumentClasses, outputClass))); } }
Validates a {@link UserDefinedFunction} class for usage in the runtime. <p>Note: This is for the final validation when actual {@link DataType}s for arguments and result are known.
validateClassForRuntime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
private static void validateClass( Class<? extends UserDefinedFunction> functionClass, boolean requiresDefaultConstructor) { if (TableFunction.class.isAssignableFrom(functionClass)) { validateNotSingleton(functionClass); } validateInstantiation(functionClass, requiresDefaultConstructor); validateImplementationMethods(functionClass); }
Validates a {@link UserDefinedFunction} class for usage in the API.
validateClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
private static void validateNotSingleton(Class<?> clazz) { if (Arrays.stream(clazz.getFields()).anyMatch(f -> f.getName().equals("MODULE$"))) { throw new ValidationException( String.format( "Function implemented by class %s is a Scala object. This is forbidden because of concurrency" + " problems when using them.", clazz.getName())); } }
Check whether this is a Scala object. Using Scala objects can lead to concurrency issues, e.g., due to a shared collector.
validateNotSingleton
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
private static void validateImplementationMethods( Class<? extends UserDefinedFunction> functionClass) { if (ScalarFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, false, false, SCALAR_EVAL); } else if (AsyncScalarFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, false, false, ASYNC_SCALAR_EVAL); validateAsyncImplementationMethod(functionClass, ASYNC_SCALAR_EVAL); } else if (TableFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, true, false, TABLE_EVAL); } else if (AsyncTableFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, true, false, ASYNC_TABLE_EVAL); } else if (AggregateFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, true, false, AGGREGATE_ACCUMULATE); validateImplementationMethod(functionClass, true, true, AGGREGATE_RETRACT); validateImplementationMethod(functionClass, true, true, AGGREGATE_MERGE); } else if (TableAggregateFunction.class.isAssignableFrom(functionClass)) { validateImplementationMethod(functionClass, true, false, TABLE_AGGREGATE_ACCUMULATE); validateImplementationMethod(functionClass, true, true, TABLE_AGGREGATE_RETRACT); validateImplementationMethod(functionClass, true, true, TABLE_AGGREGATE_MERGE); validateImplementationMethod( functionClass, true, false, TABLE_AGGREGATE_EMIT, TABLE_AGGREGATE_EMIT_RETRACT); } }
Validates the implementation methods such as {@link #SCALAR_EVAL} or {@link #AGGREGATE_ACCUMULATE} depending on the {@link UserDefinedFunction} subclass. <p>This method must be kept in sync with the code generation requirements and the individual docs of each function.
validateImplementationMethods
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
private static void validateInstantiation(Class<?> clazz, boolean requiresDefaultConstructor) { if (!InstantiationUtil.isPublic(clazz)) { throw new ValidationException( String.format("Function class '%s' is not public.", clazz.getName())); } else if (!InstantiationUtil.isProperClass(clazz)) { throw new ValidationException( String.format( "Function class '%s' is not a proper class. It is either abstract, an interface, or a primitive type.", clazz.getName())); } else if (requiresDefaultConstructor && !InstantiationUtil.hasPublicNullaryConstructor(clazz)) { throw new ValidationException( String.format( "Function class '%s' must have a public default constructor.", clazz.getName())); } }
Checks if a user-defined function can be easily instantiated.
validateInstantiation
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
private static void cleanFunction(ReadableConfig config, UserDefinedFunction function) { final ClosureCleanerLevel level = config.get(PipelineOptions.CLOSURE_CLEANER_LEVEL); try { ClosureCleaner.clean(function, level, true); } catch (Throwable t) { throw new ValidationException( String.format( "Function class '%s' is not serializable. Make sure that the class is self-contained " + "(i.e. no references to outer classes) and all inner fields are serializable as well.", function.getClass()), t); } }
Modifies a function instance by removing any reference to outer classes. This enables non-static inner function classes.
cleanFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
Apache-2.0
public ExecType getExecType() { return execType; }
The execution type of the Python worker, it defines how to execute the Python functions.
getExecType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonEnv.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonEnv.java
Apache-2.0
default PythonFunctionKind getPythonFunctionKind() { return PythonFunctionKind.GENERAL; }
Returns the kind of the user-defined python function.
getPythonFunctionKind
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonFunction.java
Apache-2.0
default boolean takesRowAsInput() { return false; }
Returns Whether the Python function takes row as input instead of each columns of a row.
takesRowAsInput
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/PythonFunction.java
Apache-2.0
public static PythonFunction getPythonFunction( String fullyQualifiedName, ReadableConfig config, ClassLoader classLoader) { try { Class pythonFunctionFactory = Class.forName( "org.apache.flink.client.python.PythonFunctionFactory", true, classLoader); return (PythonFunction) pythonFunctionFactory .getMethod( "getPythonFunction", String.class, ReadableConfig.class, ClassLoader.class) .invoke(null, fullyQualifiedName, config, classLoader); } catch (Throwable t) { throw new IllegalStateException( String.format("Instantiating python function '%s' failed.", fullyQualifiedName), t); } }
Utilities for creating PythonFunction from the fully qualified name of a Python function.
getPythonFunction
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/utils/PythonFunctionUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/python/utils/PythonFunctionUtils.java
Apache-2.0
public static ComputedColumn computed(String name, DataType type, String expression) { Preconditions.checkNotNull(name, "Column name can not be null."); Preconditions.checkNotNull(type, "Column type can not be null."); Preconditions.checkNotNull(expression, "Column expression can not be null."); return new ComputedColumn(name, type, expression); }
Creates a computed column that is computed from the given SQL expression.
computed
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
public static MetadataColumn metadata(String name, DataType type) { return metadata(name, type, null, false); }
Creates a metadata column from metadata of the given column name. <p>The column is not virtual by default.
metadata
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
public static MetadataColumn metadata(String name, DataType type, boolean isVirtual) { return metadata(name, type, null, isVirtual); }
Creates a metadata column from metadata of the given column name. <p>Allows to specify whether the column is virtual or not.
metadata
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
public static MetadataColumn metadata(String name, DataType type, String metadataAlias) { Preconditions.checkNotNull(metadataAlias, "Metadata alias can not be null."); return metadata(name, type, metadataAlias, false); }
Creates a metadata column from metadata of the given alias. <p>The column is not virtual by default.
metadata
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
public static MetadataColumn metadata( String name, DataType type, @Nullable String metadataAlias, boolean isVirtual) { Preconditions.checkNotNull(name, "Column name can not be null."); Preconditions.checkNotNull(type, "Column type can not be null."); return new MetadataColumn(name, type, metadataAlias, isVirtual); }
Creates a metadata column from metadata of the given column name or from metadata of the given alias (if not null). <p>Allows to specify whether the column is virtual or not.
metadata
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
@Deprecated public static TableColumn of(String name, DataType type) { return physical(name, type); }
@deprecated Use {@link #physical(String, DataType)} instead.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
@Deprecated public static TableColumn of(String name, DataType type, String expression) { return computed(name, type, expression); }
@deprecated Use {@link #computed(String, DataType, String)} instead.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableColumn.java
Apache-2.0
public TableSchema copy() { return new TableSchema( new ArrayList<>(columns), new ArrayList<>(watermarkSpecs), primaryKey); }
Returns a deep copy of the table schema.
copy
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public DataType[] getFieldDataTypes() { return columns.stream().map(TableColumn::getType).toArray(DataType[]::new); }
Returns all field data types as an array.
getFieldDataTypes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
@Deprecated public TypeInformation<?>[] getFieldTypes() { return fromDataTypeToLegacyInfo(getFieldDataTypes()); }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #getFieldDataTypes()} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
getFieldTypes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Optional<DataType> getFieldDataType(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(columns.get(fieldIndex).getType()); }
Returns the specified data type for the given field index. @param fieldIndex the index of the field
getFieldDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
@Deprecated public Optional<TypeInformation<?>> getFieldType(int fieldIndex) { return getFieldDataType(fieldIndex).map(TypeConversions::fromDataTypeToLegacyInfo); }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #getFieldDataType(int)} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
getFieldType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Optional<DataType> getFieldDataType(String fieldName) { return this.columns.stream() .filter(column -> column.getName().equals(fieldName)) .findFirst() .map(TableColumn::getType); }
Returns the specified data type for the given field name. @param fieldName the name of the field
getFieldDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
@Deprecated public Optional<TypeInformation<?>> getFieldType(String fieldName) { return getFieldDataType(fieldName).map(TypeConversions::fromDataTypeToLegacyInfo); }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #getFieldDataType(String)} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
getFieldType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public String[] getFieldNames() { return this.columns.stream().map(TableColumn::getName).toArray(String[]::new); }
Returns all field names as an array.
getFieldNames
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Optional<String> getFieldName(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(this.columns.get(fieldIndex).getName()); }
Returns the specified name for the given field index. @param fieldIndex the index of the field
getFieldName
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Optional<TableColumn> getTableColumn(int fieldIndex) { if (fieldIndex < 0 || fieldIndex >= columns.size()) { return Optional.empty(); } return Optional.of(this.columns.get(fieldIndex)); }
Returns the {@link TableColumn} instance for the given field index. @param fieldIndex the index of the field
getTableColumn
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Optional<TableColumn> getTableColumn(String fieldName) { return this.columns.stream() .filter(column -> column.getName().equals(fieldName)) .findFirst(); }
Returns the {@link TableColumn} instance for the given field name. @param fieldName the name of the field
getTableColumn
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public List<TableColumn> getTableColumns() { return new ArrayList<>(this.columns); }
Returns all the {@link TableColumn}s for this table schema.
getTableColumns
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public DataType toRowDataType() { final Field[] fields = columns.stream() .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); }
Converts all columns of this schema into a (possibly nested) row data type. <p>This method returns the <b>source-to-query schema</b>. <p>Note: The returned row data type contains physical, computed, and metadata columns. Be careful when using this method in a table source or table sink. In many cases, {@link #toPhysicalRowDataType()} might be more appropriate. @see DataTypes#ROW(Field...) @see #toPhysicalRowDataType() @see #toPersistedRowDataType()
toRowDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public DataType toPhysicalRowDataType() { final Field[] fields = columns.stream() .filter(TableColumn::isPhysical) .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); }
Converts all physical columns of this schema into a (possibly nested) row data type. <p>Note: The returned row data type contains only physical columns. It does not include computed or metadata columns. @see DataTypes#ROW(Field...) @see #toRowDataType() @see #toPersistedRowDataType()
toPhysicalRowDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public DataType toPersistedRowDataType() { final Field[] fields = columns.stream() .filter(TableColumn::isPersisted) .map(column -> FIELD(column.getName(), column.getType())) .toArray(Field[]::new); // The row should be never null. return ROW(fields).notNull(); }
Converts all persisted columns of this schema into a (possibly nested) row data type. <p>This method returns the <b>query-to-sink schema</b>. <p>Note: Computed columns and virtual columns are excluded in the returned row data type. The data type contains the columns of {@link #toPhysicalRowDataType()} plus persisted metadata columns. @see DataTypes#ROW(Field...) @see #toRowDataType() @see #toPhysicalRowDataType()
toPersistedRowDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Schema toSchema() { return toSchema(Collections.emptyMap()); }
Helps to migrate to the new {@link Schema} class.
toSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
@Deprecated public static TableSchema fromTypeInfo(TypeInformation<?> typeInfo) { if (typeInfo instanceof CompositeType<?>) { final CompositeType<?> compositeType = (CompositeType<?>) typeInfo; // get field names and types from composite type final String[] fieldNames = compositeType.getFieldNames(); final TypeInformation<?>[] fieldTypes = new TypeInformation[fieldNames.length]; for (int i = 0; i < fieldTypes.length; i++) { fieldTypes[i] = compositeType.getTypeAt(i); } return new TableSchema(fieldNames, fieldTypes); } else { // create table schema with a single field named "f0" of the given type. return new TableSchema( new String[] {ATOMIC_TYPE_FIELD_NAME}, new TypeInformation<?>[] {typeInfo}); } }
Creates a table schema from a {@link TypeInformation} instance. If the type information is a {@link CompositeType}, the field names and types for the composite type are used to construct the {@link TableSchema} instance. Otherwise, a table schema with a single field is created. The field name is "f0" and the field type the provided type. @param typeInfo The {@link TypeInformation} from which the table schema is generated. @return The table schema that was generated from the given {@link TypeInformation}. @deprecated This method will be removed soon. Use {@link DataTypes} to declare types.
fromTypeInfo
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
private static void validateNameTypeNumberEqual(String[] fieldNames, DataType[] fieldTypes) { if (fieldNames.length != fieldTypes.length) { throw new ValidationException( "Number of field names and field data types must be equal.\n" + "Number of names is " + fieldNames.length + ", number of data types is " + fieldTypes.length + ".\n" + "List of field names: " + Arrays.toString(fieldNames) + "\n" + "List of field data types: " + Arrays.toString(fieldTypes)); } }
Validate the field names {@code fieldNames} and field types {@code fieldTypes} have equal number. @param fieldNames Field names @param fieldTypes Field data types
validateNameTypeNumberEqual
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
private static void validateAndCreateNameToTypeMapping( Map<String, LogicalType> fieldNameToType, String fieldName, LogicalType fieldType, String parentFieldName) { String fullFieldName = parentFieldName.isEmpty() ? fieldName : parentFieldName + "." + fieldName; LogicalType oldType = fieldNameToType.put(fullFieldName, fieldType); if (oldType != null) { throw new ValidationException( "Field names must be unique. Duplicate field: '" + fullFieldName + "'"); } if (isCompositeType(fieldType) && !(fieldType instanceof LegacyTypeInformationType)) { final List<String> fieldNames = LogicalTypeChecks.getFieldNames(fieldType); final List<LogicalType> fieldTypes = fieldType.getChildren(); IntStream.range(0, fieldNames.size()) .forEach( i -> validateAndCreateNameToTypeMapping( fieldNameToType, fieldNames.get(i), fieldTypes.get(i), fullFieldName)); } }
Creates a mapping from field name to data type, the field name can be a nested field. This is mainly used for validating whether the rowtime attribute (might be nested) exists in the schema. During creating, it also validates whether there is duplicate field names. <p>For example, a "f0" field of ROW type has two nested fields "q1" and "q2". Then the mapping will be ["f0" -> ROW, "f0.q1" -> INT, "f0.q2" -> STRING]. <pre>{@code f0 ROW<q1 INT, q2 STRING> }</pre> @param fieldNameToType Field name to type mapping that to update @param fieldName Name of this field, e.g. "q1" or "q2" in the above example @param fieldType Data type of this field @param parentFieldName Field name of parent type, e.g. "f0" in the above example
validateAndCreateNameToTypeMapping
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder field(String name, DataType dataType) { Preconditions.checkNotNull(name); Preconditions.checkNotNull(dataType); columns.add(TableColumn.physical(name, dataType)); return this; }
Add a field with name and data type. <p>The call order of this method determines the order of fields in the schema.
field
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder field(String name, DataType dataType, String expression) { Preconditions.checkNotNull(name); Preconditions.checkNotNull(dataType); Preconditions.checkNotNull(expression); columns.add(TableColumn.computed(name, dataType, expression)); return this; }
Add a computed field which is generated by the given expression. This also defines the field name and the data type. <p>The call order of this method determines the order of fields in the schema. <p>The returned expression should be a SQL-style expression whose identifiers should be all quoted and expanded. <p>It should be expanded because this expression may be persisted then deserialized from the catalog, an expanded identifier would avoid the ambiguity if there are same name UDF referenced from different paths. For example, if there is a UDF named "my_udf" from path "my_catalog.my_database", you could pass in an expression like "`my_catalog`.`my_database`.`my_udf`(`f0`) + 1"; <p>It should be quoted because user could use a reserved keyword as the identifier, and we have no idea if it is quoted when deserialize from the catalog, so we force to use quoted identifier here. But framework will not check whether it is qualified and quoted or not. @param name Field name @param dataType Field data type @param expression Computed column expression.
field
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder add(TableColumn column) { columns.add(column); return this; }
Adds a {@link TableColumn} to this builder. <p>The call order of this method determines the order of fields in the schema.
add
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder fields(String[] names, DataType[] dataTypes) { Preconditions.checkNotNull(names); Preconditions.checkNotNull(dataTypes); validateNameTypeNumberEqual(names, dataTypes); List<TableColumn> columns = IntStream.range(0, names.length) .mapToObj(idx -> TableColumn.physical(names[idx], dataTypes[idx])) .collect(Collectors.toList()); this.columns.addAll(columns); return this; }
Add an array of fields with names and data types. <p>The call order of this method determines the order of fields in the schema.
fields
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
@Deprecated public Builder field(String name, TypeInformation<?> typeInfo) { return field(name, fromLegacyInfoToDataType(typeInfo)); }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #field(String, DataType)} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
field
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder watermark( String rowtimeAttribute, String watermarkExpressionString, DataType watermarkExprOutputType) { Preconditions.checkNotNull(rowtimeAttribute); Preconditions.checkNotNull(watermarkExpressionString); Preconditions.checkNotNull(watermarkExprOutputType); if (!this.watermarkSpecs.isEmpty()) { throw new IllegalStateException( "Multiple watermark definition is not supported yet."); } this.watermarkSpecs.add( new WatermarkSpec( rowtimeAttribute, watermarkExpressionString, watermarkExprOutputType)); return this; }
Specifies the previously defined field as an event-time attribute and specifies the watermark strategy. @param rowtimeAttribute the field name as a rowtime attribute, can be a nested field using dot separator. @param watermarkExpressionString the string representation of watermark generation expression, e.g. "ts - INTERVAL '5' SECOND". The string is a qualified SQL expression string (UDFs are expanded) but will not be validated by {@link TableSchema}. @param watermarkExprOutputType the data type of the computation result of watermark generation expression. Whether the data type equals to the output type of expression will also not be validated by {@link TableSchema}.
watermark
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder watermark(WatermarkSpec watermarkSpec) { if (!this.watermarkSpecs.isEmpty()) { throw new IllegalStateException( "Multiple watermark definition is not supported yet."); } this.watermarkSpecs.add(watermarkSpec); return this; }
Adds the given {@link WatermarkSpec} to this builder.
watermark
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder primaryKey(String... columns) { return primaryKey(UUID.randomUUID().toString(), columns); }
Creates a primary key constraint for a set of given columns. The primary key is informational only. It will not be enforced. It can be used for optimizations. It is the owner's of the data responsibility to ensure uniqueness of the data. <p>The primary key will be assigned a random name. @param columns array of columns that form a unique primary key
primaryKey
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public Builder primaryKey(String name, String[] columns) { if (this.primaryKey != null) { throw new ValidationException("Can not create multiple PRIMARY keys."); } if (StringUtils.isNullOrWhitespaceOnly(name)) { throw new ValidationException("PRIMARY KEY's name can not be null or empty."); } if (columns == null || columns.length == 0) { throw new ValidationException( "PRIMARY KEY constraint must be defined for at least a single column."); } this.primaryKey = UniqueConstraint.primaryKey(name, Arrays.asList(columns)); return this; }
Creates a primary key constraint for a set of given columns. The primary key is informational only. It will not be enforced. It can be used for optimizations. It is the owner's of the data responsibility to ensure @param columns array of columns that form a unique primary key @param name name for the primary key, can be used to reference the constraint
primaryKey
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/TableSchema.java
Apache-2.0
public static TypeInformation<String> STRING() { return org.apache.flink.api.common.typeinfo.Types.STRING; }
Returns type information for a Table API string or SQL VARCHAR type.
STRING
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Boolean> BOOLEAN() { return org.apache.flink.api.common.typeinfo.Types.BOOLEAN; }
Returns type information for a Table API boolean or SQL BOOLEAN type.
BOOLEAN
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Byte> BYTE() { return org.apache.flink.api.common.typeinfo.Types.BYTE; }
Returns type information for a Table API byte or SQL TINYINT type.
BYTE
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Short> SHORT() { return org.apache.flink.api.common.typeinfo.Types.SHORT; }
Returns type information for a Table API short or SQL SMALLINT type.
SHORT
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Integer> INT() { return org.apache.flink.api.common.typeinfo.Types.INT; }
Returns type information for a Table API integer or SQL INT/INTEGER type.
INT
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Long> LONG() { return org.apache.flink.api.common.typeinfo.Types.LONG; }
Returns type information for a Table API long or SQL BIGINT type.
LONG
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Float> FLOAT() { return org.apache.flink.api.common.typeinfo.Types.FLOAT; }
Returns type information for a Table API float or SQL FLOAT/REAL type.
FLOAT
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Double> DOUBLE() { return org.apache.flink.api.common.typeinfo.Types.DOUBLE; }
Returns type information for a Table API integer or SQL DOUBLE type.
DOUBLE
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<BigDecimal> DECIMAL() { return org.apache.flink.api.common.typeinfo.Types.BIG_DEC; }
Returns type information for a Table API big decimal or SQL DECIMAL type.
DECIMAL
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.sql.Date> SQL_DATE() { return org.apache.flink.api.common.typeinfo.Types.SQL_DATE; }
Returns type information for a Table API SQL date or SQL DATE type.
SQL_DATE
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.sql.Time> SQL_TIME() { return org.apache.flink.api.common.typeinfo.Types.SQL_TIME; }
Returns type information for a Table API SQL time or SQL TIME type.
SQL_TIME
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.sql.Timestamp> SQL_TIMESTAMP() { return org.apache.flink.api.common.typeinfo.Types.SQL_TIMESTAMP; }
Returns type information for a Table API SQL timestamp or SQL TIMESTAMP type.
SQL_TIMESTAMP
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.time.LocalDate> LOCAL_DATE() { return org.apache.flink.api.common.typeinfo.Types.LOCAL_DATE; }
Returns type information for a Table API LocalDate type.
LOCAL_DATE
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.time.LocalTime> LOCAL_TIME() { return org.apache.flink.api.common.typeinfo.Types.LOCAL_TIME; }
Returns type information for a Table API LocalTime type.
LOCAL_TIME
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<java.time.LocalDateTime> LOCAL_DATE_TIME() { return org.apache.flink.api.common.typeinfo.Types.LOCAL_DATE_TIME; }
Returns type information for a Table API LocalDateTime type.
LOCAL_DATE_TIME
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Integer> INTERVAL_MONTHS() { return TimeIntervalTypeInfo.INTERVAL_MONTHS; }
Returns type information for a Table API interval of months.
INTERVAL_MONTHS
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Long> INTERVAL_MILLIS() { return TimeIntervalTypeInfo.INTERVAL_MILLIS; }
Returns type information for a Table API interval of milliseconds.
INTERVAL_MILLIS
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Row> ROW(TypeInformation<?>... types) { return org.apache.flink.api.common.typeinfo.Types.ROW(types); }
Returns type information for {@link Row} with fields of the given types. <p>A row is a variable-length, null-aware composite type for storing multiple values in a deterministic field order. Every field can be null regardless of the field's type. The type of row fields cannot be automatically inferred; therefore, it is required to provide type information whenever a row is used. <p>The schema of rows can have up to <code>Integer.MAX_VALUE</code> fields, however, all row instances must strictly adhere to the schema defined by the type info. <p>This method generates type information with fields of the given types; the fields have the default names (f0, f1, f2 ..). @param types The types of the row fields, e.g., Types.STRING(), Types.INT()
ROW
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static TypeInformation<Row> ROW(String[] fieldNames, TypeInformation<?>[] types) { return org.apache.flink.api.common.typeinfo.Types.ROW_NAMED(fieldNames, types); }
Returns type information for {@link Row} with fields of the given types and with given names. <p>A row is a variable-length, null-aware composite type for storing multiple values in a deterministic field order. Every field can be null independent of the field's type. The type of row fields cannot be automatically inferred; therefore, it is required to provide type information whenever a row is used. <p>The schema of rows can have up to <code>Integer.MAX_VALUE</code> fields, however, all row instances must strictly adhere to the schema defined by the type info. @param fieldNames The array of field names @param types The types of the row fields, e.g., Types.STRING(), Types.INT()
ROW
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static <E> TypeInformation<E[]> OBJECT_ARRAY(TypeInformation<E> elementType) { return ObjectArrayTypeInfo.getInfoFor(elementType); }
Generates type information for an array consisting of Java object elements. Null values for elements are supported. @param elementType type of the array elements; e.g. Types.INT()
OBJECT_ARRAY
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public static <E> TypeInformation<Map<E, Integer>> MULTISET(TypeInformation<E> elementType) { return new MultisetTypeInfo<>(elementType); }
Generates type information for a Multiset. A Multiset is baked by a Java HashMap and maps an arbitrary key to an integer value. Null values in keys are not supported. @param elementType type of the elements of the multiset e.g. Types.STRING()
MULTISET
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/Types.java
Apache-2.0
public String getWatermarkExpr() { return watermarkExpressionString; }
Returns the string representation of watermark generation expression. The string representation is a qualified SQL expression string (UDFs are expanded).
getWatermarkExpr
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/WatermarkSpec.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/api/WatermarkSpec.java
Apache-2.0
public Rowtime timestampsFromField(String fieldName) { internalProperties.putString( ROWTIME_TIMESTAMPS_TYPE, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD); internalProperties.putString(ROWTIME_TIMESTAMPS_FROM, fieldName); return this; }
Sets a built-in timestamp extractor that converts an existing {@link Long} or {@link Types#SQL_TIMESTAMP} field into the rowtime attribute. @param fieldName The field to convert into a rowtime attribute.
timestampsFromField
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
Apache-2.0
public Rowtime timestampsFromSource() { internalProperties.putString( ROWTIME_TIMESTAMPS_TYPE, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_SOURCE); return this; }
Sets a built-in timestamp extractor that converts the assigned timestamps from a DataStream API record into the rowtime attribute and thus preserves the assigned timestamps from the source. <p>Note: This extractor only works in streaming environments.
timestampsFromSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
Apache-2.0
public Rowtime timestampsFromExtractor(TimestampExtractor extractor) { internalProperties.putProperties(extractor.toProperties()); return this; }
Sets a custom timestamp extractor to be used for the rowtime attribute. @param extractor The {@link TimestampExtractor} to extract the rowtime attribute from the physical type.
timestampsFromExtractor
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
Apache-2.0
public Rowtime watermarksFromSource() { internalProperties.putString( ROWTIME_WATERMARKS_TYPE, ROWTIME_WATERMARKS_TYPE_VALUE_FROM_SOURCE); return this; }
Sets a built-in watermark strategy which indicates the watermarks should be preserved from the underlying DataStream API and thus preserves the assigned watermarks from the source.
watermarksFromSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
Apache-2.0
public Rowtime watermarksFromStrategy(WatermarkStrategy strategy) { internalProperties.putProperties(strategy.toProperties()); return this; }
Sets a custom watermark strategy to be used for the rowtime attribute.
watermarksFromStrategy
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Rowtime.java
Apache-2.0
public Schema schema(TableSchema schema) { tableSchema.clear(); lastField = null; for (int i = 0; i < schema.getFieldCount(); i++) { field(schema.getFieldName(i).get(), schema.getFieldDataType(i).get()); } return this; }
Sets the schema with field names and the types. Required. <p>This method overwrites existing fields added with {@link #field(String, DataType)}. @param schema the table schema
schema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
public Schema field(String fieldName, DataType fieldType) { addField(fieldName, fieldType.getLogicalType().asSerializableString()); return this; }
Adds a field with the field name and the data type. Required. This method can be called multiple times. The call order of this method defines also the order of the fields in a row. @param fieldName the field name @param fieldType the type information of the field
field
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
@Deprecated public Schema field(String fieldName, TypeInformation<?> fieldType) { field(fieldName, TypeConversions.fromLegacyInfoToDataType(fieldType)); return this; }
Adds a field with the field name and the type information. Required. This method can be called multiple times. The call order of this method defines also the order of the fields in a row. @param fieldName the field name @param fieldType the type information of the field @deprecated This method will be removed in future versions as it uses the old type system. Please use {@link #field(String, DataType)} instead.
field
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
public Schema from(String originFieldName) { if (lastField == null) { throw new ValidationException("No field previously defined. Use field() before."); } tableSchema.get(lastField).put(SCHEMA_FROM, originFieldName); lastField = null; return this; }
Specifies the origin of the previously defined field. The origin field is defined by a connector or format. <p>E.g. field("myString", Types.STRING).from("CSV_MY_STRING") <p>Note: Field names are matched by the exact name by default (case sensitive).
from
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
public Schema proctime() { if (lastField == null) { throw new ValidationException("No field defined previously. Use field() before."); } tableSchema.get(lastField).put(SCHEMA_PROCTIME, "true"); lastField = null; return this; }
Specifies the previously defined field as a processing-time attribute. <p>E.g. field("proctime", Types.SQL_TIMESTAMP).proctime()
proctime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
public Schema rowtime(Rowtime rowtime) { if (lastField == null) { throw new ValidationException("No field defined previously. Use field() before."); } tableSchema.get(lastField).putAll(rowtime.toProperties()); lastField = null; return this; }
Specifies the previously defined field as an event-time attribute. <p>E.g. field("rowtime", Types.SQL_TIMESTAMP).rowtime(...)
rowtime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/descriptors/Schema.java
Apache-2.0
@Deprecated default TableSink<T> createTableSink(Map<String, String> properties) { return null; }
Creates and configures a {@link TableSink} using the given properties. @param properties normalized properties describing a table sink. @return the configured table sink. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSink(Context)} instead.
createTableSink
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
Apache-2.0
@Deprecated default TableSink<T> createTableSink(ObjectPath tablePath, CatalogTable table) { return createTableSink( ((ResolvedCatalogTable) table).toProperties(DefaultSqlFactory.INSTANCE)); }
Creates and configures a {@link TableSink} based on the given {@link CatalogTable} instance. @param tablePath path of the given {@link CatalogTable} @param table {@link CatalogTable} instance. @return the configured table sink. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSink(Context)} instead.
createTableSink
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
Apache-2.0
default TableSink<T> createTableSink(Context context) { return createTableSink(context.getObjectIdentifier().toObjectPath(), context.getTable()); }
Creates and configures a {@link TableSink} based on the given {@link Context}. @param context context of this table sink. @return the configured table sink.
createTableSink
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSinkFactory.java
Apache-2.0
@Deprecated default TableSource<T> createTableSource(Map<String, String> properties) { return null; }
Creates and configures a {@link TableSource} using the given properties. @param properties normalized properties describing a table source. @return the configured table source. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSource(Context)} instead.
createTableSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
Apache-2.0
@Deprecated default TableSource<T> createTableSource(ObjectPath tablePath, CatalogTable table) { return createTableSource( ((ResolvedCatalogTable) table).toProperties(DefaultSqlFactory.INSTANCE)); }
Creates and configures a {@link TableSource} based on the given {@link CatalogTable} instance. @param tablePath path of the given {@link CatalogTable} @param table {@link CatalogTable} instance. @return the configured table source. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSource(Context)} instead.
createTableSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
Apache-2.0
default TableSource<T> createTableSource(Context context) { return createTableSource(context.getObjectIdentifier().toObjectPath(), context.getTable()); }
Creates and configures a {@link TableSource} based on the given {@link Context}. @param context context of this table source. @return the configured table source.
createTableSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/factories/TableSourceFactory.java
Apache-2.0
default boolean configurePartitionGrouping(boolean supportsGrouping) { return false; }
If returns true, sink can trust all records will definitely be grouped by partition fields before consumed by the {@link TableSink}, i.e. the sink will receive all elements of one partition and then all elements of another partition, elements of different partitions will not be mixed. For some sinks, this can be used to reduce number of the partition writers to improve writing performance. <p>This method is used to configure the behavior of input whether to be grouped by partition, if true, at the same time the sink should also configure itself, i.e. set an internal field that changes the writing behavior (writing one partition at a time). @param supportsGrouping whether the execution mode supports grouping, e.g. grouping (usually use sort to implement) is only supported in batch mode, not supported in streaming mode. @return whether data need to be grouped by partition before consumed by the sink. Default is false. If {@code supportsGrouping} is false, it should never return true (requires grouping), otherwise it will fail.
configurePartitionGrouping
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/PartitionableTableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/PartitionableTableSink.java
Apache-2.0
default DataType getConsumedDataType() { final TypeInformation<T> legacyType = getOutputType(); if (legacyType == null) { throw new TableException("Table sink does not implement a consumed data type."); } return fromLegacyInfoToDataType(legacyType); }
Returns the data type consumed by this {@link TableSink}. @return The data type expected by this {@link TableSink}.
getConsumedDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
Apache-2.0
@Deprecated default TypeInformation<T> getOutputType() { return null; }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #getConsumedDataType()} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
getOutputType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
Apache-2.0
default TableSchema getTableSchema() { final String[] fieldNames = getFieldNames(); final TypeInformation[] legacyFieldTypes = getFieldTypes(); if (fieldNames == null || legacyFieldTypes == null) { throw new TableException("Table sink does not implement a table schema."); } return new TableSchema(fieldNames, legacyFieldTypes); }
Returns the schema of the consumed table. @return The {@link TableSchema} of the consumed table.
getTableSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
Apache-2.0
@Deprecated default String[] getFieldNames() { return null; }
@deprecated Use the field names of {@link #getTableSchema()} instead.
getFieldNames
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
Apache-2.0
@Deprecated default TypeInformation<?>[] getFieldTypes() { return null; }
@deprecated Use the field types of {@link #getTableSchema()} instead.
getFieldTypes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sinks/TableSink.java
Apache-2.0
public String getAttributeName() { return attributeName; }
Returns the name of the rowtime attribute.
getAttributeName
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/RowtimeAttributeDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/RowtimeAttributeDescriptor.java
Apache-2.0
public TimestampExtractor getTimestampExtractor() { return timestampExtractor; }
Returns the [[TimestampExtractor]] for the attribute.
getTimestampExtractor
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/RowtimeAttributeDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/RowtimeAttributeDescriptor.java
Apache-2.0
default DataType getProducedDataType() { final TypeInformation<T> legacyType = getReturnType(); if (legacyType == null) { throw new TableException("Table source does not implement a produced data type."); } return fromLegacyInfoToDataType(legacyType).notNull(); }
Returns the {@link DataType} for the produced data of the {@link TableSource}. @return The data type of the returned {@code DataStream}.
getProducedDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
Apache-2.0
@Deprecated default TypeInformation<T> getReturnType() { return null; }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #getProducedDataType()} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
getReturnType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
Apache-2.0
default String explainSource() { return TableConnectorUtils.generateRuntimeName( getClass(), getTableSchema().getFieldNames()); }
Describes the table source. @return A String explaining the {@link TableSource}.
explainSource
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/TableSource.java
Apache-2.0
@Override public TypeInformation<Long> getReturnType() { return Types.LONG; }
Provides an expression to extract the timestamp for a rowtime attribute. @deprecated This interface will not be supported in the new source design around {@link DynamicTableSource}. Use the concept of computed columns instead. See FLIP-95 for more information.
getReturnType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/tsextractors/TimestampExtractor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/legacy/sources/tsextractors/TimestampExtractor.java
Apache-2.0
default Set<String> listFunctions() { return Collections.emptySet(); }
List names of all functions in this module. It excludes internal functions. @return a set of function names
listFunctions
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/module/Module.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/module/Module.java
Apache-2.0