code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public static boolean hasNested(LogicalType logicalType, Predicate<LogicalType> predicate) { final NestedTypeSearcher typeSearcher = new NestedTypeSearcher(predicate); return logicalType.accept(typeSearcher).isPresent(); }
Checks whether a (possibly nested) logical type fulfills the given predicate.
hasNested
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static int getPrecision(LogicalType logicalType) { return logicalType.accept(PRECISION_EXTRACTOR); }
Returns the precision of all types that define a precision implicitly or explicitly.
getPrecision
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static boolean hasPrecision(LogicalType logicalType, int precision) { return getPrecision(logicalType) == precision; }
Checks the precision of a type that defines a precision implicitly or explicitly.
hasPrecision
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static int getScale(LogicalType logicalType) { return logicalType.accept(SCALE_EXTRACTOR); }
Returns the scale of all types that define a scale implicitly or explicitly.
getScale
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static int getFieldCount(LogicalType logicalType) { return logicalType.accept(FIELD_COUNT_EXTRACTOR); }
Returns the field count of row and structured types. Other types return 1.
getFieldCount
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static List<String> getFieldNames(LogicalType logicalType) { return logicalType.accept(FIELD_NAMES_EXTRACTOR); }
Returns the field names of row and structured types.
getFieldNames
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static List<LogicalType> getFieldTypes(LogicalType logicalType) { if (logicalType instanceof DistinctType) { return getFieldTypes(((DistinctType) logicalType).getSourceType()); } return logicalType.getChildren(); }
Returns the field types of row and structured types.
getFieldTypes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
@Override protected Optional<LogicalType> defaultMethod(LogicalType logicalType) { if (predicate.test(logicalType)) { return Optional.of(logicalType); } for (LogicalType child : logicalType.getChildren()) { final Optional<LogicalType> foundType = child.accept(this); if (foundType.isPresent()) { return foundType; } } return Optional.empty(); }
Searches for a type (including children) satisfying the given predicate.
defaultMethod
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
Apache-2.0
public static DecimalType findDivisionDecimalType( int precision1, int scale1, int precision2, int scale2) { int scale = Math.max(6, scale1 + precision2 + 1); int precision = precision1 - scale1 + scale2 + scale; return adjustPrecisionScale(precision, scale); }
Finds the result type of a decimal division operation.
findDivisionDecimalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static DecimalType findModuloDecimalType( int precision1, int scale1, int precision2, int scale2) { final int scale = Math.max(scale1, scale2); int precision = Math.min(precision1 - scale1, precision2 - scale2) + scale; return adjustPrecisionScale(precision, scale); }
Finds the result type of a decimal modulo operation.
findModuloDecimalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static DecimalType findMultiplicationDecimalType( int precision1, int scale1, int precision2, int scale2) { int scale = scale1 + scale2; int precision = precision1 + precision2 + 1; return adjustPrecisionScale(precision, scale); }
Finds the result type of a decimal multiplication operation.
findMultiplicationDecimalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static DecimalType findAdditionDecimalType( int precision1, int scale1, int precision2, int scale2) { final int scale = Math.max(scale1, scale2); int precision = Math.max(precision1 - scale1, precision2 - scale2) + scale + 1; return adjustPrecisionScale(precision, scale); }
Finds the result type of a decimal addition operation.
findAdditionDecimalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static DecimalType findRoundDecimalType(int precision, int scale, int round) { if (round >= scale) { return new DecimalType(false, precision, scale); } if (round < 0) { return new DecimalType( false, Math.min(DecimalType.MAX_PRECISION, 1 + precision - scale), 0); } // 0 <= r < s // NOTE: rounding may increase the digits by 1, therefore we need +1 on precisions. return new DecimalType(false, 1 + precision - scale + round, round); }
Finds the result type of a decimal rounding operation.
findRoundDecimalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static LogicalType findAvgAggType(LogicalType argType) { final LogicalType resultType; if (argType.is(DECIMAL)) { // a hack to make legacy types possible until we drop them if (argType instanceof LegacyTypeInformationType) { return argType; } // adopted from // https://docs.microsoft.com/en-us/sql/t-sql/functions/avg-transact-sql // however, we count by BIGINT, therefore divide by DECIMAL(20,0), // but the end result is actually the same, which is DECIMAL(38, MAX(6, s)). resultType = LogicalTypeMerging.findDivisionDecimalType(38, getScale(argType), 20, 0); } else { resultType = argType; } return resultType.copy(argType.isNullable()); }
Finds the result type of a decimal average aggregation.
findAvgAggType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static LogicalType findSumAggType(LogicalType argType) { // adopted from // https://docs.microsoft.com/en-us/sql/t-sql/functions/sum-transact-sql final LogicalType resultType; if (argType.is(DECIMAL)) { // a hack to make legacy types possible until we drop them if (argType instanceof LegacyTypeInformationType) { return argType; } resultType = new DecimalType(false, 38, getScale(argType)); } else { resultType = argType; } return resultType.copy(argType.isNullable()); }
Finds the result type of a decimal sum aggregation.
findSumAggType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeMerging.java
Apache-2.0
public static Class<?> toInternalConversionClass(LogicalType type) { // ordered by type root definition switch (type.getTypeRoot()) { case CHAR: case VARCHAR: return StringData.class; case BOOLEAN: return Boolean.class; case BINARY: case VARBINARY: return byte[].class; case DECIMAL: return DecimalData.class; case TINYINT: return Byte.class; case SMALLINT: return Short.class; case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: return Integer.class; case BIGINT: case INTERVAL_DAY_TIME: return Long.class; case FLOAT: return Float.class; case DOUBLE: return Double.class; case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: return TimestampData.class; case TIMESTAMP_WITH_TIME_ZONE: throw new UnsupportedOperationException("Unsupported type: " + type); case ARRAY: return ArrayData.class; case MULTISET: case MAP: return MapData.class; case ROW: case STRUCTURED_TYPE: return RowData.class; case DISTINCT_TYPE: return toInternalConversionClass(((DistinctType) type).getSourceType()); case RAW: return RawValueData.class; case NULL: return Object.class; case DESCRIPTOR: return ColumnList.class; case SYMBOL: case UNRESOLVED: default: throw new IllegalArgumentException("Illegal type: " + type); } }
Returns the conversion class for the given {@link LogicalType} that is used by the table runtime as internal data structure. @see RowData
toInternalConversionClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
Apache-2.0
public static RowType toRowType(LogicalType t) { switch (t.getTypeRoot()) { case ROW: return (RowType) t; case STRUCTURED_TYPE: final StructuredType structuredType = (StructuredType) t; final List<RowField> fields = structuredType.getAttributes().stream() .map( attribute -> new RowField( attribute.getName(), attribute.getType(), attribute.getDescription().orElse(null))) .collect(Collectors.toList()); return new RowType(structuredType.isNullable(), fields); case DISTINCT_TYPE: return toRowType(((DistinctType) t).getSourceType()); default: return RowType.of(t); } }
Converts any logical type to a row type. Composite types are converted to a row type. Atomic types are wrapped into a field.
toRowType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
Apache-2.0
public static String getAtomicName(List<String> existingNames) { int i = 0; String fieldName = ATOMIC_FIELD_NAME; while ((null != existingNames) && existingNames.contains(fieldName)) { fieldName = ATOMIC_FIELD_NAME + "_" + i++; } return fieldName; }
Returns a unique name for an atomic type.
getAtomicName
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
Apache-2.0
private static void addDefaultDataType(Class<?> clazz, DataType rootType) { final DataType dataType; if (clazz.isPrimitive()) { dataType = rootType.notNull(); } else { dataType = rootType.nullable(); } defaultDataTypes.put(clazz.getName(), dataType.bridgedTo(clazz)); }
Class-based data type extractor that supports extraction of clearly identifiable data types for input and output conversion. <p>Note: In most of the cases, {@link DataTypeExtractor} is more useful as it also considers structured types and type variables possibly annotated with {@link DataTypeHint}.
addDefaultDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/ClassDataTypeConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/ClassDataTypeConverter.java
Apache-2.0
public static Optional<DataType> extractDataType(Class<?> clazz) { // prefer BYTES over ARRAY<TINYINT> for byte[] if (clazz == byte[].class) { return Optional.of(DataTypes.BYTES()); } if (clazz.isArray()) { return extractDataType(clazz.getComponentType()).map(DataTypes::ARRAY); } if (TableSymbol.class.isAssignableFrom(clazz)) { return Optional.of(new AtomicDataType(new SymbolType<>(), clazz)); } return Optional.ofNullable(defaultDataTypes.get(clazz.getName())); }
Returns the clearly identifiable data type if possible. For example, {@link Long} can be expressed as {@link DataTypes#BIGINT()}. However, for example, {@link Row} cannot be extracted as information about the fields is missing. Or {@link BigDecimal} needs to be mapped from a variable precision/scale to constant ones.
extractDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/ClassDataTypeConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/ClassDataTypeConverter.java
Apache-2.0
public static DataType stripRowPrefix(DataType dataType, String prefix) { Preconditions.checkArgument(dataType.getLogicalType().is(ROW), "Row data type expected."); final RowType rowType = (RowType) dataType.getLogicalType(); final List<String> newFieldNames = rowType.getFieldNames().stream() .map( s -> { if (s.startsWith(prefix)) { return s.substring(prefix.length()); } return s; }) .collect(Collectors.toList()); final LogicalType newRowType = LogicalTypeUtils.renameRowFields(rowType, newFieldNames); return new FieldsDataType( newRowType, dataType.getConversionClass(), dataType.getChildren()); }
Removes a string prefix from the fields of the given row data type.
stripRowPrefix
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType appendRowFields(DataType dataType, List<DataTypes.Field> fields) { Preconditions.checkArgument(dataType.getLogicalType().is(ROW), "Row data type expected."); if (fields.size() == 0) { return dataType; } DataType newRow = Stream.concat(DataType.getFields(dataType).stream(), fields.stream()) .collect(Collectors.collectingAndThen(Collectors.toList(), DataTypes::ROW)); if (!dataType.getLogicalType().isNullable()) { newRow = newRow.notNull(); } return newRow; }
Appends the given list of fields to an existing row data type.
appendRowFields
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType toInternalDataType(LogicalType logicalType) { final DataType defaultDataType = TypeConversions.fromLogicalToDataType(logicalType); return toInternalDataType(defaultDataType); }
Creates a {@link DataType} from the given {@link LogicalType} with internal data structures.
toInternalDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType toInternalDataType(DataType dataType) { return dataType.bridgedTo(toInternalConversionClass(dataType.getLogicalType())); }
Creates a {@link DataType} from the given {@link DataType} with internal data structures.
toInternalDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType replaceLogicalType(DataType dataType, LogicalType replacement) { return LogicalTypeDataTypeConverter.toDataType(replacement) .bridgedTo(dataType.getConversionClass()); }
Replaces the {@link LogicalType} of a {@link DataType}, i.e., it keeps the bridging class.
replaceLogicalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType removeTimeAttribute(DataType dataType) { final LogicalType type = dataType.getLogicalType(); if (type.is(LogicalTypeFamily.TIMESTAMP)) { return replaceLogicalType(dataType, removeTimeAttributes(type)); } return dataType; }
Removes time attributes from the {@link DataType}. As everywhere else in the code base, this method does not support nested time attributes for now.
removeTimeAttribute
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType transform( DataType typeToTransform, TypeTransformation... transformations) { return transform(null, typeToTransform, transformations); }
Transforms the given data type to a different data type using the given transformations. @see #transform(DataTypeFactory, DataType, TypeTransformation...)
transform
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static DataType transform( @Nullable DataTypeFactory factory, DataType typeToTransform, TypeTransformation... transformations) { Preconditions.checkArgument( transformations.length > 0, "transformations should not be empty."); DataType newType = typeToTransform; for (TypeTransformation transformation : transformations) { newType = newType.accept(new DataTypeTransformer(factory, transformation)); } return newType; }
Transforms the given data type to a different data type using the given transformations. <p>The transformations will be called in the given order. In case of constructed or composite types, a transformation will be applied transitively to children first. <p>Both the {@link DataType#getLogicalType()} and {@link DataType#getConversionClass()} can be transformed. @param factory {@link DataTypeFactory} if available @param typeToTransform data type to be transformed. @param transformations the transformations to transform data type to another type. @return the new data type
transform
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static ResolvedSchema expandCompositeTypeToSchema(DataType dataType) { if (dataType instanceof FieldsDataType) { return expandCompositeType((FieldsDataType) dataType); } else if (dataType.getLogicalType() instanceof LegacyTypeInformationType && dataType.getLogicalType().getTypeRoot() == STRUCTURED_TYPE) { return expandLegacyCompositeType(dataType); } throw new IllegalArgumentException("Expected a composite type"); }
Expands a composite {@link DataType} to a corresponding {@link ResolvedSchema}. Useful for flattening a column or mapping a physical to logical type of a table source <p>Throws an exception for a non composite type. You can use {@link LogicalTypeChecks#isCompositeType(LogicalType)} to check that. <p>It does not expand an atomic type on purpose, because that operation depends on the context. E.g. in case of a {@code FLATTEN} function such operation is not allowed, whereas when mapping a physical type to logical the field name should be derived from the logical schema. @param dataType Data type to expand. Must be a composite type. @return A corresponding table schema.
expandCompositeTypeToSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static Optional<DataType> getField(DataType compositeType, int index) { ResolvedSchema tableSchema = expandCompositeTypeToSchema(compositeType); return tableSchema.getColumn(index).map(Column::getDataType); }
Retrieves a nested field from a composite type at given position. <p>Throws an exception for a non composite type. You can use {@link LogicalTypeChecks#isCompositeType(LogicalType)} to check that. @param compositeType Data type to expand. Must be a composite type. @param index Index of the field to retrieve. @return The field at the given position.
getField
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static Optional<DataType> getField(DataType compositeType, String name) { final ResolvedSchema resolvedSchema = expandCompositeTypeToSchema(compositeType); return resolvedSchema.getColumn(name).map(Column::getDataType); }
Retrieves a nested field from a composite type with given name. <p>Throws an exception for a non composite type. You can use {@link LogicalTypeChecks#isCompositeType(LogicalType)} to check that. @param compositeType Data type to expand. Must be a composite type. @param name Name of the field to retrieve. @return The field with the given name.
getField
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static List<DataType> flattenToDataTypes(DataType dataType) { final LogicalType type = dataType.getLogicalType(); if (type.is(DISTINCT_TYPE)) { return flattenToDataTypes(dataType.getChildren().get(0)); } else if (isCompositeType(type)) { return dataType.getChildren(); } return Collections.singletonList(dataType); }
Returns the data types of the flat representation in the first level of the given data type.
flattenToDataTypes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
public static List<String> flattenToNames(DataType dataType) { return flattenToNames(dataType, Collections.emptyList()); }
Returns the names of the flat representation of the given data type. In case of {@link StructuredType}, the list also includes the super type fields.
flattenToNames
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
Apache-2.0
private static boolean isRowData(TypeInformation<?> typeInfo) { if (!(typeInfo instanceof DataTypeQueryable)) { return false; } final DataType dataType = ((DataTypeQueryable) typeInfo).getDataType(); return dataType.getConversionClass() == RowData.class; }
Temporary solution to enable tests with type information and internal data structures until we drop all legacy types.
isRowData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/LegacyTypeInfoDataTypeConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/LegacyTypeInfoDataTypeConverter.java
Apache-2.0
public static DataType toDataType(LogicalType logicalType) { return logicalType.accept(dataTypeCreator); }
Returns the data type of a logical type without explicit conversions.
toDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/LogicalTypeDataTypeConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/LogicalTypeDataTypeConverter.java
Apache-2.0
public static DataType toDataType( DataTypeFactory dataTypeFactory, TypeInformation<?> typeInfo) { return toDataType(dataTypeFactory, typeInfo, false); }
Converts the given {@link TypeInformation} into {@link DataType}.
toDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/TypeInfoDataTypeConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/TypeInfoDataTypeConverter.java
Apache-2.0
public static java.sql.Date toSQLDate(int v) { // note that, in this case, can't handle Daylight Saving Time final long t = v * MILLIS_PER_DAY; return new java.sql.Date(t - LOCAL_TZ.getOffset(t)); }
Converts the internal representation of a SQL DATE (int) to the Java type used for UDF parameters ({@link java.sql.Date}).
toSQLDate
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static java.sql.Time toSQLTime(int v) { // note that, in this case, can't handle Daylight Saving Time return new java.sql.Time(v - LOCAL_TZ.getOffset(v)); }
Converts the internal representation of a SQL TIME (int) to the Java type used for UDF parameters ({@link java.sql.Time}).
toSQLTime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static java.sql.Timestamp toSQLTimestamp(long v) { return new java.sql.Timestamp(v - LOCAL_TZ.getOffset(v)); }
Converts the internal representation of a SQL TIMESTAMP (long) to the Java type used for UDF parameters ({@link java.sql.Timestamp}).
toSQLTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static int toInternal(java.sql.Date date) { long ts = date.getTime() + LOCAL_TZ.getOffset(date.getTime()); return (int) (ts / MILLIS_PER_DAY); }
Converts the Java type used for UDF parameters of SQL DATE type ({@link java.sql.Date}) to internal representation (int). <p>Converse of {@link #toSQLDate(int)}.
toInternal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static int toInternal(java.sql.Time time) { long ts = time.getTime() + LOCAL_TZ.getOffset(time.getTime()); return (int) (ts % MILLIS_PER_DAY); }
Converts the Java type used for UDF parameters of SQL TIME type ({@link java.sql.Time}) to internal representation (int). <p>Converse of {@link #toSQLTime(int)}.
toInternal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
private static LocalDateTime fromTemporalAccessor(TemporalAccessor accessor, int precision) { // complement year with 1970 int year = accessor.isSupported(YEAR) ? accessor.get(YEAR) : 1970; // complement month with 1 int month = accessor.isSupported(MONTH_OF_YEAR) ? accessor.get(MONTH_OF_YEAR) : 1; // complement day with 1 int day = accessor.isSupported(DAY_OF_MONTH) ? accessor.get(DAY_OF_MONTH) : 1; // complement hour with 0 int hour = accessor.isSupported(HOUR_OF_DAY) ? accessor.get(HOUR_OF_DAY) : 0; // complement minute with 0 int minute = accessor.isSupported(MINUTE_OF_HOUR) ? accessor.get(MINUTE_OF_HOUR) : 0; // complement second with 0 int second = accessor.isSupported(SECOND_OF_MINUTE) ? accessor.get(SECOND_OF_MINUTE) : 0; // complement nano_of_second with 0 int nanoOfSecond = accessor.isSupported(NANO_OF_SECOND) ? accessor.get(NANO_OF_SECOND) : 0; if (precision == 0) { nanoOfSecond = 0; } else if (precision != 9) { nanoOfSecond = (int) floor(nanoOfSecond, powerX(10, 9 - precision)); } return LocalDateTime.of(year, month, day, hour, minute, second, nanoOfSecond); }
This is similar to {@link LocalDateTime#from(TemporalAccessor)}, but it's less strict and introduces default values.
fromTemporalAccessor
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
private static long parseTimestampMillis(String dateStr, String format, TimeZone tz) throws ParseException { SimpleDateFormat formatter = FORMATTER_CACHE.get(format); formatter.setTimeZone(tz); return formatter.parse(dateStr).getTime(); }
Parse date time string to timestamp based on the given time zone and format. Returns null if parsing failed. @param dateStr the date time string @param format date time string format @param tz the time zone
parseTimestampMillis
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
private static long parseTimestampTz(String dateStr, String tzStr) throws ParseException { TimeZone tz = TIMEZONE_CACHE.get(tzStr); return parseTimestampMillis(dateStr, DateTimeUtils.TIMESTAMP_FORMAT_STRING, tz); }
Parse date time string to timestamp based on the given time zone string and format. Returns null if parsing failed. @param dateStr the date time string @param tzStr the time zone id string
parseTimestampTz
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static int timestampMillisToDate(long ts) { int days = (int) (ts / MILLIS_PER_DAY); if (days < 0) { days = days - 1; } return days; }
Get date from a timestamp. @param ts the timestamp in milliseconds. @return the date in days.
timestampMillisToDate
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static String formatUnixTimestamp(long unixtime, TimeZone tz) { return formatUnixTimestamp(unixtime, TIMESTAMP_FORMAT_STRING, tz); }
Convert unix timestamp (seconds since '1970-01-01 00:00:00' UTC) to datetime string in the "yyyy-MM-dd HH:mm:ss" format.
formatUnixTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static String formatUnixTimestamp(long unixtime, String format, TimeZone tz) { SimpleDateFormat formatter = FORMATTER_CACHE.get(format); formatter.setTimeZone(tz); Date date = new Date(unixtime * 1000); try { return formatter.format(date); } catch (Exception e) { LOG.error("Exception when formatting.", e); return null; } }
Convert unix timestamp (seconds since '1970-01-01 00:00:00' UTC) to datetime string in the given format.
formatUnixTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static long unixTimestamp() { return System.currentTimeMillis() / 1000; }
Returns a Unix timestamp in seconds since '1970-01-01 00:00:00' UTC as an unsigned integer.
unixTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static long unixTimestamp(long ts) { return ts / 1000; }
Returns the value of the timestamp to seconds since '1970-01-01 00:00:00' UTC.
unixTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
private static StringBuilder hms(StringBuilder b, int h, int m, int s) { int2(b, h); b.append(':'); int2(b, m); b.append(':'); int2(b, s); return b; }
Appends hour:minute:second to a buffer; assumes they are valid.
hms
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
private static StringBuilder ymdhms( StringBuilder b, int year, int month, int day, int h, int m, int s) { ymd(b, year, month, day); b.append(' '); hms(b, h, m, s); return b; }
Appends year-month-day and hour:minute:second to a buffer; assumes they are valid.
ymdhms
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static long addMonths(long timestamp, int m) { final long millis = DateTimeUtils.floorMod(timestamp, DateTimeUtils.MILLIS_PER_DAY); timestamp -= millis; final long x = addMonths((int) (timestamp / DateTimeUtils.MILLIS_PER_DAY), m); return x * DateTimeUtils.MILLIS_PER_DAY + millis; }
Adds a given number of months to a timestamp, represented as the number of milliseconds since the epoch.
addMonths
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static TimeUnit getValue(int ordinal) { return ordinal < 0 || ordinal >= CACHED_VALUES.length ? null : CACHED_VALUES[ordinal]; }
Returns the TimeUnit associated with an ordinal. The value returned is null if the ordinal is not a member of the TimeUnit enumeration.
getValue
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public boolean isValidValue(BigDecimal field) { return field.compareTo(BigDecimal.ZERO) >= 0 && (limit == null || field.compareTo(limit) < 0); }
Returns whether a given value is valid for a field of this time unit. @param field Field value @return Whether value
isValidValue
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public boolean monthly() { return ordinal() <= MONTH.ordinal(); }
Whether this is in the YEAR-TO-MONTH family of intervals.
monthly
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/DateTimeUtils.java
Apache-2.0
public static byte[] unhex(final byte[] bytes) { final byte[] out = new byte[(bytes.length + 1) >> 1]; int i = bytes.length - 2; int j = out.length - 1; while (i >= 0) { int l = Character.digit(bytes[i], 16); int r = Character.digit(bytes[i + 1], 16); if (l == -1 || r == -1) { return null; } i -= 2; out[j--] = (byte) (((l << 4) | r) & 0xFF); } // length is odd and first byte is invalid if (i == -1 && Character.digit(bytes[0], 16) == -1) { return null; } return out; }
Converts an array of characters representing hexadecimal values into an array of bytes of those same values. E.g. {@code unhex("12".getBytes())} returns {@code new byte[]{0x12}}. <p>The returned array will be half the length of the passed array, as it takes two characters to represent any given byte. If the input array has an odd length, the first byte is handled separately and set to 0. <p>Unlike {@link #decodeHex(String)}, this method does not throw an exception for odd-length inputs or invalid characters. Instead, it returns null if invalid characters are encountered. @param bytes An array of characters containing hexadecimal digits. @return A byte array containing the binary data decoded from the supplied char array, or null if the input contains invalid hexadecimal characters.
unhex
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/EncodingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/EncodingUtils.java
Apache-2.0
public static String repeat(final String str, final int repeat) { // Performance tuned for 2.0 (JDK1.4) if (str == null) { return null; } if (repeat <= 0) { return EMPTY; } final int inputLength = str.length(); if (repeat == 1 || inputLength == 0) { return str; } if (inputLength == 1 && repeat <= PAD_LIMIT) { return repeat(str.charAt(0), repeat); } final int outputLength = inputLength * repeat; switch (inputLength) { case 1: return repeat(str.charAt(0), repeat); case 2: final char ch0 = str.charAt(0); final char ch1 = str.charAt(1); final char[] output2 = new char[outputLength]; for (int i = repeat * 2 - 2; i >= 0; i--, i--) { output2[i] = ch0; output2[i + 1] = ch1; } return new String(output2); default: final StringBuilder buf = new StringBuilder(outputLength); for (int i = 0; i < repeat; i++) { buf.append(str); } return buf.toString(); } }
Repeat a String {@code repeat} times to form a new String. <pre> StringUtils.repeat(null, 2) = null StringUtils.repeat("", 0) = "" StringUtils.repeat("", 2) = "" StringUtils.repeat("a", 3) = "aaa" StringUtils.repeat("ab", 2) = "abab" StringUtils.repeat("a", -2) = "" </pre> @param str the String to repeat, may be null @param repeat number of times to repeat str, negative treated as zero @return a new String consisting of the original String repeated, {@code null} if null String input
repeat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/EncodingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/EncodingUtils.java
Apache-2.0
public static String generatePartitionPath(LinkedHashMap<String, String> partitionSpec) { if (partitionSpec.isEmpty()) { return ""; } StringBuilder suffixBuf = new StringBuilder(); int i = 0; for (Map.Entry<String, String> e : partitionSpec.entrySet()) { if (i > 0) { suffixBuf.append(Path.SEPARATOR); } suffixBuf.append(escapePathName(e.getKey())); suffixBuf.append('='); suffixBuf.append(escapePathName(e.getValue())); i++; } suffixBuf.append(Path.SEPARATOR); return suffixBuf.toString(); }
Make partition path from partition spec. @param partitionSpec The partition spec. @return An escaped, valid partition name.
generatePartitionPath
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
Apache-2.0
public static List<String> extractPartitionValues(Path currPath) { return new ArrayList<>(extractPartitionSpecFromPath(currPath).values()); }
Make partition values from path. @param currPath partition file path. @return Sequential partition specs.
extractPartitionValues
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
Apache-2.0
public static FileStatus[] listStatusWithoutHidden(FileSystem fs, Path dir) throws IOException { FileStatus[] statuses = fs.listStatus(dir); if (statuses == null) { return null; } return Arrays.stream(statuses) .filter(fileStatus -> !isHiddenFile(fileStatus)) .toArray(FileStatus[]::new); }
List file status without hidden files.
listStatusWithoutHidden
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
Apache-2.0
public static List<Tuple2<LinkedHashMap<String, String>, Path>> searchPartSpecAndPaths( FileSystem fs, Path path, int partitionNumber) { FileStatus[] generatedParts = getFileStatusRecurse(path, partitionNumber, fs); List<Tuple2<LinkedHashMap<String, String>, Path>> ret = new ArrayList<>(); for (FileStatus part : generatedParts) { // ignore hidden file if (isHiddenFile(part)) { continue; } ret.add(new Tuple2<>(extractPartitionSpecFromPath(part.getPath()), part.getPath())); } return ret; }
Search all partitions in this path. @param path search path. @param partitionNumber partition number, it will affect path structure. @return all partition specs to its path.
searchPartSpecAndPaths
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
Apache-2.0
public static GenericRowData fillPartitionValueForRecord( String[] fieldNames, DataType[] fieldTypes, int[] selectFields, List<String> partitionKeys, Path path, String defaultPartValue) { GenericRowData record = new GenericRowData(selectFields.length); LinkedHashMap<String, String> partSpec = PartitionPathUtils.extractPartitionSpecFromPath(path); for (int i = 0; i < selectFields.length; i++) { int selectField = selectFields[i]; String name = fieldNames[selectField]; if (partitionKeys.contains(name)) { String value = partSpec.get(name); value = defaultPartValue.equals(value) ? null : value; record.setField( i, PartitionPathUtils.convertStringToInternalValue( value, fieldTypes[selectField])); } } return record; }
Extract partition value from path and fill to record. @param fieldNames record field names. @param fieldTypes record field types. @param selectFields the selected fields. @param partitionKeys the partition field names. @param path the file path that the partition located. @param defaultPartValue default value of partition field. @return the filled record.
fillPartitionValueForRecord
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/PartitionPathUtils.java
Apache-2.0
public static String generateRuntimeName(Class<?> clazz, String[] fields) { String className = clazz.getSimpleName(); if (null == fields) { return className + "(*)"; } else { return className + "(" + String.join(", ", fields) + ")"; } }
Returns the table connector name used for logging and web UI.
generateRuntimeName
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableConnectorUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableConnectorUtils.java
Apache-2.0
public static TableSchema getPhysicalSchema(TableSchema tableSchema) { return getTableSchema(tableSchema, TableColumn::isPhysical); }
Return {@link TableSchema} which consists of all physical columns. That means, the computed columns and metadata columns are filtered out. <p>Readers(or writers) such as {@link TableSource} and {@link TableSink} should use this physical schema to generate {@link TableSource#getProducedDataType()} and {@link TableSource#getTableSchema()} rather than using the raw TableSchema which may contains additional columns.
getPhysicalSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static TableSchema getPersistedSchema(TableSchema tableSchema) { return getTableSchema(tableSchema, TableColumn::isPersisted); }
Return {@link TableSchema} which consists of all persisted columns. That means, the virtual computed columns and metadata columns are filtered out. <p>Its difference from {@link TableSchemaUtils#getPhysicalSchema(TableSchema)} is that it includes of all physical columns and metadata columns without virtual keyword.
getPersistedSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
private static TableSchema getTableSchema( TableSchema tableSchema, Function<TableColumn, Boolean> columnFilter) { Preconditions.checkNotNull(tableSchema); TableSchema.Builder builder = new TableSchema.Builder(); tableSchema .getTableColumns() .forEach( tableColumn -> { if (columnFilter.apply(tableColumn)) { builder.field(tableColumn.getName(), tableColumn.getType()); } }); tableSchema .getPrimaryKey() .ifPresent( uniqueConstraint -> builder.primaryKey( uniqueConstraint.getName(), uniqueConstraint.getColumns().toArray(new String[0]))); return builder.build(); }
Build a {@link TableSchema} with columns filtered by a given columnFilter.
getTableSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static boolean containsPhysicalColumnsOnly(TableSchema schema) { Preconditions.checkNotNull(schema); return schema.getTableColumns().stream().allMatch(TableColumn::isPhysical); }
Returns true if there are only physical columns in the given {@link TableSchema}.
containsPhysicalColumnsOnly
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static TableSchema checkOnlyPhysicalColumns(TableSchema schema) { Preconditions.checkNotNull(schema); if (!containsPhysicalColumnsOnly(schema)) { throw new ValidationException( "The given schema contains non-physical columns, schema: \n" + schema.toString()); } return schema; }
Throws an exception if the given {@link TableSchema} contains any non-physical columns.
checkOnlyPhysicalColumns
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static int[] getPrimaryKeyIndices(TableSchema schema) { if (schema.getPrimaryKey().isPresent()) { List<String> fieldNames = DataTypeUtils.flattenToNames(schema.toPhysicalRowDataType()); return schema.getPrimaryKey().get().getColumns().stream() .mapToInt(fieldNames::indexOf) .toArray(); } else { return new int[0]; } }
Returns the field indices of primary key in the physical columns of this schema (not include computed columns or metadata columns).
getPrimaryKeyIndices
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static ResolvedSchema removeTimeAttributeFromResolvedSchema( ResolvedSchema resolvedSchema) { return new ResolvedSchema( resolvedSchema.getColumns().stream() .map(col -> col.copy(DataTypeUtils.removeTimeAttribute(col.getDataType()))) .collect(Collectors.toList()), resolvedSchema.getWatermarkSpecs(), resolvedSchema.getPrimaryKey().orElse(null)); }
Removes time attributes from the {@link ResolvedSchema}.
removeTimeAttributeFromResolvedSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static TableSchema.Builder builderWithGivenSchema(TableSchema oriSchema) { TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns()); // Copy watermark specification. for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) { builder.watermark( wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType()); } // Copy primary key constraint. oriSchema .getPrimaryKey() .map( pk -> builder.primaryKey( pk.getName(), pk.getColumns().toArray(new String[0]))); return builder; }
Creates a builder with given table schema. @param oriSchema Original schema @return the builder with all the information from the given schema
builderWithGivenSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static TableSchema dropConstraint(TableSchema oriSchema, String constraintName) { // Validate the constraint name is valid. Optional<UniqueConstraint> uniqueConstraintOpt = oriSchema.getPrimaryKey(); if (!uniqueConstraintOpt.isPresent() || !uniqueConstraintOpt.get().getName().equals(constraintName)) { throw new ValidationException( String.format("Constraint %s to drop does not exist", constraintName)); } TableSchema.Builder builder = builderWithGivenColumns(oriSchema.getTableColumns()); // Copy watermark specification. for (WatermarkSpec wms : oriSchema.getWatermarkSpecs()) { builder.watermark( wms.getRowtimeAttribute(), wms.getWatermarkExpr(), wms.getWatermarkExprOutputType()); } return builder.build(); }
Creates a new schema but drop the constraint with given name.
dropConstraint
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
private static TableSchema.Builder builderWithGivenColumns(List<TableColumn> originalColumns) { final TableSchema.Builder builder = TableSchema.builder(); for (TableColumn column : originalColumns) { builder.add(column); } return builder; }
Returns the builder with copied columns info from the given table schema.
builderWithGivenColumns
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TableSchemaUtils.java
Apache-2.0
public static int[] computePhysicalIndices( List<TableColumn> logicalColumns, DataType physicalType, Function<String, String> nameRemapping) { Map<TableColumn, Integer> physicalIndexLookup = computePhysicalIndices(logicalColumns.stream(), physicalType, nameRemapping); return logicalColumns.stream().mapToInt(physicalIndexLookup::get).toArray(); }
Computes indices of physical fields corresponding to the selected logical fields of a {@link TableSchema}. @param logicalColumns Logical columns that describe the physical type. @param physicalType Physical type to retrieve indices from. @param nameRemapping Additional remapping of a logical to a physical field name. TimestampExtractor works with logical names, but accesses physical fields @return Physical indices of logical fields selected with {@code projectedLogicalFields} mask.
computePhysicalIndices
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
Apache-2.0
public static int[] computePhysicalIndicesOrTimeAttributeMarkers( TableSource<?> tableSource, List<TableColumn> logicalColumns, boolean streamMarkers, Function<String, String> nameRemapping) { Optional<String> proctimeAttribute = getProctimeAttribute(tableSource); List<String> rowtimeAttributes = getRowtimeAttributes(tableSource); List<TableColumn> columnsWithoutTimeAttributes = logicalColumns.stream() .filter( col -> !rowtimeAttributes.contains(col.getName()) && proctimeAttribute .map(attr -> !attr.equals(col.getName())) .orElse(true)) .collect(Collectors.toList()); Map<TableColumn, Integer> columnsToPhysicalIndices = TypeMappingUtils.computePhysicalIndices( columnsWithoutTimeAttributes.stream(), tableSource.getProducedDataType(), nameRemapping); return logicalColumns.stream() .mapToInt( logicalColumn -> { if (proctimeAttribute .map(attr -> attr.equals(logicalColumn.getName())) .orElse(false)) { verifyTimeAttributeType(logicalColumn, "Proctime"); if (streamMarkers) { return TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER; } else { return TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER; } } else if (rowtimeAttributes.contains(logicalColumn.getName())) { verifyTimeAttributeType(logicalColumn, "Rowtime"); if (streamMarkers) { return TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER; } else { return TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER; } } else { return columnsToPhysicalIndices.get(logicalColumn); } }) .toArray(); }
Computes indices of physical fields corresponding to the selected logical fields of a {@link TableSchema}. <p>It puts markers (idx < 0) for time attributes extracted from {@link DefinedProctimeAttribute} and {@link DefinedRowtimeAttributes} <p>{@link TypeMappingUtils#computePhysicalIndices(List, DataType, Function)} should be preferred. The time attribute markers should not be used anymore. @param tableSource Used to extract {@link DefinedRowtimeAttributes}, {@link DefinedProctimeAttribute} and {@link TableSource#getProducedDataType()}. @param logicalColumns Logical columns that describe the physical type. @param streamMarkers If true puts stream markers otherwise puts batch markers. @param nameRemapping Additional remapping of a logical to a physical field name. TimestampExtractor works with logical names, but accesses physical fields @return Physical indices of logical fields selected with {@code projectedLogicalFields} mask.
computePhysicalIndicesOrTimeAttributeMarkers
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
Apache-2.0
private static List<String> getRowtimeAttributes(TableSource<?> tableSource) { if (tableSource instanceof DefinedRowtimeAttributes) { return ((DefinedRowtimeAttributes) tableSource) .getRowtimeAttributeDescriptors().stream() .map(RowtimeAttributeDescriptor::getAttributeName) .collect(Collectors.toList()); } else { return Collections.emptyList(); } }
Returns a list with all rowtime attribute names of the [[TableSource]].
getRowtimeAttributes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
Apache-2.0
private static Optional<String> getProctimeAttribute(TableSource<?> tableSource) { if (tableSource instanceof DefinedProctimeAttribute) { return Optional.ofNullable( ((DefinedProctimeAttribute) tableSource).getProctimeAttribute()); } else { return Optional.empty(); } }
Returns the proctime attribute of the [[TableSource]] if it is defined.
getProctimeAttribute
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/TypeMappingUtils.java
Apache-2.0
static TableauStyle tableauWithTypeInferredColumnWidths( ResolvedSchema schema, RowDataToStringConverter converter, int maxColumnWidth, boolean printNullAsEmpty, boolean printRowKind) { Preconditions.checkArgument(maxColumnWidth > 0, "maxColumnWidth should be greater than 0"); return new TableauStyle( schema, converter, TableauStyle.columnWidthsByType( schema.getColumns(), maxColumnWidth, printNullAsEmpty, printRowKind), maxColumnWidth, printNullAsEmpty, printRowKind); }
Create a new {@link TableauStyle} using column widths computed from the type. @param schema the schema of the data to print @param converter the converter to use to convert field values to string @param maxColumnWidth Max column width @param printNullAsEmpty A flag to indicate whether null should be printed as empty string more than {@code <NULL>} @param printRowKind A flag to indicate whether print row kind info.
tableauWithTypeInferredColumnWidths
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
Apache-2.0
static TableauStyle tableauWithDataInferredColumnWidths( ResolvedSchema schema, RowDataToStringConverter converter, int maxColumnWidth, boolean printNullAsEmpty, boolean printRowKind) { Preconditions.checkArgument(maxColumnWidth > 0, "maxColumnWidth should be greater than 0"); return new TableauStyle( schema, converter, null, maxColumnWidth, printNullAsEmpty, printRowKind); }
Like {@link #tableauWithTypeInferredColumnWidths(ResolvedSchema, RowDataToStringConverter, int, boolean, boolean)}, but uses the data to infer the column size. <p><b>NOTE:</b> please make sure the data to print is small enough to be stored in java heap memory.
tableauWithDataInferredColumnWidths
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
Apache-2.0
static TableauStyle tableauWithDataInferredColumnWidths( ResolvedSchema schema, RowDataToStringConverter converter) { return PrintStyle.tableauWithDataInferredColumnWidths( schema, converter, DEFAULT_MAX_COLUMN_WIDTH, false, false); }
Like {@link #tableauWithDataInferredColumnWidths(ResolvedSchema, RowDataToStringConverter, int, boolean, boolean)}, but using default values. <p><b>NOTE:</b> please make sure the data to print is small enough to be stored in java heap memory.
tableauWithDataInferredColumnWidths
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/utils/print/PrintStyle.java
Apache-2.0
public T getRefreshHandler() { return refreshHandler; }
Return {@link RefreshHandler} from corresponding {@link WorkflowScheduler} which provides meta info to points to the refresh workflow in scheduler service.
getRefreshHandler
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/workflow/DeleteRefreshWorkflow.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/workflow/DeleteRefreshWorkflow.java
Apache-2.0
@Override public T getRefreshHandler() { return refreshHandler; }
{@link ModifyRefreshWorkflow} provides the related information to suspend refresh workflow of {@link CatalogMaterializedTable}.
getRefreshHandler
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/workflow/SuspendRefreshWorkflow.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/workflow/SuspendRefreshWorkflow.java
Apache-2.0
public List<String> getNames() { return names; }
Returns a list of column names. <p>For example, it returns [a, b, c] for function calls like {@code DESCRIPTOR(a INT, b STRING, c BOOLEAN)} or {@code DESCRIPTOR(a, b, c)}.
getNames
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/types/ColumnList.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/types/ColumnList.java
Apache-2.0
private static ResolvedSchema getResolvedSchema(CatalogBaseTable catalogBaseTable) { if (catalogBaseTable instanceof ResolvedCatalogBaseTable) { return ((ResolvedCatalogBaseTable<?>) catalogBaseTable).getResolvedSchema(); } else { return catalogBaseTable.getUnresolvedSchema().resolve(new TestSchemaResolver()); } }
We unify it to ResolvedSchema for comparing. @param catalogBaseTable The target catalog base table. @return The resolved schema.
getResolvedSchema
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTestUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/catalog/CatalogTestUtil.java
Apache-2.0
@Override public String asSummaryString() { return "Test RefreshHandler"; }
Test refresh handler for discovery testing.
asSummaryString
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/factories/workflow/TestWorkflowSchedulerFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/factories/workflow/TestWorkflowSchedulerFactory.java
Apache-2.0
@Test void testEmptyProjection() { TableSource<?> source = createTableSource(TableSchema.builder().field("f0", DataTypes.INT()).build()); assumeThat(source).isInstanceOf(ProjectableTableSource.class); ProjectableTableSource<?> projectableTableSource = (ProjectableTableSource<?>) source; TableSource<?> newTableSource = projectableTableSource.projectFields(new int[0]); assertThat(newTableSource.explainSource()).isNotEqualTo(source.explainSource()); }
Checks that {@link ProjectableTableSource#projectFields(int[])} returns a table source with a different {@link TableSource#explainSource()} even when filtering out all fields. <p>Required by {@code PushProjectIntoTableSourceScanRule}.
testEmptyProjection
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/sources/TableSourceTestBase.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/sources/TableSourceTestBase.java
Apache-2.0
@Test void testProjectionReturnsDifferentSource() { TableSource<?> source = createTableSource( TableSchema.builder() .field("f0", DataTypes.INT()) .field("f1", DataTypes.STRING()) .field("f2", DataTypes.BIGINT()) .build()); assumeThat(source).isInstanceOf(ProjectableTableSource.class); ProjectableTableSource<?> projectableTableSource = (ProjectableTableSource<?>) source; TableSource<?> newTableSource = projectableTableSource.projectFields(new int[] {0, 2}); assertThat(newTableSource.explainSource()).isNotEqualTo(source.explainSource()); assertThat(newTableSource.getTableSchema()).isEqualTo(source.getTableSchema()); }
Checks that {@link ProjectableTableSource#projectFields(int[])} returns a table source with a different {@link TableSource#explainSource()}, but same schema. <p>Required by {@code PushProjectIntoTableSourceScanRule}.
testProjectionReturnsDifferentSource
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/sources/TableSourceTestBase.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/sources/TableSourceTestBase.java
Apache-2.0
public void eval() { // nothing to do }
Table function that uses a big tuple with constructor defined field order.
eval
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/DataTypeExtractorTest.java
Apache-2.0
@Override public void method( Long generic, CompletableFuture<Long> genericFuture, List<CompletableFuture<Long>> listOfGenericFuture, Long[] array) { // don't initialize the local variable String localVariable; if (generic == null) { localVariable = "null"; } else if (generic < 0) { localVariable = "negative"; } else if (generic > 0) { localVariable = "positive"; } else { localVariable = "zero"; } // use the local variable System.err.println("localVariable: " + localVariable); }
A test function that contains multi local variable blocks without initialization at first.
method
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
Apache-2.0
@Override public void method( Long generic, CompletableFuture<Long> genericFuture, List<CompletableFuture<Long>> listOfGenericFuture, Long[] array) { // initialize the local variable String localVariable = ""; if (generic == null) { localVariable = "null"; } else if (generic < 0) { localVariable = "negative"; } else if (generic > 0) { localVariable = "positive"; } else { localVariable = "zero"; } // use the local variable System.err.println("localVariable: " + localVariable); }
A test function that contains multi local variable blocks with initialization at first.
method
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
Apache-2.0
@SuppressWarnings("unused") public void method( Long generic, // this `result` has the same name as the class member variable in // `CompletableFuture` Object result, CompletableFuture<Long> genericFuture, List<CompletableFuture<Long>> listOfGenericFuture, Long[] array) {}
A test function where one function parameter has the same name as a class member variable within another complex function parameter.
method
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/extraction/ExtractionUtilsTest.java
Apache-2.0
@Override protected Stream<TestSpec> testData() { return Stream.of( // missing strategy with arbitrary argument TypeStrategiesTestBase.TestSpec.forStrategy(MISSING) .inputTypes(DataTypes.INT()) .expectErrorMessage( "Could not infer an output type for the given arguments."), // valid explicit TypeStrategiesTestBase.TestSpec.forStrategy(explicit(DataTypes.BIGINT())) .inputTypes() .expectDataType(DataTypes.BIGINT()), // infer from input TypeStrategiesTestBase.TestSpec.forStrategy(argument(0)) .inputTypes(DataTypes.INT(), DataTypes.STRING()) .expectDataType(DataTypes.INT()), // infer from not existing input TypeStrategiesTestBase.TestSpec.forStrategy(argument(0)) .inputTypes() .expectErrorMessage( "Could not infer an output type for the given arguments."), // invalid return type TypeStrategiesTestBase.TestSpec.forStrategy(explicit(DataTypes.NULL())) .inputTypes() .expectErrorMessage( "Could not infer an output type for the given arguments. Untyped NULL received."), TypeStrategiesTestBase.TestSpec.forStrategy( "First type strategy", TypeStrategies.first( (callContext) -> Optional.empty(), explicit(DataTypes.INT()))) .inputTypes() .expectDataType(DataTypes.INT()), TypeStrategiesTestBase.TestSpec.forStrategy( "Match root type strategy", TypeStrategies.matchFamily(0, LogicalTypeFamily.NUMERIC)) .inputTypes(DataTypes.INT()) .expectDataType(DataTypes.INT()), TypeStrategiesTestBase.TestSpec.forStrategy( "Invalid match root type strategy", TypeStrategies.matchFamily(0, LogicalTypeFamily.NUMERIC)) .inputTypes(DataTypes.BOOLEAN()) .expectErrorMessage( "Could not infer an output type for the given arguments."), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to nullable type", nullableIfArgs(explicit(DataTypes.BOOLEAN().notNull()))) .inputTypes(DataTypes.BIGINT().notNull(), DataTypes.VARCHAR(2).nullable()) .expectDataType(DataTypes.BOOLEAN().nullable()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to not null type", nullableIfArgs(explicit(DataTypes.BOOLEAN().nullable()))) .inputTypes(DataTypes.BIGINT().notNull(), DataTypes.VARCHAR(2).notNull()) .expectDataType(DataTypes.BOOLEAN().notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to not null type but only consider first argument", nullableIfArgs( ConstantArgumentCount.to(0), explicit(DataTypes.BOOLEAN().nullable()))) .inputTypes(DataTypes.BIGINT().notNull(), DataTypes.VARCHAR(2).nullable()) .expectDataType(DataTypes.BOOLEAN().notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to null type but only consider first two argument", nullableIfArgs( ConstantArgumentCount.to(1), explicit(DataTypes.BOOLEAN().nullable()))) .inputTypes(DataTypes.BIGINT().notNull(), DataTypes.VARCHAR(2).nullable()) .expectDataType(DataTypes.BOOLEAN().nullable()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to not null type but only consider the second and third argument", nullableIfArgs( ConstantArgumentCount.between(1, 2), explicit(DataTypes.BOOLEAN().nullable()))) .inputTypes( DataTypes.BIGINT().nullable(), DataTypes.BIGINT().notNull(), DataTypes.VARCHAR(2).notNull()) .expectDataType(DataTypes.BOOLEAN().notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to not null because one argument is not null", nullableIfAllArgs(TypeStrategies.COMMON)) .inputTypes(DataTypes.VARCHAR(2).notNull(), DataTypes.VARCHAR(2).nullable()) .expectDataType(DataTypes.VARCHAR(2).notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Cascading to nullable because all args are nullable", nullableIfAllArgs(TypeStrategies.COMMON)) .inputTypes( DataTypes.VARCHAR(2).nullable(), DataTypes.VARCHAR(2).nullable()) .expectDataType(DataTypes.VARCHAR(2).nullable()), TypeStrategiesTestBase.TestSpec.forStrategy( "Find a common type", TypeStrategies.COMMON) .inputTypes( DataTypes.INT(), DataTypes.TINYINT().notNull(), DataTypes.DECIMAL(20, 10)) .expectDataType(DataTypes.DECIMAL(20, 10)), TypeStrategiesTestBase.TestSpec.forStrategy( "Find a common type of selected arguments", TypeStrategies.commonRange(ConstantArgumentCount.from(1))) .inputTypes(DataTypes.INT(), DataTypes.SMALLINT(), DataTypes.TINYINT()) .expectDataType(DataTypes.SMALLINT()), TypeStrategiesTestBase.TestSpec.forStrategy( "Find a common type of selected arguments", TypeStrategies.commonRange(ConstantArgumentCount.between(1, 2))) .inputTypes( DataTypes.VARCHAR(10), DataTypes.CHAR(3), DataTypes.VARCHAR(4), DataTypes.CHAR(7)) .expectDataType(DataTypes.VARCHAR(4)), TypeStrategiesTestBase.TestSpec.forStrategy( "Find a common type of selected arguments", TypeStrategies.commonRange(ConstantArgumentCount.to(1))) .inputTypes(DataTypes.TINYINT(), DataTypes.SMALLINT(), DataTypes.INT()) .expectDataType(DataTypes.SMALLINT()), TypeStrategiesTestBase.TestSpec.forStrategy( "Convert to varying string", varyingString(explicit(DataTypes.CHAR(12).notNull()))) .inputTypes(DataTypes.CHAR(12).notNull()) .expectDataType(DataTypes.VARCHAR(12).notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Average with grouped aggregation", TypeStrategies.aggArg0(LogicalTypeMerging::findAvgAggType, true)) .inputTypes(DataTypes.INT().notNull()) .calledWithGroupedAggregation() .expectDataType(DataTypes.INT().notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Average without grouped aggregation", TypeStrategies.aggArg0(LogicalTypeMerging::findAvgAggType, true)) .inputTypes(DataTypes.INT().notNull()) .expectDataType(DataTypes.INT()), // PercentileTypeStrategy TypeStrategiesTestBase.TestSpec.forStrategy(PERCENTILE) .inputTypes(DataTypes.INT(), DataTypes.DOUBLE()) .expectDataType(DataTypes.DOUBLE()), TypeStrategiesTestBase.TestSpec.forStrategy(PERCENTILE) .inputTypes(DataTypes.INT(), DataTypes.ARRAY(DataTypes.DECIMAL(5, 2))) .expectDataType(DataTypes.ARRAY(DataTypes.DOUBLE())), // LeadLagStrategy TypeStrategiesTestBase.TestSpec.forStrategy( "Expression not null", SpecificTypeStrategies.LEAD_LAG) .inputTypes(DataTypes.INT().notNull(), DataTypes.BIGINT()) .expectDataType(DataTypes.INT()), TypeStrategiesTestBase.TestSpec.forStrategy( "Default value not null", SpecificTypeStrategies.LEAD_LAG) .inputTypes( DataTypes.STRING(), DataTypes.BIGINT(), DataTypes.STRING().notNull()) .expectDataType(DataTypes.STRING().notNull()), TypeStrategiesTestBase.TestSpec.forStrategy( "Default value nullable", SpecificTypeStrategies.LEAD_LAG) .inputTypes(DataTypes.STRING(), DataTypes.BIGINT(), DataTypes.STRING()) .expectDataType(DataTypes.STRING())); }
Tests for built-in {@link TypeStrategies}.
testData
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/TypeStrategiesTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/TypeStrategiesTest.java
Apache-2.0
@Override protected Stream<TestSpec> testData() { return Stream.of( TestSpec.forStrategy("Find a decimal sum", SpecificTypeStrategies.DECIMAL_PLUS) .inputTypes(DataTypes.DECIMAL(5, 4), DataTypes.DECIMAL(3, 2)) .expectDataType(DataTypes.DECIMAL(6, 4).notNull()), TestSpec.forStrategy( "Find a decimal quotient", SpecificTypeStrategies.DECIMAL_DIVIDE) .inputTypes(DataTypes.DECIMAL(5, 4), DataTypes.DECIMAL(3, 2)) .expectDataType(DataTypes.DECIMAL(11, 8).notNull()), TestSpec.forStrategy("Find a decimal product", SpecificTypeStrategies.DECIMAL_TIMES) .inputTypes(DataTypes.DECIMAL(5, 4), DataTypes.DECIMAL(3, 2)) .expectDataType(DataTypes.DECIMAL(9, 6).notNull()), TestSpec.forStrategy("Find a decimal modulo", SpecificTypeStrategies.DECIMAL_MOD) .inputTypes(DataTypes.DECIMAL(5, 4), DataTypes.DECIMAL(3, 2)) .expectDataType(DataTypes.DECIMAL(5, 4).notNull())); }
Tests for decimal {@link TypeStrategy TypeStrategies}.
testData
java
apache/flink
flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/strategies/DecimalTypeStrategyTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/inference/strategies/DecimalTypeStrategyTest.java
Apache-2.0
public static CalciteSchema asRootSchema(Schema root) { return new SimpleCalciteSchema(null, root, ""); }
Creates a {@link CalciteSchema} with a given {@link Schema} as the root. @param root schema to use as a root schema @return calcite schema with given schema as the root
asRootSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/jdbc/CalciteSchemaBuilder.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/jdbc/CalciteSchemaBuilder.java
Apache-2.0
@Override public Snapshot copy(RelTraitSet traitSet, RelNode input, RexNode period) { return new LogicalSnapshot(getCluster(), traitSet, hints, input, period); }
Creates a LogicalSnapshot. <p>Use {@link #create} unless you know what you're doing. @param cluster Cluster that this relational expression belongs to @param traitSet The traits of this relational expression @param input Input relational expression @param period Timestamp expression which as the table was at the given time in the past
copy
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/logical/LogicalSnapshot.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/logical/LogicalSnapshot.java
Apache-2.0
public RelOptPredicateList getPredicates(Project project, RelMetadataQuery mq) { final RelNode input = project.getInput(); final RexBuilder rexBuilder = project.getCluster().getRexBuilder(); final RelOptPredicateList inputInfo = mq.getPulledUpPredicates(input); final List<RexNode> projectPullUpPredicates = new ArrayList<>(); ImmutableBitSet.Builder columnsMappedBuilder = ImmutableBitSet.builder(); Mapping m = Mappings.create( MappingType.PARTIAL_FUNCTION, input.getRowType().getFieldCount(), project.getRowType().getFieldCount()); for (Ord<RexNode> expr : Ord.zip(project.getProjects())) { if (expr.e instanceof RexInputRef) { int sIdx = ((RexInputRef) expr.e).getIndex(); m.set(sIdx, expr.i); columnsMappedBuilder.set(sIdx); // Project can also generate constants. We need to include them. } else if (RexLiteral.isNullLiteral(expr.e)) { projectPullUpPredicates.add( rexBuilder.makeCall( SqlStdOperatorTable.IS_NULL, rexBuilder.makeInputRef(project, expr.i))); } else if (RexUtil.isConstant(expr.e)) { final List<RexNode> args = ImmutableList.of(rexBuilder.makeInputRef(project, expr.i), expr.e); final SqlOperator op = args.get(0).getType().isNullable() || args.get(1).getType().isNullable() ? SqlStdOperatorTable.IS_NOT_DISTINCT_FROM : SqlStdOperatorTable.EQUALS; projectPullUpPredicates.add(rexBuilder.makeCall(op, args)); } } // Go over childPullUpPredicates. If a predicate only contains columns in // 'columnsMapped' construct a new predicate based on mapping. final ImmutableBitSet columnsMapped = columnsMappedBuilder.build(); for (RexNode r : inputInfo.pulledUpPredicates) { RexNode r2 = projectPredicate(rexBuilder, input, r, columnsMapped); if (!r2.isAlwaysTrue()) { r2 = r2.accept(new RexPermuteInputsShuttle(m, input)); projectPullUpPredicates.add(r2); } } return RelOptPredicateList.of(rexBuilder, projectPullUpPredicates); }
Infers predicates for a project. <ol> <li>create a mapping from input to projection. Map only positions that directly reference an input column. <li>Expressions that only contain above columns are retained in the Project's pullExpressions list. <li>For e.g. expression 'a + e = 9' below will not be pulled up because 'e' is not in the projection list. <blockquote> <pre> inputPullUpExprs: {a &gt; 7, b + c &lt; 10, a + e = 9} projectionExprs: {a, b, c, e / 2} projectionPullupExprs: {a &gt; 7, b + c &lt; 10} </pre> </blockquote> </ol>
getPredicates
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
Apache-2.0
private static RexNode projectPredicate( final RexBuilder rexBuilder, RelNode input, RexNode r, ImmutableBitSet columnsMapped) { ImmutableBitSet rCols = RelOptUtil.InputFinder.bits(r); if (columnsMapped.contains(rCols)) { // All required columns are present. No need to weaken. return r; } if (columnsMapped.intersects(rCols)) { final List<RexNode> list = new ArrayList<>(); for (int c : columnsMapped.intersect(rCols)) { if (input.getRowType().getFieldList().get(c).getType().isNullable() && Strong.isNull(r, ImmutableBitSet.of(c))) { list.add( rexBuilder.makeCall( SqlStdOperatorTable.IS_NOT_NULL, rexBuilder.makeInputRef(input, c))); } } if (!list.isEmpty()) { return RexUtil.composeDisjunction(rexBuilder, list); } } // Cannot weaken to anything non-trivial return rexBuilder.makeLiteral(true); }
Converts a predicate on a particular set of columns into a predicate on a subset of those columns, weakening if necessary. <p>If not possible to simplify, returns {@code true}, which is the weakest possible predicate. <p>Examples: <ol> <li>The predicate {@code $7 = $9} on columns [7] becomes {@code $7 is not null} <li>The predicate {@code $7 = $9 + $11} on columns [7, 9] becomes {@code $7 is not null or $9 is not null} <li>The predicate {@code $7 = $9 and $9 = 5} on columns [7] becomes {@code $7 = 5} <li>The predicate {@code $7 = $9 and ($9 = $1 or $9 = $2) and $1 > 3 and $2 > 10} on columns [7] becomes {@code $7 > 3} </ol> <p>We currently only handle examples 1 and 2. @param rexBuilder Rex builder @param input Input relational expression @param r Predicate expression @param columnsMapped Columns which the final predicate can reference @return Predicate expression narrowed to reference only certain columns
projectPredicate
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
Apache-2.0
public RelOptPredicateList getPredicates(Filter filter, RelMetadataQuery mq) { final RelNode input = filter.getInput(); final RexBuilder rexBuilder = filter.getCluster().getRexBuilder(); final RelOptPredicateList inputInfo = mq.getPulledUpPredicates(input); // Simplify condition using RexSimplify. final RexNode condition = filter.getCondition(); final RexExecutor executor = Util.first(filter.getCluster().getPlanner().getExecutor(), RexUtil.EXECUTOR); final RexSimplify simplify = new RexSimplify(rexBuilder, RelOptPredicateList.EMPTY, executor); final RexNode simplifiedCondition = simplify.simplify(condition); return Util.first(inputInfo, RelOptPredicateList.EMPTY) .union( rexBuilder, RelOptPredicateList.of( rexBuilder, RexUtil.retainDeterministic( RelOptUtil.conjunctions(simplifiedCondition)))); }
Add the Filter condition to the pulledPredicates list from the input.
getPredicates
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
Apache-2.0
boolean allGroupSetsOverlap(ImmutableBitSet predicateColumns, Aggregate aggregate) { // Consider this example: // select deptno, sal, count(*) // from emp where deptno = 10 // group by rollup(sal, deptno) // Because of the ROLLUP, we cannot assume // that deptno = 10 in the result: deptno may be NULL as well. for (ImmutableBitSet groupSet : aggregate.groupSets) { if (!groupSet.contains(predicateColumns)) { return false; } } return true; }
Check whether the fields specified by the predicateColumns appear in all the groupSets of the aggregate. @param predicateColumns A list of columns used in a pulled predicate. @param aggregate An aggregation operation. @return Whether all columns appear in all groupsets.
allGroupSetsOverlap
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
Apache-2.0
public RelOptPredicateList getPredicates(Aggregate agg, RelMetadataQuery mq) { final RelNode input = agg.getInput(); final RexBuilder rexBuilder = agg.getCluster().getRexBuilder(); final RelOptPredicateList inputInfo = mq.getPulledUpPredicates(input); final List<RexNode> aggPullUpPredicates = new ArrayList<>(); ImmutableBitSet groupKeys = agg.getGroupSet(); if (groupKeys.isEmpty()) { // "GROUP BY ()" can convert an empty relation to a non-empty relation, so // it is not valid to pull up predicates. In particular, consider the // predicate "false": it is valid on all input rows (trivially - there are // no rows!) but not on the output (there is one row). return RelOptPredicateList.EMPTY; } Mapping m = Mappings.create( MappingType.PARTIAL_FUNCTION, input.getRowType().getFieldCount(), agg.getRowType().getFieldCount()); int i = 0; for (int j : groupKeys) { m.set(j, i++); } for (RexNode r : inputInfo.pulledUpPredicates) { ImmutableBitSet rCols = RelOptUtil.InputFinder.bits(r); // FLINK MODIFICATION BEGIN if (groupKeys.contains(rCols) && this.allGroupSetsOverlap(rCols, agg)) { // FLINK MODIFICATION END r = r.accept(new RexPermuteInputsShuttle(m, input)); aggPullUpPredicates.add(r); } } return RelOptPredicateList.of(rexBuilder, aggPullUpPredicates); }
Infers predicates for an Aggregate. <p>Pulls up predicates that only contains references to columns in the GroupSet. For e.g. <blockquote> <pre> inputPullUpExprs : { a &gt; 7, b + c &lt; 10, a + e = 9} groupSet : { a, b} pulledUpExprs : { a &gt; 7} </pre> </blockquote>
getPredicates
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/calcite/rel/metadata/RelMdPredicates.java
Apache-2.0