code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
@Override public Optional<RexNode> convert(CallExpression call, ConvertContext context) { final FunctionDefinition definition = call.getFunctionDefinition(); // built-in functions without implementation are handled separately if (definition instanceof BuiltInFunctionDefinition) { final BuiltInFunctionDefinition builtInFunction = (BuiltInFunctionDefinition) definition; if (!builtInFunction.hasRuntimeImplementation()) { return Optional.empty(); } } final TypeInference typeInference = definition.getTypeInference(context.getDataTypeFactory()); if (typeInference.getOutputTypeStrategy() == TypeStrategies.MISSING) { return Optional.empty(); } switch (definition.getKind()) { case SCALAR: case ASYNC_SCALAR: case TABLE: final List<RexNode> args = call.getChildren().stream() .map(context::toRexNode) .collect(Collectors.toList()); final BridgingSqlFunction sqlFunction = BridgingSqlFunction.of( context.getDataTypeFactory(), context.getTypeFactory(), context.getRexFactory(), SqlKind.OTHER_FUNCTION, ContextResolvedFunction.fromCallExpression(call), typeInference); return Optional.of(context.getRelBuilder().call(sqlFunction, args)); default: return Optional.empty(); } }
A call expression converter rule that converts calls to user defined functions.
convert
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/FunctionDefinitionConvertRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/FunctionDefinitionConvertRule.java
Apache-2.0
@Override public Optional<RexNode> convert(CallExpression call, ConvertContext context) { FunctionDefinition def = call.getFunctionDefinition(); if (def instanceof ScalarFunctionDefinition) { ScalarFunction scalaFunc = ((ScalarFunctionDefinition) def).getScalarFunction(); FunctionIdentifier identifier = call.getFunctionIdentifier() .orElse(FunctionIdentifier.of(generateInlineFunctionName(scalaFunc))); SqlFunction sqlFunction = UserDefinedFunctionUtils.createScalarSqlFunction( identifier, scalaFunc.toString(), scalaFunc, context.getTypeFactory()); return Optional.of( context.getRelBuilder() .call(sqlFunction, toRexNodes(context, call.getChildren()))); } return Optional.empty(); }
{@link CallExpressionConvertRule} to convert {@link ScalarFunctionDefinition}.
convert
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/LegacyScalarFunctionConvertRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/LegacyScalarFunctionConvertRule.java
Apache-2.0
public Optional<CustomizedConverter> getConverter(FunctionDefinition functionDefinition) { return Optional.ofNullable(CONVERTERS.get(functionDefinition)); }
Registry of customized converters used by {@link CustomizedConvertRule}.
getConverter
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/CustomizedConverters.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/CustomizedConverters.java
Apache-2.0
@Override public RexNode convert(CallExpression call, CallExpressionConvertRule.ConvertContext context) { checkArgumentNumber(call, 2); final FlinkTypeFactory typeFactory = context.getTypeFactory(); final RexNode child = context.toRexNode(call.getChildren().get(0)); final TypeLiteralExpression targetType = (TypeLiteralExpression) call.getChildren().get(1); RelDataType targetRelDataType = typeFactory.createTypeWithNullability( typeFactory.createFieldTypeFromLogicalType( targetType.getOutputDataType().getLogicalType()), true); return context.getRelBuilder() .getRexBuilder() .makeCall( targetRelDataType, FlinkSqlOperatorTable.TRY_CAST, Collections.singletonList(child)); }
Conversion for {@link BuiltInFunctionDefinitions#TRY_CAST}. <p>We need this custom converter as {@link FunctionDefinitionConvertRule} doesn't support type literal arguments.
convert
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/TryCastConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/expressions/converter/converters/TryCastConverter.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.INT(); }
Built-in Int Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.TINYINT(); }
Built-in Byte Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.SMALLINT(); }
Built-in Short Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.BIGINT(); }
Built-in Long Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.FLOAT(); }
Built-in Float Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.DOUBLE(); }
Built-in Double Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return resultType; }
Built-in Decimal Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.BOOLEAN(); }
Built-in Boolean Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.STRING(); }
Built-in String Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.DATE(); }
Built-in Date Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.TIME(TimeType.DEFAULT_PRECISION); }
Built-in Time Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.TIMESTAMP(type.getPrecision()); }
Built-in Timestamp Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.TIMESTAMP_LTZ(type.getPrecision()); }
Built-in TimestampLtz Max aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/MaxAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.DECIMAL(type.getPrecision(), type.getScale()); }
Built-in decimal single value aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SingleValueAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SingleValueAggFunction.java
Apache-2.0
@Override public DataType getResultType() { return DataTypes.CHAR(type.getLength()); }
Built-in char single value aggregate function.
getResultType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SingleValueAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SingleValueAggFunction.java
Apache-2.0
default LocalReferenceExpression windowSizeAttribute() { return localRef("window_size", DataTypes.INT()); }
The field for the window size.
windowSizeAttribute
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SizeBasedWindowFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/aggfunctions/SizeBasedWindowFunction.java
Apache-2.0
public static BridgingSqlAggFunction of( DataTypeFactory dataTypeFactory, FlinkTypeFactory typeFactory, SqlKind kind, ContextResolvedFunction resolvedFunction, TypeInference typeInference) { final FunctionKind functionKind = resolvedFunction.getDefinition().getKind(); checkState( functionKind == FunctionKind.AGGREGATE || functionKind == FunctionKind.TABLE_AGGREGATE, "Aggregating function kind expected."); return new BridgingSqlAggFunction( dataTypeFactory, typeFactory, kind, resolvedFunction, typeInference); }
Creates an instance of a aggregating function (either a system or user-defined function). @param dataTypeFactory used for creating {@link DataType} @param typeFactory used for bridging to {@link RelDataType} @param kind commonly used SQL standard function; use {@link SqlKind#OTHER_FUNCTION} if this function cannot be mapped to a common function kind. @param resolvedFunction system or user-defined {@link FunctionDefinition} with context @param typeInference type inference logic
of
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlAggFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlAggFunction.java
Apache-2.0
public static BridgingSqlFunction of( DataTypeFactory dataTypeFactory, FlinkTypeFactory typeFactory, RexFactory rexFactory, SqlKind kind, ContextResolvedFunction resolvedFunction, TypeInference typeInference) { final FunctionKind functionKind = resolvedFunction.getDefinition().getKind(); checkState( functionKind == FunctionKind.SCALAR || functionKind == FunctionKind.ASYNC_SCALAR || functionKind == FunctionKind.TABLE || functionKind == FunctionKind.PROCESS_TABLE, "Scalar or table function kind expected."); final TypeInference systemTypeInference = SystemTypeInference.of(functionKind, typeInference); if (functionKind == FunctionKind.TABLE || functionKind == FunctionKind.PROCESS_TABLE) { return new BridgingSqlFunction.WithTableFunction( dataTypeFactory, typeFactory, rexFactory, kind, resolvedFunction, systemTypeInference); } return new BridgingSqlFunction( dataTypeFactory, typeFactory, rexFactory, kind, resolvedFunction, systemTypeInference); }
Creates an instance of a scalar or table function (either a system or user-defined function). @param dataTypeFactory used for creating {@link DataType} @param typeFactory used for bridging to {@link RelDataType} @param rexFactory used for {@link ExpressionEvaluatorFactory} @param kind commonly used SQL standard function; use {@link SqlKind#OTHER_FUNCTION} if this function cannot be mapped to a common function kind. @param resolvedFunction system or user-defined {@link FunctionDefinition} with context @param typeInference type inference logic
of
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlFunction.java
Apache-2.0
public ContextResolvedProcedure getContextResolveProcedure() { return contextResolvedProcedure; }
Bridges {@link Procedure} to Calcite's representation of a function.
getContextResolveProcedure
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlProcedure.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlProcedure.java
Apache-2.0
public static BridgingSqlProcedure of( DataTypeFactory dataTypeFactory, ContextResolvedProcedure resolvedProcedure) { final Procedure procedure = resolvedProcedure.getProcedure(); final ProcedureDefinition procedureDefinition = new ProcedureDefinition(procedure); final TypeInference typeInference = TypeInferenceExtractor.forProcedure(dataTypeFactory, procedure.getClass()); return new BridgingSqlProcedure( createName(resolvedProcedure), createSqlIdentifier(resolvedProcedure), createSqlReturnTypeInference(dataTypeFactory, procedureDefinition, typeInference), createSqlOperandTypeInference(dataTypeFactory, procedureDefinition, typeInference), createSqlOperandTypeChecker(dataTypeFactory, procedureDefinition, typeInference), SqlFunctionCategory.USER_DEFINED_PROCEDURE, resolvedProcedure); }
Creates an instance of a procedure. @param dataTypeFactory used for creating {@link DataType} @param resolvedProcedure {@link Procedure} with context
of
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlProcedure.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/bridging/BridgingSqlProcedure.java
Apache-2.0
@Override public CastRulePredicate getPredicateDefinition() { return predicate; }
Base class for all cast rules.
getPredicateDefinition
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final String stringExpr = generateStringExpression(context, inputTerm, inputLogicalType, targetLogicalType); return CastRuleUtils.staticCall(BINARY_STRING_DATA_FROM_STRING(), stringExpr); }
Base class for cast rules converting to {@link LogicalTypeFamily#CHARACTER_STRING} with code generation.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCharacterFamilyTargetRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractCharacterFamilyTargetRule.java
Apache-2.0
@Override public CastCodeBlock generateCodeBlock( CodeGeneratorCastRule.Context context, String inputTerm, String inputIsNullTerm, LogicalType inputType, LogicalType targetType) { final CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter(); final boolean isResultNullable = inputType.isNullable() || isPrimitiveNullable(targetType); String nullTerm; if (isResultNullable) { nullTerm = context.declareVariable("boolean", "isNull"); writer.assignStmt(nullTerm, inputIsNullTerm); } else { nullTerm = "false"; } // Create the result value variable final String returnTerm = context.declareVariable(primitiveTypeTermForType(targetType), "result"); // Generate the code block final String castCodeBlock = this.generateCodeBlockInternal( context, inputTerm, returnTerm, inputType, targetType); if (isResultNullable) { writer.ifStmt( "!" + nullTerm, thenWriter -> { thenWriter.appendBlock(castCodeBlock); // If the result type is not primitive, // then perform another null check if (isPrimitiveNullable(targetType)) { thenWriter.assignStmt(nullTerm, returnTerm + " == null"); } }, elseWriter -> elseWriter.assignStmt(returnTerm, primitiveDefaultValue(targetType))); } else { writer.appendBlock(castCodeBlock); } return CastCodeBlock.withCode(writer.toString(), returnTerm, nullTerm); }
This method doesn't need to take care of null checks handling of input values. Implementations should write the cast result in the {@code returnVariable}.
generateCodeBlock
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractNullAwareCodeGeneratorCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/AbstractNullAwareCodeGeneratorCastRule.java
Apache-2.0
@Override protected String generateCodeBlockInternal( CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) { final LogicalType innerInputType = ((ArrayType) inputLogicalType).getElementType(); CodeGeneratorContext codeGeneratorContext = context.getCodeGeneratorContext(); final String builderTerm = newName(codeGeneratorContext, "builder"); context.declareClassField( className(StringBuilder.class), builderTerm, constructorCall(StringBuilder.class)); final String resultStringTerm = newName(codeGeneratorContext, "resultString"); final int length = LogicalTypeChecks.getLength(targetLogicalType); CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter() .stmt(methodCall(builderTerm, "setLength", 0)) .stmt(methodCall(builderTerm, "append", strLiteral("["))) .forStmt( methodCall(inputTerm, "size"), (indexTerm, loopBodyWriter) -> { String elementTerm = newName(codeGeneratorContext, "element"); String elementIsNullTerm = newName(codeGeneratorContext, "elementIsNull"); CastCodeBlock codeBlock = // Null check is done at the array access level CastRuleProvider.generateAlwaysNonNullCodeBlock( context, elementTerm, innerInputType, STRING_TYPE); if (!context.legacyBehaviour() && couldTrim(length)) { // Break if the target length is already exceeded loopBodyWriter.ifStmt( stringExceedsLength(builderTerm, length), CastRuleUtils.CodeWriter::breakStmt); } loopBodyWriter // Write the comma .ifStmt( indexTerm + " != 0", thenBodyWriter -> thenBodyWriter.stmt( methodCall( builderTerm, "append", strLiteral(", ")))) // Extract element from array .declPrimitiveStmt(innerInputType, elementTerm) .declStmt( boolean.class, elementIsNullTerm, methodCall(inputTerm, "isNullAt", indexTerm)) .ifStmt( "!" + elementIsNullTerm, thenBodyWriter -> thenBodyWriter // If element not null, // extract it and // execute the cast .assignStmt( elementTerm, rowFieldReadAccess( indexTerm, inputTerm, innerInputType)) .append(codeBlock) .stmt( methodCall( builderTerm, "append", codeBlock .getReturnTerm())), elseBodyWriter -> // If element is null, just // write NULL elseBodyWriter.stmt( methodCall( builderTerm, "append", nullLiteral( context .legacyBehaviour())))); }, codeGeneratorContext) .stmt(methodCall(builderTerm, "append", strLiteral("]"))); return CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded( writer, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, builderTerm, codeGeneratorContext) // Assign the result value .assignStmt( returnVariable, CastRuleUtils.staticCall( BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)) .toString(); }
{@link LogicalTypeRoot#ARRAY} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateCodeBlockInternal
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/ArrayToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/ArrayToStringCastRule.java
Apache-2.0
@Override protected String generateCodeBlockInternal( CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) { final String resultStringTerm = newName(context.getCodeGeneratorContext(), "resultString"); final CastRuleUtils.CodeWriter writer = new CastRuleUtils.CodeWriter(); writer.declStmt(String.class, resultStringTerm); if (context.isPrinting()) { writer.assignStmt(resultStringTerm, "\"x'\"") .assignPlusStmt( resultStringTerm, staticCall(EncodingUtils.class, "hex", inputTerm)) .assignPlusStmt(resultStringTerm, "\"'\""); } else { writer.assignStmt( resultStringTerm, constructorCall( String.class, inputTerm, accessStaticField(StandardCharsets.class, "UTF_8"))); } if (!context.legacyBehaviour() && !context.isPrinting()) { final String resultPadOrTrim = newName(context.getCodeGeneratorContext(), "resultPadOrTrim"); final int length = LogicalTypeChecks.getLength(targetLogicalType); CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded( writer, targetLogicalType, context.legacyBehaviour(), length, resultPadOrTrim, resultStringTerm, context.getCodeGeneratorContext()); writer.assignStmt(resultStringTerm, resultPadOrTrim); } return writer // Assign the result value .assignStmt( returnVariable, CastRuleUtils.staticCall( BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)) .toString(); }
{@link LogicalTypeFamily#BINARY_STRING} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateCodeBlockInternal
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/BinaryToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/BinaryToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (context.legacyBehaviour()) { return CastRuleUtils.staticCall( BINARY_STRING_DATA_FROM_STRING(), stringConcat(EMPTY_STR_LITERAL, inputTerm)); } return ternaryOperator( inputTerm, accessStaticField(BinaryStringDataUtil.class, "TRUE_STRING"), accessStaticField(BinaryStringDataUtil.class, "FALSE_STRING")); }
{@link LogicalTypeRoot#BOOLEAN} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/BooleanToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/BooleanToStringCastRule.java
Apache-2.0
default boolean canFail(LogicalType inputLogicalType, LogicalType targetLogicalType) { return false; }
Returns true if the {@link CastExecutor} can fail at runtime.
canFail
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRule.java
Apache-2.0
public static @Nullable CastRule<?, ?> resolve(LogicalType inputType, LogicalType targetType) { return INSTANCE.internalResolve(inputType, targetType); }
Resolve a {@link CastRule} for the provided input type and target type. Returns {@code null} if no rule can be resolved.
resolve
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
public static boolean exists(LogicalType inputType, LogicalType targetType) { return resolve(inputType, targetType) != null; }
Returns {@code true} if and only if a {@link CastRule} can be resolved for the provided input type and target type.
exists
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
public static boolean canFail(LogicalType inputType, LogicalType targetType) { return Preconditions.checkNotNull( resolve(inputType, targetType), "Cast rule cannot be resolved") .canFail(inputType, targetType); }
Resolves the rule and returns the result of {@link CastRule#canFail(LogicalType, LogicalType)}. Fails with {@link NullPointerException} if the rule cannot be resolved.
canFail
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
public static @Nullable CastExecutor<?, ?> create( CastRule.Context context, LogicalType inputLogicalType, LogicalType targetLogicalType) { CastRule<?, ?> rule = INSTANCE.internalResolve(inputLogicalType, targetLogicalType); if (rule == null) { return null; } return rule.create(context, inputLogicalType, targetLogicalType); }
Create a {@link CastExecutor} for the provided input type and target type. Returns {@code null} if no rule can be resolved. @see CastRule#create(CastRule.Context, LogicalType, LogicalType)
create
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
@SuppressWarnings("rawtypes") public static @Nullable CastCodeBlock generateCodeBlock( CodeGeneratorCastRule.Context context, String inputTerm, String inputIsNullTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { CastRule<?, ?> rule = INSTANCE.internalResolve(inputLogicalType, targetLogicalType); if (!(rule instanceof CodeGeneratorCastRule)) { return null; } return ((CodeGeneratorCastRule) rule) .generateCodeBlock( context, inputTerm, inputIsNullTerm, inputLogicalType, targetLogicalType); }
Create a {@link CastCodeBlock} for the provided input type and target type. Returns {@code null} if no rule can be resolved or the resolved rule is not instance of {@link CodeGeneratorCastRule}. @see CodeGeneratorCastRule#generateCodeBlock(CodeGeneratorCastRule.Context, String, String, LogicalType, LogicalType)
generateCodeBlock
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
@SuppressWarnings("unchecked") public static @Nullable Object cast( CastRule.Context context, LogicalType inputLogicalType, LogicalType targetLogicalType, Object value) { CastExecutor<Object, Object> castExecutor = (CastExecutor<Object, Object>) CastRuleProvider.create(context, inputLogicalType, targetLogicalType); if (castExecutor == null) { throw new NullPointerException( "Unsupported casting from " + inputLogicalType + " to " + targetLogicalType); } return castExecutor.cast(value); }
Create a {@link CastExecutor} and execute the cast on the provided {@code value}. Fails with {@link IllegalArgumentException} if the rule cannot be resolved, or with an exception from the {@link CastExecutor} itself if the rule can fail.
cast
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CastRuleProvider.java
Apache-2.0
@SuppressWarnings("unchecked") @Override public OUT cast(IN value) throws TableException { try { inputArray[0] = value; return (OUT) expressionEvaluator.evaluate(inputArray); } catch (InvocationTargetException e) { if (e.getCause() instanceof TableException) { // Expected exception created by the rule, so no need to wrap it throw (TableException) e.getCause(); } throw new TableException( "Cannot execute the compiled expression for an unknown cause. " + e.getCause(), e); } }
Cast executor which can be instantiated starting from an expression code. @param <IN> Input internal type @param <OUT> Output internal type
cast
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CodeGeneratedExpressionCastExecutor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/CodeGeneratedExpressionCastExecutor.java
Apache-2.0
@Override default boolean canFail(LogicalType inputLogicalType, LogicalType targetLogicalType) { final List<LogicalType> inputFields = LogicalTypeChecks.getFieldTypes(inputLogicalType); final List<LogicalType> targetFields = LogicalTypeChecks.getFieldTypes(targetLogicalType); // This should have been already checked when the rule is matched assert inputFields.size() == targetFields.size(); return IntStream.range(0, inputFields.size()) .anyMatch(i -> CastRuleProvider.canFail(inputFields.get(i), targetFields.get(i))); }
This interface provides a default implementation for {@link #canFail(LogicalType, LogicalType)} for constructed type casts, e.g. ARRAY to ARRAY (but not ARRAY to STRING).
canFail
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/ConstructedToConstructedCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/ConstructedToConstructedCastRule.java
Apache-2.0
@Override public String generateStringExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { return staticCall(UNIX_DATE_TO_STRING(), inputTerm); }
{@link LogicalTypeRoot#DATE} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateStringExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DateToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DateToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { return staticCall( BuiltInMethods.TIMESTAMP_FROM_EPOCH_MILLIS(), operator(cast("long", inputTerm), "*", DateTimeUtils.MILLIS_PER_DAY)); } else if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { return staticCall( BuiltInMethods.DATE_TO_TIMESTAMP_WITH_LOCAL_TIME_ZONE(), inputTerm, context.getSessionTimeZoneTerm()); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeRoot#DATE} to {@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DateToTimestampCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DateToTimestampCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final DecimalType targetDecimalType = (DecimalType) targetLogicalType; return staticCall( BuiltInMethods.DECIMAL_TO_DECIMAL(), inputTerm, targetDecimalType.getPrecision(), targetDecimalType.getScale()); }
{@link LogicalTypeRoot#DECIMAL} to {@link LogicalTypeRoot#DECIMAL} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DecimalToDecimalCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DecimalToDecimalCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { Method method = targetLogicalType.is(LogicalTypeFamily.INTEGER_NUMERIC) ? DECIMAL_TO_INTEGRAL() : DECIMAL_TO_DOUBLE(); return castToPrimitive(targetLogicalType, staticCall(method, inputTerm)); }
{@link LogicalTypeRoot#DECIMAL} to {@link LogicalTypeFamily#INTEGER_NUMERIC} and {@link LogicalTypeFamily#APPROXIMATE_NUMERIC} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DecimalToNumericPrimitiveCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/DecimalToNumericPrimitiveCastRule.java
Apache-2.0
@Override public String generateStringExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final Method method = inputLogicalType.is(LogicalTypeRoot.INTERVAL_YEAR_MONTH) ? INTERVAL_YEAR_MONTH_TO_STRING() : INTERVAL_DAY_TIME_TO_STRING(); return staticCall(method, inputTerm); }
{@link LogicalTypeFamily#INTERVAL} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateStringExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/IntervalToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/IntervalToStringCastRule.java
Apache-2.0
@Override public CastCodeBlock generateCodeBlock( CodeGeneratorCastRule.Context context, String inputTerm, String inputIsNullTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { return CastCodeBlock.withoutCode( accessStaticField(BinaryStringDataUtil.class, "NULL_STRING"), "false"); }
{@link LogicalTypeRoot#NULL} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateCodeBlock
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NullToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NullToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final DecimalType targetDecimalType = (DecimalType) targetLogicalType; if (inputLogicalType.is(LogicalTypeFamily.INTEGER_NUMERIC)) { return staticCall( INTEGRAL_TO_DECIMAL(), cast("long", inputTerm), targetDecimalType.getPrecision(), targetDecimalType.getScale()); } return staticCall( DOUBLE_TO_DECIMAL(), cast("double", inputTerm), targetDecimalType.getPrecision(), targetDecimalType.getScale()); }
{@link LogicalTypeFamily#INTEGER_NUMERIC} and {@link LogicalTypeFamily#APPROXIMATE_NUMERIC} to {@link LogicalTypeRoot#DECIMAL} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericPrimitiveToDecimalCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericPrimitiveToDecimalCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { // Should be removed when https://issues.apache.org/jira/browse/FLINK-24576 is fixed if (inputLogicalType.is(LogicalTypeRoot.DECIMAL)) { return staticCall(DECIMAL_TO_BOOLEAN(), inputTerm); } return inputTerm + " != 0"; }
{@link LogicalTypeFamily#NUMERIC} to {@link LogicalTypeRoot#BOOLEAN} conversions.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToBooleanCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToBooleanCastRule.java
Apache-2.0
@Override public String generateStringExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { return stringConcat(EMPTY_STR_LITERAL, inputTerm); }
{@link LogicalTypeFamily#NUMERIC} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateStringExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { throw new ValidationException( "The cast from NUMERIC type to TIMESTAMP type " + "is not allowed. It's recommended to use " + "TO_TIMESTAMP(FROM_UNIXTIME(numeric_col)) " + "instead, note the numeric is in seconds."); } else if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { throw new ValidationException( "The cast from NUMERIC type" + " to TIMESTAMP_LTZ type is not allowed. It's recommended to use" + " TO_TIMESTAMP_LTZ(numeric_col, precision) instead."); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeFamily#NUMERIC} to {@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} cast rule. Disable cast conversion between Numeric type and Timestamp type and suggest to use {@code TO_TIMESTAMP()}/{@code TO_TIMESTAMP_LTZ()} instead.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToTimestampCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/NumericToTimestampCastRule.java
Apache-2.0
@Override protected String generateCodeBlockInternal( CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) { CodeGeneratorContext codeGeneratorContext = context.getCodeGeneratorContext(); final String typeSerializer = context.declareTypeSerializer(inputLogicalType); final String deserializedObjTerm = CodeGenUtils.newName(codeGeneratorContext, "deserializedObj"); final String resultStringTerm = CodeGenUtils.newName(codeGeneratorContext, "resultString"); final int length = LogicalTypeChecks.getLength(targetLogicalType); return new CastRuleUtils.CodeWriter() .declStmt( Object.class, deserializedObjTerm, methodCall(inputTerm, "toObject", typeSerializer)) .ifStmt( deserializedObjTerm + " != null", thenWriter -> CharVarCharTrimPadCastRule.padAndTrimStringIfNeeded( thenWriter, targetLogicalType, context.legacyBehaviour(), length, resultStringTerm, methodCall(deserializedObjTerm, "toString"), context.getCodeGeneratorContext()) .assignStmt( returnVariable, CastRuleUtils.staticCall( BINARY_STRING_DATA_FROM_STRING(), resultStringTerm)), elseWriter -> elseWriter.assignStmt(returnVariable, "null")) .toString(); }
{@link LogicalTypeRoot#RAW} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateCodeBlockInternal
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/RawToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/RawToStringCastRule.java
Apache-2.0
@Override protected String generateCodeBlockInternal( CodeGeneratorCastRule.Context context, String inputTerm, String returnVariable, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (context.legacyBehaviour()) { return new CastRuleUtils.CodeWriter() .assignStmt(returnVariable, methodCall(inputTerm, "toBytes")) .toString(); } else { final int targetLength = LogicalTypeChecks.getLength(targetLogicalType); final String byteArrayTerm = CodeGenUtils.newName(context.getCodeGeneratorContext(), "byteArrayTerm"); return new CastRuleUtils.CodeWriter() .declStmt(byte[].class, byteArrayTerm, methodCall(inputTerm, "toBytes")) .ifStmt( arrayLength(byteArrayTerm) + " <= " + targetLength, thenWriter -> { if (couldPad(targetLogicalType, targetLength)) { trimOrPadByteArray( returnVariable, targetLength, byteArrayTerm, thenWriter); } else { thenWriter.assignStmt(returnVariable, byteArrayTerm); } }, elseWriter -> trimOrPadByteArray( returnVariable, targetLength, byteArrayTerm, elseWriter)) .toString(); } }
{@link LogicalTypeFamily#CHARACTER_STRING} to {@link LogicalTypeFamily#BINARY_STRING} cast rule.
generateCodeBlockInternal
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/StringToBinaryCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/StringToBinaryCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { return cast( "int", operator( methodCall(inputTerm, "getMillisecond"), "/", DateTimeUtils.MILLIS_PER_DAY)); } else if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { return staticCall( BuiltInMethods.TIMESTAMP_WITH_LOCAL_TIME_ZONE_TO_DATE(), inputTerm, context.getSessionTimeZoneTerm()); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} to {@link LogicalTypeRoot#DATE}.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToDateCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToDateCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { throw new ValidationException( "The cast from TIMESTAMP type to NUMERIC type" + " is not allowed. It's recommended to use" + " UNIX_TIMESTAMP(CAST(timestamp_col AS STRING)) instead."); } else if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { throw new ValidationException( "The cast from" + " TIMESTAMP_LTZ type to NUMERIC type is not allowed."); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} to {@link LogicalTypeFamily#NUMERIC} cast rule. Disable cast conversion between Timestamp type and Numeric type and suggest to use {@code UNIX_TIMESTAMP()} instead (for {@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToNumericCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToNumericCastRule.java
Apache-2.0
@Override public String generateStringExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final String zoneId = (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) ? context.getSessionTimeZoneTerm() : accessStaticField(DateTimeUtils.class, "UTC_ZONE"); final int precision = LogicalTypeChecks.getPrecision(inputLogicalType); return staticCall(TIMESTAMP_TO_STRING_TIME_ZONE(), inputTerm, zoneId, precision); }
{@link LogicalTypeFamily#TIMESTAMP} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateStringExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { return cast( "int", operator( methodCall(inputTerm, "getMillisecond"), "%", DateTimeUtils.MILLIS_PER_DAY)); } else if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { return staticCall( BuiltInMethods.TIMESTAMP_WITH_LOCAL_TIME_ZONE_TO_TIME(), inputTerm, context.getSessionTimeZoneTerm()); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} to {@link LogicalTypeRoot#TIME_WITHOUT_TIME_ZONE}.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToTimeCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToTimeCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { final int inputPrecision = LogicalTypeChecks.getPrecision(inputLogicalType); int targetPrecision = LogicalTypeChecks.getPrecision(targetLogicalType); if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) && targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { final TimestampKind inputTimestampKind = ((TimestampType) inputLogicalType).getKind(); final TimestampKind targetTimestampKind = ((TimestampType) targetLogicalType).getKind(); if (inputTimestampKind == TimestampKind.ROWTIME || inputTimestampKind == TimestampKind.PROCTIME || targetTimestampKind == TimestampKind.ROWTIME || targetTimestampKind == TimestampKind.PROCTIME) { targetPrecision = 3; } } final String operand; if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) && targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { operand = staticCall( BuiltInMethods.TIMESTAMP_TO_TIMESTAMP_WITH_LOCAL_ZONE(), inputTerm, context.getSessionTimeZoneTerm()); } else if (inputLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE) && targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { operand = staticCall( BuiltInMethods.TIMESTAMP_WITH_LOCAL_ZONE_TO_TIMESTAMP(), inputTerm, context.getSessionTimeZoneTerm()); } else { operand = inputTerm; } if (inputPrecision <= targetPrecision) { return operand; } else { return staticCall(BuiltInMethods.TRUNCATE_SQL_TIMESTAMP(), operand, targetPrecision); } }
{@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} to {@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE} cast rule. Check and adjust if there is the precision changes.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToTimestampCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimestampToTimestampCastRule.java
Apache-2.0
@Override public String generateStringExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { return CastRuleUtils.staticCall( UNIX_TIME_TO_STRING(), inputTerm, LogicalTypeChecks.getPrecision(inputLogicalType)); }
{@link LogicalTypeRoot#TIME_WITHOUT_TIME_ZONE} to {@link LogicalTypeFamily#CHARACTER_STRING} cast rule.
generateStringExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimeToStringCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimeToStringCastRule.java
Apache-2.0
@Override public String generateExpression( CodeGeneratorCastRule.Context context, String inputTerm, LogicalType inputLogicalType, LogicalType targetLogicalType) { if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE)) { return staticCall(BuiltInMethods.TIMESTAMP_FROM_EPOCH_MILLIS(), inputTerm); } else if (targetLogicalType.is(LogicalTypeRoot.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) { return staticCall( BuiltInMethods.TIME_TO_TIMESTAMP_WITH_LOCAL_TIME_ZONE(), inputTerm, context.getSessionTimeZoneTerm()); } else { throw new IllegalArgumentException("This is a bug. Please file an issue."); } }
{@link LogicalTypeRoot#TIME_WITHOUT_TIME_ZONE} to {@link LogicalTypeRoot#TIMESTAMP_WITHOUT_TIME_ZONE}/{@link LogicalTypeRoot#TIMESTAMP_WITH_LOCAL_TIME_ZONE} cast rule.
generateExpression
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimeToTimestampCastRule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/casting/TimeToTimestampCastRule.java
Apache-2.0
@SuppressWarnings("unchecked") protected static <T> T getLiteralValueAs(LiteralValueAccessor accessor, Class<T> clazz) { Preconditions.checkArgument(!clazz.isPrimitive()); Object convertedValue = null; if (clazz == Duration.class) { final long longVal = accessor.getValueAs(Long.class); convertedValue = Duration.ofMillis(longVal); } else if (clazz == Period.class) { final long longVal = accessor.getValueAs(Long.class); if (longVal <= Integer.MAX_VALUE && longVal >= Integer.MIN_VALUE) { convertedValue = Period.ofMonths((int) longVal); } } else if (clazz == java.time.LocalDate.class) { final DateString dateString = accessor.getValueAs(DateString.class); convertedValue = java.time.LocalDate.parse(dateString.toString()); } else if (clazz == java.time.LocalTime.class) { final TimeString timeString = accessor.getValueAs(TimeString.class); convertedValue = java.time.LocalTime.parse(timeString.toString()); } else if (clazz == java.time.LocalDateTime.class) { final TimestampString timestampString = accessor.getValueAs(TimestampString.class); convertedValue = java.time.LocalDateTime.parse(timestampString.toString().replace(' ', 'T')); } else if (clazz == java.time.Instant.class) { // timestamp string is in UTC, convert back to an instant final TimestampString timestampString = accessor.getValueAs(TimestampString.class); convertedValue = java.time.LocalDateTime.parse(timestampString.toString().replace(' ', 'T')) .atOffset(ZoneOffset.UTC) .toInstant(); } if (convertedValue != null) { return (T) convertedValue; } return accessor.getValueAs(clazz); }
Bridges to {@link ValueLiteralExpression#getValueAs(Class)}.
getLiteralValueAs
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/inference/AbstractSqlCallContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/inference/AbstractSqlCallContext.java
Apache-2.0
@Override public RelDataType inferReturnType(SqlOperatorBinding opBinding) { RelDataType type = getComponentType(opBinding.getTypeFactory(), opBinding.collectOperandTypes()); if (null == type) { return null; } // explicit cast elements to component type if they are not same SqlValidatorUtils.adjustTypeForArrayConstructor(type, opBinding); return SqlTypeUtil.createArrayType(opBinding.getTypeFactory(), type, false); }
{@link SqlOperator} for <code>ARRAY</code>, which makes explicit casting if the element type not equals the derived component type.
inferReturnType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlArrayConstructor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlArrayConstructor.java
Apache-2.0
@Override public boolean argumentMustBeScalar(int ordinal) { return ordinal != 0; }
{@inheritDoc} <p>Overrides because the first parameter of table-value function windowing is an explicit TABLE parameter, which is not scalar.
argumentMustBeScalar
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
Apache-2.0
Optional<RuntimeException> checkTimeColumnDescriptorOperand( SqlCallBinding callBinding, int pos) { SqlValidator validator = callBinding.getValidator(); SqlNode operand0 = callBinding.operand(0); RelDataType type = validator.getValidatedNodeType(operand0); List<SqlNode> operands = ((SqlCall) callBinding.operand(pos)).getOperandList(); SqlIdentifier identifier = (SqlIdentifier) operands.get(0); String columnName = identifier.getSimple(); SqlNameMatcher matcher = validator.getCatalogReader().nameMatcher(); for (RelDataTypeField field : type.getFieldList()) { if (matcher.matches(field.getName(), columnName)) { RelDataType fieldType = field.getType(); if (FlinkTypeFactory.isTimeIndicatorType(fieldType)) { return Optional.empty(); } else { LogicalType timeAttributeType = FlinkTypeFactory.toLogicalType(fieldType); if (!canBeTimeAttributeType(timeAttributeType)) { ValidationException exception = new ValidationException( String.format( "The window function %s requires the timecol to be TIMESTAMP or TIMESTAMP_LTZ, but is %s.\n" + "Besides, the timecol must be a time attribute type in streaming mode.", callBinding .getOperator() .getAllowedSignatures(), field.getType())); return Optional.of(exception); } else { return Optional.empty(); } } } } IllegalArgumentException error = new IllegalArgumentException( String.format( "Can't find the time attribute field '%s' in the input schema %s.", columnName, type.getFullTypeString())); return Optional.of(error); }
Checks whether the type that the operand of time col descriptor refers to is valid. @param callBinding The call binding @param pos The position of the descriptor at the operands of the call @return true if validation passes, false otherwise
checkTimeColumnDescriptorOperand
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
Apache-2.0
boolean checkIntervalOperands(SqlCallBinding callBinding, int startPos) { final SqlValidator validator = callBinding.getValidator(); for (int i = startPos; i < callBinding.getOperandCount(); i++) { final RelDataType type = validator.getValidatedNodeType(callBinding.operand(i)); if (!SqlTypeUtil.isInterval(type)) { return false; } } return true; }
Checks whether the operands starting from position {@code startPos} are all of type {@code INTERVAL}, returning whether successful. @param callBinding The call binding @param startPos The start position to validate (starting index is 0) @return true if validation passes
checkIntervalOperands
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/sql/SqlWindowTableFunction.java
Apache-2.0
private static void adjustTypeForMultisetConstructor( RelDataType evenType, RelDataType oddType, SqlCallBinding sqlCallBinding) { SqlCall call = sqlCallBinding.getCall(); List<RelDataType> operandTypes = sqlCallBinding.collectOperandTypes(); List<SqlNode> operands = call.getOperandList(); RelDataType elementType; for (int i = 0; i < operands.size(); i++) { if (i % 2 == 0) { elementType = evenType; } else { elementType = oddType; } if (operandTypes.get(i).equalsSansFieldNames(elementType)) { continue; } call.setOperand(i, castTo(operands.get(i), elementType)); } }
When the element element does not equal with the component type, making explicit casting. @param evenType derived type for element with even index @param oddType derived type for element with odd index @param sqlCallBinding description of call
adjustTypeForMultisetConstructor
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/utils/SqlValidatorUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/functions/utils/SqlValidatorUtils.java
Apache-2.0
@Override protected RelNode doVisit(RelNode node) { Hintable hNode = (Hintable) node; AtomicBoolean changed = new AtomicBoolean(false); List<RelHint> hintsWithCapitalJoinHints = hNode.getHints().stream() .map( hint -> { String capitalHintName = hint.hintName.toUpperCase(Locale.ROOT); if (!FlinkHints.isQueryHint(capitalHintName) || hint.hintName.equals(capitalHintName)) { return hint; } changed.set(true); if (JoinStrategy.isJoinStrategy(capitalHintName)) { if (JoinStrategy.isLookupHint(hint.hintName)) { return RelHint.builder(capitalHintName) .hintOptions(hint.kvOptions) .inheritPath(hint.inheritPath) .build(); } return RelHint.builder(capitalHintName) .hintOptions(hint.listOptions) .inheritPath(hint.inheritPath) .build(); } else if (StateTtlHint.isStateTtlHint(hint.hintName)) { return RelHint.builder(capitalHintName) .hintOptions(hint.kvOptions) .inheritPath(hint.inheritPath) .build(); } throw new IllegalStateException( "Unknown hint: " + hint.hintName); }) .collect(Collectors.toList()); if (changed.get()) { return super.visit(hNode.withHints(hintsWithCapitalJoinHints)); } else { return super.visit(node); } }
A shuttle to capitalize all query hints on corresponding nodes.
doVisit
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/CapitalizeQueryHintsShuttle.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/CapitalizeQueryHintsShuttle.java
Apache-2.0
@Override public RelNode visit(RelNode other) { if (FlinkRelOptUtil.containsSubQuery(other)) { other = resolveSubQuery(other, relNode -> relNode.accept(this)); } if (other instanceof Hintable) { List<RelHint> originHints = ((Hintable) other).getHints(); // 1. classify the hints and separate out the query hints List<RelHint> queryHints = originHints.stream() .filter(h -> FlinkHints.isQueryHint(h.hintName)) .collect(Collectors.toList()); List<RelHint> remainHints = new ArrayList<>(originHints); remainHints.removeAll(queryHints); // 2. use hintStrategyTable#apply to determine whether the query hint can be attached // to the current node // If it cannot be attached, it means that the query hint on the current node needs to // be removed. List<RelHint> hintsCanApply = hintStrategyTable.apply(queryHints, other); if (hintsCanApply.size() != queryHints.size()) { hintsCanApply.addAll(remainHints); // As a result, the remaining hints will be attached. other = ((Hintable) other).withHints(hintsCanApply); } } return super.visit(other); }
Clear the invalid query hints in the unmatched nodes. For example, a query hint may be attached in the Project node at first. After accepting this shuttle, the query hint in the Project node will be cleared. <p>See more at {@link FlinkHintStrategies}. <p>Tips, hints about view and alias will not be cleared.
visit
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/ClearQueryHintsOnUnmatchedNodesShuttle.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/ClearQueryHintsOnUnmatchedNodesShuttle.java
Apache-2.0
public static Map<String, String> getHintedOptions(List<RelHint> tableHints) { return tableHints.stream() .filter(hint -> hint.hintName.equalsIgnoreCase(HINT_NAME_OPTIONS)) .findFirst() .map(hint -> hint.kvOptions) .orElse(Collections.emptyMap()); }
Returns the OPTIONS hint options from the given list of table hints {@code tableHints}, never null.
getHintedOptions
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
Apache-2.0
public static RelNode resolveSubQuery(RelNode node, Function<RelNode, RelNode> resolver) { if (node instanceof LogicalProject) { LogicalProject project = (LogicalProject) node; List<RexNode> newProjects = project.getProjects().stream() .map(p -> resolveSubQuery(p, resolver)) .collect(Collectors.toList()); return project.copy( project.getTraitSet(), project.getInput(), newProjects, project.getRowType()); } else if (node instanceof LogicalFilter) { LogicalFilter filter = (LogicalFilter) node; RexNode newCondition = resolveSubQuery(filter.getCondition(), resolver); return filter.copy(filter.getTraitSet(), filter.getInput(), newCondition); } else if (node instanceof LogicalJoin) { LogicalJoin join = (LogicalJoin) node; RexNode newCondition = resolveSubQuery(join.getCondition(), resolver); return join.copy( join.getTraitSet(), newCondition, join.getLeft(), join.getRight(), join.getJoinType(), join.isSemiJoinDone()); } else { return node; } }
Resolve the RelNode of the sub query in the node and return a new node.
resolveSubQuery
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
Apache-2.0
public static RelNode clearQueryHintsOnUnmatchedNodes(RelNode root) { return root.accept( new ClearQueryHintsOnUnmatchedNodesShuttle(root.getCluster().getHintStrategies())); }
Clear the query hints on some nodes where these hints should not be attached.
clearQueryHintsOnUnmatchedNodes
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
Apache-2.0
public static boolean isQueryHint(String hintName) { return JoinStrategy.isJoinStrategy(hintName) || StateTtlHint.isStateTtlHint(hintName); }
Check if the hint is a query hint.
isQueryHint
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
Apache-2.0
public static boolean isAliasHint(String hintName) { return FlinkHints.HINT_ALIAS.equalsIgnoreCase(hintName); }
Check if the hint is a alias hint.
isAliasHint
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/hint/FlinkHints.java
Apache-2.0
public Operation convertAlterSchema( SqlAlterTableSchema alterTableSchema, ResolvedCatalogTable oldTable) { SchemaConverter converter = createSchemaConverter(alterTableSchema, oldTable); converter.updateColumn(alterTableSchema.getColumnPositions().getList()); alterTableSchema.getWatermark().ifPresent(converter::updateWatermark); alterTableSchema.getDistribution().ifPresent(converter::updateDistribution); alterTableSchema.getFullConstraint().ifPresent(converter::updatePrimaryKey); return buildAlterTableChangeOperation( alterTableSchema, converter.changesCollector, converter.convert(), oldTable); }
Convert ALTER TABLE ADD | MODIFY (&lt;schema_component&gt; [, &lt;schema_component&gt;, ...]) to generate an updated Schema.
convertAlterSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
Apache-2.0
public Operation convertAlterSchema( SqlAlterTableRenameColumn renameColumn, ResolvedCatalogTable oldTable) { String oldColumnName = getColumnName(renameColumn.getOldColumnIdentifier()); String newColumnName = getColumnName(renameColumn.getNewColumnIdentifier()); ReferencesManager.create(oldTable).checkReferences(oldColumnName); if (oldTable.getResolvedSchema().getColumn(newColumnName).isPresent()) { throw new ValidationException( String.format( "%sThe column `%s` already existed in table schema.", EX_MSG_PREFIX, newColumnName)); } // generate new schema Schema.Builder schemaBuilder = Schema.newBuilder(); buildUpdatedColumn( schemaBuilder, oldTable, (builder, column) -> { if (column.getName().equals(oldColumnName)) { buildNewColumnFromOldColumn(builder, column, newColumnName); } else { builder.fromColumns(Collections.singletonList(column)); } }); buildUpdatedPrimaryKey( schemaBuilder, oldTable, (pk) -> pk.equals(oldColumnName) ? newColumnName : pk); buildUpdatedWatermark(schemaBuilder, oldTable); return buildAlterTableChangeOperation( renameColumn, Collections.singletonList( TableChange.modifyColumnName( unwrap(oldTable.getResolvedSchema().getColumn(oldColumnName)), newColumnName)), schemaBuilder.build(), oldTable); }
Convert ALTER TABLE RENAME col_name to new_col_name to generate an updated Schema.
convertAlterSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
Apache-2.0
public Operation convertAlterSchema( SqlAlterTableDropColumn dropColumn, ResolvedCatalogTable oldTable) { Set<String> columnsToDrop = new HashSet<>(); dropColumn .getColumnList() .forEach( identifier -> { String name = getColumnName((SqlIdentifier) identifier); if (!columnsToDrop.add(name)) { throw new ValidationException( String.format( "%sDuplicate column `%s`.", EX_MSG_PREFIX, name)); } }); ReferencesManager referencesManager = ReferencesManager.create(oldTable); // Sort by dependencies count from smallest to largest. For example, when dropping column a, // b(b as a+1), the order should be: [b, a] after sort. List<String> sortedColumnsToDrop = columnsToDrop.stream() .sorted( Comparator.comparingInt( col -> referencesManager.getColumnDependencyCount( (String) col)) .reversed()) .collect(Collectors.toList()); List<TableChange> tableChanges = new ArrayList<>(sortedColumnsToDrop.size()); for (String columnToDrop : sortedColumnsToDrop) { referencesManager.dropColumn(columnToDrop); tableChanges.add(TableChange.dropColumn(columnToDrop)); } Schema.Builder schemaBuilder = Schema.newBuilder(); buildUpdatedColumn( schemaBuilder, oldTable, (builder, column) -> { if (!columnsToDrop.contains(column.getName())) { builder.fromColumns(Collections.singletonList(column)); } }); buildUpdatedPrimaryKey(schemaBuilder, oldTable, Function.identity()); buildUpdatedWatermark(schemaBuilder, oldTable); return buildAlterTableChangeOperation( dropColumn, tableChanges, schemaBuilder.build(), oldTable); }
Convert ALTER TABLE DROP (col1 [, col2, ...]) to generate an updated Schema.
convertAlterSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
Apache-2.0
public Operation convertAlterSchema( SqlAlterTableDropPrimaryKey dropPrimaryKey, ResolvedCatalogTable oldTable) { Optional<UniqueConstraint> pkConstraint = oldTable.getResolvedSchema().getPrimaryKey(); if (!pkConstraint.isPresent()) { throw new ValidationException( String.format( "%sThe base table does not define any primary key.", EX_MSG_PREFIX)); } Schema.Builder schemaBuilder = Schema.newBuilder(); buildUpdatedColumn( schemaBuilder, oldTable, (builder, column) -> builder.fromColumns(Collections.singletonList(column))); buildUpdatedWatermark(schemaBuilder, oldTable); return buildAlterTableChangeOperation( dropPrimaryKey, Collections.singletonList(TableChange.dropConstraint(pkConstraint.get().getName())), schemaBuilder.build(), oldTable); }
Convert ALTER TABLE DROP PRIMARY KEY to generate an updated Schema.
convertAlterSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
Apache-2.0
public Operation convertAlterSchema( SqlAlterTableDropConstraint dropConstraint, ResolvedCatalogTable oldTable) { Optional<UniqueConstraint> pkConstraint = oldTable.getResolvedSchema().getPrimaryKey(); if (!pkConstraint.isPresent()) { throw new ValidationException( String.format( "%sThe base table does not define any primary key.", EX_MSG_PREFIX)); } SqlIdentifier constraintIdentifier = dropConstraint.getConstraintName(); String constraintName = pkConstraint.get().getName(); if (constraintIdentifier != null && !constraintIdentifier.getSimple().equals(constraintName)) { throw new ValidationException( String.format( "%sThe base table does not define a primary key constraint named '%s'. " + "Available constraint name: ['%s'].", EX_MSG_PREFIX, constraintIdentifier.getSimple(), constraintName)); } Schema.Builder schemaBuilder = Schema.newBuilder(); buildUpdatedColumn( schemaBuilder, oldTable, (builder, column) -> builder.fromColumns(Collections.singletonList(column))); buildUpdatedWatermark(schemaBuilder, oldTable); return buildAlterTableChangeOperation( dropConstraint, Collections.singletonList(TableChange.dropConstraint(constraintName)), schemaBuilder.build(), oldTable); }
Convert ALTER TABLE DROP CONSTRAINT constraint_name to generate an updated {@link Schema}.
convertAlterSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/AlterSchemaConverter.java
Apache-2.0
public static Optional<DynamicTableSink> getDynamicTableSink( ContextResolvedTable contextResolvedTable, LogicalTableModify tableModify) { final FlinkContext context = ShortcutUtils.unwrapContext(tableModify.getCluster()); CatalogBaseTable catalogBaseTable = contextResolvedTable.getTable(); // only consider DynamicTableSink if (catalogBaseTable instanceof CatalogTable) { ResolvedCatalogTable resolvedTable = contextResolvedTable.getResolvedTable(); Optional<Catalog> optionalCatalog = contextResolvedTable.getCatalog(); ObjectIdentifier objectIdentifier = contextResolvedTable.getIdentifier(); boolean isTemporary = contextResolvedTable.isTemporary(); // only consider the CatalogTable that doesn't use legacy connector sink option if (!contextResolvedTable.isAnonymous() && !TableFactoryUtil.isLegacyConnectorOptions( context.getTableConfig(), !context.isBatchMode(), objectIdentifier, resolvedTable, isTemporary)) { // create table dynamic table sink DynamicTableSink tableSink = ExecutableOperationUtils.createDynamicTableSink( optionalCatalog.orElse(null), () -> context.getModuleManager() .getFactory((Module::getTableSinkFactory)), objectIdentifier, resolvedTable, Collections.emptyMap(), context.getTableConfig(), context.getClassLoader(), contextResolvedTable.isTemporary()); return Optional.of(tableSink); } } return Optional.empty(); }
Get the {@link DynamicTableSink} for the table to be modified. Return Optional.empty() if it can't get the {@link DynamicTableSink}.
getDynamicTableSink
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
Apache-2.0
public static Optional<List<ResolvedExpression>> getResolvedFilterExpressions( LogicalTableModify tableModify) { FlinkContext context = ShortcutUtils.unwrapContext(tableModify.getCluster()); RelNode input = tableModify.getInput().getInput(0); // no WHERE clause, return an empty list if (input instanceof LogicalTableScan) { return Optional.of(Collections.emptyList()); } if (!(input instanceof LogicalFilter)) { return Optional.empty(); } Filter filter = (Filter) input; if (RexUtil.SubQueryFinder.containsSubQuery(filter)) { return Optional.empty(); } // optimize the filter filter = prepareFilter(filter); // resolve the filter to get resolved expression List<ResolvedExpression> resolveExpression = resolveFilter(context, filter); return Optional.ofNullable(resolveExpression); }
Get the resolved filter expressions from the {@code WHERE} clause in DELETE statement, return Optional.empty() if {@code WHERE} clause contains sub-query.
getResolvedFilterExpressions
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
Apache-2.0
private static Filter prepareFilter(Filter filter) { // we try to reduce and simplify the filter ReduceExpressionsRuleProxy reduceExpressionsRuleProxy = ReduceExpressionsRuleProxy.INSTANCE; SimplifyFilterConditionRule simplifyFilterConditionRule = SimplifyFilterConditionRule.INSTANCE(); // max iteration num for reducing and simplifying filter, // we use 5 as the max iteration num which is same with the iteration num in Flink's plan // optimizing. int maxIteration = 5; boolean changed = true; int iteration = 1; // iterate until it reaches max iteration num or there's no changes in one iterate while (changed && iteration <= maxIteration) { changed = false; // first apply the rule to reduce condition in filter RexNode newCondition = filter.getCondition(); List<RexNode> expList = new ArrayList<>(); expList.add(newCondition); if (reduceExpressionsRuleProxy.reduce(filter, expList)) { // get the new condition newCondition = expList.get(0); changed = true; } // create a new filter filter = filter.copy(filter.getTraitSet(), filter.getInput(), newCondition); // then apply the rule to simplify filter Option<Filter> changedFilter = simplifyFilterConditionRule.simplify(filter, new boolean[] {false}); if (changedFilter.isDefined()) { filter = changedFilter.get(); changed = true; } iteration += 1; } return filter; }
Prepare the filter with reducing && simplifying.
prepareFilter
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
Apache-2.0
private static List<ResolvedExpression> resolveFilter(FlinkContext context, Filter filter) { Tuple2<RexNode[], RexNode[]> extractedPredicates = FlinkRexUtil.extractPredicates( filter.getInput().getRowType().getFieldNames().toArray(new String[0]), filter.getCondition(), filter, filter.getCluster().getRexBuilder()); RexNode[] convertiblePredicates = extractedPredicates._1; RexNode[] unconvertedPredicates = extractedPredicates._2; if (unconvertedPredicates.length != 0) { // if contain any unconverted condition, return null return null; } RexNodeToExpressionConverter converter = new RexNodeToExpressionConverter( filter.getCluster().getRexBuilder(), filter.getInput().getRowType().getFieldNames().toArray(new String[0]), context.getFunctionCatalog(), context.getCatalogManager()); List<Expression> filters = Arrays.stream(convertiblePredicates) .map( p -> { Option<ResolvedExpression> expr = p.accept(converter); if (expr.isDefined()) { return expr.get(); } else { throw new TableException( String.format( "%s can not be converted to Expression", p)); } }) .collect(Collectors.toList()); ExpressionResolver resolver = ExpressionResolver.resolverFor( context.getTableConfig(), context.getClassLoader(), name -> Optional.empty(), context.getFunctionCatalog() .asLookup( str -> { throw new TableException( "We should not need to lookup any expressions at this point"); }), context.getCatalogManager().getDataTypeFactory(), (sqlExpression, inputRowType, outputType) -> { throw new TableException( "SQL expression parsing is not supported at this location."); }) .build(); return resolver.resolve(filters); }
Return the ResolvedExpression according to Filter.
resolveFilter
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/DeletePushDownUtils.java
Apache-2.0
public PlannerQueryOperation maybeRewriteQuery( CatalogManager catalogManager, FlinkPlannerImpl flinkPlanner, PlannerQueryOperation origQueryOperation, SqlNode origQueryNode, ResolvedCatalogTable sinkTable) { FlinkCalciteSqlValidator sqlValidator = flinkPlanner.getOrCreateSqlValidator(); SqlRewriterUtils rewriterUtils = new SqlRewriterUtils(sqlValidator); FlinkTypeFactory typeFactory = (FlinkTypeFactory) sqlValidator.getTypeFactory(); // Only fields that may be persisted will be included in the select query RowType sinkRowType = ((RowType) sinkTable.getResolvedSchema().toSinkRowDataType().getLogicalType()); Map<String, Integer> sourceFields = IntStream.range(0, origQueryOperation.getResolvedSchema().getColumnNames().size()) .boxed() .collect( Collectors.toMap( origQueryOperation.getResolvedSchema().getColumnNames() ::get, Function.identity())); // assignedFields contains the new sink fields that are not present in the source // and that will be included in the select query LinkedHashMap<Integer, SqlNode> assignedFields = new LinkedHashMap<>(); // targetPositions contains the positions of the source fields that will be // included in the select query List<Object> targetPositions = new ArrayList<>(); int pos = -1; for (RowType.RowField targetField : sinkRowType.getFields()) { pos++; if (!sourceFields.containsKey(targetField.getName())) { if (!targetField.getType().isNullable()) { throw new ValidationException( "Column '" + targetField.getName() + "' has no default value and does not allow NULLs."); } assignedFields.put( pos, rewriterUtils.maybeCast( SqlLiteral.createNull(SqlParserPos.ZERO), typeFactory.createUnknownType(), typeFactory.createFieldTypeFromLogicalType(targetField.getType()), typeFactory)); } else { targetPositions.add(sourceFields.get(targetField.getName())); } } // rewrite query SqlCall newSelect = rewriterUtils.rewriteCall( rewriterUtils, sqlValidator, (SqlCall) origQueryNode, typeFactory.buildRelNodeRowType(sinkRowType), assignedFields, targetPositions, () -> "Unsupported node type " + origQueryNode.getKind()); return (PlannerQueryOperation) SqlNodeToOperationConversion.convert(flinkPlanner, catalogManager, newSelect) .orElseThrow( () -> new TableException( "Unsupported node type " + newSelect.getClass().getSimpleName())); }
Rewrites the query operation to include only the fields that may be persisted in the sink.
maybeRewriteQuery
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
Apache-2.0
public Schema mergeSchemas( SqlNodeList sqlColumnList, @Nullable SqlWatermark sqlWatermark, List<SqlTableConstraint> sqlTableConstraints, ResolvedSchema sourceSchema) { SchemaBuilder schemaBuilder = new SchemaBuilder( (FlinkTypeFactory) validator.getTypeFactory(), dataTypeFactory, validator, escapeExpression); schemaBuilder.mergeColumns( sqlColumnList, Schema.newBuilder().fromResolvedSchema(sourceSchema).build().getColumns()); if (sqlWatermark != null) { schemaBuilder.setWatermark(sqlWatermark); } // It is assumed only a primary key constraint may be defined in the table. The // SqlCreateTableAs has validations to ensure this before the object is created. Optional<SqlTableConstraint> primaryKey = sqlTableConstraints.stream().filter(SqlTableConstraint::isPrimaryKey).findAny(); if (primaryKey.isPresent()) { schemaBuilder.setPrimaryKey(primaryKey.get()); } return schemaBuilder.build(); }
Merges the specified schema with columns, watermark, and constraints with the {@code sourceSchema}. <p>The resulted schema will contain the following elements: <ul> <li>columns <li>computed columns <li>metadata columns <li>watermarks <li>primary key </ul> <p>It is expected that the {@code sourceSchema} contains only physical/regular columns. <p>Columns of the {@code sourceSchema} are appended to the schema columns defined in the {@code sqlColumnList}. If a column in the {@code sqlColumnList} is already defined in the {@code sourceSchema}, then the types of the columns are implicit cast and must be compatible based on the implicit cast rules. If they're compatible, then the column position in the schema stays the same as defined in the appended {@code sourceSchema}.
mergeSchemas
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
Apache-2.0
private void mergeColumns(List<SqlNode> sinkCols, List<UnresolvedColumn> sourceCols) { Map<String, UnresolvedColumn> sinkSchemaCols = new LinkedHashMap<>(); Map<String, UnresolvedColumn> sourceSchemaCols = new LinkedHashMap<>(); populateColumnsFromSource(sourceCols, sourceSchemaCols); int sinkColumnPos = -1; for (SqlNode sinkColumn : sinkCols) { String name = ((SqlTableColumn) sinkColumn).getName().getSimple(); sinkColumnPos++; if (sinkSchemaCols.containsKey(name)) { throw new ValidationException( String.format( "A column named '%s' already exists in the schema. ", name)); } UnresolvedColumn unresolvedSinkColumn; if (sinkColumn instanceof SqlRegularColumn) { unresolvedSinkColumn = toUnresolvedPhysicalColumn((SqlRegularColumn) sinkColumn); regularAndMetadataFieldNamesToTypes.put( name, toRelDataType(((SqlRegularColumn) sinkColumn).getType())); } else if (sinkColumn instanceof SqlMetadataColumn) { unresolvedSinkColumn = toUnresolvedMetadataColumn((SqlMetadataColumn) sinkColumn); regularAndMetadataFieldNamesToTypes.put( name, toRelDataType(((SqlMetadataColumn) sinkColumn).getType())); } else if (sinkColumn instanceof SqlComputedColumn) { final SqlNode validatedExpr = sqlValidator.validateParameterizedExpression( ((SqlComputedColumn) sinkColumn).getExpr(), regularAndMetadataFieldNamesToTypes); unresolvedSinkColumn = toUnresolvedComputedColumn( (SqlComputedColumn) sinkColumn, validatedExpr); computeFieldNamesToTypes.put( name, sqlValidator.getValidatedNodeType(validatedExpr)); } else { throw new ValidationException("Unsupported column type: " + sinkColumn); } if (sourceSchemaCols.containsKey(name)) { // If the column is already defined in the source schema, then check if // the types are compatible. validateImplicitCastCompatibility( name, sinkColumnPos, sourceSchemaCols.get(name), unresolvedSinkColumn); // Replace the source schema column with the new sink schema column, which // keeps the position of the source schema column but with the data type // of the sink column. sourceSchemaCols.put(name, unresolvedSinkColumn); } else { sinkSchemaCols.put(name, unresolvedSinkColumn); } } columns.clear(); columns.putAll(sinkSchemaCols); columns.putAll(sourceSchemaCols); }
Merges the sink columns with the source columns. The resulted schema will contain columns of the sink schema first, followed by the columns of the source schema. <p>If a column in the sink schema is already defined in the source schema, then the types of the columns overrides the types of the columns in the source schema. The column position in the schema stays the same as defined in the source schema. <p>Column types overridden follows the same implicit cast rules defined for INSERT INTO statements.
mergeColumns
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableAsUtil.java
Apache-2.0
public Map<FeatureOption, MergingStrategy> computeMergingStrategies( List<SqlTableLikeOption> mergingOptions) { Map<FeatureOption, MergingStrategy> result = new HashMap<>(defaultMergingStrategies); Optional<SqlTableLikeOption> maybeAllOption = mergingOptions.stream() .filter(option -> option.getFeatureOption() == FeatureOption.ALL) .findFirst(); maybeAllOption.ifPresent( (allOption) -> { MergingStrategy strategy = allOption.getMergingStrategy(); for (FeatureOption featureOption : FeatureOption.values()) { if (featureOption != FeatureOption.ALL) { result.put(featureOption, strategy); } } }); for (SqlTableLikeOption mergingOption : mergingOptions) { result.put(mergingOption.getFeatureOption(), mergingOption.getMergingStrategy()); } return result; }
Calculates merging strategies for all options. It applies options given by a user to the {@link #defaultMergingStrategies}. The {@link MergingStrategy} specified for {@link FeatureOption#ALL} overwrites all the default options. Those can be further changed with a specific {@link FeatureOption}.
computeMergingStrategies
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
Apache-2.0
public Schema mergeTables( Map<FeatureOption, MergingStrategy> mergingStrategies, Schema sourceSchema, List<SqlNode> derivedColumns, List<SqlWatermark> derivedWatermarkSpecs, SqlTableConstraint derivedPrimaryKey) { SchemaBuilder schemaBuilder = new SchemaBuilder( mergingStrategies, sourceSchema, (FlinkTypeFactory) validator.getTypeFactory(), dataTypeFactory, validator, escapeExpression); schemaBuilder.appendDerivedColumns(mergingStrategies, derivedColumns); schemaBuilder.appendDerivedWatermarks(mergingStrategies, derivedWatermarkSpecs); schemaBuilder.appendDerivedPrimaryKey(derivedPrimaryKey); return schemaBuilder.build(); }
Merges the schema part of {@code CREATE TABLE} statement. It merges <ul> <li>columns <li>computed columns <li>watermarks <li>primary key </ul> <p>Additionally it performs validation of the features of the derived table. This is not done in the {@link SqlCreateTable#validate()} anymore because the validation should be done on top of the merged properties. E.g. Some of the columns used in computed columns of the derived table can be defined in the source table.
mergeTables
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
Apache-2.0
public Optional<TableDistribution> mergeDistribution( MergingStrategy mergingStrategy, Optional<TableDistribution> sourceTableDistribution, Optional<TableDistribution> derivedTabledDistribution) { if (derivedTabledDistribution.isPresent() && sourceTableDistribution.isPresent() && mergingStrategy != MergingStrategy.EXCLUDING) { throw new ValidationException( "The base table already has a distribution defined. You might want to specify " + "EXCLUDING DISTRIBUTION."); } if (derivedTabledDistribution.isPresent()) { return derivedTabledDistribution; } return sourceTableDistribution; }
Merges the distribution part of {@code CREATE TABLE} statement. <p>Distribution is a single property of a Table, thus there can be at most a single instance of it. Therefore, it is not possible to use {@link MergingStrategy#INCLUDING} with a distribution defined in both source and derived table.
mergeDistribution
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
Apache-2.0
public List<String> mergePartitions( MergingStrategy mergingStrategy, List<String> sourcePartitions, List<String> derivedPartitions) { if (!derivedPartitions.isEmpty() && !sourcePartitions.isEmpty() && mergingStrategy != MergingStrategy.EXCLUDING) { throw new ValidationException( "The base table already has partitions defined. You might want to specify " + "EXCLUDING PARTITIONS."); } if (!derivedPartitions.isEmpty()) { return derivedPartitions; } return sourcePartitions; }
Merges the partitions part of {@code CREATE TABLE} statement. <p>Partitioning is a single property of a Table, thus there can be at most a single instance of partitioning. Therefore, it is not possible to use {@link MergingStrategy#INCLUDING} with partitioning defined in both source and derived table.
mergePartitions
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/MergeTableLikeUtil.java
Apache-2.0
public Schema build() { Schema.Builder resultBuilder = Schema.newBuilder(); resultBuilder.fromColumns(new ArrayList<>(columns.values())); for (UnresolvedWatermarkSpec watermarkSpec : watermarkSpecs.values()) { resultBuilder.watermark( watermarkSpec.getColumnName(), watermarkSpec.getWatermarkExpression()); } if (primaryKey != null) { resultBuilder.primaryKeyNamed( primaryKey.getConstraintName(), primaryKey.getColumnNames().toArray(new String[0])); } return resultBuilder.build(); }
Builds and returns a {@link Schema} from the columns, watermark specs, and primary key specified in the builder.
build
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
static void verifyRowtimeAttribute( SqlWatermark sqlWatermark, Map<String, RelDataType> allFieldsTypes) { SqlIdentifier eventTimeColumnName = sqlWatermark.getEventTimeColumnName(); String fullRowtimeExpression = eventTimeColumnName.toString(); List<String> components = eventTimeColumnName.names; if (!allFieldsTypes.containsKey(components.get(0))) { throw new ValidationException( String.format( "The rowtime attribute field '%s' is not defined in the table schema, at %s\n" + "Available fields: [%s]", fullRowtimeExpression, eventTimeColumnName.getParserPosition(), allFieldsTypes.keySet().stream() .collect(Collectors.joining("', '", "'", "'")))); } if (components.size() > 1) { RelDataType componentType = allFieldsTypes.get(components.get(0)); for (int i = 1; i < components.size(); i++) { RelDataTypeField field = componentType.getField(components.get(i), true, false); if (field == null) { throw new ValidationException( String.format( "The rowtime attribute field '%s' is not defined in the table schema, at %s\n" + "Nested field '%s' was not found in a composite type: %s.", fullRowtimeExpression, eventTimeColumnName.getComponent(i).getParserPosition(), components.get(i), FlinkTypeFactory.toLogicalType( allFieldsTypes.get(components.get(0))))); } componentType = field.getType(); } } }
Verify the watermark rowtime attribute is part of the table schema specified in the {@code allFieldsTypes}. @param sqlWatermark The watermark with the rowtime attribute to verify. @param allFieldsTypes The table schema to verify the rowtime attribute against.
verifyRowtimeAttribute
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
protected UnresolvedPhysicalColumn toUnresolvedPhysicalColumn(SqlRegularColumn column) { final String name = column.getName().getSimple(); final Optional<String> comment = getComment(column); final LogicalType logicalType = toLogicalType(toRelDataType(column.getType())); return new UnresolvedPhysicalColumn( name, fromLogicalToDataType(logicalType), comment.orElse(null)); }
Converts a {@link SqlRegularColumn} to an {@link UnresolvedPhysicalColumn} object.
toUnresolvedPhysicalColumn
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
UnresolvedComputedColumn toUnresolvedComputedColumn( SqlComputedColumn column, SqlNode validatedExpression) { final String name = column.getName().getSimple(); final Optional<String> comment = getComment(column); return new UnresolvedComputedColumn( name, new SqlCallExpression(escapeExpressions.apply(validatedExpression)), comment.orElse(null)); }
Converts a {@link SqlComputedColumn} to an {@link UnresolvedComputedColumn} object.
toUnresolvedComputedColumn
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
UnresolvedMetadataColumn toUnresolvedMetadataColumn(SqlMetadataColumn column) { final String name = column.getName().getSimple(); final Optional<String> comment = getComment(column); final LogicalType logicalType = toLogicalType(toRelDataType(column.getType())); return new UnresolvedMetadataColumn( name, fromLogicalToDataType(logicalType), column.getMetadataAlias().orElse(null), column.isVirtual(), comment.orElse(null)); }
Converts a {@link SqlMetadataColumn} to an {@link UnresolvedMetadataColumn} object.
toUnresolvedMetadataColumn
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
UnresolvedWatermarkSpec toUnresolvedWatermarkSpec( SqlWatermark watermark, Map<String, RelDataType> accessibleFieldNamesToTypes) { // this will validate and expand function identifiers. SqlNode validated = sqlValidator.validateParameterizedExpression( watermark.getWatermarkStrategy(), accessibleFieldNamesToTypes); return new UnresolvedWatermarkSpec( watermark.getEventTimeColumnName().toString(), new SqlCallExpression(escapeExpressions.apply(validated))); }
Converts a {@link SqlWatermark} to an {@link UnresolvedWatermarkSpec} object.
toUnresolvedWatermarkSpec
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
public UnresolvedPrimaryKey toUnresolvedPrimaryKey(SqlTableConstraint primaryKey) { List<String> columnNames = primaryKey.getColumns().getList().stream() .map(n -> ((SqlIdentifier) n).getSimple()) .collect(Collectors.toList()); String constraintName = primaryKey .getConstraintName() .orElseGet( () -> columnNames.stream() .collect(Collectors.joining("_", "PK_", ""))); return new UnresolvedPrimaryKey(constraintName, columnNames); }
Converts a {@link SqlTableConstraint} to an {@link UnresolvedPrimaryKey} object.
toUnresolvedPrimaryKey
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
LogicalType getLogicalType(UnresolvedPhysicalColumn column) { return dataTypeFactory.createDataType(column.getDataType()).getLogicalType(); }
Gets the column data type of {@link UnresolvedPhysicalColumn} column and convert it to a {@link LogicalType}.
getLogicalType
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SchemaBuilderUtil.java
Apache-2.0
public static Optional<Operation> convert( FlinkPlannerImpl flinkPlanner, CatalogManager catalogManager, SqlNode sqlNode) { // validate the query final SqlNode validated = flinkPlanner.validate(sqlNode); return convertValidatedSqlNode(flinkPlanner, catalogManager, validated); }
This is the main entrance for executing all kinds of DDL/DML {@code SqlNode}s, different SqlNode will have its implementation in the #convert(type) method whose 'type' argument is subclass of {@code SqlNode}. @param flinkPlanner FlinkPlannerImpl to convertCreateTable sql node to rel node @param catalogManager CatalogManager to resolve full path for operations @param sqlNode SqlNode to execute on
convert
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
Apache-2.0
private static AlterTableChangeOperation convertAlterTableDropDistribution( SqlAlterTable sqlAlterTable, ResolvedCatalogTable resolvedCatalogTable, ObjectIdentifier tableIdentifier) { if (!resolvedCatalogTable.getDistribution().isPresent()) { throw new ValidationException( String.format( "Table %s does not have a distribution to drop.", tableIdentifier)); } List<TableChange> tableChanges = Collections.singletonList(TableChange.dropDistribution()); CatalogTable.Builder builder = CatalogTable.newBuilder() .comment(resolvedCatalogTable.getComment()) .options(resolvedCatalogTable.getOptions()) .schema(resolvedCatalogTable.getUnresolvedSchema()) .partitionKeys(resolvedCatalogTable.getPartitionKeys()) .options(resolvedCatalogTable.getOptions()); resolvedCatalogTable.getSnapshot().ifPresent(builder::snapshot); CatalogTable newTable = builder.build(); return new AlterTableChangeOperation( tableIdentifier, tableChanges, newTable, sqlAlterTable.ifTableExists()); }
Convert ALTER TABLE DROP DISTRIBUTION statement.
convertAlterTableDropDistribution
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
Apache-2.0
private FunctionLanguage parseLanguage(String languageString) { if (StringUtils.isNullOrWhitespaceOnly(languageString)) { return FunctionLanguage.JAVA; } FunctionLanguage language; try { language = FunctionLanguage.valueOf(languageString); } catch (IllegalArgumentException e) { throw new UnsupportedOperationException( String.format("Unrecognized function language string %s", languageString), e); } return language; }
Converts language string to the FunctionLanguage. @param languageString the language string from SQL parser @return supported FunctionLanguage otherwise raise UnsupportedOperationException. @throws UnsupportedOperationException if the languageString is not parsable or language is not supported
parseLanguage
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
Apache-2.0
private Operation convertDescribeTable(SqlRichDescribeTable sqlRichDescribeTable) { UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(sqlRichDescribeTable.fullTableName()); ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier); return new DescribeTableOperation(identifier, sqlRichDescribeTable.isExtended()); }
Convert DESCRIBE [EXTENDED] [[catalogName.] dataBasesName].sqlIdentifier.
convertDescribeTable
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/SqlNodeToOperationConversion.java
Apache-2.0
private Schema getSchema(SqlNodeList nodeList, boolean isInput) { columns.clear(); String schemaType = isInput ? "input" : "output"; for (SqlNode column : nodeList) { if (column instanceof SqlRegularColumn) { SqlRegularColumn regularColumn = (SqlRegularColumn) column; String name = regularColumn.getName().getSimple(); if (columns.containsKey(name)) { throw new ValidationException( "Duplicate " + schemaType + " column name: '" + name + "'."); } columns.put(name, toUnresolvedPhysicalColumn(regularColumn)); } else { throw new ValidationException( "Column " + column + " can only be a physical column."); } } return build(); }
Builder for {@link Schema} of a model.
getSchema
java
apache/flink
flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlCreateModelConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/operations/converters/SqlCreateModelConverter.java
Apache-2.0