code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public static String sqlToRegexLike(String sqlPattern, CharSequence escapeStr) { final char escapeChar; if (escapeStr != null) { if (escapeStr.length() != 1) { throw invalidEscapeCharacter(escapeStr.toString()); } escapeChar = escapeStr.charAt(0); } else { escapeChar = '\\'; } return sqlToRegexLike(sqlPattern, escapeChar); }
Translates a SQL LIKE pattern to Java regex pattern, with optional escape string.
sqlToRegexLike
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
Apache-2.0
public static String sqlToRegexSimilar(String sqlPattern, CharSequence escapeStr) { final char escapeChar; if (escapeStr != null) { if (escapeStr.length() != 1) { throw invalidEscapeCharacter(escapeStr.toString()); } escapeChar = escapeStr.charAt(0); } else { escapeChar = 0; } return sqlToRegexSimilar(sqlPattern, escapeChar); }
Translates a SQL SIMILAR pattern to Java regex pattern, with optional escape string.
sqlToRegexSimilar
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
Apache-2.0
public static String sqlToRegexSimilar(String sqlPattern, char escapeChar) { similarEscapeRuleChecking(sqlPattern, escapeChar); boolean insideCharacterEnumeration = false; final StringBuilder javaPattern = new StringBuilder(sqlPattern.length() * 2); final int len = sqlPattern.length(); for (int i = 0; i < len; i++) { char c = sqlPattern.charAt(i); if (c == escapeChar) { if (i == (len - 1)) { // It should never reach here after the escape rule // checking. throw invalidEscapeSequence(sqlPattern, i); } char nextChar = sqlPattern.charAt(i + 1); if (SQL_SIMILAR_SPECIALS.indexOf(nextChar) >= 0) { // special character, use \ to replace the escape char. if (JAVA_REGEX_SPECIALS.indexOf(nextChar) >= 0) { javaPattern.append('\\'); } javaPattern.append(nextChar); } else if (nextChar == escapeChar) { javaPattern.append(nextChar); } else { // It should never reach here after the escape rule // checking. throw invalidEscapeSequence(sqlPattern, i); } i++; // we already process the next char. } else { switch (c) { case '_': javaPattern.append('.'); break; case '%': javaPattern.append("(?s:.*)"); break; case '[': javaPattern.append('['); insideCharacterEnumeration = true; i = sqlSimilarRewriteCharEnumeration( sqlPattern, javaPattern, i, escapeChar); break; case ']': if (!insideCharacterEnumeration) { throw invalidRegularExpression(sqlPattern, i); } insideCharacterEnumeration = false; javaPattern.append(']'); break; case '\\': javaPattern.append("\\\\"); break; case '$': // $ is special character in java regex, but regular in // SQL regex. javaPattern.append("\\$"); break; default: javaPattern.append(c); } } } if (insideCharacterEnumeration) { throw invalidRegularExpression(sqlPattern, len); } return javaPattern.toString(); }
Translates SQL SIMILAR pattern to Java regex pattern.
sqlToRegexSimilar
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java
Apache-2.0
public void loadModule(String name, Module module) { checkArgument( !StringUtils.isNullOrWhitespaceOnly(name), "name cannot be null or empty string"); checkNotNull(module, "module cannot be null"); if (loadedModules.containsKey(name)) { throw new ValidationException( String.format("A module with name '%s' already exists", name)); } else { usedModules.add(name); loadedModules.put(name, module); LOG.info("Loaded module '{}' from class {}", name, module.getClass().getName()); } }
Load a module under a unique name. Modules will be kept in the loaded order, and new module will be added to the left before the unused module and turn on use by default. @param name name of the module @param module the module instance @throws ValidationException when there already exists a module with the same name
loadModule
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public void unloadModule(String name) { if (loadedModules.containsKey(name)) { loadedModules.remove(name); boolean used = usedModules.remove(name); LOG.info("Unloaded an {} module '{}'", used ? "used" : "unused", name); } else { throw new ValidationException(String.format("No module with name '%s' exists", name)); } }
Unload a module with given name. @param name name of the module @throws ValidationException when there is no module with the given name
unloadModule
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public void useModules(String... names) { checkNotNull(names, "names cannot be null"); Set<String> deduplicateNames = new HashSet<>(); for (String name : names) { if (!loadedModules.containsKey(name)) { throw new ValidationException( String.format("No module with name '%s' exists", name)); } if (!deduplicateNames.add(name)) { throw new ValidationException( String.format("Module '%s' appears more than once", name)); } } usedModules.clear(); usedModules.addAll(Arrays.asList(names)); }
Enable modules in use with declared name order. Modules that have been loaded but not exist in names varargs will become unused. @param names module names to be used @throws ValidationException when module names contain an unloaded name
useModules
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public List<String> listModules() { return new ArrayList<>(usedModules); }
Get names of all used modules in resolution order. @return a list of names of used modules
listModules
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public List<ModuleEntry> listFullModules() { // keep the order for used modules List<ModuleEntry> moduleEntries = usedModules.stream() .map(name -> new ModuleEntry(name, true)) .collect(Collectors.toList()); loadedModules.keySet().stream() .filter(name -> !usedModules.contains(name)) .forEach(name -> moduleEntries.add(new ModuleEntry(name, false))); return moduleEntries; }
Get all loaded modules with use status. Modules in use status are returned in resolution order. @return a list of module entries with module name and use status
listFullModules
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public Set<String> listFunctions() { return usedModules.stream() .map(name -> loadedModules.get(name).listFunctions(false)) .flatMap(Collection::stream) .collect(Collectors.toSet()); }
Get names of all functions from used modules. It excludes hidden functions. @return a set of function names of used modules
listFunctions
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
public Optional<FunctionDefinition> getFunctionDefinition(String name) { for (String moduleName : usedModules) { if (loadedModules.get(moduleName).listFunctions(true).stream() .anyMatch(name::equalsIgnoreCase)) { LOG.debug("Got FunctionDefinition '{}' from '{}' module.", name, moduleName); return loadedModules.get(moduleName).getFunctionDefinition(name); } } LOG.debug("Cannot find FunctionDefinition '{}' from any loaded modules.", name); return Optional.empty(); }
Get an optional of {@link FunctionDefinition} by a given name. Function will be resolved to modules in the used order, and the first match will be returned. If no match is found in all modules, return an optional. <p>It includes hidden functions even though not listed in {@link #listFunctions()}. @param name name of the function @return an optional of {@link FunctionDefinition}
getFunctionDefinition
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/module/ModuleManager.java
Apache-2.0
@Override public String asSummaryString() { return "BEGIN STATEMENT SET"; }
Operation to describe a BEGIN STATEMENT SET statement.
asSummaryString
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/BeginStatementSetOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/BeginStatementSetOperation.java
Apache-2.0
@Override public String asSummaryString() { return "NOP"; }
An {@link Operation} to represent that nothing needs to be done.
asSummaryString
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/NopOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/NopOperation.java
Apache-2.0
public static String indent(String item) { // '([^']|'')*': Matches the escape sequence "'...'" where the content between "'" // characters can contain anything except "'" unless its doubled (''). // // Then each match is checked. If it starts with "'", it's left unchanged // (escaped sequence). Otherwise, it replaces newlines within the match with indent. Pattern pattern = Pattern.compile("('([^']|'')*')|\\n"); Matcher matcher = pattern.matcher(item); StringBuffer output = new StringBuffer(); while (matcher.find()) { final String group = matcher.group(); if (group.startsWith("'")) { matcher.appendReplacement(output, Matcher.quoteReplacement(group)); } else { String replaced = group.replaceAll("\n", "\n" + OPERATION_INDENT); matcher.appendReplacement(output, Matcher.quoteReplacement(replaced)); } } matcher.appendTail(output); return "\n" + OPERATION_INDENT + output; }
Increases indentation for description of string of child {@link Operation}. The input can already contain indentation. This will increase all the indentations by one level. @param item result of {@link Operation#asSummaryString()} @return string with increased indentation
indent
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/OperationUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/OperationUtils.java
Apache-2.0
public DynamicTableSink getDynamicTableSink() { return dynamicTableSink; }
DML operation that tells to write to a sink which implements {@link SupportsStaging}. Currently. this operation is for CTAS(CREATE TABLE AS SELECT) and RTAS([CREATE OR] REPLACE TABLE AS SELECT) statement. <p>StagedSinkModifyOperation is an extension of SinkModifyOperation in the atomic CTAS/RTAS scenario. Whiling checking whether the corresponding sink support atomic CTAS/RTAS or not, we will need to get DynamicTableSink firstly and check whether it implements {@link SupportsStaging} and then call the method {@link SupportsStaging#applyStaging}. We maintain the DynamicTableSink in this operation so that we can reuse this DynamicTableSink instead of creating a new DynamicTableSink during translating the operation again which is error-prone.
getDynamicTableSink
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/StagedSinkModifyOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/StagedSinkModifyOperation.java
Apache-2.0
public Optional<ValueLiteralExpression> getSlide() { return Optional.of(slide); }
Slide of {@link WindowType#SLIDE} window. Empty for other windows. @return slide of a slide window
getSlide
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/WindowAggregateQueryOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/WindowAggregateQueryOperation.java
Apache-2.0
public Optional<ValueLiteralExpression> getSize() { return Optional.of(size); }
Size of a {@link WindowType#TUMBLE} or {@link WindowType#SLIDE} window. Empty for {@link WindowType#SESSION} window. @return size of a window
getSize
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/WindowAggregateQueryOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/WindowAggregateQueryOperation.java
Apache-2.0
public CatalogPartitionSpec getPartitionSpec() { return partitionSpec; }
Abstract Operation to describe all ALTER TABLE statements that should be applied to partitions.
getPartitionSpec
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterPartitionOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterPartitionOperation.java
Apache-2.0
public ObjectIdentifier getViewIdentifier() { return viewIdentifier; }
Abstract Operation to describe all ALTER VIEW statements.
getViewIdentifier
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterViewOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AlterViewOperation.java
Apache-2.0
public ObjectIdentifier getTableIdentifier() { return tableIdentifier; }
Operation to describe an {@code ANALYZE TABLE} statement.
getTableIdentifier
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AnalyzeTableOperation.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/ddl/AnalyzeTableOperation.java
Apache-2.0
QueryOperation createAggregate( List<ResolvedExpression> groupings, List<ResolvedExpression> aggregates, QueryOperation child) { validateGroupings(groupings); validateAggregates(aggregates); DataType[] fieldTypes = Stream.concat( groupings.stream().map(ResolvedExpression::getOutputDataType), aggregates.stream().flatMap(this::extractAggregateResultDataTypes)) .toArray(DataType[]::new); String[] groupNames = groupings.stream() .map(expr -> extractName(expr).orElseGet(expr::toString)) .toArray(String[]::new); String[] fieldNames = Stream.concat( Stream.of(groupNames), aggregates.stream() .flatMap( p -> extractAggregateNames( p, Arrays.asList(groupNames)))) .toArray(String[]::new); return new AggregateQueryOperation( groupings, aggregates, child, ResolvedSchema.physical(fieldNames, fieldTypes)); }
Creates a valid {@link AggregateQueryOperation} operation. @param groupings expressions describing grouping key of aggregates @param aggregates expressions describing aggregation functions @param child relational operation on top of which to apply the aggregation @return valid aggregate operation
createAggregate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
Apache-2.0
QueryOperation createWindowAggregate( List<ResolvedExpression> groupings, List<ResolvedExpression> aggregates, List<ResolvedExpression> windowProperties, ResolvedGroupWindow window, QueryOperation child) { validateGroupings(groupings); validateAggregates(aggregates); validateWindowProperties(windowProperties, window); DataType[] fieldTypes = concat( groupings.stream().map(ResolvedExpression::getOutputDataType), aggregates.stream().flatMap(this::extractAggregateResultDataTypes), windowProperties.stream() .map(ResolvedExpression::getOutputDataType)) .toArray(DataType[]::new); String[] groupNames = groupings.stream() .map(expr -> extractName(expr).orElseGet(expr::toString)) .toArray(String[]::new); String[] fieldNames = concat( Stream.of(groupNames), aggregates.stream() .flatMap( p -> extractAggregateNames( p, Arrays.asList(groupNames))), windowProperties.stream() .map(expr -> extractName(expr).orElseGet(expr::toString))) .toArray(String[]::new); return new WindowAggregateQueryOperation( groupings, aggregates, windowProperties, window, child, ResolvedSchema.physical(fieldNames, fieldTypes)); }
Creates a valid {@link WindowAggregateQueryOperation} operation. @param groupings expressions describing grouping key of aggregates @param aggregates expressions describing aggregation functions @param windowProperties expressions describing window properties @param window grouping window of this aggregation @param child relational operation on top of which to apply the aggregation @return valid window aggregate operation
createWindowAggregate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
Apache-2.0
private Stream<DataType> extractAggregateResultDataTypes(ResolvedExpression expression) { if (isFunctionOfKind(expression, TABLE_AGGREGATE)) { final DataType outputDataType = expression.getOutputDataType(); final LogicalType outputType = expression.getOutputDataType().getLogicalType(); // legacy if (outputType instanceof LegacyTypeInformationType) { final TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType()); return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo)) .map(TypeConversions::fromLegacyInfoToDataType); } return DataTypeUtils.flattenToDataTypes(outputDataType).stream(); } else { return Stream.of(expression.getOutputDataType()); } }
Extract result types for the aggregate or the table aggregate expression. For a table aggregate, it may return multi result types when the composite return type is flattened.
extractAggregateResultDataTypes
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
Apache-2.0
private Stream<String> extractAggregateNames( ResolvedExpression expression, List<String> groupNames) { if (isFunctionOfKind(expression, TABLE_AGGREGATE)) { final DataType outputDataType = expression.getOutputDataType(); final LogicalType outputType = expression.getOutputDataType().getLogicalType(); // legacy if (outputType instanceof LegacyTypeInformationType) { final TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType()); return Arrays.stream(FieldInfoUtils.getFieldNames(legacyInfo, groupNames)); } return DataTypeUtils.flattenToNames(outputDataType, groupNames).stream(); } else { return Stream.of(extractName(expression).orElseGet(expression::toString)); } }
Extract names for the aggregate or the table aggregate expression. For a table aggregate, it may return multi output names when the composite return type is flattened. If the result type is not a composite type, the result name should not conflict with the group names.
extractAggregateNames
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
Apache-2.0
static List<Expression> renameColumns(List<String> inputFields, List<Expression> newAliases) { LinkedHashMap<String, Expression> finalFields = new LinkedHashMap<>(); inputFields.forEach(field -> finalFields.put(field, unresolvedRef(field))); newAliases.forEach( expr -> { String name = expr.accept(renameColumnExtractor); finalFields.put(name, expr); }); return new ArrayList<>(finalFields.values()); }
Creates a projection list that renames existing columns to new names. <p><b>NOTE:</b> Resulting expression are still unresolved. @param inputFields names of current columns @param newAliases new aliases for current columns @return projection expressions
renameColumns
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ColumnOperationUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ColumnOperationUtils.java
Apache-2.0
static List<Expression> addOrReplaceColumns( List<String> inputFields, List<Expression> newExpressions) { LinkedHashMap<String, Expression> finalFields = new LinkedHashMap<>(); inputFields.forEach(field -> finalFields.put(field, unresolvedRef(field))); newExpressions.forEach( expr -> { String name = extractName(expr).orElse(expr.toString()); finalFields.put(name, expr); }); return new ArrayList<>(finalFields.values()); }
Creates a projection list that adds new or replaces existing (if a column with corresponding name already exists) columns. <p><b>NOTE:</b> Resulting expression are still unresolved. @param inputFields names of current columns @param newExpressions new columns to add @return projection expressions
addOrReplaceColumns
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ColumnOperationUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ColumnOperationUtils.java
Apache-2.0
public static DynamicTableSink createDynamicTableSink( @Nullable Catalog catalog, Supplier<Optional<DynamicTableSinkFactory>> sinkFactorySupplier, ObjectIdentifier objectIdentifier, ResolvedCatalogTable catalogTable, Map<String, String> enrichmentOptions, ReadableConfig configuration, ClassLoader classLoader, boolean isTemporary) { DynamicTableSinkFactory dynamicTableSinkFactory = null; if (catalog != null && catalog.getFactory().isPresent() && catalog.getFactory().get() instanceof DynamicTableSinkFactory) { // try get from catalog dynamicTableSinkFactory = (DynamicTableSinkFactory) catalog.getFactory().get(); } if (dynamicTableSinkFactory == null) { dynamicTableSinkFactory = sinkFactorySupplier.get().orElse(null); } return FactoryUtil.createDynamicTableSink( dynamicTableSinkFactory, objectIdentifier, catalogTable, enrichmentOptions, configuration, classLoader, isTemporary); }
Creates a {@link DynamicTableSink} from a {@link CatalogTable}. <p>It'll try to create table sink from to {@param catalog}, then try to create from {@param sinkFactorySupplier} passed secondly. Otherwise, an attempt is made to discover a matching factory using Java SPI (see {@link Factory} for details).
createDynamicTableSink
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ExecutableOperationUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/ExecutableOperationUtils.java
Apache-2.0
QueryOperation create(ResolvedExpression resolvedCall) { if (!isFunctionOfKind(resolvedCall, FunctionKind.PROCESS_TABLE)) { throw new ValidationException( "Invalid expression. The expression must be a call to a process table function."); } final CallExpression call = (CallExpression) resolvedCall; final ContextResolvedFunction contextFunction = ContextResolvedFunction.fromCallExpression(call); final DataType dataType = call.getOutputDataType(); return new FunctionQueryOperation( contextFunction, call.getResolvedChildren(), ResolvedSchema.physical( DataType.getFieldNames(dataType), DataType.getFieldDataTypes(dataType))); }
Utility class for creating a valid {@link FunctionQueryOperation} operation.
create
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/FunctionTableFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/FunctionTableFactory.java
Apache-2.0
public static List<Optional<String>> extractNames(List<ResolvedExpression> expressions) { return expressions.stream() .map(OperationExpressionsUtils::extractName) .collect(Collectors.toList()); }
Extracts names from given expressions if they have one. Expressions that have names are: <ul> <li>{@link FieldReferenceExpression} <li>{@link TableReferenceExpression} <li>{@link LocalReferenceExpression} <li>{@link BuiltInFunctionDefinitions#AS} </ul> @param expressions list of expressions to extract names from @return corresponding list of optional names
extractNames
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationExpressionsUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationExpressionsUtils.java
Apache-2.0
public static ResolvedExpression scopeReferencesWithAlias( final String aliasName, final ResolvedExpression expression) { return expression.accept( new TableReferenceScopingVisitor(Collections.singletonMap(0, aliasName))); }
Adds an input alias to all {@link FieldReferenceExpression} in the given {@code expression}.
scopeReferencesWithAlias
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationExpressionsUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationExpressionsUtils.java
Apache-2.0
private QueryOperation aliasBackwardFields( QueryOperation inputOperation, List<String> alias, int aliasStartIndex) { if (!alias.isEmpty()) { List<String> namesBeforeAlias = inputOperation.getResolvedSchema().getColumnNames(); List<String> namesAfterAlias = new ArrayList<>(namesBeforeAlias); for (int i = 0; i < alias.size(); i++) { int withOffset = aliasStartIndex + i; namesAfterAlias.remove(withOffset); namesAfterAlias.add(withOffset, alias.get(i)); } return this.alias( namesAfterAlias.stream() .map(ApiExpressionUtils::unresolvedRef) .collect(Collectors.toList()), inputOperation); } else { return inputOperation; } }
Rename fields in the input {@link QueryOperation}.
aliasBackwardFields
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationTreeBuilder.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationTreeBuilder.java
Apache-2.0
private List<Expression> addAliasToTheCallInAggregate( List<String> inputFieldNames, List<Expression> expressions) { int attrNameCntr = 0; Set<String> usedFieldNames = new HashSet<>(inputFieldNames); List<Expression> result = new ArrayList<>(); for (Expression groupingExpression : expressions) { if (groupingExpression instanceof UnresolvedCallExpression && !ApiExpressionUtils.isFunction( groupingExpression, BuiltInFunctionDefinitions.AS)) { String tempName = getUniqueName("TMP_" + attrNameCntr, usedFieldNames); attrNameCntr += 1; usedFieldNames.add(tempName); result.add( unresolvedCall( BuiltInFunctionDefinitions.AS, groupingExpression, valueLiteral(tempName))); } else { result.add(groupingExpression); } } return result; }
Add a default name to the call in the grouping expressions, e.g., groupBy(a % 5) to groupBy(a % 5 as TMP_0) or make aggregate a named aggregate.
addAliasToTheCallInAggregate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationTreeBuilder.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/OperationTreeBuilder.java
Apache-2.0
QueryOperation createLimitWithOffset( int offset, QueryOperation child, PostResolverFactory postResolverFactory) { SortQueryOperation previousSort = validateAndGetChildSort(child, postResolverFactory); if (offset < 0) { throw new ValidationException("Offset should be greater or equal 0"); } if (previousSort.getOffset() != -1) { throw new ValidationException("OFFSET already defined"); } return new SortQueryOperation(previousSort.getOrder(), previousSort.getChild(), offset, -1); }
Creates a valid {@link SortQueryOperation} with offset (possibly merged into a preceding {@link SortQueryOperation}). @param offset offset to start from @param child relational expression on top of which to apply the sort operation @param postResolverFactory factory for creating resolved expressions @return valid sort operation with applied offset
createLimitWithOffset
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/SortOperationFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/SortOperationFactory.java
Apache-2.0
@Override public void addURL(URL url) { super.addURL(url); }
A class loader extending {@link ComponentClassLoader} which overwrites method{@link #addURL} to enable it can add url to component classloader.
addURL
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
Apache-2.0
public void registerJarResources(List<ResourceUri> resourceUris) throws IOException { registerResources( prepareStagingResources( resourceUris, ResourceType.JAR, true, url -> { try { JarUtils.checkJarFile(url); } catch (IOException e) { throw new ValidationException( String.format("Failed to register jar resource [%s]", url), e); } }, false), true); }
Due to anyone of the resource in list maybe fail during register, so we should stage it before actual register to guarantee transaction process. If all the resources are available, register them into the {@link ResourceManager}.
registerJarResources
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
public String registerFileResource(ResourceUri resourceUri) throws IOException { Map<ResourceUri, URL> stagingResources = prepareStagingResources( Collections.singletonList(resourceUri), ResourceType.FILE, false, url -> {}, false); registerResources(stagingResources, false); return resourceInfos.get(new ArrayList<>(stagingResources.keySet()).get(0)).getPath(); }
Register a file resource into {@link ResourceManager} and return the absolute local file path without the scheme. <p>If the file is remote, it will be copied to a local file, with file name suffixed with a UUID. @param resourceUri resource with type as {@link ResourceType#FILE}, the resource uri might or might not contain the uri scheme, or it could be a relative path. @return the absolute local file path.
registerFileResource
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
public void declareFunctionResources(Set<ResourceUri> resourceUris) throws IOException { prepareStagingResources( resourceUris, ResourceType.JAR, true, url -> { try { JarUtils.checkJarFile(url); } catch (IOException e) { throw new ValidationException( String.format("Failed to register jar resource [%s]", url), e); } }, true); }
Declare a resource for function and add it to the function resource infos. If the file is remote, it will be copied to a local file. The declared resource will not be added to resources and classloader if it is not used in the job. @param resourceUris the resource uri for function.
declareFunctionResources
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
public Set<URL> getLocalJarResources() { return resourceInfos.entrySet().stream() .filter(entry -> ResourceType.JAR.equals(entry.getKey().getResourceType())) .map(Map.Entry::getValue) .collect(Collectors.toSet()); }
Get the local jars' URL. Return the URL corresponding to downloaded jars in the local file system for the remote jar. For the local jar, return the registered URL.
getLocalJarResources
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
public boolean exists(Path filePath) throws IOException { return filePath.getFileSystem().exists(filePath); }
Check whether the {@link Path} exists.
exists
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
private boolean decreaseCounter() { this.counter--; checkState( this.counter >= 0, String.format("Invalid reference count[%d] which must >= 0", this.counter)); return this.counter == 0; }
Resource with reference counter, when the counter is 0, it means the resource can be removed.
decreaseCounter
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/resource/ResourceManager.java
Apache-2.0
private static boolean isReferenceByPosition( TypeInformation<?> inputType, Expression[] fields) { if (!isIndexedComposite(inputType)) { return false; } List<String> inputNames = Arrays.asList(getFieldNames(inputType)); // Use the by-position mode if no of the fields exists in the input. // This prevents confusing cases like ('f2, 'f0, 'myName) for a Tuple3 where fields are // renamed // by position but the user might assume reordering instead of renaming. return Arrays.stream(fields) .allMatch( f -> { if (f instanceof UnresolvedCallExpression && ((UnresolvedCallExpression) f).getFunctionDefinition() == BuiltInFunctionDefinitions.AS && f.getChildren().get(0) instanceof UnresolvedReferenceExpression) { return false; } if (f instanceof UnresolvedReferenceExpression) { return !inputNames.contains( ((UnresolvedReferenceExpression) f).getName()); } return true; }); }
Reference input fields by name: All fields in the schema definition are referenced by name (and possibly renamed using an alias (as). In this mode, fields can be reordered and projected out. Moreover, we can define proctime and rowtime attributes at arbitrary positions using arbitrary names (except those that exist in the result schema). This mode can be used for any input type, including POJOs. <p>Reference input fields by position: In this mode, fields are simply renamed. Event-time attributes can replace the field on their position in the input data (if it is of correct type) or be appended at the end. Proctime attributes must be appended at the end. This mode can only be used if the input type has a defined field order (tuple, case class, Row) and no of fields references a field of the input type.
isReferenceByPosition
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
public static <A> TypeInfoSchema getFieldsInfo(TypeInformation<A> inputType) { if (inputType instanceof GenericTypeInfo && inputType.getTypeClass() == Row.class) { throw new ValidationException( "An input of GenericTypeInfo<Row> cannot be converted to Table. " + "Please specify the type of the input with a RowTypeInfo."); } else { return new TypeInfoSchema( getFieldNames(inputType), getFieldIndices(inputType), fromLegacyInfoToDataType(getFieldTypes(inputType)), false); } }
Returns a {@link TypeInfoSchema} for a given {@link TypeInformation}. @param inputType The TypeInformation to extract the mapping from. @param <A> The type of the TypeInformation. @return A description of the input that enables creation of a {@link TableSchema}. @see TypeInfoSchema
getFieldsInfo
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
public static <A> String[] getFieldNames(TypeInformation<A> inputType) { return getFieldNames(inputType, Collections.emptyList()); }
Returns field names for a given {@link TypeInformation}. @param inputType The TypeInformation extract the field names. @param <A> The type of the TypeInformation. @return An array holding the field names
getFieldNames
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
public static <A> String[] getFieldNames( TypeInformation<A> inputType, List<String> existingNames) { validateInputTypeInfo(inputType); List<String> fieldNames = null; // type originated from Table API if (inputType instanceof DataTypeQueryable) { final DataType dataType = ((DataTypeQueryable) inputType).getDataType(); final LogicalType type = dataType.getLogicalType(); if (isCompositeType(type)) { fieldNames = LogicalTypeChecks.getFieldNames(type); } } // type originated from other API else if (inputType instanceof CompositeType) { fieldNames = Arrays.asList(((CompositeType<A>) inputType).getFieldNames()); } // atomic in any case if (fieldNames == null) { fieldNames = Collections.singletonList(getAtomicName(existingNames)); } if (fieldNames.contains("*")) { throw new TableException("Field name can not be '*'."); } return fieldNames.toArray(new String[0]); }
Returns field names for a given {@link TypeInformation}. If the input {@link TypeInformation} is not a composite type, the result field name should not exist in the existingNames. @param inputType The TypeInformation extract the field names. @param existingNames The existing field names for non-composite types that can not be used. @param <A> The type of the TypeInformation. @return An array holding the field names
getFieldNames
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
public static <A> void validateInputTypeInfo(TypeInformation<A> typeInfo) { Class<A> clazz = typeInfo.getTypeClass(); if ((clazz.isMemberClass() && !Modifier.isStatic(clazz.getModifiers())) || !Modifier.isPublic(clazz.getModifiers()) || clazz.getCanonicalName() == null) { throw new ValidationException( format( "Class '%s' described in type information '%s' must be " + "static and globally accessible.", clazz, typeInfo)); } }
Validate if class represented by the typeInfo is static and globally accessible. @param typeInfo type to check @throws ValidationException if type does not meet these criteria
validateInputTypeInfo
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
public static int[] getFieldIndices(TypeInformation<?> inputType) { return IntStream.range(0, getFieldNames(inputType).length).toArray(); }
Returns field indexes for a given {@link TypeInformation}. @param inputType The TypeInformation extract the field positions from. @return An array holding the field positions
getFieldIndices
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
Apache-2.0
@Override public TestKind getKind() { return TestKind.STATEMENT_SET; }
Test step for creating a statement set.
getKind
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/StatementSetTestStep.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/StatementSetTestStep.java
Apache-2.0
@Override public String toString() { return id; }
Steps to be executed for running the actual test.
toString
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public static Builder of(String id, String description) { return new Builder(id, description); }
Entrypoint for a {@link TableTestProgram} that forces an identifier and description of the test program. <p>The identifier is necessary to (ideally globally) identify the test program in outputs. For example, a runner for plan tests can create directories and use the name as file names. The identifier must start with the name of the exec node under testing. <p>The description should give more context and should start with a verb and "s" suffix. <p>For example: <ul> <li>TableTestProgram.of("join-outer", "tests outer joins") <li>TableTestProgram.of("rank-x-enabled", "validates a rank with config flag 'x' set") <li>TableTestProgram.of("calc-with-projection", "verifies FLINK-12345 is fixed due to missing row projection") </ul>
of
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public SqlTestStep getRunSqlTestStep() { final List<TestStep> sqlSteps = runSteps.stream() .filter(s -> s.getKind() == TestKind.SQL) .collect(Collectors.toList()); Preconditions.checkArgument(sqlSteps.size() == 1, "Single SQL step expected."); return (SqlTestStep) sqlSteps.get(0); }
A helper method to avoid boilerplate code. It assumes that only a single SQL statement is tested.
getRunSqlTestStep
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public StatementSetTestStep getRunStatementSetTestStep() { List<TestStep> statementSetSteps = runSteps.stream() .filter(s -> s.getKind() == TestKind.STATEMENT_SET) .collect(Collectors.toList()); Preconditions.checkArgument( statementSetSteps.size() == 1, "Single StatementSet step expected."); return (StatementSetTestStep) statementSetSteps.get(0); }
A helper method to avoid boilerplate code. It assumes only one statement set is tested.
getRunStatementSetTestStep
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupSql(String sql) { this.setupSteps.add(new SqlTestStep(sql)); return this; }
Setup step for execution SQL. <p>Note: Not every runner supports generic SQL statements. Sometimes the runner would like to enrich properties e.g. of a CREATE TABLE. Use this step with caution.
setupSql
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public <T> Builder setupConfig(ConfigOption<T> option, T value) { this.setupSteps.add(new ConfigOptionTestStep<>(option, value)); return this; }
Setup step for setting a {@link ConfigOption}.
setupConfig
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupTemporaryCatalogFunction( String name, Class<? extends UserDefinedFunction> function) { this.setupSteps.add( new FunctionTestStep( FunctionPersistence.TEMPORARY, FunctionBehavior.CATALOG, name, function)); return this; }
Setup step for registering a temporary catalog function.
setupTemporaryCatalogFunction
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupCatalogFunction( String name, Class<? extends UserDefinedFunction> function) { this.setupSteps.add( new FunctionTestStep( FunctionPersistence.PERSISTENT, FunctionBehavior.CATALOG, name, function)); return this; }
Setup step for registering a catalog function.
setupCatalogFunction
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupTableSource(SourceTestStep sourceTestStep) { setupSteps.add(sourceTestStep); return this; }
Setup step for a table source. <p>Use {@link SourceTestStep.Builder} to construct this step.
setupTableSource
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupTableSink(SinkTestStep sinkTestStep) { setupSteps.add(sinkTestStep); return this; }
Setup step for a table sink. <p>Use {@link SinkTestStep.Builder} to construct this step.
setupTableSink
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
public Builder setupModel(ModelTestStep modelTestStep) { setupSteps.add(modelTestStep); return this; }
Setup step for a model. <p>Use {@link ModelTestStep.Builder} to construct this step.
setupModel
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgram.java
Apache-2.0
default List<TableTestProgram> supportedPrograms() { final List<TableTestProgram> programs = programs(); final List<String> ids = programs.stream().map(p -> p.id).collect(Collectors.toList()); final List<String> duplicates = ids.stream() .filter(id -> Collections.frequency(ids, id) > 1) .distinct() .collect(Collectors.toList()); if (!duplicates.isEmpty()) { throw new IllegalArgumentException("Duplicate test program id found: " + duplicates); } final Set<TestStep.TestKind> setupSteps = supportedSetupSteps(); final Set<TestStep.TestKind> runSteps = supportedRunSteps(); programs.forEach( p -> { p.setupSteps.stream() .map(TestStep::getKind) .filter(k -> !setupSteps.contains(k)) .findFirst() .ifPresent( k -> { throw new UnsupportedOperationException( "Test runner does not support setup step: " + k); }); p.runSteps.stream() .map(TestStep::getKind) .filter(k -> !runSteps.contains(k)) .findFirst() .ifPresent( k -> { throw new UnsupportedOperationException( "Test runner does not support run step: " + k); }); }); return programs; }
Runners should call this method to get started. <p>Compared to {@link #programs()}, this method will perform some pre-checks.
supportedPrograms
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgramRunner.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/test/program/TableTestProgramRunner.java
Apache-2.0
public String eval(Integer i, Integer offset) { return String.valueOf(i + offset); }
The scalar function for SQL Client test.
eval
java
apache/flink
flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/utils/UserDefinedFunctions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/test/java/org/apache/flink/table/utils/UserDefinedFunctions.java
Apache-2.0
@Override public void validate(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); Map<String, String> legacyTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_TYPE); Map<String, String> dataTypes = properties.getIndexedProperty(SCHEMA, SCHEMA_DATA_TYPE); if (names.isEmpty() && legacyTypes.isEmpty() && dataTypes.isEmpty()) { throw new ValidationException( format("Could not find the required schema in property '%s'.", SCHEMA)); } boolean proctimeFound = false; for (int i = 0; i < Math.max(names.size(), legacyTypes.size()); i++) { properties.validateString(SCHEMA + "." + i + "." + SCHEMA_NAME, false, 1); properties.validateDataType( SCHEMA + "." + i + "." + SCHEMA_DATA_TYPE, SCHEMA + "." + i + "." + SCHEMA_TYPE, false); properties.validateString(SCHEMA + "." + i + "." + SCHEMA_FROM, true, 1); // either proctime or rowtime String proctime = SCHEMA + "." + i + "." + SCHEMA_PROCTIME; String rowtime = SCHEMA + "." + i + "." + ROWTIME; if (properties.containsKey(proctime)) { // check the environment if (!isStreamEnvironment) { throw new ValidationException( format( "Property '%s' is not allowed in a batch environment.", proctime)); } // check for only one proctime attribute else if (proctimeFound) { throw new ValidationException( "A proctime attribute must only be defined once."); } // check proctime properties.validateBoolean(proctime, false); proctimeFound = properties.getBoolean(proctime); // no rowtime properties.validatePrefixExclusion(rowtime); } else if (properties.hasPrefix(rowtime)) { // check rowtime RowtimeValidator rowtimeValidator = new RowtimeValidator( supportsSourceTimestamps, supportsSourceWatermarks, SCHEMA + "." + i + "."); rowtimeValidator.validate(properties); // no proctime properties.validateExclusion(proctime); } } }
Validator for {@link Schema}. @deprecated See {@link Schema} for details.
validate
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
Apache-2.0
public static Optional<String> deriveProctimeAttribute(DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { Optional<Boolean> isProctime = properties.getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME); if (isProctime.isPresent() && isProctime.get()) { return Optional.of(names.get(SCHEMA + "." + i + "." + SCHEMA_NAME)); } } return Optional.empty(); }
Finds the proctime attribute if defined.
deriveProctimeAttribute
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
Apache-2.0
public static List<RowtimeAttributeDescriptor> deriveRowtimeAttributes( DescriptorProperties properties) { Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); List<RowtimeAttributeDescriptor> attributes = new ArrayList<>(); // check for rowtime in every field for (int i = 0; i < names.size(); i++) { Optional<Tuple2<TimestampExtractor, WatermarkStrategy>> rowtimeComponents = RowtimeValidator.getRowtimeComponents(properties, SCHEMA + "." + i + "."); int index = i; // create descriptor rowtimeComponents.ifPresent( tuple2 -> attributes.add( new RowtimeAttributeDescriptor( properties.getString( SCHEMA + "." + index + "." + SCHEMA_NAME), tuple2.f0, tuple2.f1))); } return attributes; }
Finds the rowtime attributes if defined.
deriveRowtimeAttributes
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
Apache-2.0
@Deprecated public static TableSchema deriveTableSinkSchema(DescriptorProperties properties) { TableSchema.Builder builder = TableSchema.builder(); TableSchema tableSchema = properties.getTableSchema(SCHEMA); for (int i = 0; i < tableSchema.getFieldCount(); i++) { final TableColumn tableColumn = tableSchema.getTableColumns().get(i); final String fieldName = tableColumn.getName(); final DataType dataType = tableColumn.getType(); if (!tableColumn.isPhysical()) { // skip non-physical column continue; } boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); String tsType = SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE; boolean isRowtime = properties.containsKey(tsType); if (!isProctime && !isRowtime) { // check for a aliasing String aliasName = properties .getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM) .orElse(fieldName); builder.field(aliasName, dataType); } // only use the rowtime attribute if it references a field else if (isRowtime) { switch (properties.getString(tsType)) { case ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD: String field = properties.getString( SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_FROM); builder.field(field, dataType); break; // other timestamp strategies require a reverse timestamp extractor to // insert the timestamp into the output default: throw new TableException( format( "Unsupported rowtime type '%s' for sink" + " table schema. Currently only '%s' is supported for table sinks.", dataType, ROWTIME_TIMESTAMPS_TYPE_VALUE_FROM_FIELD)); } } } return builder.build(); }
Derives the table schema for a table sink. A sink ignores a proctime attribute and needs to track the origin of a rowtime field. @deprecated This method combines two separate concepts of table schema and field mapping. This should be split into two methods once we have support for the corresponding interfaces (see FLINK-9870).
deriveTableSinkSchema
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
Apache-2.0
public static Map<String, String> deriveFieldMapping( DescriptorProperties properties, Optional<TypeInformation<?>> inputType) { Map<String, String> mapping = new HashMap<>(); TableSchema schema = properties.getTableSchema(SCHEMA); List<String> columnNames = new ArrayList<>(); inputType.ifPresent( t -> columnNames.addAll(Arrays.asList(((CompositeType) t).getFieldNames()))); // add all source fields first because rowtime might reference one of them columnNames.forEach(name -> mapping.put(name, name)); // add all schema fields first for implicit mappings Arrays.stream(schema.getFieldNames()).forEach(name -> mapping.put(name, name)); Map<String, String> names = properties.getIndexedProperty(SCHEMA, SCHEMA_NAME); for (int i = 0; i < names.size(); i++) { String name = properties.getString(SCHEMA + "." + i + "." + SCHEMA_NAME); Optional<String> source = properties.getOptionalString(SCHEMA + "." + i + "." + SCHEMA_FROM); if (source.isPresent()) { // add explicit mapping mapping.put(name, source.get()); } else { // implicit mapping or time boolean isProctime = properties .getOptionalBoolean(SCHEMA + "." + i + "." + SCHEMA_PROCTIME) .orElse(false); boolean isRowtime = properties.containsKey(SCHEMA + "." + i + "." + ROWTIME_TIMESTAMPS_TYPE); boolean isGeneratedColumn = properties.containsKey(SCHEMA + "." + i + "." + EXPR); // remove proctime/rowtime from mapping if (isProctime || isRowtime || isGeneratedColumn) { mapping.remove(name); } // check for invalid fields else if (!columnNames.contains(name)) { throw new ValidationException( format( "Could not map the schema field '%s' to a field " + "from source. Please specify the source field from which it can be derived.", name)); } } } return mapping; }
Finds a table source field mapping. @param properties The properties describing a schema. @param inputType The input type that a connector and/or format produces. This parameter can be used to resolve a rowtime field against an input field.
deriveFieldMapping
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/descriptors/SchemaValidator.java
Apache-2.0
@Deprecated default StreamTableSink<T> createStreamTableSink(Map<String, String> properties) { return null; }
Creates and configures a {@link StreamTableSink} using the given properties. @param properties normalized properties describing a table sink. @return the configured table sink. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSink(Context)} instead.
createStreamTableSink
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/factories/StreamTableSinkFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/factories/StreamTableSinkFactory.java
Apache-2.0
@Deprecated default StreamTableSource<T> createStreamTableSource(Map<String, String> properties) { return null; }
Creates and configures a {@link StreamTableSource} using the given properties. @param properties normalized properties describing a stream table source. @return the configured stream table source. @deprecated {@link Context} contains more information, and already contains table schema too. Please use {@link #createTableSource(Context)} instead.
createStreamTableSource
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/factories/StreamTableSourceFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/factories/StreamTableSourceFactory.java
Apache-2.0
@Override public final DataStreamSink<T> consumeDataStream(DataStream<T> dataStream) { DataStreamSink<T> streamSink = dataStream.writeUsingOutputFormat(getOutputFormat()); streamSink.getTransformation().setParallelism(dataStream.getParallelism(), false); return streamSink; }
Returns an {@link OutputFormat} for writing the data of the table.
consumeDataStream
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/sinks/OutputFormatTableSink.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/sinks/OutputFormatTableSink.java
Apache-2.0
default boolean isBounded() { return false; }
Returns true if this is a bounded source, false if this is an unbounded source. Default is unbounded for compatibility.
isBounded
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/sources/StreamTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/legacy/table/sources/StreamTableSource.java
Apache-2.0
static StreamTableEnvironment create(StreamExecutionEnvironment executionEnvironment) { return create(executionEnvironment, EnvironmentSettings.newInstance().build()); }
Creates a table environment that is the entry point and central context for creating Table and SQL API programs that integrate with the Java-specific {@link DataStream} API. <p>It is unified for bounded and unbounded data processing. <p>A stream table environment is responsible for: <ul> <li>Convert a {@link DataStream} into {@link Table} and vice-versa. <li>Connecting to external systems. <li>Registering and retrieving {@link Table}s and other meta objects from a catalog. <li>Executing SQL statements. <li>Offering further configuration options. </ul> <p>Note: If you don't intend to use the {@link DataStream} API, {@link TableEnvironment} is meant for pure table programs. @param executionEnvironment The Java {@link StreamExecutionEnvironment} of the {@link TableEnvironment}.
create
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
Apache-2.0
static StreamTableEnvironment create( StreamExecutionEnvironment executionEnvironment, EnvironmentSettings settings) { return StreamTableEnvironmentImpl.create(executionEnvironment, settings); }
Creates a table environment that is the entry point and central context for creating Table and SQL API programs that integrate with the Java-specific {@link DataStream} API. <p>It is unified for bounded and unbounded data processing. <p>A stream table environment is responsible for: <ul> <li>Convert a {@link DataStream} into {@link Table} and vice-versa. <li>Connecting to external systems. <li>Registering and retrieving {@link Table}s and other meta objects from a catalog. <li>Executing SQL statements. <li>Offering further configuration options. </ul> <p>Note: If you don't intend to use the {@link DataStream} API, {@link TableEnvironment} is meant for pure table programs. @param executionEnvironment The Java {@link StreamExecutionEnvironment} of the {@link TableEnvironment}. @param settings The environment settings used to instantiate the {@link TableEnvironment}.
create
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
Apache-2.0
@Override default Optional<Integer> getParallelism() { return Optional.empty(); }
{@inheritDoc} <p>Note: If a custom parallelism is returned and {@link #consumeDataStream(ProviderContext, DataStream)} applies multiple transformations, make sure to set the same custom parallelism to each operator to not mess up the changelog.
getParallelism
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/connector/sink/DataStreamSinkProvider.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/connector/sink/DataStreamSinkProvider.java
Apache-2.0
@Override public void validate(DescriptorProperties properties) { properties.validateString(TableFactoryService.FORMAT_TYPE, false, 1); properties.validateString(TableFactoryService.FORMAT_PROPERTY_VERSION, true, 1); properties.validateValue(TableFactoryService.FORMAT_TYPE, FORMAT_TYPE_VALUE, false); properties.validateString(FORMAT_FIELD_DELIMITER, true, 1); properties.validateString(FORMAT_LINE_DELIMITER, true, 1); properties.validateString(FORMAT_QUOTE_CHARACTER, true, 1, 1); properties.validateString(FORMAT_COMMENT_PREFIX, true, 1); properties.validateBoolean(FORMAT_IGNORE_FIRST_LINE, true); properties.validateBoolean(FORMAT_IGNORE_PARSE_ERRORS, true); properties.validateBoolean(TableFactoryService.FORMAT_DERIVE_SCHEMA, true); properties.validateString(FORMAT_WRITE_MODE, true, 1); properties.validateInt(FORMAT_NUM_FILES, true); final boolean hasSchema = properties.hasPrefix(FORMAT_FIELDS); final boolean isDerived = properties .getOptionalBoolean(TableFactoryService.FORMAT_DERIVE_SCHEMA) .orElse(true); // derive schema by default // if a schema is defined, no matter derive schema is set or not, will use the defined // schema if (hasSchema) { properties.validateTableSchema(FORMAT_FIELDS, false); } else if (!isDerived) { throw new ValidationException( "A definition of a schema is required if derivation from the table's schema is disabled."); } }
Validator for the legacy CSV connector. @deprecated Use the RFC-compliant {@code Csv} format in the dedicated flink-formats/flink-csv module instead.
validate
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/descriptors/OldCsvValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/descriptors/OldCsvValidator.java
Apache-2.0
@Override public StreamExecutionEnvironment getExecutionEnvironment() { return executionEnvironment; }
The default implementation for {@link ProcedureContext}.
getExecutionEnvironment
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/procedure/DefaultProcedureContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/procedure/DefaultProcedureContext.java
Apache-2.0
@Override public StreamTableSink<Row> createStreamTableSink(Map<String, String> properties) { return createTableSink(false, properties); }
Factory base for creating configured instances of {@link CsvTableSink} in a batch environment. @deprecated The legacy CSV connector has been replaced by {@code FileSink}. It is kept only to support tests for the legacy connector stack.
createStreamTableSink
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sinks/CsvBatchTableSinkFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sinks/CsvBatchTableSinkFactory.java
Apache-2.0
@Override public StreamTableSource<Row> createStreamTableSource(Map<String, String> properties) { return createTableSource(false, properties); }
Factory for creating configured instances of {@link CsvTableSource} in a batch environment. @deprecated The legacy CSV connector has been replaced by {@code FileSource}. It is kept only to support tests for the legacy connector stack.
createStreamTableSource
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvBatchTableSourceFactory.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvBatchTableSourceFactory.java
Apache-2.0
public Builder path(String path) { this.path = path; return this; }
Sets the path to the CSV file. Required. @param path the path to the CSV file
path
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder field(String fieldName, DataType fieldType) { if (schema.containsKey(fieldName)) { throw new IllegalArgumentException("Duplicate field name " + fieldName); } // CSV only support java.sql.Timestamp/Date/Time DataType type; switch (fieldType.getLogicalType().getTypeRoot()) { case TIMESTAMP_WITHOUT_TIME_ZONE: type = fieldType.bridgedTo(Timestamp.class); break; case TIME_WITHOUT_TIME_ZONE: type = fieldType.bridgedTo(Time.class); break; case DATE: type = fieldType.bridgedTo(Date.class); break; default: type = fieldType; } schema.put(fieldName, type); return this; }
Adds a field with the field name and the data type. Required. This method can be called multiple times. The call order of this method defines also the order of the fields in a row. @param fieldName the field name @param fieldType the data type of the field
field
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
@Deprecated public Builder field(String fieldName, TypeInformation<?> fieldType) { return field(fieldName, TypeConversions.fromLegacyInfoToDataType(fieldType)); }
Adds a field with the field name and the type information. Required. This method can be called multiple times. The call order of this method defines also the order of the fields in a row. @param fieldName the field name @param fieldType the type information of the field @deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #field(String, DataType)} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
field
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder quoteCharacter(Character quote) { this.quoteCharacter = quote; return this; }
Sets a quote character for String values, null by default. @param quote the quote character
quoteCharacter
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder commentPrefix(String prefix) { this.commentPrefix = prefix; return this; }
Sets a prefix to indicate comments, null by default. @param prefix the prefix to indicate comments
commentPrefix
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder ignoreFirstLine() { this.isIgnoreFirstLine = true; return this; }
Ignore the first line. Not skip the first line by default.
ignoreFirstLine
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder ignoreParseErrors() { this.lenient = true; return this; }
Skip records with parse error instead to fail. Throw an exception by default.
ignoreParseErrors
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public Builder emptyColumnAsNull() { this.emptyColumnAsNull = true; return this; }
Treat empty column as null, false by default.
emptyColumnAsNull
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
public CsvTableSource build() { if (path == null) { throw new IllegalArgumentException("Path must be defined."); } if (schema.isEmpty()) { throw new IllegalArgumentException("Fields can not be empty."); } return new CsvTableSource( new CsvInputFormatConfig( path, schema.keySet().toArray(new String[0]), schema.values().toArray(new DataType[0]), IntStream.range(0, schema.values().size()).toArray(), fieldDelim, lineDelim, quoteCharacter, isIgnoreFirstLine, commentPrefix, lenient, emptyColumnAsNull)); }
Apply the current values and constructs a newly-created CsvTableSource. @return a newly-created CsvTableSource
build
java
apache/flink
flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/sources/CsvTableSource.java
Apache-2.0
@Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof SpecifiedException)) { return false; } SpecifiedException other = (SpecifiedException) obj; return Objects.equals(getMessage(), other.getMessage()); }
A specified runtime exception override {@link #equals(Object)}.
equals
java
apache/flink
flink-table/flink-table-api-scala/src/test/java/org/apache/flink/table/api/typeutils/ScalaTrySerializerUpgradeTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-scala/src/test/java/org/apache/flink/table/api/typeutils/ScalaTrySerializerUpgradeTest.java
Apache-2.0
public String rewriteBlock(String context) { this.visitor = new BlockStatementVisitor(code, parameters); JavaParser javaParser = new JavaParser(visitor.tokenStream); javaParser.getInterpreter().setPredictionMode(PredictionMode.SLL); visitor.visitStatement(javaParser.statement(), context); visitor.rewrite(); return visitor.rewriter.getText(); }
Rewrite code block that was used for this object initialization. @param context prefix for extracted blocks. @return a map which key represent rewritten block name and value represents rewritten code block, including calls to extracted methods
rewriteBlock
java
apache/flink
flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementSplitter.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-code-splitter/src/main/java/org/apache/flink/table/codesplit/BlockStatementSplitter.java
Apache-2.0
R runTest(String filename) { try { String code = FileUtils.readFileUtf8( new File( CodeRewriterTestBase.class .getClassLoader() .getResource( resourceDir + "/code/" + filename + ".java") .toURI())); String expected = FileUtils.readFileUtf8( new File( CodeRewriterTestBase.class .getClassLoader() .getResource( resourceDir + "/expected/" + filename + ".java") .toURI())); R rewriter = rewriterProvider.apply(code); // Trying to mitigate any indentation issues between all sort of platforms by simply // trim every line of the "class". Before this change, code-splitter test could fail on // Windows machines while passing on Unix. expected = trimLines(expected); String actual = trimLines(rewriter.rewrite()); assertThat(actual).isEqualTo(expected); return rewriter; } catch (Exception e) { throw new RuntimeException(e); } finally { // we reset the counter to ensure the variable names after rewrite are as expected CodeSplitUtil.getCounter().set(0L); } }
Base test class for {@link CodeRewriter}.
runTest
java
apache/flink
flink-table/flink-table-code-splitter/src/test/java/org/apache/flink/table/codesplit/CodeRewriterTestBase.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-code-splitter/src/test/java/org/apache/flink/table/codesplit/CodeRewriterTestBase.java
Apache-2.0
public static DataType of(LogicalType logicalType) { return TypeConversions.fromLogicalToDataType(logicalType); }
Creates a {@link DataType} from a {@link LogicalType} with default conversion class. @return the {@link LogicalType} converted to a {@link DataType}. @see LogicalType#getDefaultConversion()
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static UnresolvedDataType of(Class<?> unresolvedClass) { Preconditions.checkNotNull(unresolvedClass, "Unresolved class name must not be null."); return new UnresolvedDataType( () -> String.format("'%s'", unresolvedClass.getName()), (factory) -> factory.createDataType(unresolvedClass)); }
Creates an unresolved type that will be resolved to a {@link DataType} by analyzing the given class later. <p>During the resolution, Java reflection is used which can be supported by {@link DataTypeHint} annotations for nested, structured types. <p>It will throw an {@link ValidationException} in cases where the reflective extraction needs more information or simply fails. <p>The following examples show how to use and enrich the extraction process: <pre>{@code // returns INT of(Integer.class) // returns TIMESTAMP(9) of(java.time.LocalDateTime.class) // returns an anonymous, unregistered structured type // that is deeply integrated into the API compared to opaque RAW types class User { // extract fields automatically public String name; public int age; // enrich the extraction with precision information public @DataTypeHint("DECIMAL(10,2)") BigDecimal accountBalance; // enrich the extraction with forcing using RAW types public @DataTypeHint(forceRawPattern = "scala.") Address address; // enrich the extraction by specifying defaults public @DataTypeHint(defaultSecondPrecision = 3) Log log; } of(User.class) }</pre> <p>Note: In most of the cases, the {@link UnresolvedDataType} will be automatically resolved by the API. At other locations, a {@link DataTypeFactory} is provided.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static UnresolvedDataType of(String unresolvedName) { Preconditions.checkNotNull(unresolvedName, "Unresolved name must not be null."); return new UnresolvedDataType( () -> unresolvedName, (factory) -> factory.createDataType(unresolvedName)); }
Creates an unresolved type that will be resolved to a {@link DataType} by using a fully or partially defined name. <p>It includes both built-in types (e.g. "INT") as well as user-defined types (e.g. "mycat.mydb.Money"). <p>Note: In most of the cases, the {@link UnresolvedDataType} will be automatically resolved by the API. At other locations, a {@link DataTypeFactory} is provided.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static UnresolvedDataType of(TypeInformation<?> typeInfo) { Preconditions.checkNotNull(typeInfo, "Type information must not be null."); return new UnresolvedDataType( () -> String.format("'%s'", typeInfo), (factory) -> factory.createDataType(typeInfo)); }
Creates an unresolved type that will be resolved to a {@link DataType} by converting the given {@link TypeInformation} later. <p>{@link DataType} is richer than {@link TypeInformation} as it also includes details about the {@link LogicalType}. Therefore, some details will be added implicitly during the conversion. The mapping to data type happens on a best effort basis. If no data type is suitable, the type information is interpreted as {@link DataTypes#RAW(TypeInformation)}. <p>See {@link TypeInfoDataTypeConverter} for more information. <p>Note: In most of the cases, the {@link UnresolvedDataType} will be automatically resolved by the API. At other locations, a {@link DataTypeFactory} is provided.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType CHAR(int n) { return new AtomicDataType(new CharType(n)); }
Data type of a fixed-length character string {@code CHAR(n)} where {@code n} is the number of code points. {@code n} must have a value between 1 and {@link Integer#MAX_VALUE} (both inclusive). @see CharType
CHAR
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType VARCHAR(int n) { return new AtomicDataType(new VarCharType(n)); }
Data type of a variable-length character string {@code VARCHAR(n)} where {@code n} is the maximum number of code points. {@code n} must have a value between 1 and {@link Integer#MAX_VALUE} (both inclusive). @see VarCharType
VARCHAR
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType STRING() { return VARCHAR(Integer.MAX_VALUE); }
Data type of a variable-length character string with defined maximum length. This is a shortcut for {@code VARCHAR(2147483647)} for representing JVM strings. @see VarCharType
STRING
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType BOOLEAN() { return new AtomicDataType(new BooleanType()); }
Data type of a boolean with a (possibly) three-valued logic of {@code TRUE, FALSE, UNKNOWN}. @see BooleanType
BOOLEAN
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType BINARY(int n) { return new AtomicDataType(new BinaryType(n)); }
Data type of a fixed-length binary string (=a sequence of bytes) {@code BINARY(n)} where {@code n} is the number of bytes. {@code n} must have a value between 1 and {@link Integer#MAX_VALUE} (both inclusive). @see BinaryType
BINARY
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType VARBINARY(int n) { return new AtomicDataType(new VarBinaryType(n)); }
Data type of a variable-length binary string (=a sequence of bytes) {@code VARBINARY(n)} where {@code n} is the maximum number of bytes. {@code n} must have a value between 1 and {@link Integer#MAX_VALUE} (both inclusive). @see VarBinaryType
VARBINARY
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType BYTES() { return VARBINARY(Integer.MAX_VALUE); }
Data type of a variable-length binary string (=a sequence of bytes) with defined maximum length. This is a shortcut for {@code VARBINARY(2147483647)} for representing JVM byte arrays. @see VarBinaryType
BYTES
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType DECIMAL(int precision, int scale) { return new AtomicDataType(new DecimalType(precision, scale)); }
Data type of a decimal number with fixed precision and scale {@code DECIMAL(p, s)} where {@code p} is the number of digits in a number (=precision) and {@code s} is the number of digits to the right of the decimal point in a number (=scale). {@code p} must have a value between 1 and 38 (both inclusive). {@code s} must have a value between 0 and {@code p} (both inclusive). @see DecimalType
DECIMAL
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0
public static DataType TINYINT() { return new AtomicDataType(new TinyIntType()); }
Data type of a 1-byte signed integer with values from -128 to 127. @see TinyIntType
TINYINT
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/api/DataTypes.java
Apache-2.0