code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public static ApiExpression currentDatabase() { return apiCall(BuiltInFunctionDefinitions.CURRENT_DATABASE); }
Return the current database, the return type of this expression is {@link DataTypes#STRING()}.
currentDatabase
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression localTime() { return apiCall(BuiltInFunctionDefinitions.LOCAL_TIME); }
Returns the current SQL time in local time zone, the return type of this expression is {@link DataTypes#TIME()}, this is a synonym for {@link Expressions#currentTime()}.
localTime
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression localTimestamp() { return apiCall(BuiltInFunctionDefinitions.LOCAL_TIMESTAMP); }
Returns the current SQL timestamp in local time zone, the return type of this expression is {@link DataTypes#TIMESTAMP()}.
localTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toDate(Object dateStr) { return apiCall(BuiltInFunctionDefinitions.TO_DATE, dateStr); }
Converts the given date string with format 'yyyy-MM-dd' to {@link DataTypes#DATE()}. @param dateStr The date string. @return The date value of {@link DataTypes#DATE()} type.
toDate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toDate(Object dateStr, Object format) { return apiCall(BuiltInFunctionDefinitions.TO_DATE, dateStr, format); }
Converts the date string with the specified format to {@link DataTypes#DATE()}. @param dateStr The date string. @param format The format of the string. @return The date value of {@link DataTypes#DATE()} type.
toDate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestamp(Object timestampStr) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP, timestampStr); }
Converts the given date time string with format 'yyyy-MM-dd HH:mm:ss' under the 'UTC+0' time zone to {@link DataTypes#TIMESTAMP()}. @param timestampStr The date time string. @return The timestamp value with {@link DataTypes#TIMESTAMP()} type.
toTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestamp(Object timestampStr, Object format) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP, timestampStr, format); }
Converts the given time string with the specified format under the 'UTC+0' time zone to {@link DataTypes#TIMESTAMP()}. @param timestampStr The date time string. @param format The format of the string. @return The timestamp value with {@link DataTypes#TIMESTAMP()} type.
toTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestampLtz(Object numericEpochTime, Object precision) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP_LTZ, numericEpochTime, precision); }
Converts a numeric type epoch time to {@link DataTypes#TIMESTAMP_LTZ(int)}. <p>The supported precision is 0 or 3: <ul> <li>0 means the numericEpochTime is in second. <li>3 means the numericEpochTime is in millisecond. </ul> @param numericEpochTime The epoch time with numeric type. @param precision The precision to indicate the epoch time is in second or millisecond. @return The timestamp value with {@link DataTypes#TIMESTAMP_LTZ(int)} type.
toTimestampLtz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestampLtz(String timestampStr, String format) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP_LTZ, timestampStr, format); }
Converts the given time string with the specified format to {@link DataTypes#TIMESTAMP_LTZ(int)}. @param timestampStr The timestamp string to convert. @param format The format of the string. @return The timestamp value with {@link DataTypes#TIMESTAMP_LTZ(int)} type.
toTimestampLtz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestampLtz(String timeStamp) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP_LTZ, timeStamp); }
Converts a timestamp to {@link DataTypes#TIMESTAMP_LTZ(int)}. <p>This method takes a string representing a timestamp and converts it to a TIMESTAMP_LTZ using the built-in TO_TIMESTAMP_LTZ function definition. @param timeStamp The timestamp string to be converted. @return The timestamp value with {@link DataTypes#TIMESTAMP_LTZ(int)} type.
toTimestampLtz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestampLtz(Object numericEpochTime) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP_LTZ, numericEpochTime); }
Converts a numeric type epoch time to {@link DataTypes#TIMESTAMP_LTZ(int)}. <p>This method takes an object representing an epoch time and converts it to a TIMESTAMP_LTZ using the built-in TO_TIMESTAMP_LTZ function definition. @param numericEpochTime The epoch time with numeric type. @return The timestamp value with {@link DataTypes#TIMESTAMP_LTZ(int)} type.
toTimestampLtz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression toTimestampLtz( Object timestampStr, Object format, Object timezone) { return apiCall(BuiltInFunctionDefinitions.TO_TIMESTAMP_LTZ, timestampStr, format, timezone); }
Converts a string timestamp with the custom format and timezone to {@link DataTypes#TIMESTAMP_LTZ(int)}. <p>The timestamp string will be parsed using the custom format and timezone, and converted to a TIMESTAMP_LTZ value. @param timestampStr The timestamp string to convert. @param format The format pattern to parse the timestamp string. @param timezone The timezone to use for the conversion. @return The timestamp value with {@link DataTypes#TIMESTAMP_LTZ(int)} type.
toTimestampLtz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression temporalOverlaps( Object leftTimePoint, Object leftTemporal, Object rightTimePoint, Object rightTemporal) { return apiCall( BuiltInFunctionDefinitions.TEMPORAL_OVERLAPS, leftTimePoint, leftTemporal, rightTimePoint, rightTemporal); }
Determines whether two anchored time intervals overlap. Time point and temporal are transformed into a range defined by two time points (start, end). The function evaluates <code>leftEnd >= rightStart && rightEnd >= leftStart</code>. <p>It evaluates: leftEnd >= rightStart && rightEnd >= leftStart <p>e.g. <pre>{@code temporalOverlaps( lit("2:55:00").toTime(), lit(1).hours(), lit("3:30:00").toTime(), lit(2).hours() ) }</pre> <p>leads to true
temporalOverlaps
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression dateFormat(Object timestamp, Object format) { return apiCall(BuiltInFunctionDefinitions.DATE_FORMAT, timestamp, format); }
Formats a timestamp as a string using a specified format. The format must be compatible with MySQL's date formatting syntax as used by the date_parse function. <p>For example {@code dataFormat($("time"), "%Y, %d %M")} results in strings formatted as "2017, 05 May". @param timestamp The timestamp to format as string. @param format The format of the string. @return The formatted timestamp as string.
dateFormat
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression timestampDiff( TimePointUnit timePointUnit, Object timePoint1, Object timePoint2) { return apiCall( BuiltInFunctionDefinitions.TIMESTAMP_DIFF, valueLiteral(timePointUnit), timePoint1, timePoint2); }
Returns the (signed) number of {@link TimePointUnit} between timePoint1 and timePoint2. <p>For example, {@code timestampDiff(TimePointUnit.DAY, lit("2016-06-15").toDate(), lit("2016-06-18").toDate()} leads to 3. @param timePointUnit The unit to compute diff. @param timePoint1 The first point in time. @param timePoint2 The second point in time. @return The number of intervals as integer value.
timestampDiff
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression convertTz(Object dateStr, Object tzFrom, Object tzTo) { return apiCall(BuiltInFunctionDefinitions.CONVERT_TZ, dateStr, tzFrom, tzTo); }
Converts a datetime dateStr (with default ISO timestamp format 'yyyy-MM-dd HH:mm:ss') from time zone tzFrom to time zone tzTo. The format of time zone should be either an abbreviation such as "PST", a full name such as "America/Los_Angeles", or a custom ID such as "GMT-08:00". E.g., convertTz('1970-01-01 00:00:00', 'UTC', 'America/Los_Angeles') returns '1969-12-31 16:00:00'. @param dateStr the date time string @param tzFrom the original time zone @param tzTo the target time zone @return The formatted timestamp as string.
convertTz
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression fromUnixtime(Object unixtime) { return apiCall(BuiltInFunctionDefinitions.FROM_UNIXTIME, unixtime); }
Converts unix timestamp (seconds since '1970-01-01 00:00:00' UTC) to datetime string in the "yyyy-MM-dd HH:mm:ss" format. @param unixtime The unix timestamp with numeric type. @return The formatted timestamp as string.
fromUnixtime
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression fromUnixtime(Object unixtime, Object format) { return apiCall(BuiltInFunctionDefinitions.FROM_UNIXTIME, unixtime, format); }
Converts unix timestamp (seconds since '1970-01-01 00:00:00' UTC) to datetime string in the given format. @param unixtime The unix timestamp with numeric type. @param format The format of the string. @return The formatted timestamp as string.
fromUnixtime
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression unixTimestamp() { return apiCall(BuiltInFunctionDefinitions.UNIX_TIMESTAMP); }
Gets the current unix timestamp in seconds. This function is not deterministic which means the value would be recalculated for each record. @return The current unix timestamp as bigint.
unixTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression unixTimestamp(Object timestampStr) { return apiCall(BuiltInFunctionDefinitions.UNIX_TIMESTAMP, timestampStr); }
Converts the given date time string with format 'yyyy-MM-dd HH:mm:ss' to unix timestamp (in seconds), using the time zone specified in the table config. @param timestampStr The date time string. @return The converted timestamp as bigint.
unixTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression unixTimestamp(Object timestampStr, Object format) { return apiCall(BuiltInFunctionDefinitions.UNIX_TIMESTAMP, timestampStr, format); }
Converts the given date time string with the specified format to unix timestamp (in seconds), using the specified timezone in table config. @param timestampStr The date time string. @param format The format of the date time string. @return The converted timestamp as bigint.
unixTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression map(Object key, Object value, Object... tail) { return apiCallAtLeastTwoArgument(BuiltInFunctionDefinitions.MAP, key, value, tail); }
Creates a map of expressions. <pre>{@code table.select( map( "key1", 1, "key2", 2, "key3", 3 )) }</pre> <p>Note keys and values should have the same types for all entries.
map
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression mapFromArrays(Object key, Object value) { return apiCall( BuiltInFunctionDefinitions.MAP_FROM_ARRAYS, objectToExpression(key), objectToExpression(value)); }
Creates a map from an array of keys and an array of values. <pre>{@code table.select( mapFromArrays( array("key1", "key2", "key3"), array(1, 2, 3) )) }</pre> <p>Note both arrays should have the same length.
mapFromArrays
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression rowInterval(Long rows) { return new ApiExpression(valueLiteral(rows)); }
Creates an interval of rows. @see Table#window(GroupWindow) @see Table#window(OverWindow...)
rowInterval
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression pi() { return apiCall(BuiltInFunctionDefinitions.PI); }
Returns a value that is closer than any other value to pi.
pi
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression e() { return apiCall(BuiltInFunctionDefinitions.E); }
Returns a value that is closer than any other value to e.
e
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression rand() { return apiCall(BuiltInFunctionDefinitions.RAND); }
Returns a pseudorandom double value between 0.0 (inclusive) and 1.0 (exclusive).
rand
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression rand(Object seed) { return apiCall(BuiltInFunctionDefinitions.RAND, objectToExpression(seed)); }
Returns a pseudorandom double value between 0.0 (inclusive) and 1.0 (exclusive) with a initial seed. Two rand() functions will return identical sequences of numbers if they have same initial seed.
rand
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression randInteger(Object bound) { return apiCall(BuiltInFunctionDefinitions.RAND_INTEGER, objectToExpression(bound)); }
Returns a pseudorandom integer value between 0 (inclusive) and the specified value (exclusive).
randInteger
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression randInteger(Object seed, Object bound) { return apiCall( BuiltInFunctionDefinitions.RAND_INTEGER, objectToExpression(seed), objectToExpression(bound)); }
Returns a pseudorandom integer value between 0 (inclusive) and the specified value (exclusive) with a initial seed. Two randInteger() functions will return identical sequences of numbers if they have same initial seed and same bound.
randInteger
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression concat(Object string, Object... strings) { return apiCallAtLeastOneArgument(BuiltInFunctionDefinitions.CONCAT, string, strings); }
Returns the string that results from concatenating the arguments. Returns NULL if any argument is NULL.
concat
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression atan2(Object y, Object x) { return apiCallAtLeastOneArgument(BuiltInFunctionDefinitions.ATAN2, y, x); }
Calculates the arc tangent of a given coordinate.
atan2
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression concatWs(Object separator, Object string, Object... strings) { return apiCallAtLeastTwoArgument( BuiltInFunctionDefinitions.CONCAT_WS, separator, string, strings); }
Returns the string that results from concatenating the arguments and separator. Returns NULL If the separator is NULL. <p>Note: this function does not skip empty strings. However, it does skip any NULL values after the separator argument.
concatWs
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression uuid() { return apiCall(BuiltInFunctionDefinitions.UUID); }
Returns an UUID (Universally Unique Identifier) string (e.g., "3d3c68f7-f608-473f-b60c-b0c44ad4cc4e") according to RFC 4122 type 4 (pseudo randomly generated) UUID. The UUID is generated using a cryptographically strong pseudo random number generator.
uuid
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression nullOf(DataType dataType) { return new ApiExpression(valueLiteral(null, dataType)); }
Returns a null literal value of a given data type. <p>e.g. {@code nullOf(DataTypes.INT())}
nullOf
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression nullOf(TypeInformation<?> typeInfo) { return nullOf(TypeConversions.fromLegacyInfoToDataType(typeInfo)); }
@deprecated This method will be removed in future versions as it uses the old type system. It is recommended to use {@link #nullOf(DataType)} instead which uses the new type system based on {@link DataTypes}. Please make sure to use either the old or the new type system consistently to avoid unintended behavior. See the website documentation for more information.
nullOf
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression log(Object value) { return apiCall(BuiltInFunctionDefinitions.LOG, value); }
Calculates the logarithm of the given value.
log
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression ifThenElse(Object condition, Object ifTrue, Object ifFalse) { return apiCall(BuiltInFunctionDefinitions.IF, condition, ifTrue, ifFalse); }
Ternary conditional operator that decides which of two other expressions should be evaluated based on a evaluated boolean condition. <p>e.g. ifThenElse($("f0") > 5, "A", "B") leads to "A" @param condition boolean condition @param ifTrue expression to be evaluated if condition holds @param ifFalse expression to be evaluated if condition does not hold
ifThenElse
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression coalesce(Object... args) { return apiCall(BuiltInFunctionDefinitions.COALESCE, args); }
Returns the first argument that is not NULL. <p>If all arguments are NULL, it returns NULL as well. The return type is the least restrictive, common type of all of its arguments. The return type is nullable if all arguments are nullable as well. <p>Examples: <pre>{@code // Returns "default" coalesce(null, "default") // Returns the first non-null value among f0 and f1, or "default" if f0 and f1 are both null coalesce($("f0"), $("f1"), "default") }</pre> @param args the input expressions.
coalesce
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression withAllColumns() { return $("*"); }
Creates an expression that selects all columns. It can be used wherever an array of expression is accepted such as function calls, projections, or groupings. <p>This expression is a synonym of $("*"). It is semantically equal to {@code SELECT *} in SQL when used in a projection. <p>Example: <pre>{@code tab.select(withAllColumns()) }</pre> @see #withColumns(Object, Object...) @see #withoutColumns(Object, Object...)
withAllColumns
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression withColumns(Object head, Object... tail) { return apiCallAtLeastOneArgument(BuiltInFunctionDefinitions.WITH_COLUMNS, head, tail); }
Creates an expression that selects a range of columns. It can be used wherever an array of expression is accepted such as function calls, projections, or groupings. <p>A range can either be index-based or name-based. Indices start at 1 and boundaries are inclusive. <p>e.g. withColumns(range("b", "c")) or withColumns($("*"))
withColumns
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression withoutColumns(Object head, Object... tail) { return apiCallAtLeastOneArgument(BuiltInFunctionDefinitions.WITHOUT_COLUMNS, head, tail); }
Creates an expression that selects all columns except for the given range of columns. It can be used wherever an array of expression is accepted such as function calls, projections, or groupings. <p>A range can either be index-based or name-based. Indices start at 1 and boundaries are inclusive. <p>e.g. withoutColumns(range("b", "c")) or withoutColumns($("c"))
withoutColumns
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression jsonObject(JsonOnNull onNull, Object... keyValues) { final Object[] arguments = Stream.concat(Stream.of(onNull), Arrays.stream(keyValues)).toArray(Object[]::new); return apiCall(JSON_OBJECT, arguments); }
Builds a JSON object string from a list of key-value pairs. <p>{@param keyValues} is an even-numbered list of alternating key/value pairs. Note that keys must be non-{@code NULL} string literals, while values may be arbitrary expressions. <p>This function returns a JSON string. The {@link JsonOnNull onNull} behavior defines how to treat {@code NULL} values. <p>Values which are created from another JSON construction function call ({@code jsonObject}, {@code jsonArray}) are inserted directly rather than as a string. This allows building nested JSON structures. <p>Examples: <pre>{@code // {} jsonObject(JsonOnNull.NULL) // "{\"K1\":\"V1\",\"K2\":\"V2\"}" // {"K1":"V1","K2":"V2"} jsonObject(JsonOnNull.NULL, "K1", "V1", "K2", "V2") // Expressions as values jsonObject(JsonOnNull.NULL, "orderNo", $("orderId")) // ON NULL jsonObject(JsonOnNull.NULL, "K1", nullOf(DataTypes.STRING())) // "{\"K1\":null}" jsonObject(JsonOnNull.ABSENT, "K1", nullOf(DataTypes.STRING())) // "{}" // {"K1":{"K2":"V"}} jsonObject(JsonOnNull.NULL, "K1", json("{\"K2\":\"V\"}")) // {"K1":{"K2":"V"}} jsonObject(JsonOnNull.NULL, "K1", jsonObject(JsonOnNull.NULL, "K2", "V")) }</pre> @see #json(Object) @see #jsonArray(JsonOnNull, Object...)
jsonObject
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression json(Object value) { return apiCall(JSON, value); }
Expects a raw, pre-formatted JSON string and returns its values as-is without escaping it as a string. <p>This function can currently only be used within the {@link #jsonObject(JsonOnNull, Object...)} and {@link #jsonArray(JsonOnNull, Object...)} function. It allows passing pre-formatted JSON strings that will be inserted directly into the resulting JSON structure rather than being escaped as a string value. This allows storing nested JSON structures in a `JSON_OBJECT` or `JSON_ARRAY` without processing them as strings, which is often useful when ingesting already formatted json data. If the value is null or empty, the function returns {@code null}. <p>Examples: <pre>{@code // {"K":{"K2":42}} jsonObject(JsonOnNull.NULL, "K", json("{\"K2\": 42}")) // {"K":{"K2":{"K3":42}}} jsonObject( JsonOnNull.NULL, "K", json(""" { "K2": { "K3": 42 } } """)) // {"K": null} jsonObject(JsonOnNull.NULL, "K", json("")) // Invalid - JSON function can only be used within JSON_OBJECT json("{\"value\": 42}") }</pre>
json
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression jsonObjectAgg(JsonOnNull onNull, Object keyExpr, Object valueExpr) { final BuiltInFunctionDefinition functionDefinition; switch (onNull) { case ABSENT: functionDefinition = JSON_OBJECTAGG_ABSENT_ON_NULL; break; case NULL: default: functionDefinition = JSON_OBJECTAGG_NULL_ON_NULL; break; } return apiCall(functionDefinition, keyExpr, valueExpr); }
Builds a JSON object string by aggregating key-value expressions into a single JSON object. <p>The key expression must return a non-nullable character string. Value expressions can be arbitrary, including other JSON functions. If a value is {@code NULL}, the {@link JsonOnNull onNull} behavior defines what to do. <p>Note that keys must be unique. If a key occurs multiple times, an error will be thrown. <p>This function is currently not supported in {@code OVER} windows. <p>Examples: <pre>{@code // "{\"Apple\":2,\"Banana\":17,\"Orange\":0}" orders.select(jsonObjectAgg(JsonOnNull.NULL, $("product"), $("cnt"))) }</pre> @see #jsonObject(JsonOnNull, Object...) @see #jsonArrayAgg(JsonOnNull, Object)
jsonObjectAgg
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression jsonString(Object value) { return apiCallAtLeastOneArgument(JSON_STRING, value); }
Serializes a value into JSON. <p>This function returns a JSON string containing the serialized value. If the value is {@code null}, the function returns {@code null}. <p>Examples: <pre>{@code // null jsonString(nullOf(DataTypes.INT())) jsonString(1) // "1" jsonString(true) // "true" jsonString("Hello, World!") // "\"Hello, World!\"" jsonString(Arrays.asList(1, 2)) // "[1,2]" }</pre>
jsonString
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression jsonArray(JsonOnNull onNull, Object... values) { final Object[] arguments = Stream.concat(Stream.of(onNull), Arrays.stream(values)).toArray(Object[]::new); return apiCall(JSON_ARRAY, arguments); }
Builds a JSON array string from a list of values. <p>This function returns a JSON string. The values can be arbitrary expressions. The {@link JsonOnNull onNull} behavior defines how to treat {@code NULL} values. <p>Elements which are created from another JSON construction function call ({@code jsonObject}, {@code jsonArray}) are inserted directly rather than as a string. This allows building nested JSON structures. <p>Examples: <pre>{@code // "[]" jsonArray(JsonOnNull.NULL) // "[1,\"2\"]" jsonArray(JsonOnNull.NULL, 1, "2") // Expressions as values jsonArray(JsonOnNull.NULL, $("orderId")) // ON NULL jsonArray(JsonOnNull.NULL, nullOf(DataTypes.STRING())) // "[null]" jsonArray(JsonOnNull.ABSENT, nullOf(DataTypes.STRING())) // "[]" // "[[1]]" jsonArray(JsonOnNull.NULL, jsonArray(JsonOnNull.NULL, 1)) // "[{\"nested_json\":{\"value\":42}}]" jsonArray(JsonOnNull.NULL, json("{\"nested_json\": {\"value\": 42}}")) }</pre> @see #json(Object) @see #jsonObject(JsonOnNull, Object...)
jsonArray
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression jsonArrayAgg(JsonOnNull onNull, Object itemExpr) { final BuiltInFunctionDefinition functionDefinition; switch (onNull) { case NULL: functionDefinition = JSON_ARRAYAGG_NULL_ON_NULL; break; case ABSENT: default: functionDefinition = JSON_ARRAYAGG_ABSENT_ON_NULL; break; } return apiCall(functionDefinition, itemExpr); }
Builds a JSON object string by aggregating items into an array. <p>Item expressions can be arbitrary, including other JSON functions. If a value is {@code NULL}, the {@link JsonOnNull onNull} behavior defines what to do. <p>This function is currently not supported in {@code OVER} windows, unbounded session windows, or hop windows. <p>Examples: <pre>{@code // "[\"Apple\",\"Banana\",\"Orange\"]" orders.select(jsonArrayAgg(JsonOnNull.NULL, $("product"))) }</pre> @see #jsonArray(JsonOnNull, Object...) @see #jsonObjectAgg(JsonOnNull, Object, Object)
jsonArrayAgg
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lead(Object value) { return apiCall(BuiltInFunctionDefinitions.LEAD, value); }
A window function that provides access to a row that comes directly after the current row. <p>Example: <pre>{@code table.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue")).over($("w").as("next_revenue") ) }</pre>
lead
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lead(Object value, Object offset) { return apiCall(BuiltInFunctionDefinitions.LEAD, value, offset); }
A window function that provides access to a row at a specified physical offset which comes after the current row. <p>Example: <pre>{@code table.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue"), 1).over($("w").as("next_revenue") ) }</pre>
lead
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lead(Object value, Object offset, Object defaultValue) { return apiCall(BuiltInFunctionDefinitions.LEAD, value, offset, defaultValue); }
A window function that provides access to a row at a specified physical offset which comes after the current row. <p>The value to return when offset is beyond the scope of the partition. If a default value is not specified, NULL is returned. {@code default} must be type-compatible with {@code value}. <p>Example: <pre>{@code table.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue"), 1, lit(0)).over($("w").as("next_revenue") ) }</pre>
lead
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lag(Object value) { return apiCall(BuiltInFunctionDefinitions.LAG, value); }
A window function that provides access to a row that comes directly before the current row. <p>Example: <pre>{@code table.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue")).over($("w").as("prev_revenue") ) }</pre>
lag
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lag(Object value, Object offset) { return apiCall(BuiltInFunctionDefinitions.LAG, value, offset); }
A window function that provides access to a row at a specified physical offset which comes before the current row. <p>Example: <pre>{@code table.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue"), 1).over($("w").as("prev_revenue") ) }</pre>
lag
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lag(Object value, Object offset, Object defaultValue) { return apiCall(BuiltInFunctionDefinitions.LAG, value, offset, defaultValue); }
A window function that provides access to a row at a specified physical offset which comes before the current row. <p>The value to return when offset is beyond the scope of the partition. If a default value is not specified, NULL is returned. {@code default} must be type-compatible with {@code value}. <p>Example: <pre>{@code org.window(Over.orderBy($("ts")).partitionBy("organisation").as("w")) .select( $("organisation"), $("revenue"), lag($("revenue"), 1, lit(0)).over($("w").as("prev_revenue") ) }</pre>
lag
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression call(String path, Object... arguments) { return new ApiExpression( ApiExpressionUtils.lookupCall( path, Arrays.stream(arguments) .map(ApiExpressionUtils::objectToExpression) .toArray(Expression[]::new))); }
A call to a function that will be looked up in a catalog. There are two kinds of functions: <ul> <li>System functions - which are identified with one part names <li>Catalog functions - which are identified always with three parts names (catalog, database, function) </ul> <p>Moreover each function can either be a temporary function or permanent one (which is stored in an external catalog). <p>Based on that two properties the resolution order for looking up a function based on the provided {@code functionName} is following: <ul> <li>Temporary system function <li>System function <li>Temporary catalog function <li>Catalog function </ul> @see TableEnvironment#useCatalog(String) @see TableEnvironment#useDatabase(String) @see TableEnvironment#createTemporaryFunction @see TableEnvironment#createTemporarySystemFunction
call
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public <T> Builder option(ConfigOption<T> configOption, T value) { Preconditions.checkNotNull(configOption, "Config option must not be null."); Preconditions.checkNotNull(value, "Value must not be null."); options.put(configOption.key(), ConfigurationUtils.convertValue(value, String.class)); return this; }
Sets the given option on the format.
option
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
Apache-2.0
public Builder option(String key, String value) { Preconditions.checkNotNull(key, "Key must not be null."); Preconditions.checkNotNull(value, "Value must not be null."); options.put(key, value); return this; }
Sets the given option on the format. <p>Note that format options must not be prefixed with the format identifier itself here. For example, <pre>{@code FormatDescriptor.forFormat("json") .option("ignore-parse-errors", "true") .build(); }</pre> <p>will automatically be converted into its prefixed form: <pre>{@code 'format' = 'json' 'json.ignore-parse-errors' = 'true' }</pre>
option
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
Apache-2.0
public FormatDescriptor build() { return new FormatDescriptor(format, options); }
Returns an immutable instance of {@link FormatDescriptor}.
build
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/FormatDescriptor.java
Apache-2.0
public static OverWindowPartitioned partitionBy(Expression... partitionBy) { return new OverWindowPartitioned(Arrays.asList(partitionBy)); }
Partitions the elements on some partition keys. <p>Each partition is individually sorted and aggregate functions are applied to each partition separately. @param partitionBy list of field references @return an over window with defined partitioning
partitionBy
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Over.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Over.java
Apache-2.0
public OverWindowPartitionedOrderedPreceding preceding(Expression preceding) { return new OverWindowPartitionedOrderedPreceding(partitionBy, orderBy, preceding); }
Set the preceding offset (based on time or row-count intervals) for over window. @param preceding preceding offset relative to the current row. @return an over window with defined preceding
preceding
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/OverWindowPartitionedOrdered.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/OverWindowPartitionedOrdered.java
Apache-2.0
public OverWindowPartitionedOrderedPreceding following(Expression following) { optionalFollowing = following; return this; }
Set the following offset (based on time or row-count intervals) for over window. @param following following offset that relative to the current row. @return an over window with defined following
following
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/OverWindowPartitionedOrderedPreceding.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/OverWindowPartitionedOrderedPreceding.java
Apache-2.0
@SuppressWarnings("unchecked") default <T extends PlannerConfig> Optional<T> unwrap(Class<T> type) { if (type.isInstance(this)) { return Optional.of((T) this); } else { return Optional.empty(); } }
The {@link PlannerConfig} holds parameters to configure the behavior of queries.
unwrap
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlannerConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlannerConfig.java
Apache-2.0
public static PlanReference fromFile(File file) { Objects.requireNonNull(file, "File cannot be null"); return new FilePlanReference(file); }
Create a reference starting from a file path.
fromFile
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
Apache-2.0
public static PlanReference fromJsonString(String jsonString) { Objects.requireNonNull(jsonString, "Json string cannot be null"); return new JsonContentPlanReference(jsonString); }
Create a reference starting from a JSON string.
fromJsonString
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
Apache-2.0
public static PlanReference fromSmileBytes(byte[] smileBytes) { Objects.requireNonNull(smileBytes, "Smile bytes cannot be null"); return new BytesContentPlanReference(smileBytes); }
Create a reference starting from a Smile binary representation.
fromSmileBytes
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
Apache-2.0
public static PlanReference fromResource(String resourcePath) { Objects.requireNonNull(resourcePath, "Resource path cannot be null"); return fromResource(Thread.currentThread().getContextClassLoader(), resourcePath); }
Create a reference from a file in the classpath, using {@code Thread.currentThread().getContextClassLoader()} as {@link ClassLoader}.
fromResource
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
Apache-2.0
public static PlanReference fromResource(ClassLoader classLoader, String resourcePath) { Objects.requireNonNull(classLoader, "ClassLoader cannot be null"); Objects.requireNonNull(resourcePath, "Resource path cannot be null"); return new ResourcePlanReference(classLoader, resourcePath); }
Create a reference from a file in the classpath.
fromResource
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/PlanReference.java
Apache-2.0
public static SessionWithGap withGap(Expression gap) { return new SessionWithGap(gap); }
Creates a session window. The boundary of session windows are defined by intervals of inactivity, i.e., a session window is closes if no event appears for a defined gap period. @param gap specifies how long (as interval of milliseconds) to wait for new data before closing the session window. @return a partially defined session window
withGap
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Session.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Session.java
Apache-2.0
public Expression getGap() { return gap; }
Session window on time with alias. Fully specifies a window.
getGap
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SessionWithGapOnTimeWithAlias.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SessionWithGapOnTimeWithAlias.java
Apache-2.0
public static SlideWithSize over(Expression size) { return new SlideWithSize(size); }
Creates a sliding window. Sliding windows have a fixed size and slide by a specified slide interval. If the slide interval is smaller than the window size, sliding windows are overlapping. Thus, an element can be assigned to multiple windows. <p>For example, a sliding window of size 15 minutes with 5 minutes sliding interval groups elements of 15 minutes and evaluates every five minutes. Each element is contained in three consecutive @param size the size of the window as time or row-count interval @return a partially specified sliding window
over
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Slide.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Slide.java
Apache-2.0
public SlideWithSizeAndSlide every(Expression slide) { return new SlideWithSizeAndSlide(size, slide); }
Specifies the window's slide as time or row-count interval. <p>The slide determines the interval in which windows are started. Hence, sliding windows can overlap if the slide is smaller than the size of the window. <p>For example, you could have windows of size 15 minutes that slide by 3 minutes. With this 15 minutes worth of elements are grouped every 3 minutes and each row contributes to 5 windows. @param slide the slide of the window either as time or row-count interval. @return a sliding window
every
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SlideWithSize.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/SlideWithSize.java
Apache-2.0
@Deprecated default TableSchema getSchema() { return TableSchema.fromResolvedSchema(getResolvedSchema(), DefaultSqlFactory.INSTANCE); }
Returns the schema of this table. @deprecated This method has been deprecated as part of FLIP-164. {@link TableSchema} has been replaced by two more dedicated classes {@link Schema} and {@link ResolvedSchema}. Use {@link Schema} for declaration in APIs. {@link ResolvedSchema} is offered by the framework after resolution and validation.
getSchema
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
Apache-2.0
default TableResult executeInsert(String tablePath) { return insertInto(tablePath).execute(); }
Shorthand for {@code tableEnv.insertInto(tablePath).execute()}. @see #insertInto(String) @see TablePipeline#execute()
executeInsert
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
Apache-2.0
default TableResult executeInsert(String tablePath, boolean overwrite) { return insertInto(tablePath, overwrite).execute(); }
Shorthand for {@code tableEnv.insertInto(tablePath, overwrite).execute()}. @see #insertInto(String, boolean) @see TablePipeline#execute()
executeInsert
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
Apache-2.0
default TableResult executeInsert(TableDescriptor descriptor) { return insertInto(descriptor).execute(); }
Shorthand for {@code tableEnv.insertInto(descriptor).execute()}. @see #insertInto(TableDescriptor) @see TablePipeline#execute()
executeInsert
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
Apache-2.0
default TableResult executeInsert(TableDescriptor descriptor, boolean overwrite) { return insertInto(descriptor, overwrite).execute(); }
Shorthand for {@code tableEnv.insertInto(descriptor, overwrite).execute()}. @see #insertInto(TableDescriptor, boolean) @see TablePipeline#execute()
executeInsert
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Table.java
Apache-2.0
@Override public <T> TableConfig set(ConfigOption<T> option, T value) { configuration.set(option, value); return this; }
Sets an application-specific value for the given {@link ConfigOption}. <p>This method should be preferred over {@link #set(String, String)} as it is type-safe, avoids unnecessary parsing of the value, and provides inline documentation. <p>Note: Scala users might need to convert the value into a boxed type. E.g. by using {@code Int.box(1)} or {@code Boolean.box(false)}. @see TableConfigOptions @see ExecutionConfigOptions @see OptimizerConfigOptions
set
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public TableConfig set(String key, String value) { configuration.setString(key, value); return this; }
Sets an application-specific string-based value for the given string-based key. <p>The value will be parsed by the framework on access. <p>This method exists for convenience when configuring a session with string-based properties. Use {@link #set(ConfigOption, Object)} for more type-safety and inline documentation. @see TableConfigOptions @see ExecutionConfigOptions @see OptimizerConfigOptions
set
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
@Override public <T> T get(ConfigOption<T> option) { return configuration.getOptional(option).orElseGet(() -> rootConfiguration.get(option)); }
{@inheritDoc} <p>This method gives read-only access to the full configuration. However, application-specific configuration has precedence. Configuration of outer layers is used for defaults and fallbacks. See the docs of {@link TableConfig} for more information. @param option metadata of the option to read @param <T> type of the value to read @return read value or {@link ConfigOption#defaultValue()} if not found
get
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
@Internal public ReadableConfig getRootConfiguration() { return rootConfiguration; }
Gives direct access to the underlying environment-specific key-value map for advanced configuration.
getRootConfiguration
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public void addConfiguration(Configuration configuration) { Preconditions.checkNotNull(configuration); this.configuration.addAll(configuration); }
Adds the given key-value configuration to the underlying application-specific configuration. It overwrites existing keys. @param configuration key-value configuration to be added
addConfiguration
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public void setSqlDialect(SqlDialect sqlDialect) { set(TableConfigOptions.TABLE_SQL_DIALECT, sqlDialect.name().toLowerCase()); }
Sets the current SQL dialect to parse a SQL query. Flink's SQL behavior by default.
setSqlDialect
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public PlannerConfig getPlannerConfig() { return plannerConfig; }
Returns the current configuration of Planner for Table API and SQL queries.
getPlannerConfig
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public void setPlannerConfig(PlannerConfig plannerConfig) { this.plannerConfig = Preconditions.checkNotNull(plannerConfig); }
Sets the configuration of Planner for Table API and SQL queries. Changing the configuration has no effect after the first query has been defined.
setPlannerConfig
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public Integer getMaxGeneratedCodeLength() { return this.configuration.get(TableConfigOptions.MAX_LENGTH_GENERATED_CODE); }
Returns the current threshold where generated code will be split into sub-function calls. Java has a maximum method length of 64 KB. This setting allows for finer granularity if necessary. <p>Default value is 4000 instead of 64KB as by default JIT refuses to work on methods with more than 8K byte code.
getMaxGeneratedCodeLength
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public void setMaxGeneratedCodeLength(Integer maxGeneratedCodeLength) { this.configuration.set( TableConfigOptions.MAX_LENGTH_GENERATED_CODE, maxGeneratedCodeLength); }
Sets current threshold where generated code will be split into sub-function calls. Java has a maximum method length of 64 KB. This setting allows for finer granularity if necessary. <p>Default value is 4000 instead of 64KB as by default JIT refuses to work on methods with more than 8K byte code.
setMaxGeneratedCodeLength
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public void addJobParameter(String key, String value) { final Map<String, String> params = getOptional(PipelineOptions.GLOBAL_JOB_PARAMETERS) .map(HashMap::new) .orElseGet(HashMap::new); params.put(key, value); set(PipelineOptions.GLOBAL_JOB_PARAMETERS, params); }
Sets a custom user parameter that can be accessed via {@link FunctionContext#getJobParameter(String, String)}. <p>This will add an entry to the current value of {@link PipelineOptions#GLOBAL_JOB_PARAMETERS}. <p>It is also possible to set multiple parameters at once, which will override any previously set parameters: <pre>{@code Map<String, String> params = ... TableConfig config = tEnv.getConfig(); config.set(PipelineOptions.GLOBAL_JOB_PARAMETERS, params); }</pre>
addJobParameter
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableConfig.java
Apache-2.0
public CatalogTable toCatalogTable() { final Schema schema = getSchema() .orElseThrow( () -> new ValidationException( "Missing schema in TableDescriptor. " + "A schema is typically required. " + "It can only be omitted at certain " + "documented locations.")); return CatalogTable.newBuilder() .schema(schema) .options(getOptions()) .distribution(distribution) .partitionKeys(partitionKeys) .comment(getComment().orElse(null)) .build(); }
Converts this descriptor into a {@link CatalogTable}.
toCatalogTable
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder schema(@Nullable Schema schema) { this.schema = schema; return this; }
Define the schema of the {@link TableDescriptor}. <p>The schema is typically required. It is optional only in cases where the schema can be inferred, e.g. {@link Table#insertInto(TableDescriptor)}.
schema
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder format(String format) { return format(FactoryUtil.FORMAT, FormatDescriptor.forFormat(format).build()); }
Defines the {@link Format format} to be used for this table. <p>Note that not every connector requires a format to be specified, while others may use multiple formats. In the latter case, use {@link #format(ConfigOption, FormatDescriptor)} instead to specify for which option the format should be configured.
format
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder format(FormatDescriptor formatDescriptor) { return format(FactoryUtil.FORMAT, formatDescriptor); }
Defines the format to be used for this table. <p>Note that not every connector requires a format to be specified, while others may use multiple formats. <p>Options of the provided {@param formatDescriptor} are automatically prefixed. For example, <pre>{@code descriptorBuilder.format(FormatDescriptor.forFormat("json") .option(JsonOptions.IGNORE_PARSE_ERRORS, true) .build() }</pre> <p>will result in the options <pre>{@code 'format' = 'json' 'json.ignore-parse-errors' = 'true' }</pre>
format
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder format( ConfigOption<String> formatOption, FormatDescriptor formatDescriptor) { Preconditions.checkNotNull(formatOption, "Format option must not be null."); Preconditions.checkNotNull(formatDescriptor, "Format descriptor must not be null."); option(formatOption, formatDescriptor.getFormat()); final String optionPrefix = FactoryUtil.getFormatPrefix(formatOption, formatDescriptor.getFormat()); formatDescriptor .getOptions() .forEach( (key, value) -> { if (key.startsWith(optionPrefix)) { throw new ValidationException( String.format( "Format options set using #format(FormatDescriptor) should not contain the prefix '%s', but found '%s'.", optionPrefix, key)); } final String prefixedKey = optionPrefix + key; option(prefixedKey, value); }); return this; }
Defines the format to be used for this table. <p>Note that not every connector requires a format to be specified, while others may use multiple formats. <p>Options of the provided {@param formatDescriptor} are automatically prefixed. For example, <pre>{@code descriptorBuilder.format(KafkaOptions.KEY_FORMAT, FormatDescriptor.forFormat("json") .option(JsonOptions.IGNORE_PARSE_ERRORS, true) .build() }</pre> <p>will result in the options <pre>{@code 'key.format' = 'json' 'key.json.ignore-parse-errors' = 'true' }</pre>
format
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedByHash(String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofHash(Arrays.asList(bucketKeys), null); return this; }
Defines that the table should be distributed into buckets using a hash algorithm over the given columns. The number of buckets is connector-defined.
distributedByHash
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedByHash(int numberOfBuckets, String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofHash(Arrays.asList(bucketKeys), numberOfBuckets); return this; }
Defines that the table should be distributed into the given number of buckets using a hash algorithm over the given columns.
distributedByHash
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedByRange(String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofRange(Arrays.asList(bucketKeys), null); return this; }
Defines that the table should be distributed into buckets using a range algorithm over the given columns. The number of buckets is connector-defined.
distributedByRange
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedByRange(int numberOfBuckets, String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofRange(Arrays.asList(bucketKeys), numberOfBuckets); return this; }
Defines that the table should be distributed into the given number of buckets using a range algorithm over the given columns.
distributedByRange
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedBy(String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofUnknown(Arrays.asList(bucketKeys), null); return this; }
Defines that the table should be distributed into buckets over the given columns. The number of buckets and used algorithm are connector-defined.
distributedBy
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder distributedBy(int numberOfBuckets, String... bucketKeys) { validateBucketKeys(bucketKeys); this.distribution = TableDistribution.ofUnknown(Arrays.asList(bucketKeys), numberOfBuckets); return this; }
Defines that the table should be distributed into the given number of buckets by the given columns. The used algorithm is connector-defined.
distributedBy
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder partitionedBy(String... partitionKeys) { this.partitionKeys.addAll(Arrays.asList(partitionKeys)); return this; }
Define which columns this table is partitioned by.
partitionedBy
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0
public Builder comment(@Nullable String comment) { this.comment = comment; return this; }
Define the comment for this table.
comment
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableDescriptor.java
Apache-2.0