code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
public byte[] toUnscaledBytes() { return toBigDecimal().unscaledValue().toByteArray(); }
Returns a byte array describing the <i>unscaled value</i> of this {@link DecimalData}. @return the unscaled byte array of this {@link DecimalData}.
toUnscaledBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
Apache-2.0
public static @Nullable DecimalData fromBigDecimal(BigDecimal bd, int precision, int scale) { bd = bd.setScale(scale, RoundingMode.HALF_UP); if (bd.precision() > precision) { return null; } long longVal = -1; if (precision <= MAX_COMPACT_PRECISION) { longVal = bd.movePointRight(scale).longValueExact(); } return new DecimalData(precision, scale, longVal, bd); }
Creates an instance of {@link DecimalData} from a {@link BigDecimal} and the given precision and scale. <p>The returned decimal value may be rounded to have the desired scale. The precision will be checked. If the precision overflows, null will be returned.
fromBigDecimal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
Apache-2.0
public static DecimalData fromUnscaledLong(long unscaledLong, int precision, int scale) { checkArgument(precision > 0 && precision <= MAX_LONG_DIGITS); return new DecimalData(precision, scale, unscaledLong, null); }
Creates an instance of {@link DecimalData} from an unscaled long value and the given precision and scale.
fromUnscaledLong
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
Apache-2.0
public static DecimalData fromUnscaledBytes(byte[] unscaledBytes, int precision, int scale) { BigDecimal bd = new BigDecimal(new BigInteger(unscaledBytes), scale); return fromBigDecimal(bd, precision, scale); }
Creates an instance of {@link DecimalData} from an unscaled byte array value and the given precision and scale.
fromUnscaledBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
Apache-2.0
public static @Nullable DecimalData zero(int precision, int scale) { if (precision <= MAX_COMPACT_PRECISION) { return new DecimalData(precision, scale, 0, null); } else { return fromBigDecimal(BigDecimal.ZERO, precision, scale); } }
Creates an instance of {@link DecimalData} for a zero value with the given precision and scale. <p>The precision will be checked. If the precision overflows, null will be returned.
zero
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/DecimalData.java
Apache-2.0
public void setField(int pos, Object value) { this.fields[pos] = value; }
Sets the field value at the given position. <p>Note: The given field value must be an internal data structures. Otherwise the {@link GenericRowData} is corrupted and may throw exception when processing. See {@link RowData} for more information about internal data structures. <p>The field value can be null for representing nullability.
setField
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
Apache-2.0
public static GenericRowData of(Object... values) { GenericRowData row = new GenericRowData(values.length); for (int i = 0; i < values.length; ++i) { row.setField(i, values[i]); } return row; }
Creates an instance of {@link GenericRowData} with given field values. <p>By default, the row describes a {@link RowKind#INSERT} in a changelog. <p>Note: All fields of the row must be internal data structures.
of
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
Apache-2.0
public static GenericRowData ofKind(RowKind kind, Object... values) { GenericRowData row = new GenericRowData(kind, values.length); for (int i = 0; i < values.length; ++i) { row.setField(i, values[i]); } return row; }
Creates an instance of {@link GenericRowData} with given kind and field values. <p>Note: All fields of the row must be internal data structures.
ofKind
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/GenericRowData.java
Apache-2.0
static <T> RawValueData<T> fromObject(T javaObject) { return BinaryRawValueData.fromObject(javaObject); }
Creates an instance of {@link RawValueData} from a Java object.
fromObject
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RawValueData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RawValueData.java
Apache-2.0
static <T> RawValueData<T> fromBytes(byte[] bytes) { return BinaryRawValueData.fromBytes(bytes); }
Creates an instance of {@link RawValueData} from the given byte array.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RawValueData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RawValueData.java
Apache-2.0
static FieldGetter createFieldGetter(LogicalType fieldType, int fieldPos) { final FieldGetter fieldGetter; // ordered by type root definition switch (fieldType.getTypeRoot()) { case CHAR: case VARCHAR: fieldGetter = row -> row.getString(fieldPos); break; case BOOLEAN: fieldGetter = row -> row.getBoolean(fieldPos); break; case BINARY: case VARBINARY: fieldGetter = row -> row.getBinary(fieldPos); break; case DECIMAL: final int decimalPrecision = getPrecision(fieldType); final int decimalScale = getScale(fieldType); fieldGetter = row -> row.getDecimal(fieldPos, decimalPrecision, decimalScale); break; case TINYINT: fieldGetter = row -> row.getByte(fieldPos); break; case SMALLINT: fieldGetter = row -> row.getShort(fieldPos); break; case INTEGER: case DATE: case TIME_WITHOUT_TIME_ZONE: case INTERVAL_YEAR_MONTH: fieldGetter = row -> row.getInt(fieldPos); break; case BIGINT: case INTERVAL_DAY_TIME: fieldGetter = row -> row.getLong(fieldPos); break; case FLOAT: fieldGetter = row -> row.getFloat(fieldPos); break; case DOUBLE: fieldGetter = row -> row.getDouble(fieldPos); break; case TIMESTAMP_WITHOUT_TIME_ZONE: case TIMESTAMP_WITH_LOCAL_TIME_ZONE: final int timestampPrecision = getPrecision(fieldType); fieldGetter = row -> row.getTimestamp(fieldPos, timestampPrecision); break; case TIMESTAMP_WITH_TIME_ZONE: throw new UnsupportedOperationException(); case ARRAY: fieldGetter = row -> row.getArray(fieldPos); break; case MULTISET: case MAP: fieldGetter = row -> row.getMap(fieldPos); break; case ROW: case STRUCTURED_TYPE: final int rowFieldCount = getFieldCount(fieldType); fieldGetter = row -> row.getRow(fieldPos, rowFieldCount); break; case DISTINCT_TYPE: fieldGetter = createFieldGetter(((DistinctType) fieldType).getSourceType(), fieldPos); break; case RAW: fieldGetter = row -> row.getRawValue(fieldPos); break; case NULL: case SYMBOL: case UNRESOLVED: case DESCRIPTOR: default: throw new IllegalArgumentException(); } return row -> { if (row.isNullAt(fieldPos)) { return null; } return fieldGetter.getFieldOrNull(row); }; }
Creates an accessor for getting elements in an internal row data structure at the given position. @param fieldType the element type of the row @param fieldPos the element position of the row
createFieldGetter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/RowData.java
Apache-2.0
static StringData fromString(String str) { return BinaryStringData.fromString(str); }
Creates an instance of {@link StringData} from the given {@link String}.
fromString
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
Apache-2.0
static StringData fromBytes(byte[] bytes) { return BinaryStringData.fromBytes(bytes); }
Creates an instance of {@link StringData} from the given UTF-8 byte array.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
Apache-2.0
static StringData fromBytes(byte[] bytes, int offset, int numBytes) { return BinaryStringData.fromBytes(bytes, offset, numBytes); }
Creates an instance of {@link StringData} from the given UTF-8 byte array with offset and number of bytes.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/StringData.java
Apache-2.0
public long getMillisecond() { return millisecond; }
Returns the number of milliseconds since {@code 1970-01-01 00:00:00}.
getMillisecond
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public int getNanoOfMillisecond() { return nanoOfMillisecond; }
Returns the number of nanoseconds (the nanoseconds within the milliseconds). <p>The value range is from 0 to 999,999.
getNanoOfMillisecond
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public Timestamp toTimestamp() { return Timestamp.valueOf(toLocalDateTime()); }
Converts this {@link TimestampData} object to a {@link Timestamp}.
toTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public LocalDateTime toLocalDateTime() { int date = (int) (millisecond / MILLIS_PER_DAY); int time = (int) (millisecond % MILLIS_PER_DAY); if (time < 0) { --date; time += MILLIS_PER_DAY; } long nanoOfDay = time * 1_000_000L + nanoOfMillisecond; LocalDate localDate = LocalDate.ofEpochDay(date); LocalTime localTime = LocalTime.ofNanoOfDay(nanoOfDay); return LocalDateTime.of(localDate, localTime); }
Converts this {@link TimestampData} object to a {@link LocalDateTime}.
toLocalDateTime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static TimestampData fromEpochMillis(long milliseconds) { return new TimestampData(milliseconds, 0); }
Creates an instance of {@link TimestampData} from milliseconds. <p>The nanos-of-millisecond field will be set to zero. @param milliseconds the number of milliseconds since {@code 1970-01-01 00:00:00}; a negative number is the number of milliseconds before {@code 1970-01-01 00:00:00}
fromEpochMillis
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static TimestampData fromEpochMillis(long milliseconds, int nanosOfMillisecond) { return new TimestampData(milliseconds, nanosOfMillisecond); }
Creates an instance of {@link TimestampData} from milliseconds and a nanos-of-millisecond. @param milliseconds the number of milliseconds since {@code 1970-01-01 00:00:00}; a negative number is the number of milliseconds before {@code 1970-01-01 00:00:00} @param nanosOfMillisecond the nanoseconds within the millisecond, from 0 to 999,999
fromEpochMillis
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static TimestampData fromLocalDateTime(LocalDateTime dateTime) { long epochDay = dateTime.toLocalDate().toEpochDay(); long nanoOfDay = dateTime.toLocalTime().toNanoOfDay(); long millisecond = epochDay * MILLIS_PER_DAY + nanoOfDay / 1_000_000; int nanoOfMillisecond = (int) (nanoOfDay % 1_000_000); return new TimestampData(millisecond, nanoOfMillisecond); }
Creates an instance of {@link TimestampData} from an instance of {@link LocalDateTime}. @param dateTime an instance of {@link LocalDateTime}
fromLocalDateTime
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static TimestampData fromTimestamp(Timestamp timestamp) { return fromLocalDateTime(timestamp.toLocalDateTime()); }
Creates an instance of {@link TimestampData} from an instance of {@link Timestamp}. @param timestamp an instance of {@link Timestamp}
fromTimestamp
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static TimestampData fromInstant(Instant instant) { long epochSecond = instant.getEpochSecond(); int nanoSecond = instant.getNano(); long millisecond = epochSecond * 1_000 + nanoSecond / 1_000_000; int nanoOfMillisecond = nanoSecond % 1_000_000; return new TimestampData(millisecond, nanoOfMillisecond); }
Creates an instance of {@link TimestampData} from an instance of {@link Instant}. @param instant an instance of {@link Instant}
fromInstant
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static boolean isCompact(int precision) { return precision <= 3; }
Returns whether the timestamp data is small enough to be stored in a long of milliseconds.
isCompact
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/TimestampData.java
Apache-2.0
public static <T> BinaryRawValueData<T> fromObject(T javaObject) { if (javaObject == null) { return null; } return new BinaryRawValueData<>(javaObject); }
Creates a {@link BinaryRawValueData} instance from the given Java object.
fromObject
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
Apache-2.0
public static <T> BinaryRawValueData<T> fromBytes(byte[] bytes) { return fromBytes(bytes, 0, bytes.length); }
Creates a {@link BinaryStringData} instance from the given bytes.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
Apache-2.0
public static <T> BinaryRawValueData<T> fromBytes(byte[] bytes, int offset, int numBytes) { return new BinaryRawValueData<>( new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, offset, numBytes); }
Creates a {@link BinaryStringData} instance from the given bytes with offset and number of bytes.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryRawValueData.java
Apache-2.0
public static void copyToView( MemorySegment[] segments, int offset, int sizeInBytes, DataOutputView target) throws IOException { for (MemorySegment sourceSegment : segments) { int curSegRemain = sourceSegment.size() - offset; if (curSegRemain > 0) { int copySize = Math.min(curSegRemain, sizeInBytes); byte[] bytes = allocateReuseBytes(copySize); sourceSegment.get(offset, bytes, 0, copySize); target.write(bytes, 0, copySize); sizeInBytes -= copySize; offset = 0; } else { offset -= sourceSegment.size(); } if (sizeInBytes == 0) { return; } } if (sizeInBytes != 0) { throw new RuntimeException( "No copy finished, this should be a bug, " + "The remaining length is: " + sizeInBytes); } }
Copy bytes of segments to output view. <p>Note: It just copies the data in, not include the length. @param segments source segments @param offset offset for segments @param sizeInBytes size in bytes @param target target output view
copyToView
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static DecimalData readDecimalData( MemorySegment[] segments, int baseOffset, long offsetAndSize, int precision, int scale) { final int size = ((int) offsetAndSize); int subOffset = (int) (offsetAndSize >> 32); byte[] bytes = new byte[size]; copyToBytes(segments, baseOffset + subOffset, bytes, 0, size); return DecimalData.fromUnscaledBytes(bytes, precision, scale); }
Gets an instance of {@link DecimalData} from underlying {@link MemorySegment}.
readDecimalData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static TimestampData readTimestampData( MemorySegment[] segments, int baseOffset, long offsetAndNanos) { final int nanoOfMillisecond = (int) offsetAndNanos; final int subOffset = (int) (offsetAndNanos >> 32); final long millisecond = getLong(segments, baseOffset + subOffset); return TimestampData.fromEpochMillis(millisecond, nanoOfMillisecond); }
Gets an instance of {@link TimestampData} from underlying {@link MemorySegment}. @param segments the underlying MemorySegments @param baseOffset the base offset of current instance of {@code TimestampData} @param offsetAndNanos the offset of milli-seconds part and nanoseconds @return an instance of {@link TimestampData}
readTimestampData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static byte[] readBinary( MemorySegment[] segments, int baseOffset, int fieldOffset, long variablePartOffsetAndLen) { long mark = variablePartOffsetAndLen & HIGHEST_FIRST_BIT; if (mark == 0) { final int subOffset = (int) (variablePartOffsetAndLen >> 32); final int len = (int) variablePartOffsetAndLen; return BinarySegmentUtils.copyToBytes(segments, baseOffset + subOffset, len); } else { int len = (int) ((variablePartOffsetAndLen & HIGHEST_SECOND_TO_EIGHTH_BIT) >>> 56); if (BinarySegmentUtils.LITTLE_ENDIAN) { return BinarySegmentUtils.copyToBytes(segments, fieldOffset, len); } else { // fieldOffset + 1 to skip header. return BinarySegmentUtils.copyToBytes(segments, fieldOffset + 1, len); } } }
Get binary, if len less than 8, will be include in variablePartOffsetAndLen. <p>Note: Need to consider the ByteOrder. @param baseOffset base offset of composite binary format. @param fieldOffset absolute start offset of 'variablePartOffsetAndLen'. @param variablePartOffsetAndLen a long value, real data or offset and len.
readBinary
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static StringData readStringData( MemorySegment[] segments, int baseOffset, int fieldOffset, long variablePartOffsetAndLen) { long mark = variablePartOffsetAndLen & HIGHEST_FIRST_BIT; if (mark == 0) { final int subOffset = (int) (variablePartOffsetAndLen >> 32); final int len = (int) variablePartOffsetAndLen; return BinaryStringData.fromAddress(segments, baseOffset + subOffset, len); } else { int len = (int) ((variablePartOffsetAndLen & HIGHEST_SECOND_TO_EIGHTH_BIT) >>> 56); if (BinarySegmentUtils.LITTLE_ENDIAN) { return BinaryStringData.fromAddress(segments, fieldOffset, len); } else { // fieldOffset + 1 to skip header. return BinaryStringData.fromAddress(segments, fieldOffset + 1, len); } } }
Get binary string, if len less than 8, will be include in variablePartOffsetAndLen. <p>Note: Need to consider the ByteOrder. @param baseOffset base offset of composite binary format. @param fieldOffset absolute start offset of 'variablePartOffsetAndLen'. @param variablePartOffsetAndLen a long value, real data or offset and len.
readStringData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static <T> RawValueData<T> readRawValueData( MemorySegment[] segments, int baseOffset, long offsetAndSize) { final int size = ((int) offsetAndSize); int offset = (int) (offsetAndSize >> 32); return new BinaryRawValueData<>(segments, offset + baseOffset, size, null); }
Gets an instance of {@link RawValueData} from underlying {@link MemorySegment}.
readRawValueData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static MapData readMapData( MemorySegment[] segments, int baseOffset, long offsetAndSize) { final int size = ((int) offsetAndSize); int offset = (int) (offsetAndSize >> 32); BinaryMapData map = new BinaryMapData(); map.pointTo(segments, offset + baseOffset, size); return map; }
Gets an instance of {@link MapData} from underlying {@link MemorySegment}.
readMapData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static ArrayData readArrayData( MemorySegment[] segments, int baseOffset, long offsetAndSize) { final int size = ((int) offsetAndSize); int offset = (int) (offsetAndSize >> 32); BinaryArrayData array = new BinaryArrayData(); array.pointTo(segments, offset + baseOffset, size); return array; }
Gets an instance of {@link ArrayData} from underlying {@link MemorySegment}.
readArrayData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static RowData readRowData( MemorySegment[] segments, int numFields, int baseOffset, long offsetAndSize) { final int size = ((int) offsetAndSize); int offset = (int) (offsetAndSize >> 32); NestedRowData row = new NestedRowData(numFields); row.pointTo(segments, offset + baseOffset, size); return row; }
Gets an instance of {@link RowData} from underlying {@link MemorySegment}.
readRowData
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinarySegmentUtils.java
Apache-2.0
public static BinaryStringData fromAddress(MemorySegment[] segments, int offset, int numBytes) { return new BinaryStringData(segments, offset, numBytes); }
Creates a {@link BinaryStringData} instance from the given address (base and offset) and length.
fromAddress
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public static BinaryStringData fromString(String str) { if (str == null) { return null; } else { return new BinaryStringData(str); } }
Creates a {@link BinaryStringData} instance from the given Java string.
fromString
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public static BinaryStringData fromBytes(byte[] bytes) { return fromBytes(bytes, 0, bytes.length); }
Creates a {@link BinaryStringData} instance from the given UTF-8 bytes.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public static BinaryStringData fromBytes(byte[] bytes, int offset, int numBytes) { return new BinaryStringData( new MemorySegment[] {MemorySegmentFactory.wrap(bytes)}, offset, numBytes); }
Creates a {@link BinaryStringData} instance from the given UTF-8 bytes with offset and number of bytes.
fromBytes
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
@Override public int compareTo(@Nonnull StringData o) { // BinaryStringData is the only implementation of StringData BinaryStringData other = (BinaryStringData) o; if (javaObject != null && other.javaObject != null) { return javaObject.compareTo(other.javaObject); } ensureMaterialized(); other.ensureMaterialized(); if (binarySection.segments.length == 1 && other.binarySection.segments.length == 1) { int len = Math.min(binarySection.sizeInBytes, other.binarySection.sizeInBytes); MemorySegment seg1 = binarySection.segments[0]; MemorySegment seg2 = other.binarySection.segments[0]; for (int i = 0; i < len; i++) { int res = (seg1.get(binarySection.offset + i) & 0xFF) - (seg2.get(other.binarySection.offset + i) & 0xFF); if (res != 0) { return res; } } return binarySection.sizeInBytes - other.binarySection.sizeInBytes; } // if there are multi segments. return compareMultiSegments(other); }
Compares two strings lexicographically. Since UTF-8 uses groups of six bits, it is sometimes useful to use octal notation which uses 3-bit groups. With a calculator which can convert between hexadecimal and octal it can be easier to manually create or interpret UTF-8 compared with using binary. So we just compare the binary.
compareTo
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
private int compareMultiSegments(BinaryStringData other) { if (binarySection.sizeInBytes == 0 || other.binarySection.sizeInBytes == 0) { return binarySection.sizeInBytes - other.binarySection.sizeInBytes; } int len = Math.min(binarySection.sizeInBytes, other.binarySection.sizeInBytes); MemorySegment seg1 = binarySection.segments[0]; MemorySegment seg2 = other.binarySection.segments[0]; int segmentSize = binarySection.segments[0].size(); int otherSegmentSize = other.binarySection.segments[0].size(); int sizeOfFirst1 = segmentSize - binarySection.offset; int sizeOfFirst2 = otherSegmentSize - other.binarySection.offset; int varSegIndex1 = 1; int varSegIndex2 = 1; // find the first segment of this string. while (sizeOfFirst1 <= 0) { sizeOfFirst1 += segmentSize; seg1 = binarySection.segments[varSegIndex1++]; } while (sizeOfFirst2 <= 0) { sizeOfFirst2 += otherSegmentSize; seg2 = other.binarySection.segments[varSegIndex2++]; } int offset1 = segmentSize - sizeOfFirst1; int offset2 = otherSegmentSize - sizeOfFirst2; int needCompare = Math.min(Math.min(sizeOfFirst1, sizeOfFirst2), len); while (needCompare > 0) { // compare in one segment. for (int i = 0; i < needCompare; i++) { int res = (seg1.get(offset1 + i) & 0xFF) - (seg2.get(offset2 + i) & 0xFF); if (res != 0) { return res; } } if (needCompare == len) { break; } len -= needCompare; // next segment if (sizeOfFirst1 < sizeOfFirst2) { // I am smaller seg1 = binarySection.segments[varSegIndex1++]; offset1 = 0; offset2 += needCompare; sizeOfFirst1 = segmentSize; sizeOfFirst2 -= needCompare; } else if (sizeOfFirst1 > sizeOfFirst2) { // other is smaller seg2 = other.binarySection.segments[varSegIndex2++]; offset2 = 0; offset1 += needCompare; sizeOfFirst2 = otherSegmentSize; sizeOfFirst1 -= needCompare; } else { // same, should go ahead both. seg1 = binarySection.segments[varSegIndex1++]; seg2 = other.binarySection.segments[varSegIndex2++]; offset1 = 0; offset2 = 0; sizeOfFirst1 = segmentSize; sizeOfFirst2 = otherSegmentSize; } needCompare = Math.min(Math.min(sizeOfFirst1, sizeOfFirst2), len); } checkArgument(needCompare == len); return binarySection.sizeInBytes - other.binarySection.sizeInBytes; }
Find the boundaries of segments, and then compare MemorySegment.
compareMultiSegments
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public boolean contains(final BinaryStringData s) { ensureMaterialized(); s.ensureMaterialized(); if (s.binarySection.sizeInBytes == 0) { return true; } int find = BinarySegmentUtils.find( binarySection.segments, binarySection.offset, binarySection.sizeInBytes, s.binarySection.segments, s.binarySection.offset, s.binarySection.sizeInBytes); return find != -1; }
Returns true if and only if this BinaryStringData contains the specified sequence of bytes values. @param s the sequence to search for @return true if this BinaryStringData contains {@code s}, false otherwise
contains
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public boolean startsWith(final BinaryStringData prefix) { ensureMaterialized(); prefix.ensureMaterialized(); return matchAt(prefix, 0); }
Tests if this BinaryStringData starts with the specified prefix. @param prefix the prefix. @return {@code true} if the bytes represented by the argument is a prefix of the bytes represented by this string; {@code false} otherwise. Note also that {@code true} will be returned if the argument is an empty BinaryStringData or is equal to this {@code BinaryStringData} object as determined by the {@link #equals(Object)} method.
startsWith
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public boolean endsWith(final BinaryStringData suffix) { ensureMaterialized(); suffix.ensureMaterialized(); return matchAt(suffix, binarySection.sizeInBytes - suffix.binarySection.sizeInBytes); }
Tests if this BinaryStringData ends with the specified suffix. @param suffix the suffix. @return {@code true} if the bytes represented by the argument is a suffix of the bytes represented by this object; {@code false} otherwise. Note that the result will be {@code true} if the argument is the empty string or is equal to this {@code BinaryStringData} object as determined by the {@link #equals(Object)} method.
endsWith
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
static int numBytesForFirstByte(final byte b) { if (b >= 0) { // 1 byte, 7 bits: 0xxxxxxx return 1; } else if ((b >> 5) == -2 && (b & 0x1e) != 0) { // 2 bytes, 11 bits: 110xxxxx 10xxxxxx return 2; } else if ((b >> 4) == -2) { // 3 bytes, 16 bits: 1110xxxx 10xxxxxx 10xxxxxx return 3; } else if ((b >> 3) == -2) { // 4 bytes, 21 bits: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx return 4; } else { // Skip the first byte disallowed in UTF-8 // Handling errors quietly, same semantics to java String. return 1; } }
Returns the number of bytes for a code point with the first byte as `b`. @param b The first byte of a code point
numBytesForFirstByte
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/BinaryStringData.java
Apache-2.0
public final void ensureMaterialized(TypeSerializer<T> serializer) { if (binarySection == null) { try { this.binarySection = materialize(serializer); } catch (IOException e) { throw new WrappingRuntimeException(e); } } }
Ensure we have materialized binary format.
ensureMaterialized
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/LazyBinaryFormat.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/binary/LazyBinaryFormat.java
Apache-2.0
@Override public boolean hasDictionary() { return this.dictionary != null; }
Returns true if this column has a dictionary.
hasDictionary
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/writable/AbstractWritableVector.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/columnar/vector/writable/AbstractWritableVector.java
Apache-2.0
public static ProjectedRowData from(int[][] projection) throws IllegalArgumentException { return new ProjectedRowData( Arrays.stream(projection) .mapToInt( arr -> { if (arr.length != 1) { throw new IllegalArgumentException( "ProjectedRowData doesn't support nested projections"); } return arr[0]; }) .toArray()); }
Like {@link #from(int[])}, but throws {@link IllegalArgumentException} if the provided {@code projection} array contains nested projections, which are not supported by {@link ProjectedRowData}. <p>The array represents the mapping of the fields of the original {@link DataType}, including nested rows. For example, {@code [[0, 2, 1], ...]} specifies to include the 2nd field of the 3rd field of the 1st field in the top-level row. @see Projection @see ProjectedRowData
from
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
Apache-2.0
public static ProjectedRowData from(int[] projection) { return new ProjectedRowData(projection); }
Create an empty {@link ProjectedRowData} starting from a {@code projection} array. <p>The array represents the mapping of the fields of the original {@link DataType}. For example, {@code [0, 2, 1]} specifies to include in the following order the 1st field, the 3rd field and the 2nd field of the row. @see Projection @see ProjectedRowData
from
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
Apache-2.0
public static ProjectedRowData from(Projection projection) { return new ProjectedRowData(projection.toTopLevelIndexes()); }
Create an empty {@link ProjectedRowData} starting from a {@link Projection}. <p>Throws {@link IllegalStateException} if the provided {@code projection} array contains nested projections, which are not supported by {@link ProjectedRowData}. @see Projection @see ProjectedRowData
from
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/data/utils/ProjectedRowData.java
Apache-2.0
@Override public void validate(DescriptorProperties properties) { properties.validateString(CONNECTOR_TYPE, false, 1); properties.validateInt(CONNECTOR_PROPERTY_VERSION, true, 0); }
Key for describing the version of the connector. This property can be used for different connector versions (e.g. Kafka 0.10 or Kafka 0.11).
validate
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/ConnectorDescriptorValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/ConnectorDescriptorValidator.java
Apache-2.0
public void putPropertiesWithPrefix(String prefix, Map<String, String> prop) { checkNotNull(prefix); checkNotNull(prop); for (Map.Entry<String, String> e : prop.entrySet()) { put(String.format("%s.%s", prefix, e.getKey()), e.getValue()); } }
Adds a properties map by appending the given prefix to element keys with a dot. <p>For example: for prefix "flink" and a map of a single property with key "k" and value "v". The added property will be as key "flink.k" and value "v".
putPropertiesWithPrefix
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putClass(String key, Class<?> clazz) { checkNotNull(key); checkNotNull(clazz); final String error = InstantiationUtil.checkForInstantiationError(clazz); if (error != null) { throw new ValidationException( "Class '" + clazz.getName() + "' is not supported: " + error); } put(key, clazz.getName()); }
Adds a class under the given key.
putClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putString(String key, String str) { checkNotNull(key); checkNotNull(str); put(key, str); }
Adds a string under the given key.
putString
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putBoolean(String key, boolean b) { checkNotNull(key); put(key, Boolean.toString(b)); }
Adds a boolean under the given key.
putBoolean
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putLong(String key, long l) { checkNotNull(key); put(key, Long.toString(l)); }
Adds a long under the given key.
putLong
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putInt(String key, int i) { checkNotNull(key); put(key, Integer.toString(i)); }
Adds an integer under the given key.
putInt
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putCharacter(String key, char c) { checkNotNull(key); put(key, Character.toString(c)); }
Adds a character under the given key.
putCharacter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putTableSchema(String key, TableSchema schema) { checkNotNull(key); checkNotNull(schema); final String[] fieldNames = schema.getFieldNames(); final DataType[] fieldTypes = schema.getFieldDataTypes(); final String[] fieldExpressions = schema.getTableColumns().stream() .map( column -> { if (column instanceof ComputedColumn) { return ((ComputedColumn) column).getExpression(); } return null; }) .toArray(String[]::new); final String[] fieldMetadata = schema.getTableColumns().stream() .map( column -> { if (column instanceof MetadataColumn) { return ((MetadataColumn) column) .getMetadataAlias() .orElse(column.getName()); } return null; }) .toArray(String[]::new); final String[] fieldVirtual = schema.getTableColumns().stream() .map( column -> { if (column instanceof MetadataColumn) { return Boolean.toString( ((MetadataColumn) column).isVirtual()); } return null; }) .toArray(String[]::new); final List<List<String>> values = new ArrayList<>(); for (int i = 0; i < schema.getFieldCount(); i++) { values.add( Arrays.asList( fieldNames[i], fieldTypes[i].getLogicalType().asSerializableString(), fieldExpressions[i], fieldMetadata[i], fieldVirtual[i])); } putIndexedOptionalProperties( key, Arrays.asList(NAME, DATA_TYPE, EXPR, METADATA, VIRTUAL), values); if (!schema.getWatermarkSpecs().isEmpty()) { final List<List<String>> watermarkValues = new ArrayList<>(); for (WatermarkSpec spec : schema.getWatermarkSpecs()) { watermarkValues.add( Arrays.asList( spec.getRowtimeAttribute(), spec.getWatermarkExpr(), spec.getWatermarkExprOutputType() .getLogicalType() .asSerializableString())); } putIndexedFixedProperties( key + '.' + WATERMARK, Arrays.asList( WATERMARK_ROWTIME, WATERMARK_STRATEGY_EXPR, WATERMARK_STRATEGY_DATA_TYPE), watermarkValues); } schema.getPrimaryKey() .ifPresent( pk -> { putString(key + '.' + PRIMARY_KEY_NAME, pk.getName()); putString( key + '.' + PRIMARY_KEY_COLUMNS, String.join(",", pk.getColumns())); }); }
Adds a table schema under the given key.
putTableSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putMemorySize(String key, MemorySize size) { checkNotNull(key); checkNotNull(size); put(key, size.toString()); }
Adds a Flink {@link MemorySize} under the given key.
putMemorySize
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putIndexedFixedProperties( String key, List<String> subKeys, List<List<String>> subKeyValues) { checkNotNull(key); checkNotNull(subKeys); checkNotNull(subKeyValues); for (int idx = 0; idx < subKeyValues.size(); idx++) { final List<String> values = subKeyValues.get(idx); if (values == null || values.size() != subKeys.size()) { throw new ValidationException("Values must have same arity as keys."); } for (int keyIdx = 0; keyIdx < values.size(); keyIdx++) { put(key + '.' + idx + '.' + subKeys.get(keyIdx), values.get(keyIdx)); } } }
Adds an indexed sequence of properties (with sub-properties) under a common key. <p>For example: <pre> schema.fields.0.type = INT, schema.fields.0.name = test schema.fields.1.type = LONG, schema.fields.1.name = test2 </pre> <p>The arity of each subKeyValues must match the arity of propertyKeys.
putIndexedFixedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putIndexedOptionalProperties( String key, List<String> subKeys, List<List<String>> subKeyValues) { checkNotNull(key); checkNotNull(subKeys); checkNotNull(subKeyValues); for (int idx = 0; idx < subKeyValues.size(); idx++) { final List<String> values = subKeyValues.get(idx); if (values == null || values.size() != subKeys.size()) { throw new ValidationException("Values must have same arity as keys."); } if (values.stream().allMatch(Objects::isNull)) { throw new ValidationException("Values must have at least one non-null value."); } for (int keyIdx = 0; keyIdx < values.size(); keyIdx++) { String value = values.get(keyIdx); if (value != null) { put(key + '.' + idx + '.' + subKeys.get(keyIdx), values.get(keyIdx)); } } } }
Adds an indexed sequence of properties (with sub-properties) under a common key. Different with {@link #putIndexedFixedProperties}, this method supports the properties value to be null, which would be ignore. The sub-properties should at least have one non-null value. <p>For example: <pre> schema.fields.0.type = INT, schema.fields.0.name = test schema.fields.1.type = LONG, schema.fields.1.name = test2 schema.fields.2.type = LONG, schema.fields.2.name = test3, schema.fields.2.expr = test2 + 1 </pre> <p>The arity of each subKeyValues must match the arity of propertyKeys.
putIndexedOptionalProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public void putIndexedVariableProperties(String key, List<Map<String, String>> subKeyValues) { checkNotNull(key); checkNotNull(subKeyValues); for (int idx = 0; idx < subKeyValues.size(); idx++) { final Map<String, String> values = subKeyValues.get(idx); for (Map.Entry<String, String> value : values.entrySet()) { put(key + '.' + idx + '.' + value.getKey(), value.getValue()); } } }
Adds an indexed mapping of properties under a common key. <p>For example: <pre> schema.fields.0.type = INT, schema.fields.0.name = test schema.fields.1.name = test2 </pre> <p>The arity of the subKeyValues can differ.
putIndexedVariableProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<String> getOptionalString(String key) { return optionalGet(key); }
Returns a string value under the given key if it exists.
getOptionalString
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public String getString(String key) { return getOptionalString(key).orElseThrow(exceptionSupplier(key)); }
Returns a string value under the given existing key.
getString
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Character> getOptionalCharacter(String key) { return optionalGet(key) .map( (c) -> { if (c.length() != 1) { throw new ValidationException( "The value of '" + key + "' must only contain one character."); } return c.charAt(0); }); }
Returns a character value under the given key if it exists.
getOptionalCharacter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Character getCharacter(String key) { return getOptionalCharacter(key).orElseThrow(exceptionSupplier(key)); }
Returns a character value under the given existing key.
getCharacter
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
@SuppressWarnings("unchecked") public <T> Optional<Class<T>> getOptionalClass(String key, Class<T> superClass) { return optionalGet(key) .map( (name) -> { final Class<?> clazz; try { clazz = Class.forName( name, true, Thread.currentThread().getContextClassLoader()); if (!superClass.isAssignableFrom(clazz)) { throw new ValidationException( "Class '" + name + "' does not extend from the required class '" + superClass.getName() + "' for key '" + key + "'."); } return (Class<T>) clazz; } catch (Exception e) { throw new ValidationException( "Could not get class '" + name + "' for key '" + key + "'.", e); } }); }
Returns a class value under the given key if it exists.
getOptionalClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public <T> Class<T> getClass(String key, Class<T> superClass) { return getOptionalClass(key, superClass).orElseThrow(exceptionSupplier(key)); }
Returns a class value under the given existing key.
getClass
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<BigDecimal> getOptionalBigDecimal(String key) { return optionalGet(key) .map( (value) -> { try { return new BigDecimal(value); } catch (Exception e) { throw new ValidationException( "Invalid decimal value for key '" + key + "'.", e); } }); }
Returns a big decimal value under the given key if it exists.
getOptionalBigDecimal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public BigDecimal getBigDecimal(String key) { return getOptionalBigDecimal(key).orElseThrow(exceptionSupplier(key)); }
Returns a big decimal value under the given existing key.
getBigDecimal
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Boolean> getOptionalBoolean(String key) { return optionalGet(key) .map( (value) -> { try { return Boolean.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid boolean value for key '" + key + "'.", e); } }); }
Returns a boolean value under the given key if it exists.
getOptionalBoolean
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public boolean getBoolean(String key) { return getOptionalBoolean(key).orElseThrow(exceptionSupplier(key)); }
Returns a boolean value under the given existing key.
getBoolean
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Byte> getOptionalByte(String key) { return optionalGet(key) .map( (value) -> { try { return Byte.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid byte value for key '" + key + "'.", e); } }); }
Returns a byte value under the given key if it exists.
getOptionalByte
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public byte getByte(String key) { return getOptionalByte(key).orElseThrow(exceptionSupplier(key)); }
Returns a byte value under the given existing key.
getByte
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Double> getOptionalDouble(String key) { return optionalGet(key) .map( (value) -> { try { return Double.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid double value for key '" + key + "'.", e); } }); }
Returns a double value under the given key if it exists.
getOptionalDouble
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public double getDouble(String key) { return getOptionalDouble(key).orElseThrow(exceptionSupplier(key)); }
Returns a double value under the given existing key.
getDouble
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Float> getOptionalFloat(String key) { return optionalGet(key) .map( (value) -> { try { return Float.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid float value for key '" + key + "'.", e); } }); }
Returns a float value under the given key if it exists.
getOptionalFloat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public float getFloat(String key) { return getOptionalFloat(key).orElseThrow(exceptionSupplier(key)); }
Returns a float value under the given given existing key.
getFloat
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Integer> getOptionalInt(String key) { return optionalGet(key) .map( (value) -> { try { return Integer.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid integer value for key '" + key + "'.", e); } }); }
Returns an integer value under the given key if it exists.
getOptionalInt
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public int getInt(String key) { return getOptionalInt(key).orElseThrow(exceptionSupplier(key)); }
Returns an integer value under the given existing key.
getInt
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Long> getOptionalLong(String key) { return optionalGet(key) .map( (value) -> { try { return Long.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid long value for key '" + key + "'.", e); } }); }
Returns a long value under the given key if it exists.
getOptionalLong
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public long getLong(String key) { return getOptionalLong(key).orElseThrow(exceptionSupplier(key)); }
Returns a long value under the given existing key.
getLong
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Short> getOptionalShort(String key) { return optionalGet(key) .map( (value) -> { try { return Short.valueOf(value); } catch (Exception e) { throw new ValidationException( "Invalid short value for key '" + key + "'.", e); } }); }
Returns a short value under the given key if it exists.
getOptionalShort
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public short getShort(String key) { return getOptionalShort(key).orElseThrow(exceptionSupplier(key)); }
Returns a short value under the given existing key.
getShort
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<TypeInformation<?>> getOptionalType(String key) { return optionalGet(key).map(TypeStringUtils::readTypeInfo); }
Returns the type information under the given key if it exists.
getOptionalType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public TypeInformation<?> getType(String key) { return getOptionalType(key).orElseThrow(exceptionSupplier(key)); }
Returns the type information under the given existing key.
getType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<DataType> getOptionalDataType(String key) { return optionalGet(key) .map( t -> TypeConversions.fromLogicalToDataType( LogicalTypeParser.parse( t, Thread.currentThread().getContextClassLoader()))); }
Returns the DataType under the given key if it exists.
getOptionalDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public DataType getDataType(String key) { return getOptionalDataType(key).orElseThrow(exceptionSupplier(key)); }
Returns the DataType under the given existing key.
getDataType
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<TableSchema> getOptionalTableSchema(String key) { // filter for number of fields final int fieldCount = properties.keySet().stream() .filter( (k) -> k.startsWith(key) // "key." is the prefix. && SCHEMA_COLUMN_NAME_SUFFIX .matcher(k.substring(key.length() + 1)) .matches()) .mapToInt((k) -> 1) .sum(); if (fieldCount == 0) { return Optional.empty(); } // validate fields and build schema final TableSchema.Builder schemaBuilder = TableSchema.builder(); for (int i = 0; i < fieldCount; i++) { final String nameKey = key + '.' + i + '.' + NAME; final String legacyTypeKey = key + '.' + i + '.' + TYPE; final String typeKey = key + '.' + i + '.' + DATA_TYPE; final String exprKey = key + '.' + i + '.' + EXPR; final String metadataKey = key + '.' + i + '.' + METADATA; final String virtualKey = key + '.' + i + '.' + VIRTUAL; final String name = optionalGet(nameKey).orElseThrow(exceptionSupplier(nameKey)); final DataType type; if (containsKey(typeKey)) { type = getDataType(typeKey); } else if (containsKey(legacyTypeKey)) { type = TypeConversions.fromLegacyInfoToDataType(getType(legacyTypeKey)); } else { throw exceptionSupplier(typeKey).get(); } final Optional<String> expr = optionalGet(exprKey); final Optional<String> metadata = optionalGet(metadataKey); final boolean virtual = getOptionalBoolean(virtualKey).orElse(false); // computed column if (expr.isPresent()) { schemaBuilder.add(TableColumn.computed(name, type, expr.get())); } // metadata column else if (metadata.isPresent()) { final String metadataAlias = metadata.get(); if (metadataAlias.equals(name)) { schemaBuilder.add(TableColumn.metadata(name, type, virtual)); } else { schemaBuilder.add(TableColumn.metadata(name, type, metadataAlias, virtual)); } } // physical column else { schemaBuilder.add(TableColumn.physical(name, type)); } } // extract watermark information // filter for number of fields String watermarkPrefixKey = key + '.' + WATERMARK; final int watermarkCount = properties.keySet().stream() .filter( (k) -> k.startsWith(watermarkPrefixKey) && k.endsWith('.' + WATERMARK_ROWTIME)) .mapToInt((k) -> 1) .sum(); if (watermarkCount > 0) { for (int i = 0; i < watermarkCount; i++) { final String rowtimeKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_ROWTIME; final String exprKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_STRATEGY_EXPR; final String typeKey = watermarkPrefixKey + '.' + i + '.' + WATERMARK_STRATEGY_DATA_TYPE; final String rowtime = optionalGet(rowtimeKey).orElseThrow(exceptionSupplier(rowtimeKey)); final String exprString = optionalGet(exprKey).orElseThrow(exceptionSupplier(exprKey)); final String typeString = optionalGet(typeKey).orElseThrow(exceptionSupplier(typeKey)); final DataType exprType = TypeConversions.fromLogicalToDataType( LogicalTypeParser.parse( typeString, Thread.currentThread().getContextClassLoader())); schemaBuilder.watermark(rowtime, exprString, exprType); } } // Extract unique constraints. String pkConstraintNameKey = key + '.' + PRIMARY_KEY_NAME; final Optional<String> pkConstraintNameOpt = optionalGet(pkConstraintNameKey); if (pkConstraintNameOpt.isPresent()) { final String pkColumnsKey = key + '.' + PRIMARY_KEY_COLUMNS; final String columns = optionalGet(pkColumnsKey).orElseThrow(exceptionSupplier(pkColumnsKey)); schemaBuilder.primaryKey(pkConstraintNameOpt.get(), columns.split(",")); } return Optional.of(schemaBuilder.build()); }
Returns a table schema under the given key if it exists.
getOptionalTableSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public TableSchema getTableSchema(String key) { return getOptionalTableSchema(key).orElseThrow(exceptionSupplier(key)); }
Returns a table schema under the given existing key.
getTableSchema
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<MemorySize> getOptionalMemorySize(String key) { return optionalGet(key) .map( (value) -> { try { return MemorySize.parse(value, MemorySize.MemoryUnit.BYTES); } catch (Exception e) { throw new ValidationException( "Invalid memory size value for key '" + key + "'.", e); } }); }
Returns a Flink {@link MemorySize} under the given key if it exists.
getOptionalMemorySize
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public MemorySize getMemorySize(String key) { return getOptionalMemorySize(key).orElseThrow(exceptionSupplier(key)); }
Returns a Flink {@link MemorySize} under the given existing key.
getMemorySize
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Optional<Duration> getOptionalDuration(String key) { return optionalGet(key) .map( (value) -> { try { return TimeUtils.parseDuration(value); } catch (Exception e) { throw new ValidationException( "Invalid duration value for key '" + key + "'.", e); } }); }
Returns a Java {@link Duration} under the given key if it exists.
getOptionalDuration
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Duration getDuration(String key) { return getOptionalDuration(key).orElseThrow(exceptionSupplier(key)); }
Returns a java {@link Duration} under the given existing key.
getDuration
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public List<Map<String, String>> getFixedIndexedProperties(String key, List<String> subKeys) { // determine max index final int maxIndex = extractMaxIndex(key, "\\.(.*)"); // validate and create result final List<Map<String, String>> list = new ArrayList<>(); for (int i = 0; i <= maxIndex; i++) { final Map<String, String> map = new HashMap<>(); for (String subKey : subKeys) { final String fullKey = key + '.' + i + '.' + subKey; // check for existence of full key if (!containsKey(fullKey)) { throw exceptionSupplier(fullKey).get(); } map.put(subKey, fullKey); } list.add(map); } return list; }
Returns the property keys of fixed indexed properties. <p>For example: <pre> schema.fields.0.type = INT, schema.fields.0.name = test schema.fields.1.type = LONG, schema.fields.1.name = test2 </pre> <p>getFixedIndexedProperties("schema.fields", List("type", "name")) leads to: <pre> 0: Map("type" -> "schema.fields.0.type", "name" -> "schema.fields.0.name") 1: Map("type" -> "schema.fields.1.type", "name" -> "schema.fields.1.name") </pre>
getFixedIndexedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public List<Map<String, String>> getVariableIndexedProperties( String key, List<String> requiredSubKeys) { // determine max index final int maxIndex = extractMaxIndex(key, "(\\.)?(.*)"); // determine optional properties final String escapedKey = Pattern.quote(key); final Pattern pattern = Pattern.compile(escapedKey + "\\.(\\d+)(\\.)?(.*)"); final Set<String> optionalSubKeys = properties.keySet().stream() .flatMap( (k) -> { final Matcher matcher = pattern.matcher(k); if (matcher.find()) { return Stream.of(matcher.group(3)); } return Stream.empty(); }) .filter((k) -> k.length() > 0) .collect(Collectors.toSet()); // validate and create result final List<Map<String, String>> list = new ArrayList<>(); for (int i = 0; i <= maxIndex; i++) { final Map<String, String> map = new HashMap<>(); // check and add required keys for (String subKey : requiredSubKeys) { final String fullKey = key + '.' + i + '.' + subKey; // check for existence of full key if (!containsKey(fullKey)) { throw exceptionSupplier(fullKey).get(); } map.put(subKey, fullKey); } // add optional keys for (String subKey : optionalSubKeys) { final String fullKey = key + '.' + i + '.' + subKey; optionalGet(fullKey).ifPresent(value -> map.put(subKey, fullKey)); } list.add(map); } return list; }
Returns the property keys of variable indexed properties. <p>For example: <pre> schema.fields.0.type = INT, schema.fields.0.name = test schema.fields.1.type = LONG </pre> <p>getFixedIndexedProperties("schema.fields", List("type")) leads to: <pre> 0: Map("type" -> "schema.fields.0.type", "name" -> "schema.fields.0.name") 1: Map("type" -> "schema.fields.1.type") </pre>
getVariableIndexedProperties
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public Map<String, String> getIndexedProperty(String key, String subKey) { final String escapedKey = Pattern.quote(key); final String escapedSubKey = Pattern.quote(subKey); return properties.entrySet().stream() .filter(entry -> entry.getKey().matches(escapedKey + "\\.\\d+\\." + escapedSubKey)) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); }
Returns all properties under a given key that contains an index in between. <p>E.g. rowtime.0.name -> returns all rowtime.#.name properties
getIndexedProperty
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0
public <E> Optional<List<E>> getOptionalArray(String key, Function<String, E> keyMapper) { // determine max index final int maxIndex = extractMaxIndex(key, ""); if (maxIndex < 0) { // check for a single element array if (containsKey(key)) { return Optional.of(Collections.singletonList(keyMapper.apply(key))); } else { return Optional.empty(); } } else { final List<E> list = new ArrayList<>(); for (int i = 0; i < maxIndex + 1; i++) { final String fullKey = key + '.' + i; final E value = keyMapper.apply(fullKey); list.add(value); } return Optional.of(list); } }
Returns all array elements under a given key if it exists. <p>For example: <pre> primary-key.0 = field1 primary-key.1 = field2 </pre> <p>leads to: List(field1, field2) <p>or: <pre> primary-key = field1 </pre> <p>leads to: List(field1) <p>The key mapper gets the key of the current value e.g. "primary-key.1".
getOptionalArray
java
apache/flink
flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-common/src/main/java/org/apache/flink/table/descriptors/DescriptorProperties.java
Apache-2.0