code
stringlengths
25
201k
docstring
stringlengths
19
96.2k
func_name
stringlengths
0
235
language
stringclasses
1 value
repo
stringlengths
8
51
path
stringlengths
11
314
url
stringlengths
62
377
license
stringclasses
7 values
@Override protected CompletableFuture<EmptyResponseBody> handleRequest( SqlGatewayRestAPIVersion version, @Nonnull HandlerRequest<ConfigureSessionRequestBody> request) throws RestHandlerException { SessionHandle sessionHandle = request.getPathParameter(SessionHandleIdPathParameter.class); String statement = request.getRequestBody().getStatement(); Long timeout = request.getRequestBody().getTimeout(); timeout = timeout == null ? 0L : timeout; service.configureSession(sessionHandle, statement, timeout); return CompletableFuture.completedFuture(EmptyResponseBody.getInstance()); }
Handler to configure a session with statement.
handleRequest
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/session/ConfigureSessionHandler.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/session/ConfigureSessionHandler.java
Apache-2.0
@Override protected CompletableFuture<GetSessionConfigResponseBody> handleRequest( SqlGatewayRestAPIVersion version, @Nonnull HandlerRequest<EmptyRequestBody> request) throws RestHandlerException { try { SessionHandle sessionHandle = request.getPathParameter(SessionHandleIdPathParameter.class); Map<String, String> sessionConfig = this.service.getSessionConfig(sessionHandle); return CompletableFuture.completedFuture( new GetSessionConfigResponseBody(sessionConfig)); } catch (SqlGatewayException e) { throw new RestHandlerException( e.getMessage(), HttpResponseStatus.INTERNAL_SERVER_ERROR, e); } }
Handler to get the session configuration.
handleRequest
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/session/GetSessionConfigHandler.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/session/GetSessionConfigHandler.java
Apache-2.0
@Override protected CompletableFuture<GetApiVersionResponseBody> handleRequest( @Nullable SqlGatewayRestAPIVersion version, @Nonnull HandlerRequest<EmptyRequestBody> request) { return CompletableFuture.completedFuture( new GetApiVersionResponseBody( stableVersions.stream().map(Enum::name).collect(Collectors.toList()))); }
Handler to get rest api version.
handleRequest
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/util/GetApiVersionHandler.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/handler/util/GetApiVersionHandler.java
Apache-2.0
public String getClusterID() { return clusterID; }
Response about the cluster that runs the application.
getClusterID
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/application/DeployScriptResponseBody.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/application/DeployScriptResponseBody.java
Apache-2.0
public String getOperationHandle() { return operationHandle; }
{@link ResponseBody} for executing a materialized table refresh operation.
getOperationHandle
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/materializedtable/RefreshMaterializedTableResponseBody.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/materializedtable/RefreshMaterializedTableResponseBody.java
Apache-2.0
public String getStatus() { return status; }
{@link ResponseBody} for getting the status of operation.
getStatus
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/operation/OperationStatusResponseBody.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/operation/OperationStatusResponseBody.java
Apache-2.0
public Map<String, String> getProperties() { return properties; }
{@link ResponseBody} for get session configuration.
getProperties
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/session/GetSessionConfigResponseBody.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/session/GetSessionConfigResponseBody.java
Apache-2.0
public List<String> getVersions() { return versions; }
{@link ResponseBody} for getting rest api version.
getVersions
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/util/GetApiVersionResponseBody.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/message/util/GetApiVersionResponseBody.java
Apache-2.0
@Override public LogicalType deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException { JsonNode logicalTypeNode = jsonParser.readValueAsTree(); if (logicalTypeNode.has(FIELD_NAME_TYPE_NAME)) { return deserializeInternal(logicalTypeNode); } throw new UnsupportedOperationException( String.format( "Cannot parse this Json String:\n%s", logicalTypeNode.toPrettyString())); }
JSON deserializer for {@link LogicalType}. @see LogicalTypeJsonSerializer for the reverse operation.
deserialize
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/LogicalTypeJsonDeserializer.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/LogicalTypeJsonDeserializer.java
Apache-2.0
public static ResultInfo createResultInfo( ResultSet resultSet, RowFormat rowFormat, @Nullable RowDataLocalTimeZoneConverter timeZoneConverter) { Preconditions.checkArgument(resultSet.getResultType() != ResultSet.ResultType.NOT_READY); List<RowData> data = resultSet.getData(); switch (rowFormat) { case JSON: if (timeZoneConverter != null && timeZoneConverter.hasTimeZoneData()) { data = data.stream() .map(timeZoneConverter::convertTimeZoneRowData) .collect(Collectors.toList()); } break; case PLAIN_TEXT: RowDataToStringConverter converter = ((ResultSetImpl) resultSet).getConverter(); data = data.stream() .map(rowData -> convertToPlainText(rowData, converter)) .collect(Collectors.toList()); break; default: throw new UnsupportedOperationException( String.format("Unsupported row format: %s.", rowFormat)); } return new ResultInfo( resultSet.getResultSchema().getColumns().stream() .map(ColumnInfo::toColumnInfo) .collect(Collectors.toList()), data, rowFormat); }
A {@code ResultInfo} contains information of a {@link ResultSet}. It is designed for transferring the information of ResultSet via REST. For its serialization and deserialization, See: <p>{@link ResultInfoSerializer} and {@link ResultInfoDeserializer}
createResultInfo
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
Apache-2.0
public List<ColumnInfo> getColumnInfos() { return Collections.unmodifiableList(columnInfos); }
Get the column info of the data.
getColumnInfos
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
Apache-2.0
public RowFormat getRowFormat() { return rowFormat; }
Get the row format about the data.
getRowFormat
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
Apache-2.0
public List<FieldGetter> getFieldGetters() { if (rowFormat == RowFormat.JSON) { List<LogicalType> columnTypes = columnInfos.stream() .map(ColumnInfo::getLogicalType) .collect(Collectors.toList()); return IntStream.range(0, columnTypes.size()) .mapToObj(i -> RowData.createFieldGetter(columnTypes.get(i), i)) .collect(Collectors.toList()); } else { return IntStream.range(0, columnInfos.size()) .mapToObj(i -> RowData.createFieldGetter(STRING_TYPE, i)) .collect(Collectors.toList()); } }
Create the {@link FieldGetter} to get column value in the results. <p>With {@code JSON} format, it uses the {@link ResolvedSchema} to build the getters. However, it uses {@link StringData}'s {@link FieldGetter} to get the column values.
getFieldGetters
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/serde/ResultInfo.java
Apache-2.0
private static ZoneId getSessionTimeZone(ReadableConfig sessionConfig) { final String zone = sessionConfig.get(TableConfigOptions.LOCAL_TIME_ZONE); return TableConfigOptions.LOCAL_TIME_ZONE.defaultValue().equals(zone) ? ZoneId.systemDefault() : ZoneId.of(zone); }
Get time zone from the given session config.
getSessionTimeZone
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/RowDataLocalTimeZoneConverter.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/RowDataLocalTimeZoneConverter.java
Apache-2.0
public static SqlGatewayRestAPIVersion fromURIToVersion(String uri) { int slashIndex = uri.indexOf('/', 1); if (slashIndex < 0) { slashIndex = uri.length(); } try { return valueOf(uri.substring(1, slashIndex).toUpperCase()); } catch (Exception e) { return getDefaultVersion(); } }
Convert uri to SqlGatewayRestAPIVersion. If failed, return default version. @return SqlGatewayRestAPIVersion
fromURIToVersion
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
Apache-2.0
public static List<SqlGatewayRestAPIVersion> getStableVersions() { return Arrays.stream(SqlGatewayRestAPIVersion.values()) .filter(SqlGatewayRestAPIVersion::isStableVersion) .collect(Collectors.toList()); }
Returns the supported stable versions. @return the list of the stable versions.
getStableVersions
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
Apache-2.0
public static SqlGatewayRestAPIVersion getDefaultVersion() { List<SqlGatewayRestAPIVersion> versions = Arrays.stream(SqlGatewayRestAPIVersion.values()) .filter(SqlGatewayRestAPIVersion::isDefaultVersion) .collect(Collectors.toList()); Preconditions.checkState( versions.size() == 1, String.format( "Only one default version of Sql Gateway Rest API, but found %s.", versions.size())); return versions.get(0); }
Returns the default version. @return the default version.
getDefaultVersion
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
Apache-2.0
public static List<SqlGatewayRestAPIVersion> getHigherVersions( SqlGatewayRestAPIVersion version) { return Arrays.stream(SqlGatewayRestAPIVersion.values()) .filter(SqlGatewayRestAPIVersion::isStableVersion) .filter(v -> v.compareTo(version) > 0) .collect(Collectors.toList()); }
Get higher versions comparing to the input version.
getHigherVersions
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestAPIVersion.java
Apache-2.0
public static @Nullable Long parseToken(@Nullable String nextResultUri) { if (nextResultUri == null || nextResultUri.length() == 0) { return null; } String[] split = nextResultUri.split("/"); // remove query string String s = split[split.length - 1]; s = s.replaceAll("\\?.*", ""); return Long.valueOf(s); }
Parse token from the result uri.
parseToken
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointUtils.java
Apache-2.0
public static DefaultContext load( Configuration dynamicConfig, List<URI> dependencies, boolean discoverExecutionConfig) { // 1. find the configuration directory String flinkConfigDir = CliFrontend.getConfigurationDirectoryFromEnv(); // 2. load the global configuration Configuration configuration = GlobalConfiguration.loadConfiguration(flinkConfigDir); configuration.set(DeploymentOptionsInternal.CONF_DIR, flinkConfigDir); configuration.addAll(dynamicConfig); // 3. load the custom command lines List<CustomCommandLine> commandLines = CliFrontend.loadCustomCommandLines(configuration, flinkConfigDir); // initialize default file system FileSystem.initialize( configuration, PluginUtils.createPluginManagerFromRootFolder(configuration)); if (discoverExecutionConfig) { Options commandLineOptions = collectCommandLineOptions(commandLines); try { CommandLine deploymentCommandLine = CliFrontendParser.parse(commandLineOptions, new String[] {}, true); configuration.addAll( createExecutionConfig( deploymentCommandLine, commandLineOptions, commandLines)); } catch (Exception e) { throw new SqlGatewayException( "Could not load available CLI with Environment Deployment entry.", e); } } return new DefaultContext(configuration, dependencies); }
Build the {@link DefaultContext} from config.yaml, dynamic configuration and users specified jars. @param dynamicConfig user specified configuration. @param dependencies user specified jars @param discoverExecutionConfig flag whether to load the execution configuration
load
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/context/DefaultContext.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/context/DefaultContext.java
Apache-2.0
private <ClusterID, Result> Result runClusterAction( Configuration configuration, OperationHandle handle, ClusterAction<ClusterID, Result> clusterAction) throws SqlExecutionException { final ClusterClientFactory<ClusterID> clusterClientFactory = clusterClientServiceLoader.getClusterClientFactory(configuration); final ClusterID clusterId = clusterClientFactory.getClusterId(configuration); Preconditions.checkNotNull(clusterId, "No cluster ID found for operation " + handle); try (final ClusterDescriptor<ClusterID> clusterDescriptor = clusterClientFactory.createClusterDescriptor(configuration); final ClusterClient<ClusterID> clusterClient = clusterDescriptor.retrieve(clusterId).getClusterClient()) { return clusterAction.runAction(clusterClient); } catch (FlinkException e) { throw new SqlExecutionException("Failed to run cluster action.", e); } }
Retrieves the {@link ClusterClient} from the session and runs the given {@link ClusterAction} against it. @param configuration the combined configuration of {@code sessionConf} and {@code executionConfig}. @param handle the specified operation handle @param clusterAction the cluster action to run against the retrieved {@link ClusterClient}. @param <ClusterID> type of the cluster id @param <Result> type of the result @throws SqlExecutionException if something goes wrong
runClusterAction
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationExecutor.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationExecutor.java
Apache-2.0
public void closeOperation(OperationHandle operationHandle) { writeLock( () -> { Operation opToRemove = submittedOperations.remove(operationHandle); if (opToRemove != null) { opToRemove.close(); } }); }
Close the operation and release all resources used by the {@link Operation}. @param operationHandle identifies the {@link Operation}.
closeOperation
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
Apache-2.0
public OperationInfo getOperationInfo(OperationHandle operationHandle) { return getOperation(operationHandle).getOperationInfo(); }
Get the {@link OperationInfo} of the operation. @param operationHandle identifies the {@link Operation}.
getOperationInfo
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
Apache-2.0
public ResolvedSchema getOperationResultSchema(OperationHandle operationHandle) throws Exception { return getOperation(operationHandle).getResultSchema(); }
Get the {@link ResolvedSchema} of the operation. @param operationHandle identifies the {@link Operation}.
getOperationResultSchema
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
Apache-2.0
public void close() { stateLock.writeLock().lock(); Exception closeException = null; try { isRunning = false; IOUtils.closeAll(submittedOperations.values(), Throwable.class); } catch (Exception e) { closeException = e; } finally { submittedOperations.clear(); stateLock.writeLock().unlock(); } // wait all operations closed try { operationLock.acquire(); } catch (Exception e) { LOG.error("Failed to wait all operation closed.", e); } finally { operationLock.release(); } LOG.debug("Closes the Operation Manager."); if (closeException != null) { throw new SqlExecutionException( "Failed to close the OperationManager.", closeException); } }
Closes the {@link OperationManager} and all operations.
close
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/operation/OperationManager.java
Apache-2.0
static SessionManager create(DefaultContext defaultContext) { return new SessionManagerImpl(defaultContext); }
Create the {@link SessionManager} with the default configuration.
create
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/session/SessionManager.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/service/session/SessionManager.java
Apache-2.0
@Override public void execute(JobExecutionContext context) throws JobExecutionException { SessionHandle sessionHandle = null; OperationHandle operationHandle = null; SqlGatewayRestClient gatewayRestClient = null; try { JobDataMap dataMap = context.getJobDetail().getJobDataMap(); String workflowJsonStr = dataMap.getString(WORKFLOW_INFO); WorkflowInfo workflowInfo = fromJson(workflowJsonStr, WorkflowInfo.class); LOG.info("Execute refresh operation for workflow: {}.", workflowInfo); String schedulerTime = dateToString(context.getScheduledFireTime()); gatewayRestClient = new SqlGatewayRestClient(workflowInfo.getRestEndpointUrl()); sessionHandle = gatewayRestClient.openSession( String.format( "%s-quartz-refresh-session-%s", workflowInfo.getMaterializedTableIdentifier(), schedulerTime), workflowInfo.getInitConfig()); operationHandle = gatewayRestClient.refreshMaterializedTable( sessionHandle, workflowInfo.getMaterializedTableIdentifier(), schedulerTime, workflowInfo.getDynamicOptions(), Collections.emptyMap(), workflowInfo.getExecutionConfig()); List<RowData> results = gatewayRestClient.fetchOperationAllResults(sessionHandle, operationHandle); String jobId = results.get(0).getString(0).toString(); LOG.info( "Successfully execute refresh operation for materialized table: {} with job id: {}.", workflowInfo.getMaterializedTableIdentifier(), jobId); context.setResult( "Successfully execute refresh operation for materialized table: " + workflowInfo.getMaterializedTableIdentifier() + " with job id: " + jobId); // TODO wait for the job to finish } catch (Exception e) { LOG.error("Failed to execute refresh operation for workflow.", e); throw new JobExecutionException(e.getMessage(), e); } finally { try { if (gatewayRestClient != null) { if (operationHandle != null) { gatewayRestClient.closeOperation(sessionHandle, operationHandle); } if (sessionHandle != null) { gatewayRestClient.closeSession(sessionHandle); } gatewayRestClient.close(); } } catch (Exception e) { LOG.error("Failed to close session.", e); } } }
The {@link Job} implementation for embedded quartz scheduler.
execute
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/workflow/scheduler/EmbeddedQuartzScheduler.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/workflow/scheduler/EmbeddedQuartzScheduler.java
Apache-2.0
private static LocalDateTime date2LocalDateTime(Date date) { return LocalDateTime.ofInstant( Instant.ofEpochMilli(date.getTime()), ZoneId.systemDefault()); }
Convert date to local datetime. @param date date @return local datetime
date2LocalDateTime
java
apache/flink
flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/workflow/scheduler/QuartzSchedulerUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/main/java/org/apache/flink/table/gateway/workflow/scheduler/QuartzSchedulerUtils.java
Apache-2.0
protected String runStatements(List<TestSqlStatement> statements) throws Exception { List<String> output = new ArrayList<>(); for (TestSqlStatement statement : statements) { StringBuilder builder = new StringBuilder(); builder.append(statement.getComment()); builder.append(statement.getSql()); String trimmedSql = statement.getSql().trim(); if (trimmedSql.endsWith(";")) { trimmedSql = trimmedSql.substring(0, trimmedSql.length() - 1); } try { builder.append(runSingleStatement(trimmedSql)); } catch (Throwable t) { LOG.error("Failed to execute statements.", t); builder.append( AbstractSqlGatewayStatementITCase.Tag.ERROR.addTag( removeRowNumber(stringifyException(t).trim()) + "\n")); } output.add(builder.toString()); } return String.join("", output); }
Returns printed results for each ran SQL statements. @param statements the SQL statements to run @return the stringified results
runStatements
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/AbstractSqlGatewayStatementITCase.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/AbstractSqlGatewayStatementITCase.java
Apache-2.0
@Test void testSqlGatewayMessageHeaders() throws Exception { // The header can't support V0, but sends request by V0 assertThatThrownBy( () -> restClient.sendRequest( serverAddress.getHostName(), serverAddress.getPort(), headerNot0, EmptyMessageParameters.getInstance(), EmptyRequestBody.getInstance(), Collections.emptyList(), SqlGatewayRestAPIVersion.V0)) .satisfies( FlinkAssertions.anyCauseMatches( IllegalArgumentException.class, String.format( "The requested version V0 is not supported by the request (method=%s URL=%s). Supported versions are: %s.", headerNot0.getHttpMethod(), headerNot0.getTargetRestEndpointURL(), headerNot0.getSupportedAPIVersions().stream() .map(RestAPIVersion::getURLVersionPrefix) .collect(Collectors.joining(","))))); // The header only supports V0, sends request by V0 CompletableFuture<TestResponse> specifiedVersionResponse = restClient.sendRequest( serverAddress.getHostName(), serverAddress.getPort(), header0, EmptyMessageParameters.getInstance(), EmptyRequestBody.getInstance(), Collections.emptyList(), SqlGatewayRestAPIVersion.V0); TestResponse testResponse0 = specifiedVersionResponse.get(timeout.toMillis(), TimeUnit.MILLISECONDS); assertThat(testResponse0.getStatus()).isEqualTo("V0"); // The header only supports V0, lets the client get the version CompletableFuture<TestResponse> unspecifiedVersionResponse0 = restClient.sendRequest( serverAddress.getHostName(), serverAddress.getPort(), header0, EmptyMessageParameters.getInstance(), EmptyRequestBody.getInstance(), Collections.emptyList()); TestResponse testResponse1 = unspecifiedVersionResponse0.get(timeout.toMillis(), TimeUnit.MILLISECONDS); assertThat(testResponse1.getStatus()).isEqualTo("V0"); // The header supports multiple versions, lets the client get the latest version as default CompletableFuture<TestResponse> unspecifiedVersionResponse1 = restClient.sendRequest( serverAddress.getHostName(), serverAddress.getPort(), headerNot0, EmptyMessageParameters.getInstance(), EmptyRequestBody.getInstance(), Collections.emptyList()); TestResponse testResponse2 = unspecifiedVersionResponse1.get(timeout.toMillis(), TimeUnit.MILLISECONDS); assertThat(testResponse2.getStatus()) .isEqualTo( RestAPIVersion.getLatestVersion(headerNot0.getSupportedAPIVersions()) .name()); }
Test that {@link SqlGatewayMessageHeaders} can identify the version correctly.
testSqlGatewayMessageHeaders
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
Apache-2.0
@Test void testVersionSelection() throws Exception { for (SqlGatewayRestAPIVersion version : SqlGatewayRestAPIVersion.values()) { if (version != SqlGatewayRestAPIVersion.V0) { CompletableFuture<TestResponse> versionResponse = restClient.sendRequest( serverAddress.getHostName(), serverAddress.getPort(), headerNot0, EmptyMessageParameters.getInstance(), EmptyRequestBody.getInstance(), Collections.emptyList(), version); TestResponse testResponse = versionResponse.get(timeout.toMillis(), TimeUnit.MILLISECONDS); assertThat(testResponse.getStatus()).isEqualTo(version.name()); } } }
Test that requests of different version are routed to correct handlers.
testVersionSelection
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
Apache-2.0
@Test void testDefaultVersionRouting() throws Exception { assertThat(config.get(SecurityOptions.SSL_REST_ENABLED)).isFalse(); OkHttpClient client = new OkHttpClient(); final Request request = new Request.Builder() .url(serverEndpoint.getRestBaseUrl() + header0.getTargetRestEndpointURL()) .build(); final Response response = client.newCall(request).execute(); assert response.body() != null; assertThat(response.body().string()) .contains(SqlGatewayRestAPIVersion.getDefaultVersion().name()); }
Test that {@link AbstractSqlGatewayRestHandler} will use the default endpoint version when the url does not contain version.
testDefaultVersionRouting
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointITCase.java
Apache-2.0
@Test void testIfSqlGatewayRestEndpointUseOverrideOptions() throws ConfigurationException { Configuration flinkConfig = new Configuration(); flinkConfig.setString(RestOptions.ADDRESS.key(), ADDRESS1); flinkConfig.setString(RestOptions.BIND_ADDRESS.key(), BIND_ADDRESS1); flinkConfig.setString(RestOptions.BIND_PORT.key(), BIND_PORT1); flinkConfig.setString(RestOptions.PORT.key(), PORT1); flinkConfig.setString(SQL_GATEWAY_ADDRESS, ADDRESS2); flinkConfig.setString(SQL_GATEWAY_BIND_ADDRESS, BIND_ADDRESS2); flinkConfig.setString(SQL_GATEWAY_BIND_PORT, BIND_PORT2); flinkConfig.setString(SQL_GATEWAY_PORT, PORT2); Configuration sqlGatewayRestEndpointConfig = getBaseConfig(flinkConfig); final RestServerEndpointConfiguration result = RestServerEndpointConfiguration.fromConfiguration(sqlGatewayRestEndpointConfig); assertThat(result.getRestAddress()).isEqualTo(ADDRESS2); assertThat(result.getRestBindAddress()).isEqualTo(BIND_ADDRESS2); assertThat(result.getRestBindPortRange()).isEqualTo(BIND_PORT2); }
Test {@link SqlGatewayRestEndpoint} uses its own options when there are both runtime options and sql gateway options in the delegating configuration.
testIfSqlGatewayRestEndpointUseOverrideOptions
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
Apache-2.0
@Test void testFallbackOptions() throws ConfigurationException { Configuration flinkConfig = new Configuration(); flinkConfig.setString(SQL_GATEWAY_ADDRESS, ADDRESS2); RestServerEndpointConfiguration result1 = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig)); // Test bind-port get the default value assertThat(result1.getRestBindPortRange()).isEqualTo("8083"); // Test bind-port fallback to port flinkConfig.setString(SQL_GATEWAY_PORT, PORT2); result1 = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig)); assertThat(result1.getRestBindPortRange()).isEqualTo(PORT2); }
Test {@link SqlGatewayRestEndpoint} uses fallback options correctly.
testFallbackOptions
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
Apache-2.0
@Test void testRequiredOptions() throws ConfigurationException { // Empty options Configuration flinkConfig1 = new Configuration(); SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext context = new SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext( null, flinkConfig1, getEndpointConfig(flinkConfig1, IDENTIFIER)); SqlGatewayEndpointFactoryUtils.EndpointFactoryHelper endpointFactoryHelper = SqlGatewayEndpointFactoryUtils.createEndpointFactoryHelper( new SqlGatewayRestEndpointFactory(), context); assertThatThrownBy(endpointFactoryHelper::validate).isInstanceOf(ValidationException.class); // Only ADDRESS flinkConfig1.setString(SQL_GATEWAY_ADDRESS, ADDRESS2); RestServerEndpointConfiguration result = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig1)); assertThat(result.getRestAddress()).isEqualTo(ADDRESS2); // Only BIND PORT Configuration flinkConfig2 = new Configuration(); flinkConfig2.setString(SQL_GATEWAY_BIND_PORT, BIND_PORT2); context = new SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext( null, flinkConfig2, getEndpointConfig(flinkConfig2, IDENTIFIER)); endpointFactoryHelper = SqlGatewayEndpointFactoryUtils.createEndpointFactoryHelper( new SqlGatewayRestEndpointFactory(), context); assertThatThrownBy(endpointFactoryHelper::validate).isInstanceOf(ValidationException.class); // Only PORT Configuration flinkConfig3 = new Configuration(); flinkConfig3.setString(SQL_GATEWAY_PORT, PORT2); context = new SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext( null, flinkConfig3, getEndpointConfig(flinkConfig3, IDENTIFIER)); endpointFactoryHelper = SqlGatewayEndpointFactoryUtils.createEndpointFactoryHelper( new SqlGatewayRestEndpointFactory(), context); assertThatThrownBy(endpointFactoryHelper::validate).isInstanceOf(ValidationException.class); // ADDRESS and PORT flinkConfig1.setString(SQL_GATEWAY_PORT, PORT2); result = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig1)); assertThat(result.getRestAddress()).isEqualTo(ADDRESS2); assertThat(result.getRestBindPortRange()).isEqualTo(PORT2); // ADDRESS and PORT and BIND PORT flinkConfig1.setString(SQL_GATEWAY_BIND_PORT, BIND_PORT2); result = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig1)); assertThat(result.getRestAddress()).isEqualTo(ADDRESS2); assertThat(result.getRestBindPortRange()).isEqualTo(BIND_PORT2); // ADDRESS and BIND PORT Configuration flinkConfig4 = new Configuration(); flinkConfig4.setString(SQL_GATEWAY_ADDRESS, ADDRESS2); flinkConfig4.setString(SQL_GATEWAY_BIND_PORT, BIND_PORT2); result = RestServerEndpointConfiguration.fromConfiguration(getBaseConfig(flinkConfig1)); assertThat(result.getRestAddress()).isEqualTo(ADDRESS2); assertThat(result.getRestBindPortRange()).isEqualTo(BIND_PORT2); }
Test {@link SqlGatewayRestEndpoint} uses required options correctly.
testRequiredOptions
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/SqlGatewayRestEndpointTest.java
Apache-2.0
@Test void testGetInfoAndApiVersion() throws Exception { CompletableFuture<GetInfoResponseBody> response = sendRequest(getInfoHeaders, emptyParameters, emptyRequestBody); String productName = response.get().getProductName(); String version = response.get().getProductVersion(); assertEquals(GetInfoHandler.PRODUCT_NAME, productName); assertEquals(EnvironmentInformation.getVersion(), version); CompletableFuture<GetApiVersionResponseBody> response2 = sendRequest(getApiVersionHeaders, emptyParameters, emptyRequestBody); List<String> versions = response2.get().getVersions(); assertThat( Arrays.stream(SqlGatewayRestAPIVersion.values()) .filter(SqlGatewayRestAPIVersion::isStableVersion) .map(Enum::name) .collect(Collectors.toList())) .isEqualTo(versions); }
Test basic logic of handlers inherited from {@link AbstractSqlGatewayRestHandler} in util related cases.
testGetInfoAndApiVersion
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/UtilITCase.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/UtilITCase.java
Apache-2.0
@Override public List<Tuple2<RestHandlerSpecification, ChannelInboundHandler>> initializeHandlers( final CompletableFuture<String> localAddressFuture) { return super.initializeHandlers(localAddressFuture); }
Utility class to extract the {@link SqlGatewayMessageHeaders} that the {@link SqlGatewayRestEndpoint} supports.
initializeHandlers
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/DocumentingSqlGatewayRestEndpoint.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/DocumentingSqlGatewayRestEndpoint.java
Apache-2.0
public static String getSqlGatewayRestOptionFullName(String key) { return getSqlGatewayOptionPrefix(IDENTIFIER) + key; }
Get the full name of sql gateway rest endpoint options.
getSqlGatewayRestOptionFullName
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
Apache-2.0
public static Configuration getBaseConfig(Configuration flinkConf) { SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext context = new SqlGatewayEndpointFactoryUtils.DefaultEndpointFactoryContext( null, flinkConf, getEndpointConfig(flinkConf, IDENTIFIER)); return rebuildRestEndpointOptions( context.getEndpointOptions(), context.getFlinkConfiguration().toMap()); }
Get the configuration used by SqlGatewayRestEndpoint.
getBaseConfig
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
Apache-2.0
public static Configuration getFlinkConfig( String address, String bindAddress, String portRange) { final Configuration config = new Configuration(); if (address != null) { config.setString( getSqlGatewayRestOptionFullName(SqlGatewayRestOptions.ADDRESS.key()), address); } if (bindAddress != null) { config.setString( getSqlGatewayRestOptionFullName(SqlGatewayRestOptions.BIND_ADDRESS.key()), bindAddress); } if (portRange != null) { config.setString( getSqlGatewayRestOptionFullName(SqlGatewayRestOptions.PORT.key()), portRange); } return config; }
Create the configuration generated from config.yaml.
getFlinkConfig
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/SqlGatewayRestEndpointTestUtils.java
Apache-2.0
public static Builder builder( Configuration configuration, SqlGatewayService sqlGatewayService) { return new Builder(configuration, sqlGatewayService); }
Utility for setting up a rest server based on {@link SqlGatewayRestEndpoint} with a given set of handlers.
builder
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/TestingSqlGatewayRestEndpoint.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/rest/util/TestingSqlGatewayRestEndpoint.java
Apache-2.0
@Override public void uncaughtException(Thread t, Throwable e) { // ignore error LOG.error("Thread '{}' produced an uncaught exception. Ignore...", t.getName(), e); }
Handler to log the exception and exits.
uncaughtException
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/service/utils/IgnoreExceptionHandler.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/service/utils/IgnoreExceptionHandler.java
Apache-2.0
@Test void testSerDe() throws Exception { EmbeddedRefreshHandler expected = new EmbeddedRefreshHandler("a", "b"); byte[] serBytes = EmbeddedRefreshHandlerSerializer.INSTANCE.serialize(expected); EmbeddedRefreshHandler actual = EmbeddedRefreshHandlerSerializer.INSTANCE.deserialize( serBytes, this.getClass().getClassLoader()); assertThat(actual).isEqualTo(expected); }
Tests for {@link EmbeddedRefreshHandler} and {@link EmbeddedRefreshHandlerSerializer}.
testSerDe
java
apache/flink
flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/workflow/EmbeddedRefreshHandlerTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway/src/test/java/org/apache/flink/table/gateway/workflow/EmbeddedRefreshHandlerTest.java
Apache-2.0
public static EndpointFactoryHelper createEndpointFactoryHelper( SqlGatewayEndpointFactory endpointFactory, SqlGatewayEndpointFactory.Context context) { return new EndpointFactoryHelper(endpointFactory, context.getEndpointOptions()); }
Creates a utility that helps to validate options for a {@link SqlGatewayEndpointFactory}. <p>Note: This utility checks for left-over options in the final step.
createEndpointFactoryHelper
java
apache/flink
flink-table/flink-sql-gateway-api/src/main/java/org/apache/flink/table/gateway/api/endpoint/SqlGatewayEndpointFactoryUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-gateway-api/src/main/java/org/apache/flink/table/gateway/api/endpoint/SqlGatewayEndpointFactoryUtils.java
Apache-2.0
public static ColumnInfo fromLogicalType(String columnName, LogicalType type) { Builder builder = new Builder() .columnName(columnName) .nullable(type.isNullable()) .signed(type.is(LogicalTypeFamily.NUMERIC)) .columnTypeName(type.asSummaryString()); if (type instanceof BooleanType) { // "true" or "false" builder.columnType(Types.BOOLEAN).columnDisplaySize(5); } else if (type instanceof TinyIntType) { builder.columnType(Types.TINYINT).precision(3).scale(0).columnDisplaySize(4); } else if (type instanceof SmallIntType) { builder.columnType(Types.SMALLINT).precision(5).scale(0).columnDisplaySize(6); } else if (type instanceof IntType) { builder.columnType(Types.INTEGER).precision(10).scale(0).columnDisplaySize(11); } else if (type instanceof BigIntType) { builder.columnType(Types.BIGINT).precision(19).scale(0).columnDisplaySize(20); } else if (type instanceof FloatType) { builder.columnType(Types.FLOAT).precision(9).scale(0).columnDisplaySize(16); } else if (type instanceof DoubleType) { builder.columnType(Types.DOUBLE).precision(17).scale(0).columnDisplaySize(24); } else if (type instanceof DecimalType) { DecimalType decimalType = (DecimalType) type; builder.columnType(Types.DECIMAL) .columnDisplaySize(decimalType.getPrecision() + 2) // dot and sign .precision(decimalType.getPrecision()) .scale(decimalType.getScale()); } else if (type instanceof CharType) { CharType charType = (CharType) type; builder.columnType(Types.CHAR) .scale(0) .precision(charType.getLength()) .columnDisplaySize(charType.getLength()); } else if (type instanceof VarCharType) { builder.columnType(Types.VARCHAR) .scale(0) .precision(VARBINARY_MAX) .columnDisplaySize(VARBINARY_MAX); } else if (type instanceof BinaryType) { BinaryType binaryType = (BinaryType) type; builder.columnType(Types.BINARY) .scale(0) .precision(binaryType.getLength()) .columnDisplaySize(binaryType.getLength()); } else if (type instanceof VarBinaryType) { builder.columnType(Types.VARBINARY) .scale(0) .precision(VARBINARY_MAX) .columnDisplaySize(VARBINARY_MAX); } else if (type instanceof DateType) { builder.columnType(Types.DATE).scale(0).columnDisplaySize(DATE_MAX); } else if (type instanceof TimeType) { TimeType timeType = (TimeType) type; builder.columnType(Types.TIME) .precision(timeType.getPrecision()) .scale(0) .columnDisplaySize(TIME_MAX); } else if (type instanceof TimestampType) { TimestampType timestampType = (TimestampType) type; builder.columnType(Types.TIMESTAMP) .precision(timestampType.getPrecision()) .scale(0) .columnDisplaySize(TIMESTAMP_MAX); } else if (type instanceof LocalZonedTimestampType) { LocalZonedTimestampType localZonedTimestampType = (LocalZonedTimestampType) type; builder.columnType(Types.TIMESTAMP) .precision(localZonedTimestampType.getPrecision()) .scale(0) .columnDisplaySize(TIMESTAMP_MAX); } else if (type instanceof ZonedTimestampType) { ZonedTimestampType zonedTimestampType = (ZonedTimestampType) type; builder.columnType(Types.TIMESTAMP_WITH_TIMEZONE) .precision(zonedTimestampType.getPrecision()) .scale(0) .columnDisplaySize(TIMESTAMP_WITH_TIME_ZONE_MAX); } else if (type instanceof ArrayType) { builder.columnType(Types.ARRAY) .scale(0) .precision(STRUCT_MAX) .columnDisplaySize(STRUCT_MAX); } else if (type instanceof MapType) { // Use java object for map while there's no map in Types at the moment builder.columnType(Types.JAVA_OBJECT) .scale(0) .precision(STRUCT_MAX) .columnDisplaySize(STRUCT_MAX); } else if (type instanceof RowType) { builder.columnType(Types.STRUCT) .precision(STRUCT_MAX) .scale(0) .columnDisplaySize(STRUCT_MAX); } else { throw new RuntimeException(String.format("Not supported type[%s]", type)); } return builder.build(); }
Build column info from given logical type. @param columnName the column name @param type the logical type @return the result column info
fromLogicalType
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/ColumnInfo.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/ColumnInfo.java
Apache-2.0
@Override public int getMajorVersion() { return DRIVER_VERSION_MAJOR; }
Major version of flink. @return the major version
getMajorVersion
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/FlinkDriver.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/FlinkDriver.java
Apache-2.0
static String getType(int type) throws SQLException { // see javax.sql.rowset.RowSetMetaDataImpl switch (type) { case Types.NUMERIC: case Types.DECIMAL: return BigDecimal.class.getName(); case Types.BOOLEAN: case Types.BIT: return Boolean.class.getName(); case Types.TINYINT: return Byte.class.getName(); case Types.SMALLINT: return Short.class.getName(); case Types.INTEGER: return Integer.class.getName(); case Types.BIGINT: return Long.class.getName(); case Types.REAL: case Types.FLOAT: return Float.class.getName(); case Types.DOUBLE: return Double.class.getName(); case Types.VARCHAR: case Types.CHAR: return String.class.getName(); case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: return "byte[]"; case Types.DATE: return Date.class.getName(); case Types.TIME: return Time.class.getName(); case Types.TIMESTAMP: return Timestamp.class.getName(); case Types.TIMESTAMP_WITH_TIMEZONE: return OffsetDateTime.class.getName(); case Types.JAVA_OBJECT: return Map.class.getName(); case Types.ARRAY: return Array.class.getName(); case Types.STRUCT: return RowData.class.getName(); } throw new SQLFeatureNotSupportedException( String.format("Not support data type [%s]", type)); }
Get column type name according type in {@link Types}. @param type the type in {@link Types} @return type class name
getType
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/FlinkResultSetMetaData.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/FlinkResultSetMetaData.java
Apache-2.0
public static FlinkResultSet createCatalogsResultSet( Statement statement, StatementResult result) { List<RowData> catalogs = new ArrayList<>(); result.forEachRemaining(catalogs::add); catalogs.sort(Comparator.comparing(v -> v.getString(0))); return new FlinkResultSet( statement, new CollectionResultIterator(catalogs.iterator()), ResolvedSchema.of(TABLE_CAT_COLUMN)); }
Create result set for catalogs. The schema columns are: <ul> <li>TABLE_CAT String => catalog name. </ul> <p>The results are ordered by catalog name. @param statement The statement for database meta data @param result The result for catalogs @return a ResultSet object in which each row has a single String column that is a catalog name
createCatalogsResultSet
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DatabaseMetaDataUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DatabaseMetaDataUtils.java
Apache-2.0
public static FlinkResultSet createSchemasResultSet( Statement statement, List<String> catalogs, Map<String, List<String>> catalogSchemas) { List<RowData> schemaWithCatalogList = new ArrayList<>(); List<String> catalogList = new ArrayList<>(catalogs); catalogList.sort(String::compareTo); for (String catalog : catalogList) { List<String> schemas = catalogSchemas.get(catalog); schemas.sort(String::compareTo); schemas.forEach( s -> schemaWithCatalogList.add( GenericRowData.of( StringData.fromString(s), StringData.fromString(catalog)))); } return new FlinkResultSet( statement, new CollectionResultIterator(schemaWithCatalogList.iterator()), ResolvedSchema.of(TABLE_SCHEM_COLUMN, TABLE_CATALOG_COLUMN)); }
Create result set for schemas. The schema columns are: <ul> <li>TABLE_SCHEM String => schema name <li>TABLE_CATALOG String => catalog name (may be null) </ul> <p>The results are ordered by TABLE_CATALOG and TABLE_SCHEM. @param statement The statement for database meta data @param catalogs The catalog list @param catalogSchemas The catalog with schema list @return a ResultSet object in which each row is a schema description
createSchemasResultSet
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DatabaseMetaDataUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DatabaseMetaDataUtils.java
Apache-2.0
public static Map<String, String> fromProperties(Properties properties) { Map<String, String> map = new HashMap<>(); Enumeration<?> e = properties.propertyNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); map.put(key, properties.getProperty(key)); } return map; }
Generate map from given properties. @param properties the given properties @return the result map
fromProperties
java
apache/flink
flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DriverUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-jdbc-driver/src/main/java/org/apache/flink/table/jdbc/utils/DriverUtils.java
Apache-2.0
public SqlIdentifier getModelIdentifier() { return operand(0); }
SqlExplicitModelCall is a SQL call that represents an explicit model.
getModelIdentifier
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/calcite/sql/SqlExplicitModelCall.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/calcite/sql/SqlExplicitModelCall.java
Apache-2.0
private static Optional<RelDataType> explicitTypeSpec(SqlOperatorBinding opBinding) { if (opBinding.getOperandCount() >= 6) { return Optional.of(opBinding.getOperandType(5)); } return Optional.empty(); }
Returns the optional explicit returning type specification. *
explicitTypeSpec
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/calcite/sql/fun/SqlJsonQueryFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/calcite/sql/fun/SqlJsonQueryFunction.java
Apache-2.0
public static void validateAndChangeColumnNullability( List<SqlTableConstraint> tableConstraints, SqlNodeList columnList) throws SqlValidateException { List<SqlTableConstraint> fullConstraints = getFullConstraints(tableConstraints, columnList); if (fullConstraints.stream().filter(SqlTableConstraint::isPrimaryKey).count() > 1) { throw new SqlValidateException( fullConstraints.get(1).getParserPosition(), "Duplicate primary key definition"); } for (SqlTableConstraint constraint : fullConstraints) { validate(constraint); Set<String> primaryKeyColumns = Arrays.stream(constraint.getColumnNames()).collect(Collectors.toSet()); // rewrite primary key's nullability to false // e.g. CREATE TABLE tbl (`a` STRING PRIMARY KEY NOT ENFORCED, ...) or // CREATE TABLE tbl (`a` STRING, PRIMARY KEY(`a`) NOT ENFORCED) will change `a` // to STRING NOT NULL for (SqlNode column : columnList) { SqlTableColumn tableColumn = (SqlTableColumn) column; if (tableColumn instanceof SqlTableColumn.SqlRegularColumn && primaryKeyColumns.contains(tableColumn.getName().getSimple())) { SqlTableColumn.SqlRegularColumn regularColumn = (SqlTableColumn.SqlRegularColumn) column; SqlDataTypeSpec notNullType = regularColumn.getType().withNullable(false); regularColumn.setType(notNullType); } } } }
Check constraints and change the nullability of primary key columns. @throws SqlValidateException if encountered duplicate primary key constraints, or the constraint is enforced or unique.
validateAndChangeColumnNullability
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/SqlConstraintValidator.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/SqlConstraintValidator.java
Apache-2.0
public static LinkedHashMap<String, String> getPartitionKVs(SqlNodeList partitionSpec) { if (partitionSpec == null) { return null; } LinkedHashMap<String, String> ret = new LinkedHashMap<>(); if (partitionSpec.size() == 0) { return ret; } for (SqlNode node : partitionSpec.getList()) { SqlProperty sqlProperty = (SqlProperty) node; Comparable<?> comparable = SqlLiteral.value(sqlProperty.getValue()); String value = comparable instanceof NlsString ? ((NlsString) comparable).getValue() : comparable.toString(); ret.put(sqlProperty.getKey().getSimple(), value); } return ret; }
Get static partition key value pair as strings. <p>For character literals we return the unquoted and unescaped values. For other types we use {@link SqlLiteral#toString()} to get the string format of the value literal. @return the mapping of column names to values of partition specifications, returns an empty map if there is no partition specifications.
getPartitionKVs
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/SqlPartitionUtils.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/SqlPartitionUtils.java
Apache-2.0
public SqlNodeList getPartitionSpec() { return partitionSpec; }
Returns the partition spec if the ALTER should be applied to partitions, and null otherwise.
getPartitionSpec
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTable.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTable.java
Apache-2.0
public boolean ifTableExists() { return ifTableExists; }
Whether to ignore the error if the table doesn't exist. @return true when IF EXISTS is specified.
ifTableExists
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTable.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTable.java
Apache-2.0
@Override public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { super.unparse(writer, leftPrec, rightPrec); writer.keyword("ADD"); // unparse table schema and distribution unparseSchemaAndDistribution(writer, leftPrec, rightPrec); }
SqlNode to describe ALTER TABLE [IF EXISTS] table_name ADD column/constraint/watermark clause. <p>Example: DDL like the below for add column/constraint/watermark. <pre>{@code -- add single column ALTER TABLE mytable ADD new_column STRING COMMENT 'new_column docs'; -- add multiple columns, constraint, and watermark ALTER TABLE mytable ADD ( log_ts STRING COMMENT 'log timestamp string' FIRST, ts AS TO_TIMESTAMP(log_ts) AFTER log_ts, col_meta int metadata from 'mk1' virtual AFTER col_b, PRIMARY KEY (id) NOT ENFORCED, WATERMARK FOR ts AS ts - INTERVAL '3' SECOND ); }</pre>
unparse
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableAdd.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableAdd.java
Apache-2.0
@Override public void unparse(SqlWriter writer, int leftPrec, int rightPrec) { super.unparse(writer, leftPrec, rightPrec); writer.keyword("MODIFY"); // unparse table schema and distribution unparseSchemaAndDistribution(writer, leftPrec, rightPrec); }
SqlNode to describe ALTER TABLE [IF EXISTS] table_name MODIFY column/constraint/watermark clause. <p>Example: DDL like the below for modify column/constraint/watermark. <pre>{@code -- modify single column ALTER TABLE mytable MODIFY new_column STRING COMMENT 'new_column docs'; -- modify multiple columns, constraint, and watermark ALTER TABLE mytable MODIFY ( log_ts STRING COMMENT 'log timestamp string' FIRST, ts AS TO_TIMESTAMP(log_ts) AFTER log_ts, col_meta int metadata from 'mk1' virtual AFTER col_b, PRIMARY KEY (id) NOT ENFORCED, WATERMARK FOR ts AS ts - INTERVAL '3' SECOND ); }</pre>
unparse
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableModify.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAlterTableModify.java
Apache-2.0
public String[] fullTableName() { return tableName.names.toArray(new String[0]); }
ANALYZE TABLE to compute the statistics for a given table.
fullTableName
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAnalyzeTable.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlAnalyzeTable.java
Apache-2.0
@Override public String toString() { return this.digest; }
Enumeration of materialized table refresh mode.
toString
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlRefreshMode.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/SqlRefreshMode.java
Apache-2.0
@Override public SqlOperator getOperator() { return OPERATOR; }
Creates a table constraint node. @param constraintName Constraint name @param uniqueSpec Unique specification @param columns Column list on which the constraint enforces or null if this is a column constraint @param enforcement Whether the constraint is enforced @param isTableConstraint Whether this is a table constraint @param pos Parser position
getOperator
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
Apache-2.0
public boolean isUnique() { return this.uniqueSpec.getValueAs(SqlUniqueSpec.class) == SqlUniqueSpec.UNIQUE; }
Returns whether the constraint is UNIQUE.
isUnique
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
Apache-2.0
public boolean isPrimaryKey() { return this.uniqueSpec.getValueAs(SqlUniqueSpec.class) == SqlUniqueSpec.PRIMARY_KEY; }
Returns whether the constraint is PRIMARY KEY.
isPrimaryKey
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/ddl/constraint/SqlTableConstraint.java
Apache-2.0
public SqlNodeList getStaticPartitions() { return staticPartitions; }
@return the list of partition key-value pairs, returns empty if there is no partition specifications.
getStaticPartitions
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/RichSqlInsert.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/RichSqlInsert.java
Apache-2.0
public LinkedHashMap<String, String> getStaticPartitionKVs() { LinkedHashMap<String, String> ret = new LinkedHashMap<>(); if (this.staticPartitions.size() == 0) { return ret; } for (SqlNode node : this.staticPartitions.getList()) { SqlProperty sqlProperty = (SqlProperty) node; Comparable comparable = SqlLiteral.value(sqlProperty.getValue()); String value = comparable instanceof NlsString ? ((NlsString) comparable).getValue() : comparable.toString(); ret.put(sqlProperty.getKey().getSimple(), value); } return ret; }
Get static partition key value pair as strings. <p>For character literals we return the unquoted and unescaped values. For other types we use {@link SqlLiteral#toString()} to get the string format of the value literal. If the string format is not what you need, use {@link #getStaticPartitions()}. @return the mapping of column names to values of partition specifications, returns an empty map if there is no partition specifications.
getStaticPartitionKVs
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/RichSqlInsert.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/dml/RichSqlInsert.java
Apache-2.0
@Override public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) { assert call.operandCount() == 2; final SqlWriter.Frame frame = writer.startFunCall(getName()); call.operand(0).unparse(writer, 0, 0); writer.sep("AS"); if (call.operand(1) instanceof SqlIntervalQualifier) { writer.sep("INTERVAL"); } call.operand(1).unparse(writer, 0, 0); writer.endFunCall(frame); }
This is the unresolved version of {@code TRY_CAST}. We need it only for unparse.
unparse
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/expr/SqlUnresolvedTryCastFunction.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/expr/SqlUnresolvedTryCastFunction.java
Apache-2.0
private RelDataType createCollectionType( RelDataType elementType, RelDataTypeFactory typeFactory) { switch (collectionTypeName) { case MULTISET: return typeFactory.createMultisetType(elementType, -1); case ARRAY: return typeFactory.createArrayType(elementType, -1); default: throw Util.unexpected(collectionTypeName); } }
Create collection data type. @param elementType Type of the collection element @param typeFactory Type factory @return The collection data type, or throw exception if the collection type name does not belong to {@code SqlTypeName} enumerations
createCollectionType
java
apache/flink
flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/type/ExtendedSqlCollectionTypeNameSpec.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/main/java/org/apache/flink/sql/parser/type/ExtendedSqlCollectionTypeNameSpec.java
Apache-2.0
public SqlParserFixture fixture() { return super.fixture() .withTester(new UnparsingTesterImpl()) .withConfig(c -> c.withParserFactory(FlinkSqlParserImpl.FACTORY)); }
Extension to {@link FlinkSqlParserImplTest} that ensures that every expression can un-parse successfully.
fixture
java
apache/flink
flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/FlinkSqlUnParserTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/FlinkSqlUnParserTest.java
Apache-2.0
public SqlParserFixture fixture() { return super.fixture() .withTester(new SqlParserTest.UnparsingTesterImpl()) .withConfig(c -> c.withParserFactory(FlinkSqlParserImpl.FACTORY)); }
Extension to {@link MaterializedTableStatementParserTest} that ensures that every expression can un-parse successfully.
fixture
java
apache/flink
flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/MaterializedTableStatementUnParserTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/MaterializedTableStatementUnParserTest.java
Apache-2.0
@DisplayName("STATEMENT is a reserved keyword") @Test void testSTATEMENT() { assertThat(PARSER_METADATA.isKeyword("STATEMENT")).isTrue(); assertThat(PARSER_METADATA.isNonReservedKeyword("STATEMENT")).isFalse(); }
Test class to check parser keywords.
testSTATEMENT
java
apache/flink
flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/ReservedKeywordTest.java
https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/ReservedKeywordTest.java
Apache-2.0
default void writeToFile(File file) { writeToFile(file, false); }
Writes this plan to a file using the JSON representation. This operation will fail if the file already exists, even if the content is different from this plan. @param file the target file @throws TableException if the file cannot be written.
writeToFile
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/CompiledPlan.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/CompiledPlan.java
Apache-2.0
default CompiledPlan printJsonString() { System.out.println(this.asJsonString()); return this; }
Like {@link #asJsonString()}, but prints the result to {@link System#out}.
printJsonString
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/CompiledPlan.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/CompiledPlan.java
Apache-2.0
public static EnvironmentSettings inStreamingMode() { return EnvironmentSettings.newInstance().inStreamingMode().build(); }
Creates a default instance of {@link EnvironmentSettings} in streaming execution mode. <p>In this mode, both bounded and unbounded data streams can be processed. <p>This method is a shortcut for creating a {@link TableEnvironment} with little code. Use the builder provided in {@link EnvironmentSettings#newInstance()} for advanced settings.
inStreamingMode
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public static EnvironmentSettings inBatchMode() { return EnvironmentSettings.newInstance().inBatchMode().build(); }
Creates a default instance of {@link EnvironmentSettings} in batch execution mode. <p>This mode is highly optimized for batch scenarios. Only bounded data streams can be processed in this mode. <p>This method is a shortcut for creating a {@link TableEnvironment} with little code. Use the builder provided in {@link EnvironmentSettings#newInstance()} for advanced settings.
inBatchMode
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public static Builder newInstance() { return new Builder(); }
Creates a builder for creating an instance of {@link EnvironmentSettings}.
newInstance
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public String getBuiltInCatalogName() { return configuration.get(TABLE_CATALOG_NAME); }
Gets the specified name of the initial catalog to be created when instantiating a {@link TableEnvironment}.
getBuiltInCatalogName
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public String getBuiltInDatabaseName() { return configuration.get(TABLE_DATABASE_NAME); }
Gets the specified name of the default database in the initial catalog to be created when instantiating a {@link TableEnvironment}.
getBuiltInDatabaseName
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public boolean isStreamingMode() { return configuration.get(RUNTIME_MODE) == STREAMING; }
Tells if the {@link TableEnvironment} should work in a batch or streaming mode.
isStreamingMode
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder inBatchMode() { configuration.set(RUNTIME_MODE, BATCH); return this; }
Sets that the components should work in a batch mode. Streaming mode by default.
inBatchMode
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder inStreamingMode() { configuration.set(RUNTIME_MODE, STREAMING); return this; }
Sets that the components should work in a streaming mode. Enabled by default.
inStreamingMode
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder withBuiltInCatalogName(String builtInCatalogName) { configuration.set(TABLE_CATALOG_NAME, builtInCatalogName); return this; }
Specifies the name of the initial catalog to be created when instantiating a {@link TableEnvironment}. <p>This catalog is an in-memory catalog that will be used to store all temporary objects (e.g. from {@link TableEnvironment#createTemporaryView(String, Table)} or {@link TableEnvironment#createTemporarySystemFunction(String, UserDefinedFunction)}) that cannot be persisted because they have no serializable representation. <p>It will also be the initial value for the current catalog which can be altered via {@link TableEnvironment#useCatalog(String)}. <p>Default: {@link TableConfigOptions#TABLE_DATABASE_NAME}{@code .defaultValue()}.
withBuiltInCatalogName
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder withBuiltInDatabaseName(String builtInDatabaseName) { configuration.set(TABLE_DATABASE_NAME, builtInDatabaseName); return this; }
Specifies the name of the default database in the initial catalog to be created when instantiating a {@link TableEnvironment}. <p>This database is an in-memory database that will be used to store all temporary objects (e.g. from {@link TableEnvironment#createTemporaryView(String, Table)} or {@link TableEnvironment#createTemporarySystemFunction(String, UserDefinedFunction)}) that cannot be persisted because they have no serializable representation. <p>It will also be the initial value for the current database which can be altered via {@link TableEnvironment#useDatabase(String)}. <p>Default: {@link TableConfigOptions#TABLE_DATABASE_NAME}{@code .defaultValue()}.
withBuiltInDatabaseName
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder withConfiguration(Configuration configuration) { this.configuration.addAll(configuration); return this; }
Add extra configuration to {@link EnvironmentSettings}.
withConfiguration
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public Builder withSqlFactory(SqlFactory sqlFactory) { this.sqlFactory = sqlFactory; return this; }
Provides a way to customize the process of serializing Table API to a SQL string. This is useful, for example, for customizing the serialization of inline functions.
withSqlFactory
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
public EnvironmentSettings build() { if (classLoader == null) { classLoader = Thread.currentThread().getContextClassLoader(); } return new EnvironmentSettings(configuration, classLoader, catalogStore, sqlFactory); }
Returns an immutable instance of {@link EnvironmentSettings}.
build
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/EnvironmentSettings.java
Apache-2.0
default String explain(ExplainDetail... extraDetails) { return explain(ExplainFormat.TEXT, extraDetails); }
Returns the AST of this object and the execution plan to compute the result of the given statement. @param extraDetails The extra explain details which the result of this method should include, e.g. estimated cost, changelog mode for streaming @return AST and the execution plan.
explain
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Explainable.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Explainable.java
Apache-2.0
@SuppressWarnings("unchecked") default SELF printExplain(ExplainDetail... extraDetails) { System.out.println(explain(extraDetails)); return (SELF) this; }
Like {@link #explain(ExplainDetail...)}, but piping the result to {@link System#out}.
printExplain
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Explainable.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Explainable.java
Apache-2.0
public static ApiExpression $(String name) { return new ApiExpression(unresolvedRef(name)); }
Creates an unresolved reference to a table's column. <p>Example: <pre>{@code tab.select($("key"), $("value")) }</pre> @see #col(String) @see #withAllColumns()
$
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression col(String name) { return $(name); }
Creates an unresolved reference to a table's column. <p>Because {@link #$(String)} is not supported by every JVM language due to the dollar sign, this method provides a synonym with the same behavior. <p>Example: <pre>{@code tab.select(col("key"), col("value")) }</pre> @see #withAllColumns()
col
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lit(Object v) { return new ApiExpression(valueLiteral(v)); }
Creates a literal (i.e. a constant value). <p>The data type is derived from the object's class and its value. <p>For example: <ul> <li>{@code lit(12)} leads to {@code INT} <li>{@code lit("abc")} leads to {@code CHAR(3)} <li>{@code lit(new BigDecimal("123.45"))} leads to {@code DECIMAL(5, 2)} </ul> <p>See {@link ValueDataTypeConverter} for a list of supported literal values.
lit
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression lit(Object v, DataType dataType) { return new ApiExpression(valueLiteral(v, dataType)); }
Creates a literal (i.e. a constant value) of a given {@link DataType}. <p>The method {@link #lit(Object)} is preferred as it extracts the {@link DataType} automatically. Use this method only when necessary. The class of {@code v} must be supported according to the {@link LogicalType#supportsInputConversion(Class)}.
lit
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression descriptor(String... columnNames) { return new ApiExpression(valueLiteral(ColumnList.of(Arrays.asList(columnNames)))); }
Creates a literal describing an arbitrary, unvalidated list of column names. <p>Passing a list of columns can be useful for parameterizing a function. In particular, it enables declaring the {@code on_time} argument for {@link ProcessTableFunction}. <p>The data type will be {@link DataTypes#DESCRIPTOR()}.
descriptor
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression range(String start, String end) { return apiCall( BuiltInFunctionDefinitions.RANGE_TO, unresolvedRef(start), unresolvedRef(end)); }
Indicates a range from 'start' to 'end', which can be used in columns selection. <p>Example: <pre>{@code Table table = ... table.select(withColumns(range(b, c))) }</pre> @see #withColumns(Object, Object...) @see #withoutColumns(Object, Object...)
range
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression range(int start, int end) { return apiCall(BuiltInFunctionDefinitions.RANGE_TO, valueLiteral(start), valueLiteral(end)); }
Indicates an index based range, which can be used in columns selection. <p>Example: <pre>{@code Table table = ... table.select(withColumns(range(3, 4))) }</pre> @see #withColumns(Object, Object...) @see #withoutColumns(Object, Object...)
range
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression and(Object predicate0, Object predicate1, Object... predicates) { return apiCallAtLeastTwoArgument( BuiltInFunctionDefinitions.AND, predicate0, predicate1, predicates); }
Boolean AND in three-valued logic.
and
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression or(Object predicate0, Object predicate1, Object... predicates) { return apiCallAtLeastTwoArgument( BuiltInFunctionDefinitions.OR, predicate0, predicate1, predicates); }
Boolean OR in three-valued logic.
or
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression not(Object expression) { return apiCall(BuiltInFunctionDefinitions.NOT, expression); }
Inverts a given boolean expression. <p>This method supports a three-valued logic by preserving {@code NULL}. This means if the input expression is {@code NULL}, the result will also be {@code NULL}. <p>The resulting type is nullable if and only if the input type is nullable. <p>Examples: <pre>{@code not(lit(true)) // false not(lit(false)) // true not(lit(null, DataTypes.BOOLEAN())) // null }</pre>
not
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression currentDate() { return apiCall(BuiltInFunctionDefinitions.CURRENT_DATE); }
Returns the current SQL date in local time zone, the return type of this expression is {@link DataTypes#DATE()}.
currentDate
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression currentTime() { return apiCall(BuiltInFunctionDefinitions.CURRENT_TIME); }
Returns the current SQL time in local time zone, the return type of this expression is {@link DataTypes#TIME()}.
currentTime
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0
public static ApiExpression currentTimestamp() { return apiCall(BuiltInFunctionDefinitions.CURRENT_TIMESTAMP); }
Returns the current SQL timestamp in local time zone, the return type of this expression is {@link DataTypes#TIMESTAMP_WITH_LOCAL_TIME_ZONE()}.
currentTimestamp
java
apache/flink
flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/Expressions.java
Apache-2.0