code stringlengths 25 201k | docstring stringlengths 19 96.2k | func_name stringlengths 0 235 | language stringclasses 1 value | repo stringlengths 8 51 | path stringlengths 11 314 | url stringlengths 62 377 | license stringclasses 7 values |
|---|---|---|---|---|---|---|---|
public void unregisterCatalog(String catalogName, boolean ignoreIfNotExists) {
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(catalogName),
"Catalog name cannot be null or empty.");
if (catalogs.containsKey(catalogName)
|| catalogStoreHolder.catalogStore().contains(catalogName)) {
if (catalogName.equals(currentCatalogName)) {
throw new CatalogException("Cannot drop a catalog which is currently in use.");
}
if (catalogs.containsKey(catalogName)) {
Catalog catalog = catalogs.remove(catalogName);
catalog.close();
}
if (catalogStoreHolder.catalogStore().contains(catalogName)) {
catalogStoreHolder.catalogStore().removeCatalog(catalogName, ignoreIfNotExists);
}
} else if (!ignoreIfNotExists) {
throw new CatalogException(format("Catalog %s does not exist.", catalogName));
}
} | Unregisters a catalog under the given name. The catalog name must be existed.
<p>If the catalog is in the initialized catalogs, it will be removed from the initialized
catalogs. If the catalog is stored in the {@link CatalogStore}, it will be removed from the
CatalogStore.
@param catalogName name under which to unregister the given catalog.
@param ignoreIfNotExists If false exception will be thrown if the table or database or
catalog to be altered does not exist.
@throws CatalogException If the catalog does not exist in the initialized catalogs and not in
the {@link CatalogStore}, or if the remove operation failed. | unregisterCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Catalog getCatalogOrError(String catalogName) throws CatalogNotExistException {
return getCatalog(catalogName).orElseThrow(() -> new CatalogNotExistException(catalogName));
} | Gets a catalog by name.
@param catalogName name of the catalog to retrieve
@return the requested catalog
@throws CatalogNotExistException if the catalog does not exist | getCatalogOrError | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public @Nullable String getCurrentCatalog() {
return currentCatalogName;
} | Gets the current catalog that will be used when resolving table path.
@return the current catalog
@see CatalogManager#qualifyIdentifier(UnresolvedIdentifier) | getCurrentCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void setCurrentCatalog(@Nullable String catalogName) throws CatalogNotExistException {
if (catalogName == null) {
this.currentCatalogName = null;
this.currentDatabaseName = null;
return;
}
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(catalogName), "Catalog name cannot be empty.");
Catalog potentialCurrentCatalog =
getCatalog(catalogName)
.orElseThrow(
() ->
new CatalogException(
format(
"A catalog with name [%s] does not exist.",
catalogName)));
if (!catalogName.equals(currentCatalogName)) {
currentCatalogName = catalogName;
currentDatabaseName = potentialCurrentCatalog.getDefaultDatabase();
LOG.info(
"Set the current default catalog as [{}] and the current default database as [{}].",
currentCatalogName,
currentDatabaseName);
}
} | Sets the current catalog name that will be used when resolving table path.
@param catalogName catalog name to set as current catalog
@throws CatalogNotExistException thrown if the catalog doesn't exist
@see CatalogManager#qualifyIdentifier(UnresolvedIdentifier) | setCurrentCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public @Nullable String getCurrentDatabase() {
return currentDatabaseName;
} | Gets the current database name that will be used when resolving table path.
@return the current database
@see CatalogManager#qualifyIdentifier(UnresolvedIdentifier) | getCurrentDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void setCurrentDatabase(@Nullable String databaseName) {
if (databaseName == null) {
this.currentDatabaseName = null;
return;
}
checkArgument(
!StringUtils.isNullOrWhitespaceOnly(databaseName),
"The database name cannot be empty.");
if (currentCatalogName == null) {
throw new CatalogException("Current catalog has not been set.");
}
if (!getCatalogOrThrowException(currentCatalogName).databaseExists(databaseName)) {
throw new CatalogException(
format(
"A database with name [%s] does not exist in the catalog: [%s].",
databaseName, currentCatalogName));
}
if (!databaseName.equals(currentDatabaseName)) {
currentDatabaseName = databaseName;
LOG.info(
"Set the current default database as [{}] in the current default catalog [{}].",
currentDatabaseName,
currentCatalogName);
}
} | Sets the current database name that will be used when resolving a table path. The database
has to exist in the current catalog.
@param databaseName database name to set as current database name
@throws CatalogException thrown if the database doesn't exist in the current catalog
@see CatalogManager#qualifyIdentifier(UnresolvedIdentifier)
@see CatalogManager#setCurrentCatalog(String) | setCurrentDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public String getBuiltInCatalogName() {
return builtInCatalogName;
} | Gets the built-in catalog name. The built-in catalog is used for storing all non-serializable
transient meta-objects.
@return the built-in catalog name | getBuiltInCatalogName | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public String getBuiltInDatabaseName() {
// The default database of the built-in catalog is also the built-in database.
return getCatalogOrThrowException(getBuiltInCatalogName()).getDefaultDatabase();
} | Gets the built-in database name in the built-in catalog. The built-in database is used for
storing all non-serializable transient meta-objects.
@return the built-in database name | getBuiltInDatabaseName | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Optional<ContextResolvedTable> getTable(ObjectIdentifier objectIdentifier) {
CatalogBaseTable temporaryTable = temporaryTables.get(objectIdentifier);
if (temporaryTable != null) {
final ResolvedCatalogBaseTable<?> resolvedTable =
resolveCatalogBaseTable(temporaryTable);
return Optional.of(ContextResolvedTable.temporary(objectIdentifier, resolvedTable));
} else {
return getPermanentTable(objectIdentifier, null);
}
} | Retrieves a fully qualified table. If the path is not yet fully qualified use {@link
#qualifyIdentifier(UnresolvedIdentifier)} first.
@param objectIdentifier full path of the table to retrieve
@return table that the path points to. | getTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Optional<ContextResolvedTable> getTable(
ObjectIdentifier objectIdentifier, long timestamp) {
CatalogBaseTable temporaryTable = temporaryTables.get(objectIdentifier);
if (temporaryTable != null) {
final ResolvedCatalogBaseTable<?> resolvedTable =
resolveCatalogBaseTable(temporaryTable);
return Optional.of(ContextResolvedTable.temporary(objectIdentifier, resolvedTable));
} else {
return getPermanentTable(objectIdentifier, timestamp);
}
} | Retrieves a fully qualified table with a specific time. If the path is not yet fully
qualified, use {@link #qualifyIdentifier(UnresolvedIdentifier)} first.
@param objectIdentifier full path of the table to retrieve
@param timestamp Timestamp of the table snapshot, which is milliseconds since 1970-01-01
00:00:00 UTC
@return table at a specific time that the path points to. | getTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
@Override
public Optional<ResolvedCatalogBaseTable<?>> getCatalogBaseTable(
ObjectIdentifier objectIdentifier) {
ContextResolvedTable resolvedTable = getTable(objectIdentifier).orElse(null);
return resolvedTable == null
? Optional.empty()
: Optional.of(resolvedTable.getResolvedTable());
} | Retrieves a fully qualified table. If the path is not yet fully qualified use {@link
#qualifyIdentifier(UnresolvedIdentifier)} first.
@param objectIdentifier full path of the table to retrieve
@return resolved table that the path points to or empty if it does not exist. | getCatalogBaseTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
@Override
public boolean isTemporaryTable(ObjectIdentifier objectIdentifier) {
return temporaryTables.containsKey(objectIdentifier);
} | Return whether the table with a fully qualified table path is temporary or not.
@param objectIdentifier full path of the table
@return the table is temporary or not. | isTemporaryTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ContextResolvedTable getTableOrError(ObjectIdentifier objectIdentifier) {
return getTable(objectIdentifier)
.orElseThrow(
() ->
new TableException(
String.format(
"Cannot find table '%s' in any of the catalogs %s, nor as a temporary table.",
objectIdentifier, listCatalogs())));
} | Like {@link #getTable(ObjectIdentifier)}, but throws an error when the table is not available
in any of the catalogs. | getTableOrError | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listCatalogs() {
return Collections.unmodifiableSet(
Stream.concat(
catalogs.keySet().stream(),
catalogStoreHolder.catalogStore().listCatalogs().stream())
.collect(Collectors.toSet()));
} | Retrieves the set of names of all registered catalogs, including all initialized catalogs and
all catalogs stored in the {@link CatalogStore}.
@return a set of names of registered catalogs | listCatalogs | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listTables() {
return listTables(getCurrentCatalog(), getCurrentDatabase());
} | Returns an array of names of all tables (tables and views, both temporary and permanent)
registered in the namespace of the current catalog and database.
@return names of all registered tables | listTables | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listTables(String catalogName, String databaseName) {
Catalog catalog = getCatalogOrThrowException(catalogName);
if (catalog == null) {
throw new ValidationException(String.format("Catalog %s does not exist", catalogName));
}
try {
return Stream.concat(
catalog.listTables(databaseName).stream(),
listTemporaryTablesInternal(catalogName, databaseName)
.map(e -> e.getKey().getObjectName()))
.collect(Collectors.toSet());
} catch (DatabaseNotExistException e) {
throw new ValidationException(
String.format("Database %s does not exist", databaseName), e);
}
} | Returns an array of names of all tables (tables and views, both temporary and permanent)
registered in the namespace of the given catalog and database.
@return names of all registered tables | listTables | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listTemporaryTables() {
return listTemporaryTablesInternal(getCurrentCatalog(), getCurrentDatabase())
.map(e -> e.getKey().getObjectName())
.collect(Collectors.toSet());
} | Returns an array of names of temporary tables registered in the namespace of the current
catalog and database.
@return names of registered temporary tables | listTemporaryTables | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listViews() {
return listViews(getCurrentCatalog(), getCurrentDatabase());
} | Returns an array of names of all views(both temporary and permanent) registered in the
namespace of the current catalog and database.
@return names of all registered views | listViews | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listSchemas() {
return Stream.concat(
catalogs.keySet().stream(),
temporaryTables.keySet().stream().map(ObjectIdentifier::getCatalogName))
.collect(Collectors.toSet());
} | Lists all available schemas in the root of the catalog manager. It is not equivalent to
listing all catalogs as it includes also different catalog parts of the temporary objects.
<p><b>NOTE:</b>It is primarily used for interacting with Calcite's schema.
@return set of schemas in the root of catalog manager | listSchemas | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listSchemas(String catalogName) {
return Stream.concat(
getCatalog(catalogName)
.map(Catalog::listDatabases)
.orElse(Collections.emptyList())
.stream(),
temporaryTables.keySet().stream()
.filter(i -> i.getCatalogName().equals(catalogName))
.map(ObjectIdentifier::getDatabaseName))
.collect(Collectors.toSet());
} | Lists all available schemas in the given catalog. It is not equivalent to listing databases
within the given catalog as it includes also different database parts of the temporary
objects identifiers.
<p><b>NOTE:</b>It is primarily used for interacting with Calcite's schema.
@param catalogName filter for the catalog part of the schema
@return set of schemas with the given prefix | listSchemas | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public boolean schemaExists(String catalogName) {
return getCatalog(catalogName).isPresent()
|| temporaryTables.keySet().stream()
.anyMatch(i -> i.getCatalogName().equals(catalogName));
} | Checks if there is a catalog with given name or is there a temporary object registered within
a given catalog.
<p><b>NOTE:</b>It is primarily used for interacting with Calcite's schema.
@param catalogName filter for the catalog part of the schema
@return true if a subschema exists | schemaExists | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ObjectIdentifier qualifyIdentifier(UnresolvedIdentifier identifier) {
return ObjectIdentifier.of(
identifier.getCatalogName().orElseGet(() -> qualifyCatalog(getCurrentCatalog())),
identifier.getDatabaseName().orElseGet(() -> qualifyDatabase(getCurrentDatabase())),
identifier.getObjectName());
} | Returns the full name of the given table path, this name may be padded with current
catalog/database name based on the {@code identifier's} length.
@param identifier an unresolved identifier
@return a fully qualified object identifier | qualifyIdentifier | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public String qualifyCatalog(@Nullable String catalogName) {
if (!StringUtils.isNullOrWhitespaceOnly(catalogName)) {
return catalogName;
}
final String currentCatalogName = getCurrentCatalog();
if (StringUtils.isNullOrWhitespaceOnly(currentCatalogName)) {
throw new ValidationException(
"A current catalog has not been set. Please use a"
+ " fully qualified identifier (such as"
+ " 'my_catalog.my_database.my_table') or"
+ " set a current catalog using"
+ " 'USE CATALOG my_catalog'.");
}
return currentCatalogName;
} | Qualifies catalog name. Throws {@link ValidationException} if not set. | qualifyCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public String qualifyDatabase(@Nullable String databaseName) {
if (!StringUtils.isNullOrWhitespaceOnly(databaseName)) {
return databaseName;
}
final String currentDatabaseName = getCurrentDatabase();
if (StringUtils.isNullOrWhitespaceOnly(currentDatabaseName)) {
throw new ValidationException(
"A current database has not been set. Please use a"
+ " fully qualified identifier (such as"
+ " 'my_database.my_table' or"
+ " 'my_catalog.my_database.my_table') or"
+ " set a current database using"
+ " 'USE my_database'.");
}
return currentDatabaseName;
} | Qualifies database name. Throws {@link ValidationException} if not set. | qualifyDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public boolean createTable(
CatalogBaseTable table, ObjectIdentifier objectIdentifier, boolean ignoreIfExists) {
final boolean result;
if (ignoreIfExists) {
final Optional<CatalogBaseTable> resultOpt = getUnresolvedTable(objectIdentifier);
result = resultOpt.isEmpty();
} else {
result = true;
}
execute(
(catalog, path) -> {
ResolvedCatalogBaseTable<?> resolvedTable = resolveCatalogBaseTable(table);
catalog.createTable(path, resolvedTable, ignoreIfExists);
if (resolvedTable instanceof CatalogTable
|| resolvedTable instanceof CatalogMaterializedTable) {
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
CreateTableEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedTable,
ignoreIfExists,
false)));
}
},
objectIdentifier,
false,
"CreateTable");
return result;
} | Creates a table in a given fully qualified path.
@param table The table to put in the given path.
@param objectIdentifier The fully qualified path where to put the table.
@param ignoreIfExists If false exception will be thrown if a table exists in the given path.
@return true if table was created in the given path, false if a table already exists in the
given path. | createTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void createTemporaryTable(
CatalogBaseTable table, ObjectIdentifier objectIdentifier, boolean ignoreIfExists) {
Optional<TemporaryOperationListener> listener =
getTemporaryOperationListener(objectIdentifier);
temporaryTables.compute(
objectIdentifier,
(k, v) -> {
if (v != null) {
if (!ignoreIfExists) {
throw new ValidationException(
String.format(
"Temporary table '%s' already exists",
objectIdentifier));
}
return v;
} else {
ResolvedCatalogBaseTable<?> resolvedTable = resolveCatalogBaseTable(table);
Catalog catalog =
getCatalog(objectIdentifier.getCatalogName()).orElse(null);
if (listener.isPresent()) {
return listener.get()
.onCreateTemporaryTable(
objectIdentifier.toObjectPath(), resolvedTable);
}
if (resolvedTable instanceof CatalogTable) {
catalogModificationListeners.forEach(
l ->
l.onEvent(
CreateTableEvent.createEvent(
CatalogContext.createContext(
objectIdentifier
.getCatalogName(),
catalog),
objectIdentifier,
resolvedTable,
ignoreIfExists,
true)));
}
return resolvedTable;
}
});
} | Creates a temporary table in a given fully qualified path.
@param table The table to put in the given path.
@param objectIdentifier The fully qualified path where to put the table.
@param ignoreIfExists if false exception will be thrown if a table exists in the given path. | createTemporaryTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void dropTemporaryTable(ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
dropTemporaryTableInternal(
objectIdentifier,
(table) -> table instanceof CatalogTable,
ignoreIfNotExists,
true);
} | Drop a temporary table in a given fully qualified path.
@param objectIdentifier The fully qualified path of the table to drop.
@param ignoreIfNotExists If false exception will be thrown if the table to be dropped does
not exist. | dropTemporaryTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void alterTable(
CatalogBaseTable table, ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
execute(
(catalog, path) -> {
final CatalogBaseTable resolvedTable = resolveCatalogBaseTable(table);
catalog.alterTable(path, resolvedTable, ignoreIfNotExists);
if (resolvedTable instanceof CatalogTable
|| resolvedTable instanceof CatalogMaterializedTable) {
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
AlterTableEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedTable,
ignoreIfNotExists)));
}
},
objectIdentifier,
ignoreIfNotExists,
"AlterTable");
} | Alters a table in a given fully qualified path.
@param table The table to put in the given path
@param objectIdentifier The fully qualified path where to alter the table.
@param ignoreIfNotExists If false exception will be thrown if the table or database or
catalog to be altered does not exist. | alterTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void alterTable(
CatalogBaseTable table,
List<TableChange> changes,
ObjectIdentifier objectIdentifier,
boolean ignoreIfNotExists) {
execute(
(catalog, path) -> {
final CatalogBaseTable resolvedTable = resolveCatalogBaseTable(table);
catalog.alterTable(path, resolvedTable, changes, ignoreIfNotExists);
if (resolvedTable instanceof CatalogTable) {
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
AlterTableEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedTable,
ignoreIfNotExists)));
}
},
objectIdentifier,
ignoreIfNotExists,
"AlterTable");
} | Alters a table in a given fully qualified path with table changes.
@param table The table to put in the given path
@param changes The table changes from the original table to the new table.
@param objectIdentifier The fully qualified path where to alter the table.
@param ignoreIfNotExists If false exception will be thrown if the table or database or
catalog to be altered does not exist. | alterTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public boolean dropTable(ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
return dropTableInternal(objectIdentifier, ignoreIfNotExists, TableKind.TABLE);
} | Drops a table in a given fully qualified path.
@param objectIdentifier The fully qualified path of the table to drop.
@param ignoreIfNotExists If false exception will be thrown if the table to drop does not
exist.
@return true if table existed in the given path and was dropped, false if table didn't exist
in the given path and ignoreIfNotExists was true. | dropTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public boolean dropMaterializedTable(
ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
return dropTableInternal(objectIdentifier, ignoreIfNotExists, TableKind.MATERIALIZED_TABLE);
} | Drops a materialized table in a given fully qualified path.
@param objectIdentifier The fully qualified path of the materialized table to drop.
@param ignoreIfNotExists If false exception will be thrown if the table to drop does not
exist.
@return true if materialized table existed in the given path and was dropped, false if
materialized table didn't exist in the given path and ignoreIfNotExists was true. | dropMaterializedTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Optional<ContextResolvedModel> getModel(ObjectIdentifier objectIdentifier) {
CatalogModel temporaryModel = temporaryModels.get(objectIdentifier);
if (temporaryModel != null) {
final ResolvedCatalogModel resolvedModel = resolveCatalogModel(temporaryModel);
return Optional.of(ContextResolvedModel.temporary(objectIdentifier, resolvedModel));
}
Optional<Catalog> catalogOptional = getCatalog(objectIdentifier.getCatalogName());
ObjectPath objectPath = objectIdentifier.toObjectPath();
if (catalogOptional.isPresent()) {
Catalog currentCatalog = catalogOptional.get();
try {
final CatalogModel model = currentCatalog.getModel(objectPath);
if (model != null) {
final ResolvedCatalogModel resolvedModel = resolveCatalogModel(model);
return Optional.of(
ContextResolvedModel.permanent(
objectIdentifier, currentCatalog, resolvedModel));
}
} catch (ModelNotExistException e) {
// Ignore.
} catch (UnsupportedOperationException e) {
// Ignore for catalogs that don't support models.
}
}
return Optional.empty();
} | Retrieves a fully qualified model. If the path is not yet fully qualified use {@link
#qualifyIdentifier(UnresolvedIdentifier)} first.
@param objectIdentifier full path of the model to retrieve
@return model that the path points to. | getModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ContextResolvedModel getModelOrError(ObjectIdentifier objectIdentifier) {
return getModel(objectIdentifier)
.orElseThrow(
() ->
new TableException(
String.format(
"Cannot find model '%s' in any of the catalogs %s.",
objectIdentifier, listCatalogs())));
} | Like {@link #getModel(ObjectIdentifier)}, but throws an error when the model is not available
in any of the catalogs. | getModelOrError | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public boolean isTemporaryModel(ObjectIdentifier objectIdentifier) {
return temporaryModels.containsKey(objectIdentifier);
} | Return whether the model with a fully qualified table path is temporary or not.
@param objectIdentifier full path of the table
@return the model is temporary or not. | isTemporaryModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listModels() {
return listModels(getCurrentCatalog(), getCurrentDatabase());
} | Returns an array of names of all models registered in the namespace of the current catalog
and database.
@return names of all registered models | listModels | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public Set<String> listModels(String catalogName, String databaseName) {
Catalog catalog = getCatalogOrThrowException(catalogName);
if (catalog == null) {
throw new ValidationException(String.format("Catalog %s does not exist", catalogName));
}
try {
return new HashSet<>(catalog.listModels(databaseName));
} catch (DatabaseNotExistException e) {
throw new ValidationException(
String.format("Database %s does not exist", databaseName), e);
}
} | Returns an array of names of all models registered in the namespace of the given catalog and
database.
@return names of all registered models | listModels | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void createModel(
CatalogModel model, ObjectIdentifier objectIdentifier, boolean ignoreIfExists) {
execute(
(catalog, path) -> {
final ResolvedCatalogModel resolvedModel = resolveCatalogModel(model);
catalog.createModel(path, resolvedModel, ignoreIfExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
CreateModelEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedModel,
ignoreIfExists,
false)));
},
objectIdentifier,
false,
"CreateModel");
} | Creates a model in a given fully qualified path.
@param model The resolved model to put in the given path.
@param objectIdentifier The fully qualified path where to put the model.
@param ignoreIfExists If false exception will be thrown if a model exists in the given path. | createModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void createTemporaryModel(
CatalogModel model, ObjectIdentifier objectIdentifier, boolean ignoreIfExists) {
Optional<TemporaryOperationListener> listener =
getTemporaryOperationListener(objectIdentifier);
temporaryModels.compute(
objectIdentifier,
(k, v) -> {
if (v != null) {
if (!ignoreIfExists) {
throw new ValidationException(
String.format(
"Temporary model '%s' already exists",
objectIdentifier));
}
return v;
} else {
ResolvedCatalogModel resolvedModel = resolveCatalogModel(model);
Catalog catalog =
getCatalog(objectIdentifier.getCatalogName()).orElse(null);
if (listener.isPresent()) {
return listener.get()
.onCreateTemporaryModel(
objectIdentifier.toObjectPath(), resolvedModel);
}
catalogModificationListeners.forEach(
l ->
l.onEvent(
CreateModelEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedModel,
ignoreIfExists,
true)));
return resolvedModel;
}
});
} | Creates a temporary model in a given fully qualified path.
@param model The resolved model to put in the given path.
@param objectIdentifier The fully qualified path where to put the model.
@param ignoreIfExists if false exception will be thrown if a model exists in the given path. | createTemporaryModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void alterModel(
CatalogModel newModel,
List<ModelChange> modelChanges,
ObjectIdentifier objectIdentifier,
boolean ignoreIfNotExists) {
execute(
(catalog, path) -> {
ResolvedCatalogModel resolvedModel = resolveCatalogModel(newModel);
catalog.alterModel(path, resolvedModel, modelChanges, ignoreIfNotExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
AlterModelEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedModel,
ignoreIfNotExists)));
},
objectIdentifier,
ignoreIfNotExists,
"AlterModel");
} | Alters a model in a given fully qualified path.
@param newModel The new model containing changes.
@param modelChanges The changes to apply to the model.
@param objectIdentifier The fully qualified path where to alter the model.
@param ignoreIfNotExists If false exception will be thrown if the model to be altered does
not exist. | alterModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void alterModel(
CatalogModel newModel, ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
execute(
(catalog, path) -> {
ResolvedCatalogModel resolvedModel = resolveCatalogModel(newModel);
catalog.alterModel(path, resolvedModel, ignoreIfNotExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
AlterModelEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedModel,
ignoreIfNotExists)));
},
objectIdentifier,
ignoreIfNotExists,
"AlterModel");
} | Alters a model in a given fully qualified path.
@param newModel The new model containing changes
@param objectIdentifier The fully qualified path where to alter the model.
@param ignoreIfNotExists If false exception will be thrown if the model to be altered does
not exist. | alterModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void dropModel(ObjectIdentifier objectIdentifier, boolean ignoreIfNotExists) {
execute(
(catalog, path) -> {
Optional<ContextResolvedModel> resultOpt = getModel(objectIdentifier);
if (resultOpt.isPresent()) {
ResolvedCatalogModel resolvedModel = resultOpt.get().getResolvedModel();
catalog.dropModel(path, ignoreIfNotExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
DropModelEvent.createEvent(
CatalogContext.createContext(
objectIdentifier.getCatalogName(),
catalog),
objectIdentifier,
resolvedModel,
ignoreIfNotExists,
false)));
} else if (!ignoreIfNotExists) {
throw new ModelNotExistException(
objectIdentifier.getCatalogName(), objectIdentifier.toObjectPath());
}
},
objectIdentifier,
ignoreIfNotExists,
"DropModel");
} | Drops a model in a given fully qualified path.
@param objectIdentifier The fully qualified path of the model to drop.
@param ignoreIfNotExists If false exception will be thrown if the model to drop does not
exist. | dropModel | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ResolvedCatalogBaseTable<?> resolveCatalogBaseTable(CatalogBaseTable baseTable) {
Preconditions.checkNotNull(schemaResolver, "Schema resolver is not initialized.");
if (baseTable instanceof CatalogTable) {
return resolveCatalogTable((CatalogTable) baseTable);
} else if (baseTable instanceof CatalogMaterializedTable) {
return resolveCatalogMaterializedTable((CatalogMaterializedTable) baseTable);
} else if (baseTable instanceof CatalogView) {
return resolveCatalogView((CatalogView) baseTable);
}
throw new IllegalArgumentException(
"Unknown kind of catalog base table: " + baseTable.getClass());
} | Resolves a {@link CatalogBaseTable} to a validated {@link ResolvedCatalogBaseTable}. | resolveCatalogBaseTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ResolvedCatalogTable resolveCatalogTable(CatalogTable table) {
Preconditions.checkNotNull(schemaResolver, "Schema resolver is not initialized.");
if (table instanceof ResolvedCatalogTable) {
return (ResolvedCatalogTable) table;
}
final ResolvedSchema resolvedSchema = table.getUnresolvedSchema().resolve(schemaResolver);
// Validate distribution keys are included in physical columns
final List<String> physicalColumns =
resolvedSchema.getColumns().stream()
.filter(Column::isPhysical)
.map(Column::getName)
.collect(Collectors.toList());
final Consumer<TableDistribution> distributionValidation =
distribution -> {
distribution
.getBucketKeys()
.forEach(
bucketKey -> {
if (!physicalColumns.contains(bucketKey)) {
throw new ValidationException(
String.format(
"Invalid bucket key '%s'. A bucket key for a distribution must "
+ "reference a physical column in the schema. "
+ "Available columns are: %s",
bucketKey, physicalColumns));
}
});
distribution
.getBucketCount()
.ifPresent(
c -> {
if (c <= 0) {
throw new ValidationException(
String.format(
"Invalid bucket count '%s'. The number of "
+ "buckets for a distributed table must be at least 1.",
c));
}
});
};
table.getDistribution().ifPresent(distributionValidation);
table.getPartitionKeys()
.forEach(
partitionKey -> {
if (!physicalColumns.contains(partitionKey)) {
throw new ValidationException(
String.format(
"Invalid partition key '%s'. A partition key must "
+ "reference a physical column in the schema. "
+ "Available columns are: %s",
partitionKey, physicalColumns));
}
});
return new ResolvedCatalogTable(table, resolvedSchema);
} | Resolves a {@link CatalogTable} to a validated {@link ResolvedCatalogTable}. | resolveCatalogTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ResolvedCatalogMaterializedTable resolveCatalogMaterializedTable(
CatalogMaterializedTable table) {
Preconditions.checkNotNull(schemaResolver, "Schema resolver is not initialized.");
if (table instanceof ResolvedCatalogMaterializedTable) {
return (ResolvedCatalogMaterializedTable) table;
}
final ResolvedSchema resolvedSchema = table.getUnresolvedSchema().resolve(schemaResolver);
// Validate partition keys are included in physical columns
final List<String> physicalColumns =
resolvedSchema.getColumns().stream()
.filter(Column::isPhysical)
.map(Column::getName)
.collect(Collectors.toList());
table.getPartitionKeys()
.forEach(
partitionKey -> {
if (!physicalColumns.contains(partitionKey)) {
throw new ValidationException(
String.format(
"Invalid partition key '%s'. A partition key must "
+ "reference a physical column in the schema. "
+ "Available columns are: %s",
partitionKey, physicalColumns));
}
});
return new ResolvedCatalogMaterializedTable(table, resolvedSchema);
} | Resolves a {@link CatalogMaterializedTable} to a validated {@link
ResolvedCatalogMaterializedTable}. | resolveCatalogMaterializedTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ResolvedCatalogView resolveCatalogView(CatalogView view) {
Preconditions.checkNotNull(schemaResolver, "Schema resolver is not initialized.");
if (view instanceof ResolvedCatalogView) {
return (ResolvedCatalogView) view;
}
if (view instanceof QueryOperationCatalogView) {
final QueryOperation queryOperation =
((QueryOperationCatalogView) view).getQueryOperation();
return new ResolvedCatalogView(view, queryOperation.getResolvedSchema());
}
final ResolvedSchema resolvedSchema = view.getUnresolvedSchema().resolve(schemaResolver);
final List<Operation> parse;
try {
parse = parser.parse(view.getExpandedQuery());
} catch (Throwable e) {
// in case of a failure during parsing, let the lower layers fail
return new ResolvedCatalogView(view, resolvedSchema);
}
if (parse.size() != 1 || !(parse.get(0) instanceof QueryOperation)) {
// parsing a view should result in a single query operation
// if it is not what we expect, we let the lower layers fail
return new ResolvedCatalogView(view, resolvedSchema);
} else {
final QueryOperation operation = (QueryOperation) parse.get(0);
final ResolvedSchema querySchema = operation.getResolvedSchema();
if (querySchema.getColumns().size() != resolvedSchema.getColumns().size()) {
// in case the query does not match the number of expected columns, let the lower
// layers fail
return new ResolvedCatalogView(view, resolvedSchema);
}
final ResolvedSchema renamedQuerySchema =
new ResolvedSchema(
IntStream.range(0, resolvedSchema.getColumnCount())
.mapToObj(
i ->
querySchema
.getColumn(i)
.get()
.rename(
resolvedSchema
.getColumnNames()
.get(i)))
.collect(Collectors.toList()),
resolvedSchema.getWatermarkSpecs(),
resolvedSchema.getPrimaryKey().orElse(null));
return new ResolvedCatalogView(
// pass a view that has the query parsed and
// validated already
new QueryOperationCatalogView(operation, view), renamedQuerySchema);
}
} | Resolves a {@link CatalogView} to a validated {@link ResolvedCatalogView}. | resolveCatalogView | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void createDatabase(
String catalogName,
String databaseName,
CatalogDatabase database,
boolean ignoreIfExists)
throws DatabaseAlreadyExistException, CatalogException {
Catalog catalog = getCatalogOrThrowException(catalogName);
catalog.createDatabase(databaseName, database, ignoreIfExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
CreateDatabaseEvent.createEvent(
CatalogContext.createContext(catalogName, catalog),
databaseName,
database,
ignoreIfExists)));
} | Create a database.
@param catalogName Name of the catalog for database
@param databaseName Name of the database to be created
@param database The database definition
@param ignoreIfExists Flag to specify behavior when a database with the given name already
exists: if set to false, throw a DatabaseAlreadyExistException, if set to true, do
nothing.
@throws DatabaseAlreadyExistException if the given database already exists and ignoreIfExists
is false
@throws CatalogException in case of any runtime exception | createDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void dropDatabase(
String catalogName, String databaseName, boolean ignoreIfNotExists, boolean cascade)
throws DatabaseNotExistException, DatabaseNotEmptyException, CatalogException {
if (Objects.equals(currentCatalogName, catalogName)
&& Objects.equals(currentDatabaseName, databaseName)) {
throw new ValidationException("Cannot drop a database which is currently in use.");
}
Catalog catalog = getCatalogOrError(catalogName);
catalog.dropDatabase(databaseName, ignoreIfNotExists, cascade);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
DropDatabaseEvent.createEvent(
CatalogContext.createContext(catalogName, catalog),
databaseName,
ignoreIfNotExists,
cascade)));
} | Drop a database.
@param catalogName Name of the catalog for database.
@param databaseName Name of the database to be dropped.
@param ignoreIfNotExists Flag to specify behavior when the database does not exist: if set to
false, throw an exception, if set to true, do nothing.
@param cascade Flag to specify behavior when the database contains table or function: if set
to true, delete all tables and functions in the database and then delete the database, if
set to false, throw an exception.
@throws DatabaseNotExistException if the given database does not exist
@throws DatabaseNotEmptyException if the given database is not empty and isRestrict is true
@throws CatalogException in case of any runtime exception | dropDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public void alterDatabase(
String catalogName,
String databaseName,
CatalogDatabase newDatabase,
boolean ignoreIfNotExists)
throws DatabaseNotExistException, CatalogException {
Catalog catalog = getCatalogOrError(catalogName);
catalog.alterDatabase(databaseName, newDatabase, ignoreIfNotExists);
catalogModificationListeners.forEach(
listener ->
listener.onEvent(
AlterDatabaseEvent.createEvent(
CatalogContext.createContext(catalogName, catalog),
databaseName,
newDatabase,
ignoreIfNotExists)));
} | Modify an existing database.
@param catalogName Name of the catalog for database
@param databaseName Name of the database to be dropped
@param newDatabase The new database definition
@param ignoreIfNotExists Flag to specify behavior when the given database does not exist: if
set to false, throw an exception, if set to true, do nothing.
@throws DatabaseNotExistException if the given database does not exist
@throws CatalogException in case of any runtime exception | alterDatabase | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/CatalogManager.java | Apache-2.0 |
public ContextResolvedTable toCatalogTable() {
if (resolvedTable.getTableKind() == CatalogBaseTable.TableKind.MATERIALIZED_TABLE) {
return ContextResolvedTable.permanent(
objectIdentifier,
catalog,
((ResolvedCatalogMaterializedTable) resolvedTable).toResolvedCatalogTable());
}
return this;
} | Convert the {@link ResolvedCatalogMaterializedTable} in {@link ContextResolvedTable} to
{@link ResolvedCatalogTable }. | toCatalogTable | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | Apache-2.0 |
public ContextResolvedTable copy(Map<String, String> newOptions) {
if (resolvedTable.getTableKind() == CatalogBaseTable.TableKind.VIEW) {
throw new ValidationException(
String.format("View '%s' cannot be enriched with new options.", this));
}
if (resolvedTable.getTableKind() == CatalogBaseTable.TableKind.MATERIALIZED_TABLE) {
return ContextResolvedTable.permanent(
objectIdentifier,
catalog,
((ResolvedCatalogMaterializedTable) resolvedTable).copy(newOptions));
}
return new ContextResolvedTable(
objectIdentifier,
catalog,
((ResolvedCatalogTable) resolvedTable).copy(newOptions),
false);
} | Copy the {@link ContextResolvedTable}, replacing the underlying {@link CatalogTable} options. | copy | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | Apache-2.0 |
public ContextResolvedTable copy(ResolvedSchema newSchema) {
if (resolvedTable.getTableKind() == CatalogBaseTable.TableKind.MATERIALIZED_TABLE) {
throw new ValidationException(
String.format(
"Materialized table '%s' cannot be copied with new schema %s.",
this, newSchema));
}
return new ContextResolvedTable(
objectIdentifier,
catalog,
new ResolvedCatalogTable((CatalogTable) resolvedTable.getOrigin(), newSchema),
false);
} | Copy the {@link ContextResolvedTable}, replacing the underlying {@link ResolvedSchema}. | copy | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/ContextResolvedTable.java | Apache-2.0 |
private static Supplier<SerializerConfig> createSerializerConfig(
ClassLoader classLoader, ReadableConfig config, SerializerConfig serializerConfig) {
return () -> {
SerializerConfig newSerializerConfig;
if (serializerConfig != null) {
newSerializerConfig = serializerConfig.copy();
} else {
newSerializerConfig = new SerializerConfigImpl();
newSerializerConfig.configure(config, classLoader);
}
return newSerializerConfig;
};
} | Creates a lazy {@link ExecutionConfig} that contains options for {@link TypeSerializer}s with
information from existing {@link ExecutionConfig} (if available) enriched with table {@link
ReadableConfig}. | createSerializerConfig | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/DataTypeFactoryImpl.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/DataTypeFactoryImpl.java | Apache-2.0 |
@Override
public void open() throws CatalogException {
try {
FileSystem fs = catalogStorePath.getFileSystem();
if (!fs.exists(catalogStorePath)) {
fs.mkdirs(catalogStorePath);
}
if (!fs.getFileStatus(catalogStorePath).isDir()) {
throw new CatalogException(
String.format(
"Failed to open catalog store. The given catalog store path %s is not a directory.",
catalogStorePath));
}
} catch (CatalogException e) {
throw e;
} catch (Exception e) {
throw new CatalogException(
String.format(
"Failed to open file catalog store directory %s.", catalogStorePath),
e);
}
super.open();
} | Opens the catalog store and initializes the catalog file map.
@throws CatalogException if the catalog store directory does not exist, not a directory, or
if there is an error reading the directory | open | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | Apache-2.0 |
@Override
public void storeCatalog(String catalogName, CatalogDescriptor catalog)
throws CatalogException {
checkOpenState();
Path catalogPath = getCatalogPath(catalogName);
try {
FileSystem fs = catalogPath.getFileSystem();
if (fs.exists(catalogPath)) {
throw new CatalogException(
String.format(
"Catalog %s's store file %s is already exist.",
catalogName, catalogPath));
}
try (FSDataOutputStream os = fs.create(catalogPath, WriteMode.NO_OVERWRITE)) {
YAML_MAPPER.writeValue(os, catalog.getConfiguration().toFileWritableMap());
}
LOG.info("Catalog {}'s configuration saved to file {}", catalogName, catalogPath);
} catch (CatalogException e) {
throw e;
} catch (Exception e) {
throw new CatalogException(
String.format(
"Failed to store catalog %s's configuration to file %s.",
catalogName, catalogPath),
e);
}
} | Stores the specified catalog in the catalog store.
@param catalogName the name of the catalog
@param catalog the catalog descriptor to store
@throws CatalogException if the catalog store is not open or if there is an error storing the
catalog | storeCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | Apache-2.0 |
@Override
public void removeCatalog(String catalogName, boolean ignoreIfNotExists)
throws CatalogException {
checkOpenState();
Path catalogPath = getCatalogPath(catalogName);
try {
FileSystem fs = catalogPath.getFileSystem();
if (fs.exists(catalogPath)) {
fs.delete(catalogPath, false);
} else if (!ignoreIfNotExists) {
throw new CatalogException(
String.format(
"Catalog %s's store file %s does not exist.",
catalogName, catalogPath));
}
} catch (CatalogException e) {
throw e;
} catch (Exception e) {
throw new CatalogException(
String.format("Failed to remove catalog %s's store file.", catalogName), e);
}
} | Removes the specified catalog from the catalog store.
@param catalogName the name of the catalog to remove
@param ignoreIfNotExists whether to ignore if the catalog does not exist in the catalog store
@throws CatalogException if the catalog store is not open or if there is an error removing
the catalog | removeCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | Apache-2.0 |
@Override
public Optional<CatalogDescriptor> getCatalog(String catalogName) throws CatalogException {
checkOpenState();
Path catalogPath = getCatalogPath(catalogName);
try {
FileSystem fs = catalogPath.getFileSystem();
if (!fs.exists(catalogPath)) {
return Optional.empty();
}
try (FSDataInputStream is = fs.open(catalogPath)) {
Map<String, String> configMap =
YAML_MAPPER.readValue(is, new TypeReference<Map<String, String>>() {});
CatalogDescriptor catalog =
CatalogDescriptor.of(catalogName, Configuration.fromMap(configMap));
return Optional.of(catalog);
}
} catch (Exception e) {
throw new CatalogException(
String.format(
"Failed to load catalog %s's configuration from file.", catalogName),
e);
}
} | Returns the catalog descriptor for the specified catalog, if it exists in the catalog store.
@param catalogName the name of the catalog to retrieve
@return an {@link Optional} containing the catalog descriptor, or an empty {@link Optional}
if the catalog does not exist in the catalog store
@throws CatalogException if the catalog store is not open or if there is an error retrieving
the catalog | getCatalog | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | Apache-2.0 |
@Override
public Set<String> listCatalogs() throws CatalogException {
checkOpenState();
try {
FileStatus[] statusArr = catalogStorePath.getFileSystem().listStatus(catalogStorePath);
return Arrays.stream(statusArr)
.filter(status -> !status.isDir())
.map(FileStatus::getPath)
.map(Path::getName)
.map(filename -> filename.replace(FILE_EXTENSION, ""))
.collect(Collectors.toSet());
} catch (Exception e) {
throw new CatalogException(
String.format(
"Failed to list file catalog store directory %s.", catalogStorePath),
e);
}
} | Returns a set of all catalog names in the catalog store.
@return a set of all catalog names in the catalog store
@throws CatalogException if the catalog store is not open or if there is an error retrieving
the list of catalog names | listCatalogs | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FileCatalogStore.java | Apache-2.0 |
public void registerTemporarySystemFunction(
String name, String className, List<ResourceUri> resourceUris) {
registerTemporarySystemFunction(
name,
new CatalogFunctionImpl(className, FunctionLanguage.JAVA, resourceUris),
false);
} | Registers a temporary system function from resource uris. | registerTemporarySystemFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public boolean dropTemporaryCatalogFunction(
UnresolvedIdentifier unresolvedIdentifier, boolean ignoreIfNotExist) {
final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
return dropTempCatalogFunction(identifier, ignoreIfNotExist) != null;
} | Drops a temporary catalog function. Returns true if a function was dropped. | dropTemporaryCatalogFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public boolean dropCatalogFunction(
UnresolvedIdentifier unresolvedIdentifier, boolean ignoreIfNotExist) {
final ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
final ObjectIdentifier normalizedIdentifier =
FunctionIdentifier.normalizeObjectIdentifier(identifier);
final Catalog catalog =
catalogManager
.getCatalog(normalizedIdentifier.getCatalogName())
.orElseThrow(IllegalStateException::new);
final ObjectPath path = identifier.toObjectPath();
// we force users to deal with temporary catalog functions first
if (tempCatalogFunctions.containsKey(normalizedIdentifier)) {
throw new ValidationException(
String.format(
"Could not drop catalog function. A temporary function '%s' does already exist. "
+ "Please drop the temporary function first.",
identifier.asSummaryString()));
}
if (!catalog.functionExists(path)) {
if (ignoreIfNotExist) {
return false;
}
throw new ValidationException(
String.format(
"Could not drop catalog function. A function '%s' doesn't exist.",
identifier.asSummaryString()));
}
try {
catalog.dropFunction(path, ignoreIfNotExist);
} catch (Throwable t) {
throw new TableException(
String.format(
"Could not drop catalog function '%s'.", identifier.asSummaryString()),
t);
}
return true;
} | Drops a catalog function by also considering temporary catalog functions. Returns true if a
function was dropped. | dropCatalogFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public String[] getUserDefinedFunctions() {
return getUserDefinedFunctions(
catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase())
.stream()
.map(FunctionIdentifier::getFunctionName)
.toArray(String[]::new);
} | Get names of all user defined functions, including temp system functions, temp catalog
functions and catalog functions in the current catalog and current database. | getUserDefinedFunctions | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public String[] getFunctions() {
return getFunctions(
catalogManager.getCurrentCatalog(), catalogManager.getCurrentDatabase());
} | Get names of all functions, including temp system functions, system functions, temp catalog
functions and catalog functions in the current catalog and current database. | getFunctions | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public String[] getFunctions(String catalogName, String databaseName) {
Set<String> result =
getUserDefinedFunctions(catalogName, databaseName).stream()
.map(FunctionIdentifier::getFunctionName)
.collect(Collectors.toSet());
// add system functions
result.addAll(moduleManager.listFunctions());
return result.toArray(new String[0]);
} | Get names of all functions, including temp system functions, system functions, temp catalog
functions and catalog functions with specific catalog and database. | getFunctions | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public boolean hasTemporaryCatalogFunction(ObjectIdentifier functionIdentifier) {
ObjectIdentifier normalizedIdentifier =
FunctionIdentifier.normalizeObjectIdentifier(functionIdentifier);
return tempCatalogFunctions.containsKey(normalizedIdentifier);
} | Check whether a temporary catalog function is already registered.
@param functionIdentifier the object identifier of function
@return whether the temporary catalog function exists in the function catalog | hasTemporaryCatalogFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public boolean hasTemporarySystemFunction(String functionName) {
return tempSystemFunctions.containsKey(functionName);
} | Check whether a temporary system function is already registered.
@param functionName the name of the function
@return whether the temporary system function exists in the function catalog | hasTemporarySystemFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
public CatalogFunction dropTempCatalogFunction(
ObjectIdentifier identifier, boolean ignoreIfNotExist) {
ObjectIdentifier normalizedName = FunctionIdentifier.normalizeObjectIdentifier(identifier);
CatalogFunction fd = tempCatalogFunctions.get(normalizedName);
if (fd != null) {
catalogManager
.getTemporaryOperationListener(normalizedName)
.ifPresent(l -> l.onDropTemporaryFunction(normalizedName.toObjectPath()));
tempCatalogFunctions.remove(normalizedName);
unregisterFunctionJarResources(fd);
} else if (!ignoreIfNotExist) {
throw new ValidationException(
String.format("Temporary catalog function %s doesn't exist", identifier));
}
return fd;
} | Drop a temporary catalog function.
@param identifier identifier of the function
@param ignoreIfNotExist Flag to specify behavior when the function does not exist: if set to
false, throw an exception, if set to true, do nothing.
@return the removed catalog function, which is null if function doesn't exist and
ignoreIfNotExist is true. | dropTempCatalogFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java | Apache-2.0 |
default ContextResolvedFunction lookupBuiltInFunction(BuiltInFunctionDefinition definition) {
return lookupFunction(UnresolvedIdentifier.of(definition.getName()))
.orElseThrow(
() ->
new TableException(
String.format(
"Required built-in function [%s] could not be found in any catalog.",
definition.getName())));
} | Helper method for looking up a built-in function. | lookupBuiltInFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionLookup.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionLookup.java | Apache-2.0 |
public static ProducingResult createProducingResult(
ResolvedSchema inputSchema, @Nullable Schema declaredSchema) {
// no schema has been declared by the user,
// the schema will be entirely derived from the input
if (declaredSchema == null) {
// go through data type to erase time attributes
final DataType physicalDataType = inputSchema.toSourceRowDataType();
final Schema schema = Schema.newBuilder().fromRowDataType(physicalDataType).build();
return new ProducingResult(null, schema, null);
}
final List<UnresolvedColumn> declaredColumns = declaredSchema.getColumns();
// the declared schema does not contain physical information,
// thus, it only replaces physical columns with metadata rowtime or adds a primary key
if (declaredColumns.stream().noneMatch(SchemaTranslator::isPhysical)) {
// go through data type to erase time attributes
final DataType sourceDataType = inputSchema.toSourceRowDataType();
final DataType physicalDataType =
patchDataTypeWithoutMetadataRowtime(sourceDataType, declaredColumns);
final Schema.Builder builder = Schema.newBuilder();
builder.fromRowDataType(physicalDataType);
builder.fromSchema(declaredSchema);
return new ProducingResult(null, builder.build(), null);
}
return new ProducingResult(null, declaredSchema, null);
} | Converts the given {@link DataType} into the final {@link ProducingResult}.
<p>This method serves three types of use cases:
<ul>
<li>1. Derive physical columns from the input schema.
<li>2. Derive physical columns from the input schema but enrich with metadata column and
primary key.
<li>3. Entirely use declared schema.
</ul> | createProducingResult | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | Apache-2.0 |
public static ProducingResult createProducingResult(
DataTypeFactory dataTypeFactory,
ResolvedSchema inputSchema,
AbstractDataType<?> targetDataType) {
final List<String> inputFieldNames = inputSchema.getColumnNames();
final List<String> inputFieldNamesNormalized =
inputFieldNames.stream()
.map(n -> n.toLowerCase(Locale.ROOT))
.collect(Collectors.toList());
final DataType resolvedDataType = dataTypeFactory.createDataType(targetDataType);
final List<String> targetFieldNames = flattenToNames(resolvedDataType);
final List<String> targetFieldNamesNormalized =
targetFieldNames.stream()
.map(n -> n.toLowerCase(Locale.ROOT))
.collect(Collectors.toList());
final List<DataType> targetFieldDataTypes = flattenToDataTypes(resolvedDataType);
// help in reorder fields for POJOs if all field names are present but out of order,
// otherwise let the sink validation fail later
List<String> projections = null;
if (targetFieldNames.size() == inputFieldNames.size()) {
// reordering by name (case-sensitive)
if (targetFieldNames.containsAll(inputFieldNames)) {
projections = targetFieldNames;
}
// reordering by name (case-insensitive) but fields must be unique
else if (targetFieldNamesNormalized.containsAll(inputFieldNamesNormalized)
&& targetFieldNamesNormalized.stream().distinct().count()
== targetFieldNames.size()
&& inputFieldNamesNormalized.stream().distinct().count()
== inputFieldNames.size()) {
projections =
targetFieldNamesNormalized.stream()
.map(
targetName -> {
final int inputFieldPos =
inputFieldNamesNormalized.indexOf(targetName);
return inputFieldNames.get(inputFieldPos);
})
.collect(Collectors.toList());
}
}
final Schema schema =
Schema.newBuilder().fromFields(targetFieldNames, targetFieldDataTypes).build();
return new ProducingResult(projections, schema, resolvedDataType);
} | Converts the given {@link DataType} into the final {@link ProducingResult}.
<p>This method serves one type of use case:
<ul>
<li>1. Derive physical columns from the input data type.
</ul> | createProducingResult | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | Apache-2.0 |
public static ConsumingResult createConsumingResult(
DataTypeFactory dataTypeFactory,
TypeInformation<?> inputTypeInfo,
@Nullable Schema declaredSchema) {
final DataType inputDataType =
TypeInfoDataTypeConverter.toDataType(dataTypeFactory, inputTypeInfo);
return createConsumingResult(dataTypeFactory, inputDataType, declaredSchema, true);
} | Converts the given {@link TypeInformation} and an optional declared {@link Schema} (possibly
incomplete) into the final {@link ConsumingResult}.
<p>This method serves three types of use cases:
<ul>
<li>1. Derive physical columns from the input type information.
<li>2. Derive physical columns but merge them with declared computed columns and other
schema information.
<li>3. Derive and enrich physical columns and merge other schema information.
</ul> | createConsumingResult | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/SchemaTranslator.java | Apache-2.0 |
public static boolean isFunctionOfKind(Expression expression, FunctionKind kind) {
if (expression instanceof UnresolvedCallExpression) {
return ((UnresolvedCallExpression) expression).getFunctionDefinition().getKind()
== kind;
}
if (expression instanceof CallExpression) {
return ((CallExpression) expression).getFunctionDefinition().getKind() == kind;
}
return false;
} | Checks if the expression is a function call of given type.
@param expression expression to check
@param kind expected type of function
@return true if the expression is function call of given type, false otherwise | isFunctionOfKind | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionUtils.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionUtils.java | Apache-2.0 |
public static boolean isFunction(
Expression expression, BuiltInFunctionDefinition functionDefinition) {
if (expression instanceof UnresolvedCallExpression) {
return ((UnresolvedCallExpression) expression).getFunctionDefinition()
== functionDefinition;
}
if (expression instanceof CallExpression) {
return ((CallExpression) expression).getFunctionDefinition() == functionDefinition;
}
return false;
} | Checks if the given expression is a given builtin function.
@param expression expression to check
@param functionDefinition expected function definition
@return true if the given expression is a given function call | isFunction | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionUtils.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionUtils.java | Apache-2.0 |
public final R visit(Expression other) {
if (other instanceof UnresolvedReferenceExpression) {
return visit((UnresolvedReferenceExpression) other);
} else if (other instanceof TableReferenceExpression) {
return visit((TableReferenceExpression) other);
} else if (other instanceof LocalReferenceExpression) {
return visit((LocalReferenceExpression) other);
} else if (other instanceof LookupCallExpression) {
return visit((LookupCallExpression) other);
} else if (other instanceof UnresolvedCallExpression) {
return visit((UnresolvedCallExpression) other);
} else if (other instanceof SqlCallExpression) {
return visit((SqlCallExpression) other);
} else if (other instanceof ResolvedExpression) {
return visit((ResolvedExpression) other);
}
return visitNonApiExpression(other);
} | A visitor for all {@link Expression}s that might be created during API translation. | visit | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionVisitor.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ApiExpressionVisitor.java | Apache-2.0 |
public final R visit(Expression other) {
if (other instanceof TableReferenceExpression) {
return visit((TableReferenceExpression) other);
} else if (other instanceof LocalReferenceExpression) {
return visit((LocalReferenceExpression) other);
} else if (other instanceof ResolvedExpression) {
return visit((ResolvedExpression) other);
}
throw new TableException("Unexpected unresolved expression received: " + other);
} | A visitor for all {@link ResolvedExpression}s.
<p>All expressions of this visitor are the output of the API and might be passed to a planner. | visit | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ResolvedExpressionVisitor.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/ResolvedExpressionVisitor.java | Apache-2.0 |
public static List<ResolverRule> getExpandingResolverRules() {
return Arrays.asList(
ResolverRules.UNWRAP_API_EXPRESSION,
ResolverRules.LOOKUP_CALL_BY_NAME,
ResolverRules.FLATTEN_STAR_REFERENCE,
ResolverRules.EXPAND_COLUMN_FUNCTIONS);
} | List of rules for (possibly) expanding the list of unresolved expressions. | getExpandingResolverRules | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | Apache-2.0 |
public static List<ResolverRule> getAllResolverRules() {
return Arrays.asList(
ResolverRules.UNWRAP_API_EXPRESSION,
ResolverRules.LOOKUP_CALL_BY_NAME,
ResolverRules.FLATTEN_STAR_REFERENCE,
ResolverRules.EXPAND_COLUMN_FUNCTIONS,
ResolverRules.OVER_WINDOWS,
ResolverRules.FIELD_RESOLVE,
ResolverRules.QUALIFY_BUILT_IN_FUNCTIONS,
ResolverRules.RESOLVE_SQL_CALL,
ResolverRules.RESOLVE_CALL_BY_ARGUMENTS);
} | List of rules that will be applied during expression resolution. | getAllResolverRules | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | Apache-2.0 |
public static ExpressionResolverBuilder resolverFor(
TableConfig tableConfig,
ClassLoader userClassLoader,
TableReferenceLookup tableCatalog,
FunctionLookup functionLookup,
DataTypeFactory typeFactory,
SqlExpressionResolver sqlExpressionResolver,
QueryOperation... inputs) {
return new ExpressionResolverBuilder(
inputs,
tableConfig,
userClassLoader,
tableCatalog,
functionLookup,
typeFactory,
sqlExpressionResolver);
} | Creates a builder for {@link ExpressionResolver}. One can add additional properties to the
resolver like e.g. {@link GroupWindow} or {@link OverWindow}. You can also add additional
{@link ResolverRule}.
@param tableConfig general configuration
@param tableCatalog a way to lookup a table reference by name
@param functionLookup a way to lookup call by name
@param typeFactory a way to lookup and create data types
@param inputs inputs to use for field resolution
@return builder for resolver | resolverFor | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | Apache-2.0 |
public List<ResolvedExpression> resolve(List<Expression> expressions) {
final Function<List<Expression>, List<Expression>> resolveFunction =
concatenateRules(getAllResolverRules());
final List<Expression> resolvedExpressions = resolveFunction.apply(expressions);
return resolvedExpressions.stream()
.map(e -> e.accept(VERIFY_RESOLUTION_VISITOR))
.collect(Collectors.toList());
} | Resolves given expressions with configured set of rules. All expressions of an operation
should be given at once as some rules might assume the order of expressions.
<p>After this method is applied the returned expressions should be ready to be converted to
planner specific expressions.
@param expressions list of expressions to resolve.
@return resolved list of expression | resolve | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | Apache-2.0 |
public List<Expression> resolveExpanding(List<Expression> expressions) {
final Function<List<Expression>, List<Expression>> resolveFunction =
concatenateRules(getExpandingResolverRules());
return resolveFunction.apply(expressions);
} | Resolves given expressions with configured set of rules. All expressions of an operation
should be given at once as some rules might assume the order of expressions.
<p>After this method is applied the returned expressions might contain unresolved expression
that can be used for further API transformations.
@param expressions list of expressions to resolve.
@return resolved list of expression | resolveExpanding | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/ExpressionResolver.java | Apache-2.0 |
FieldReferenceExpression toExpr() {
return new FieldReferenceExpression(
column.getName(), column.getDataType(), inputIdx, columnIdx);
} | Provides a way to look up field reference by the name of the field. | toExpr | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/lookups/FieldReferenceLookup.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/lookups/FieldReferenceLookup.java | Apache-2.0 |
public List<FieldReferenceExpression> getAllInputFields() {
return getInputFields(Collections.emptyList());
} | Gives all fields of underlying inputs in order of those inputs and order of fields within
input.
@return concatenated list of fields of all inputs. | getAllInputFields | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/lookups/FieldReferenceLookup.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/lookups/FieldReferenceLookup.java | Apache-2.0 |
private List<Expression> resolveArgsOfColumns(
List<Expression> args, boolean isReverseProjection) {
List<Expression> finalResult = new LinkedList<>();
List<UnresolvedReferenceExpression> result =
args.stream()
.flatMap(e -> e.accept(this.columnsExpressionExpander).stream())
.collect(Collectors.toList());
if (isReverseProjection) {
for (UnresolvedReferenceExpression field : inputFieldReferences) {
if (indexOfName(result, field.getName()) == -1) {
finalResult.add(field);
}
}
} else {
finalResult.addAll(result);
}
return finalResult;
} | Expand the columns expression in the input Expression List. | resolveArgsOfColumns | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | Apache-2.0 |
private boolean isIndexRangeCall(UnresolvedCallExpression expression) {
return expression.getFunctionDefinition() == RANGE_TO
&& expression.getChildren().get(0) instanceof ValueLiteralExpression
&& expression.getChildren().get(1) instanceof ValueLiteralExpression;
} | Whether the expression is a column index range expression, e.g. withColumns(1 ~ 2). | isIndexRangeCall | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | Apache-2.0 |
private boolean isNameRangeCall(UnresolvedCallExpression expression) {
return expression.getFunctionDefinition() == RANGE_TO
&& expression.getChildren().get(0) instanceof UnresolvedReferenceExpression
&& expression.getChildren().get(1) instanceof UnresolvedReferenceExpression;
} | Whether the expression is a column name range expression, e.g. withColumns(a ~ b). | isNameRangeCall | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | Apache-2.0 |
private static int indexOfName(
List<UnresolvedReferenceExpression> inputFieldReferences, String targetName) {
int i;
for (i = 0; i < inputFieldReferences.size(); ++i) {
if (inputFieldReferences.get(i).getName().equals(targetName)) {
break;
}
}
return i == inputFieldReferences.size() ? -1 : i;
} | Find the index of targetName in the list. Return -1 if not found. | indexOfName | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ExpandColumnFunctionsRule.java | Apache-2.0 |
private List<ResolvedExpression> castArguments(
Result inferenceResult, List<ResolvedExpression> resolvedArgs) {
return IntStream.range(0, resolvedArgs.size())
.mapToObj(
pos -> {
final ResolvedExpression argument = resolvedArgs.get(pos);
final DataType argumentType = argument.getOutputDataType();
final DataType expectedType =
inferenceResult.getExpectedArgumentTypes().get(pos);
if (!supportsAvoidingCast(
argumentType.getLogicalType(),
expectedType.getLogicalType())) {
return resolutionContext
.postResolutionFactory()
.cast(argument, expectedType);
}
return argument;
})
.collect(Collectors.toList());
} | Casts the arguments according to the properties of the {@link Result}. | castArguments | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ResolveCallByArgumentsRule.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/expressions/resolver/rules/ResolveCallByArgumentsRule.java | Apache-2.0 |
public static Planner createPlanner(
Executor executor,
TableConfig tableConfig,
ClassLoader userClassLoader,
ModuleManager moduleManager,
CatalogManager catalogManager,
FunctionCatalog functionCatalog) {
final PlannerFactory plannerFactory =
FactoryUtil.discoverFactory(
Thread.currentThread().getContextClassLoader(),
PlannerFactory.class,
PlannerFactory.DEFAULT_IDENTIFIER);
final Context context =
new DefaultPlannerContext(
executor,
tableConfig,
userClassLoader,
moduleManager,
catalogManager,
functionCatalog);
return plannerFactory.create(context);
} | Discovers a planner factory and creates a planner instance. | createPlanner | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/PlannerFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/PlannerFactoryUtil.java | Apache-2.0 |
@SuppressWarnings("unchecked")
public static <T> TableSource<T> findAndCreateTableSource(TableSourceFactory.Context context) {
try {
return TableFactoryService.find(
TableSourceFactory.class,
((ResolvedCatalogTable) context.getTable())
.toProperties(DefaultSqlFactory.INSTANCE))
.createTableSource(context);
} catch (Throwable t) {
throw new TableException("findAndCreateTableSource failed.", t);
}
} | Returns a table source matching the descriptor. | findAndCreateTableSource | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
@SuppressWarnings("unchecked")
public static <T> TableSource<T> findAndCreateTableSource(
ObjectIdentifier objectIdentifier,
CatalogTable catalogTable,
ReadableConfig configuration,
boolean isTemporary) {
TableSourceFactory.Context context =
new TableSourceFactoryContextImpl(
objectIdentifier, catalogTable, configuration, isTemporary);
return findAndCreateTableSource(context);
} | Creates a {@link TableSource} from a {@link CatalogTable}.
<p>It considers {@link Catalog#getFactory()} if provided. | findAndCreateTableSource | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
@SuppressWarnings("unchecked")
public static <T> TableSink<T> findAndCreateTableSink(TableSinkFactory.Context context) {
try {
return TableFactoryService.find(
TableSinkFactory.class,
((ResolvedCatalogTable) context.getTable())
.toProperties(DefaultSqlFactory.INSTANCE))
.createTableSink(context);
} catch (Throwable t) {
throw new TableException("findAndCreateTableSink failed.", t);
}
} | Returns a table sink matching the context. | findAndCreateTableSink | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
@SuppressWarnings("unchecked")
public static <T> TableSink<T> findAndCreateTableSink(
ObjectIdentifier objectIdentifier,
CatalogTable catalogTable,
ReadableConfig configuration,
boolean isStreamingMode,
boolean isTemporary) {
TableSinkFactory.Context context =
new TableSinkFactoryContextImpl(
objectIdentifier,
catalogTable,
configuration,
!isStreamingMode,
isTemporary);
return findAndCreateTableSink(context);
} | Creates a {@link TableSink} from a {@link CatalogTable}.
<p>It considers {@link Catalog#getFactory()} if provided. | findAndCreateTableSink | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
public static boolean isLegacyConnectorOptions(
ReadableConfig configuration,
boolean isStreamingMode,
ObjectIdentifier objectIdentifier,
CatalogTable catalogTable,
boolean isTemporary) {
// normalize option keys
DescriptorProperties properties = new DescriptorProperties(true);
properties.putProperties(catalogTable.getOptions());
if (properties.containsKey(ConnectorDescriptorValidator.CONNECTOR_TYPE)) {
return true;
} else {
try {
// try to create legacy table source using the options,
// some legacy factories may use the 'type' key
TableFactoryUtil.findAndCreateTableSink(
objectIdentifier,
catalogTable,
configuration,
isStreamingMode,
isTemporary);
// success, then we will use the legacy factories
return true;
} catch (Throwable ignore) {
// fail, then we will use new factories
return false;
}
}
} | Checks whether the {@link CatalogTable} uses legacy connector sink options. | isLegacyConnectorOptions | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
public static CatalogStoreFactory findAndCreateCatalogStoreFactory(
Configuration configuration, ClassLoader classLoader) {
String identifier = configuration.get(CommonCatalogOptions.TABLE_CATALOG_STORE_KIND);
CatalogStoreFactory catalogStoreFactory =
FactoryUtil.discoverFactory(classLoader, CatalogStoreFactory.class, identifier);
return catalogStoreFactory;
} | Finds and creates a {@link CatalogStoreFactory} using the provided {@link Configuration} and
user classloader.
<p>The configuration format should be as follows:
<pre>{@code
table.catalog-store.kind: {identifier}
table.catalog-store.{identifier}.{param1}: xxx
table.catalog-store.{identifier}.{param2}: xxx
}</pre> | findAndCreateCatalogStoreFactory | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
public static CatalogStoreFactory.Context buildCatalogStoreFactoryContext(
Configuration configuration, ClassLoader classLoader) {
String identifier = configuration.get(CommonCatalogOptions.TABLE_CATALOG_STORE_KIND);
String catalogStoreOptionPrefix =
CommonCatalogOptions.TABLE_CATALOG_STORE_OPTION_PREFIX + identifier + ".";
Map<String, String> options =
new DelegatingConfiguration(configuration, catalogStoreOptionPrefix).toMap();
CatalogStoreFactory.Context context =
new FactoryUtil.DefaultCatalogStoreContext(options, configuration, classLoader);
return context;
} | Build a {@link CatalogStoreFactory.Context} for opening the {@link CatalogStoreFactory}.
<p>The configuration format should be as follows:
<pre>{@code
table.catalog-store.kind: {identifier}
table.catalog-store.{identifier}.{param1}: xxx
table.catalog-store.{identifier}.{param2}: xxx
}</pre> | buildCatalogStoreFactoryContext | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/factories/TableFactoryUtil.java | Apache-2.0 |
public final UnresolvedReferenceExpression[] operands() {
int operandCount = operandCount();
Preconditions.checkState(
operandCount >= 0, "inputCount must be greater than or equal to 0.");
UnresolvedReferenceExpression[] ret = new UnresolvedReferenceExpression[operandCount];
for (int i = 0; i < operandCount; i++) {
String name = String.valueOf(i);
validateOperandName(name);
ret[i] = unresolvedRef(name);
}
return ret;
} | Args of accumulate and retract, the input value (usually obtained from a new arrived data). | operands | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | Apache-2.0 |
public final UnresolvedReferenceExpression operand(int i) {
String name = String.valueOf(i);
if (getAggBufferNames().contains(name)) {
throw new IllegalStateException(
String.format("Agg buffer name(%s) should not same to operands.", name));
}
return unresolvedRef(name);
} | Arg of accumulate and retract, the input value (usually obtained from a new arrived data). | operand | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | Apache-2.0 |
public final UnresolvedReferenceExpression mergeOperand(
UnresolvedReferenceExpression aggBuffer) {
String name = String.valueOf(Arrays.asList(aggBufferAttributes()).indexOf(aggBuffer));
validateOperandName(name);
return unresolvedRef(name);
} | Merge input of {@link #mergeExpressions()}, the input are AGG buffer generated by user
definition. | mergeOperand | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/DeclarativeAggregateFunction.java | Apache-2.0 |
public static boolean like(String s, String pattern, String escape) {
final String regex = sqlToRegexLike(pattern, escape);
return Pattern.matches(regex, s);
} | SQL {@code LIKE} function with escape. | like | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | Apache-2.0 |
public static boolean ilike(String s, String patternStr, String escape) {
final String regex = sqlToRegexLike(patternStr, escape);
Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE);
Matcher matcher = pattern.matcher(s);
return matcher.matches();
} | SQL {@code ILIKE} function with escape. | ilike | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | Apache-2.0 |
public static boolean similar(String s, String pattern, String escape) {
final String regex = sqlToRegexSimilar(pattern, escape);
return Pattern.matches(regex, s);
} | SQL {@code SIMILAR} function with escape. | similar | java | apache/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | https://github.com/apache/flink/blob/master/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/functions/SqlLikeUtils.java | Apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.