index int64 0 0 | repo_id stringlengths 26 205 | file_path stringlengths 51 246 | content stringlengths 8 433k | __index_level_0__ int64 0 10k |
|---|---|---|---|---|
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/Statement.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import software.amazon.documentdb.jdbc.common.utilities.Warning;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Abstract implementation of Statement for JDBC Driver.
*/
public abstract class Statement implements java.sql.Statement {
private static final Logger LOGGER = LoggerFactory.getLogger(Statement.class);
private final java.sql.Connection connection;
private final AtomicBoolean isClosed = new AtomicBoolean(false);
private int maxFieldSize = 0;
private long largeMaxRows = 0;
private boolean shouldCloseOnCompletion = false;
private SQLWarning warnings;
private int fetchSize = 0;
private ResultSet resultSet;
/**
* Constructor for seeding the statement with the parent connection.
*
* @param connection The parent connection.
*/
protected Statement(final java.sql.Connection connection) {
this.connection = connection;
this.warnings = null;
}
@Override
public void addBatch(final String sql) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException();
}
@Override
public void cancel() throws SQLException {
verifyOpen();
cancelQuery(false);
}
@Override
public void clearBatch() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException();
}
@Override
public void clearWarnings() throws SQLException {
verifyOpen();
warnings = null;
}
@Override
public void close() throws SQLException {
if (!this.isClosed.getAndSet(true)) {
LOGGER.debug("Cancel any running queries.");
try {
cancelQuery(true);
} catch (final SQLException e) {
LOGGER.warn(
"Error occurred while closing Statement. Failed to cancel running query: %s",
e.getMessage());
}
if (this.resultSet != null) {
LOGGER.debug("Close opened result set.");
this.resultSet.close();
}
}
}
@Override
public void closeOnCompletion() throws SQLException {
verifyOpen();
this.shouldCloseOnCompletion = true;
}
@Override
public boolean execute(final String sql) throws SQLException {
this.resultSet = executeQuery(sql);
return true;
}
// Add default execute stubs.
@Override
public boolean execute(final String sql, final int autoGeneratedKeys) throws SQLException {
// Ignore the auto-generated keys as INSERT is not supported and auto-generated keys are not supported.
return execute(sql);
}
@Override
public boolean execute(final String sql, final int[] columnIndexes) throws SQLException {
// Ignore the auto-generated keys as INSERT is not supported and auto-generated keys are not supported.
return execute(sql);
}
@Override
public boolean execute(final String sql, final String[] columnNames) throws SQLException {
// Ignore the auto-generated keys as INSERT is not supported and auto-generated keys are not supported.
return execute(sql);
}
@Override
public int[] executeBatch() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException();
}
@Override
public long[] executeLargeBatch() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException();
}
@Override
public long executeLargeUpdate(final String sql) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public long executeLargeUpdate(final String sql, final int autoGeneratedKeys) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public long executeLargeUpdate(final String sql, final int[] columnIndexes) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public long executeLargeUpdate(final String sql, final String[] columnNames) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public int executeUpdate(final String sql) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public int executeUpdate(final String sql, final int autoGeneratedKeys) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public int executeUpdate(final String sql, final int[] columnIndexes) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public int executeUpdate(final String sql, final String[] columnNames) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public java.sql.Connection getConnection() throws SQLException {
verifyOpen();
return connection;
}
@Override
public int getFetchDirection() throws SQLException {
verifyOpen();
return ResultSet.FETCH_FORWARD;
}
@Override
public void setFetchDirection(final int direction) throws SQLException {
verifyOpen();
if (direction != ResultSet.FETCH_FORWARD) {
throw SqlError.createSQLException(
LOGGER,
SqlState.FEATURE_NOT_SUPPORTED,
SqlError.UNSUPPORTED_FETCH_DIRECTION,
direction);
}
}
@Override
public int getFetchSize() throws SQLException {
verifyOpen();
return fetchSize;
}
@Override
public void setFetchSize(final int rows) throws SQLException {
verifyOpen();
if (rows < 0) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.INVALID_FETCH_SIZE,
rows);
}
this.fetchSize = rows;
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.UNSUPPORTED_GENERATED_KEYS));
}
@Override
public long getLargeMaxRows() throws SQLException {
verifyOpen();
// Maximum result size is 1MB, so therefore a singe row cannot exceed this.
return largeMaxRows;
}
@Override
public void setLargeMaxRows(final long max) throws SQLException {
verifyOpen();
if (max < 0) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.INVALID_LARGE_MAX_ROWS_SIZE,
max);
}
this.largeMaxRows = max;
}
@Override
public long getLargeUpdateCount() throws SQLException {
verifyOpen();
// Updates are not supported, so always return -1.
return -1;
}
@Override
public int getMaxFieldSize() throws SQLException {
verifyOpen();
return maxFieldSize;
}
@Override
public void setMaxFieldSize(final int max) throws SQLException {
verifyOpen();
if (max < 0) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.INVALID_MAX_FIELD_SIZE,
max);
}
this.maxFieldSize = max;
}
@Override
public int getMaxRows() throws SQLException {
final long maxRows = getLargeMaxRows();
if (maxRows > Integer.MAX_VALUE) {
final String warning = Warning.lookup(Warning.MAX_VALUE_TRUNCATED, maxRows, Integer.MAX_VALUE);
LOGGER.warn(warning);
this.addWarning(new SQLWarning(warning));
return Integer.MAX_VALUE;
}
return (int) maxRows;
}
@Override
public void setMaxRows(final int max) throws SQLException {
setLargeMaxRows(max);
}
@Override
public boolean getMoreResults() throws SQLException {
return getMoreResults(java.sql.Statement.CLOSE_CURRENT_RESULT);
}
@Override
public boolean getMoreResults(final int current) throws SQLException {
verifyOpen();
if ((java.sql.Statement.KEEP_CURRENT_RESULT != current) && (this.resultSet != null)) {
this.resultSet.close();
this.resultSet = null;
}
return false;
}
@Override
public ResultSet getResultSet() throws SQLException {
verifyOpen();
return resultSet;
}
@Override
public int getResultSetConcurrency() throws SQLException {
verifyOpen();
return ResultSet.CONCUR_READ_ONLY;
}
@Override
public int getResultSetHoldability() throws SQLException {
verifyOpen();
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public int getResultSetType() throws SQLException {
verifyOpen();
return ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public int getUpdateCount() throws SQLException {
return (int) this.getLargeUpdateCount();
}
@Override
public SQLWarning getWarnings() throws SQLException {
verifyOpen();
return warnings;
}
@Override
public boolean isClosed() {
return isClosed.get();
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
verifyOpen();
return shouldCloseOnCompletion;
}
@Override
public boolean isPoolable() throws SQLException {
verifyOpen();
// Statement pooling is not supported.
return false;
}
@Override
public void setPoolable(final boolean poolable) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.POOLING_NOT_SUPPORTED));
}
@Override
public boolean isWrapperFor(final Class<?> iface) {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public void setCursorName(final String name) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public void setEscapeProcessing(final boolean enable) throws SQLException {
verifyOpen();
// Do nothing, because the driver does not support escape processing.
}
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (iface.isAssignableFrom(this.getClass())) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
/**
* Adds a new {@link SQLWarning} to the end of the warning list.
*
* @param warning the {@link SQLWarning} to add.
*/
void addWarning(final SQLWarning warning) {
if (this.warnings == null) {
this.warnings = warning;
} else {
this.warnings.setNextWarning(warning);
}
}
/**
* Verify the statement is open.
*
* @throws SQLException if the statement is closed.
*/
protected void verifyOpen() throws SQLException {
if (isClosed.get()) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.STMT_CLOSED);
}
}
/**
* Cancels the current query.
* @throws SQLException - if a database exception occurs
* @param isClosing indicator of whether the statement is closing.
*/
protected abstract void cancelQuery(final boolean isClosing) throws SQLException;
}
| 4,600 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/PreparedStatement.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
/**
* Abstract implementation of PreparedStatement for JDBC Driver.
*/
public abstract class PreparedStatement extends Statement implements java.sql.PreparedStatement {
private final String sql;
private ResultSet resultSet;
/**
* Constructor for seeding the prepared statement with the parent connection.
*
* @param connection The parent connection.
* @param sql The sql query.
*/
protected PreparedStatement(final Connection connection, final String sql) {
super(connection);
this.sql = sql;
}
@Override
public void addBatch() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean execute() throws SQLException {
resultSet = executeQuery();
return true;
}
@Override
public ResultSet executeQuery(final String sql) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public ResultSet getResultSet() throws SQLException {
verifyOpen();
return resultSet;
}
@Override
public boolean execute(final String sql) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void clearParameters() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public int executeUpdate() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.READ_ONLY));
}
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setArray(final int parameterIndex, final Array x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x, final int length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setAsciiStream(final int parameterIndex, final InputStream x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBigDecimal(final int parameterIndex, final BigDecimal x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x, final int length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBinaryStream(final int parameterIndex, final InputStream x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBlob(final int parameterIndex, final Blob x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Blob.class.toString()));
}
@Override
public void setBlob(final int parameterIndex, final InputStream inputStream, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Blob.class.toString()));
}
@Override
public void setBlob(final int parameterIndex, final InputStream inputStream) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Blob.class.toString()));
}
@Override
public void setBoolean(final int parameterIndex, final boolean x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setByte(final int parameterIndex, final byte x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setBytes(final int parameterIndex, final byte[] x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader, final int length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setCharacterStream(final int parameterIndex, final Reader reader)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setClob(final int parameterIndex, final Clob x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Clob.class.toString()));
}
@Override
public void setClob(final int parameterIndex, final Reader reader, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Clob.class.toString()));
}
@Override
public void setClob(final int parameterIndex, final Reader reader) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Clob.class.toString()));
}
@Override
public void setDate(final int parameterIndex, final Date x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setDate(final int parameterIndex, final Date x, final Calendar cal)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setDouble(final int parameterIndex, final double x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setFloat(final int parameterIndex, final float x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setInt(final int parameterIndex, final int x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setLong(final int parameterIndex, final long x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setNCharacterStream(final int parameterIndex, final Reader value, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setNCharacterStream(final int parameterIndex, final Reader value)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setNClob(final int parameterIndex, final NClob value) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, NClob.class.toString()));
}
@Override
public void setNClob(final int parameterIndex, final Reader reader, final long length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, NClob.class.toString()));
}
@Override
public void setNClob(final int parameterIndex, final Reader reader) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, NClob.class.toString()));
}
@Override
public void setNString(final int parameterIndex, final String value) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setNull(final int parameterIndex, final int sqlType) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setNull(final int parameterIndex, final int sqlType, final String typeName)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setObject(final int parameterIndex, final Object x, final int targetSqlType)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setObject(final int parameterIndex, final Object x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setObject(final int parameterIndex, final Object x, final int targetSqlType,
final int scaleOrLength)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setRef(final int parameterIndex, final Ref x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, Ref.class.toString()));
}
@Override
public void setRowId(final int parameterIndex, final RowId x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setSQLXML(final int parameterIndex, final SQLXML xmlObject) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(
SqlError.lookup(SqlError.UNSUPPORTED_TYPE, SQLXML.class.toString()));
}
@Override
public void setShort(final int parameterIndex, final short x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setString(final int parameterIndex, final String x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setTime(final int parameterIndex, final Time x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setTime(final int parameterIndex, final Time x, final Calendar cal)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setTimestamp(final int parameterIndex, final Timestamp x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setTimestamp(final int parameterIndex, final Timestamp x, final Calendar cal)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
public void setURL(final int parameterIndex, final URL x) throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
@Override
@Deprecated
public void setUnicodeStream(final int parameterIndex, final InputStream x, final int length)
throws SQLException {
verifyOpen();
throw new SQLFeatureNotSupportedException(SqlError.lookup(SqlError.PARAMETERS_NOT_SUPPORTED));
}
/**
* Gets the sql query string.
* @return the sql query string.
*/
protected String getSql() {
return sql;
}
}
| 4,601 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/Driver.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.nio.charset.StandardCharsets;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Properties;
import java.util.logging.Logger;
/**
* Abstract implementation of Driver for JDBC Driver.
*/
public abstract class Driver implements java.sql.Driver {
static final int DRIVER_MAJOR_VERSION;
static final int DRIVER_MINOR_VERSION;
static final String DRIVER_VERSION;
static final String APP_NAME_SUFFIX;
static final String APPLICATION_NAME;
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(Driver.class);
static {
APPLICATION_NAME = getApplicationName();
// TODO: Get driver version, suffix
DRIVER_MAJOR_VERSION = 1;
DRIVER_MINOR_VERSION = 1;
APP_NAME_SUFFIX = "TODO";
DRIVER_VERSION = "0.0.0";
}
/**
* Get the name of the currently running application.
*
* @return the name of the currently running application.
*/
private static String getApplicationName() {
// What we do is get the process ID of the current process, then check the set of running processes and pick out
// the one that matches the current process. From there we can grab the name of what is running the process.
try {
final String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0];
final boolean isWindows = System.getProperty("os.name").startsWith("Windows");
if (isWindows) {
final Process process = Runtime.getRuntime()
.exec("tasklist /fi \"PID eq " + pid + "\" /fo csv /nh");
try (final BufferedReader input = new BufferedReader(
new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8))) {
final String line = input.readLine();
if (line != null) {
// Omit the surrounding quotes.
return line.substring(1, line.indexOf(",") - 1);
}
}
} else {
final Process process = Runtime.getRuntime().exec("ps -eo pid,comm");
try (final BufferedReader input = new BufferedReader(
new InputStreamReader(process.getInputStream(), StandardCharsets.UTF_8))) {
String line;
while ((line = input.readLine()) != null) {
line = line.trim();
if (line.startsWith(pid)) {
return line.substring(line.indexOf(" ") + 1);
}
}
}
}
} catch (final Exception err) {
// Eat the exception and fall through.
LOGGER.info(
"An exception has occurred and ignored while retrieving the caller application name: "
+ err.getLocalizedMessage());
}
return "Unknown";
}
@Override
public DriverPropertyInfo[] getPropertyInfo(final String url, final Properties info) throws SQLException {
return new DriverPropertyInfo[0];
}
// TODO: Fix functions below.
@Override
public int getMajorVersion() {
return 0;
}
@Override
public int getMinorVersion() {
return 0;
}
@Override
public boolean jdbcCompliant() {
return false;
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return null;
}
}
| 4,602 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/DatabaseMetaData.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import java.sql.ResultSet;
import java.sql.RowIdLifetime;
import java.sql.SQLException;
/**
* Abstract implementation of DatabaseMetaData for JDBC Driver.
*/
public abstract class DatabaseMetaData implements java.sql.DatabaseMetaData {
private static final org.slf4j.Logger LOGGER = LoggerFactory.getLogger(DatabaseMetaData.class);
// TODO: Create class with abstract functions to get these (and other) of constants
private static final int MAX_CATALOG_NAME_LENGTH = 60;
private static final int MAX_TABLE_NAME_LENGTH = 60;
private static final int MAX_STATEMENT_LENGTH = 65536;
private final java.sql.Connection connection;
/**
* DatabaseMetaData constructor.
* @param connection Connection Object.
*/
public DatabaseMetaData(final java.sql.Connection connection) {
this.connection = connection;
}
// TODO: This unwrap and isWrapperFor is everywhere, try to make a generic static function to handle them.
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (iface.isAssignableFrom(this.getClass())) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
@Override
public boolean isWrapperFor(final Class<?> iface) {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public java.sql.Connection getConnection() {
return connection;
}
@Override
public int getDefaultTransactionIsolation() {
return java.sql.Connection.TRANSACTION_NONE;
}
@Override
public int getDriverMajorVersion() {
return Driver.DRIVER_MAJOR_VERSION;
}
@Override
public int getDriverMinorVersion() {
return Driver.DRIVER_MINOR_VERSION;
}
@Override
public String getDriverVersion() {
return Driver.DRIVER_VERSION;
}
@Override
public int getResultSetHoldability() {
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public RowIdLifetime getRowIdLifetime() {
return RowIdLifetime.ROWID_UNSUPPORTED;
}
@Override
public int getSQLStateType() {
return java.sql.DatabaseMetaData.sqlStateSQL;
}
@Override
public String getIdentifierQuoteString() {
return "\"";
}
@Override
public int getMaxCatalogNameLength() {
return MAX_CATALOG_NAME_LENGTH;
}
@Override
public int getMaxStatementLength() {
return MAX_STATEMENT_LENGTH;
}
@Override
public int getMaxTableNameLength() {
return MAX_TABLE_NAME_LENGTH;
}
@Override
public boolean supportsResultSetConcurrency(final int type, final int concurrency) {
return (type == ResultSet.TYPE_FORWARD_ONLY) && (concurrency == ResultSet.CONCUR_READ_ONLY);
}
@Override
public boolean supportsResultSetType(final int type) {
return (ResultSet.TYPE_FORWARD_ONLY == type);
}
@Override
public String getProcedureTerm() {
LOGGER.debug("Procedures are not supported. Returning empty string.");
return "";
}
@Override
public String getSchemaTerm() {
LOGGER.debug("Schemas are not supported. Returning an empty string.");
return "";
}
@Override
public int getMaxBinaryLiteralLength() {
LOGGER.debug("Binary is not a supported data type.");
return 0;
}
@Override
public ResultSet getCrossReference(final String parentCatalog, final String parentSchema, final String parentTable,
final String foreignCatalog, final String foreignSchema,
final String foreignTable)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_CROSS_REFERENCE);
}
@Override
public ResultSet getExportedKeys(final String catalog, final String schema, final String table)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_EXPORTED_KEYS);
}
@Override
public ResultSet getFunctionColumns(final String catalog, final String schemaNamePattern,
final String tableNamePattern, final String columnNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_FUNCTION_COLUMNS);
}
@Override
public ResultSet getFunctions(final String catalog, final String schemaPattern, final String functionNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_FUNCTIONS);
}
@Override
public ResultSet getProcedureColumns(final String catalog, final String schemaPattern,
final String procedureNamePattern, final String columnNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PROCEDURE_COLUMNS);
}
@Override
public ResultSet getPseudoColumns(final String catalog, final String schemaPattern, final String tableNamePattern,
final String columnNamePattern) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PSEUDO_COLUMNS);
}
@Override
public ResultSet getSuperTables(final String catalog, final String schemaPattern, final String tableNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_SUPER_TABLES);
}
@Override
public ResultSet getSuperTypes(final String catalog, final String schemaPattern, final String tableNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_SUPER_TYPES);
}
@Override
public ResultSet getTablePrivileges(final String catalog, final String schemaPattern, final String tableNamePattern)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_TABLE_PRIVILEGES);
}
@Override
public ResultSet getUDTs(final String catalog, final String schemaPattern, final String typeNamePattern,
final int[] types) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_USER_DEFINED_TYPES);
}
@Override
public ResultSet getVersionColumns(final String catalog, final String schema, final String table)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_VERSION_COLUMNS);
}
@Override
public int getMaxTablesInSelect() {
return 1;
}
@Override
public int getMaxUserNameLength() {
return 0;
}
@Override
public boolean allProceduresAreCallable() {
return false;
}
@Override
public boolean allTablesAreSelectable() {
return true;
}
@Override
public boolean autoCommitFailureClosesAllResultSets() {
return false;
}
@Override
public boolean dataDefinitionCausesTransactionCommit() {
return false;
}
@Override
public boolean dataDefinitionIgnoredInTransactions() {
return false;
}
@Override
public boolean deletesAreDetected(final int type) {
return false;
}
@Override
public boolean doesMaxRowSizeIncludeBlobs() {
return true;
}
@Override
public boolean generatedKeyAlwaysReturned() {
return false;
}
@Override
public boolean insertsAreDetected(final int type) {
return false;
}
@Override
public boolean isCatalogAtStart() {
return true;
}
@Override
public boolean isReadOnly() {
return true;
}
@Override
public boolean locatorsUpdateCopy() {
return false;
}
@Override
public boolean nullPlusNonNullIsNull() {
return true;
}
@Override
public boolean nullsAreSortedAtEnd() {
return false;
}
@Override
public boolean nullsAreSortedAtStart() {
return false;
}
@Override
public boolean nullsAreSortedHigh() {
return false;
}
@Override
public boolean nullsAreSortedLow() {
return false;
}
@Override
public boolean othersDeletesAreVisible(final int type) {
return false;
}
@Override
public boolean othersInsertsAreVisible(final int type) {
return false;
}
@Override
public boolean othersUpdatesAreVisible(final int type) {
return false;
}
@Override
public boolean ownDeletesAreVisible(final int type) {
return false;
}
@Override
public boolean ownInsertsAreVisible(final int type) {
return false;
}
@Override
public boolean ownUpdatesAreVisible(final int type) {
return false;
}
@Override
public boolean storesLowerCaseIdentifiers() {
return false;
}
@Override
public boolean storesLowerCaseQuotedIdentifiers() {
return false;
}
@Override
public boolean storesMixedCaseIdentifiers() {
return true;
}
@Override
public boolean storesMixedCaseQuotedIdentifiers() {
return true;
}
@Override
public boolean storesUpperCaseIdentifiers() {
return false;
}
@Override
public boolean storesUpperCaseQuotedIdentifiers() {
return false;
}
@Override
public boolean supportsANSI92EntryLevelSQL() {
return true;
}
@Override
public boolean supportsANSI92FullSQL() {
return false;
}
@Override
public boolean supportsANSI92IntermediateSQL() {
return false;
}
@Override
public boolean supportsAlterTableWithAddColumn() {
return false;
}
@Override
public boolean supportsAlterTableWithDropColumn() {
return false;
}
@Override
public boolean supportsBatchUpdates() {
return false;
}
@Override
public boolean supportsCatalogsInDataManipulation() {
return false;
}
@Override
public boolean supportsCatalogsInIndexDefinitions() {
return false;
}
@Override
public boolean supportsCatalogsInPrivilegeDefinitions() {
return false;
}
@Override
public boolean supportsCatalogsInProcedureCalls() {
return false;
}
@Override
public boolean supportsCatalogsInTableDefinitions() {
return false;
}
@Override
public boolean supportsColumnAliasing() {
return true;
}
@Override
public boolean supportsConvert() {
return false;
}
@Override
public boolean supportsConvert(final int fromType, final int toType) {
return false;
}
@Override
public boolean supportsCoreSQLGrammar() {
return true;
}
@Override
public boolean supportsCorrelatedSubqueries() {
return true;
}
@Override
public boolean supportsDataDefinitionAndDataManipulationTransactions() {
return false;
}
@Override
public boolean supportsDataManipulationTransactionsOnly() {
return false;
}
@Override
public boolean supportsDifferentTableCorrelationNames() {
return false;
}
@Override
public boolean supportsExpressionsInOrderBy() {
return true;
}
@Override
public boolean supportsExtendedSQLGrammar() {
return true;
}
@Override
public boolean supportsFullOuterJoins() {
return true;
}
@Override
public boolean supportsGetGeneratedKeys() {
return false;
}
@Override
public boolean supportsGroupBy() {
return true;
}
@Override
public boolean supportsGroupByBeyondSelect() {
return true;
}
@Override
public boolean supportsGroupByUnrelated() {
return false;
}
@Override
public boolean supportsIntegrityEnhancementFacility() {
return false;
}
@Override
public boolean supportsLikeEscapeClause() {
return true;
}
@Override
public boolean supportsLimitedOuterJoins() {
return true;
}
@Override
public boolean supportsMinimumSQLGrammar() {
return true;
}
@Override
public boolean supportsMixedCaseIdentifiers() {
return true;
}
@Override
public boolean supportsMixedCaseQuotedIdentifiers() {
return true;
}
@Override
public boolean supportsMultipleOpenResults() {
return false;
}
@Override
public boolean supportsMultipleResultSets() {
return false;
}
@Override
public boolean supportsMultipleTransactions() {
return false;
}
@Override
public boolean supportsNamedParameters() {
return false;
}
@Override
public boolean supportsNonNullableColumns() {
return false;
}
@Override
public boolean supportsOpenCursorsAcrossCommit() {
return false;
}
@Override
public boolean supportsOpenCursorsAcrossRollback() {
return false;
}
@Override
public boolean supportsOpenStatementsAcrossCommit() {
return false;
}
@Override
public boolean supportsOpenStatementsAcrossRollback() {
return false;
}
@Override
public boolean supportsOrderByUnrelated() {
return true;
}
@Override
public boolean supportsOuterJoins() {
return true;
}
@Override
public boolean supportsPositionedDelete() {
return false;
}
@Override
public boolean supportsPositionedUpdate() {
return false;
}
@Override
public boolean supportsResultSetHoldability(final int holdability) {
return false;
}
@Override
public boolean supportsSavepoints() {
return false;
}
@Override
public boolean supportsSchemasInDataManipulation() {
return false;
}
@Override
public boolean supportsSchemasInIndexDefinitions() {
return false;
}
@Override
public boolean supportsSchemasInPrivilegeDefinitions() {
return false;
}
@Override
public boolean supportsSchemasInProcedureCalls() {
return false;
}
@Override
public boolean supportsSchemasInTableDefinitions() {
return false;
}
@Override
public boolean supportsSelectForUpdate() {
return false;
}
@Override
public boolean supportsStatementPooling() {
return false;
}
@Override
public boolean supportsStoredFunctionsUsingCallSyntax() {
return false;
}
@Override
public boolean supportsStoredProcedures() {
return false;
}
@Override
public boolean supportsSubqueriesInComparisons() {
return false;
}
@Override
public boolean supportsSubqueriesInExists() {
return false;
}
@Override
public boolean supportsSubqueriesInIns() {
return false;
}
@Override
public boolean supportsSubqueriesInQuantifieds() {
return false;
}
@Override
public boolean supportsTableCorrelationNames() {
return false;
}
@Override
public boolean supportsTransactionIsolationLevel(final int level) {
return false;
}
@Override
public boolean supportsTransactions() {
return false;
}
@Override
public boolean supportsUnion() {
return false;
}
@Override
public boolean supportsUnionAll() {
return false;
}
@Override
public boolean updatesAreDetected(final int type) {
return false;
}
@Override
public boolean usesLocalFilePerTable() {
return false;
}
@Override
public boolean usesLocalFiles() {
return false;
}
@Override
public int getMaxCharLiteralLength() {
return 0;
}
@Override
public int getMaxColumnNameLength() {
return 0;
}
@Override
public int getMaxColumnsInGroupBy() {
return 0;
}
@Override
public int getMaxColumnsInIndex() {
return 0;
}
@Override
public int getMaxColumnsInOrderBy() {
return 0;
}
@Override
public int getMaxColumnsInSelect() {
return 0;
}
@Override
public int getMaxColumnsInTable() {
return 0;
}
@Override
public int getMaxConnections() {
return 0;
}
@Override
public int getMaxCursorNameLength() {
return 0;
}
@Override
public int getMaxIndexLength() {
return 0;
}
@Override
public int getMaxProcedureNameLength() {
return 0;
}
@Override
public int getMaxSchemaNameLength() {
return 0;
}
@Override
public int getMaxStatements() {
return 0;
}
}
| 4,603 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/Connection.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.ConnectionProperty;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import software.amazon.documentdb.jdbc.common.utilities.Warning;
import java.sql.Array;
import java.sql.Blob;
import java.sql.ClientInfoStatus;
import java.sql.Clob;
import java.sql.NClob;
import java.sql.ResultSet;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Struct;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Abstract implementation of Connection for JDBC Driver.
*/
public abstract class Connection implements java.sql.Connection {
private static final Logger LOGGER = LoggerFactory.getLogger(Connection.class);
private final Properties connectionProperties;
private final AtomicBoolean isClosed = new AtomicBoolean(false);
private Map<String, Class<?>> typeMap = new HashMap<>();
private SQLWarning warnings = null;
protected Connection(@NonNull final Properties connectionProperties) {
this.connectionProperties = connectionProperties;
}
/*
Functions that have their implementation in this Connection class.
*/
@Override
public Properties getClientInfo() throws SQLException {
verifyOpen();
final Properties clientInfo = new Properties();
clientInfo.putAll(connectionProperties);
clientInfo.putIfAbsent(
ConnectionProperty.APPLICATION_NAME,
Driver.APPLICATION_NAME);
return clientInfo;
}
@Override
public void setClientInfo(final Properties properties) throws SQLClientInfoException {
throwIfIsClosed(properties);
connectionProperties.clear();
if (properties != null) {
for (final String name : properties.stringPropertyNames()) {
final String value = properties.getProperty(name);
setClientInfo(name, value);
}
}
LOGGER.debug("Successfully set client info with all properties.");
}
@Override
public String getClientInfo(final String name) throws SQLException {
verifyOpen();
if (name == null) {
LOGGER.debug("Null value is passed as name, falling back to get client info with null.");
return null;
}
connectionProperties.putIfAbsent(
ConnectionProperty.APPLICATION_NAME,
Driver.APPLICATION_NAME);
return connectionProperties.getProperty(name);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
verifyOpen();
return typeMap;
}
@Override
public void setTypeMap(final Map<String, Class<?>> map) throws SQLException {
verifyOpen();
if (map == null) {
LOGGER.debug("Null value is passed as conversion map, failing back to an empty hash map.");
typeMap = new HashMap<>();
} else {
typeMap = map;
}
}
@Override
public boolean isClosed() {
return isClosed.get();
}
@Override
public boolean isWrapperFor(final Class<?> iface) {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public String nativeSQL(final String sql) throws SQLException {
verifyOpen();
return sql;
}
@Override
public void setClientInfo(final String name, final String value) throws SQLClientInfoException {
Objects.requireNonNull(name);
throwIfIsClosed(null);
if (isSupportedProperty(name)) {
if (value != null) {
connectionProperties.put(name, value);
LOGGER.debug("Successfully set client info with name {{}} and value {{}}", name, value);
} else {
connectionProperties.remove(name);
LOGGER.debug("Successfully removed client info with name {{}}", name);
}
} else {
addWarning(new SQLWarning(Warning.lookup(Warning.UNSUPPORTED_PROPERTY, name)));
}
}
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (iface.isAssignableFrom(this.getClass())) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
@Override
public void clearWarnings() throws SQLException {
verifyOpen();
warnings = null;
}
@Override
public SQLWarning getWarnings() throws SQLException {
verifyOpen();
return warnings;
}
/**
* Set a new warning if there were none, or add a new warning to the end of the list.
* @param warning the {@link SQLWarning} to be set.SQLError
*/
protected void addWarning(final SQLWarning warning) {
LOGGER.warn(warning.getMessage());
if (this.warnings == null) {
this.warnings = warning;
return;
}
this.warnings.setNextWarning(warning);
}
/**
* Closes the connection and releases resources.
* @throws SQLException throws SQLException
*/
protected abstract void doClose() throws SQLException;
@Override
public void close() throws SQLException {
if (!isClosed.getAndSet(true)) {
doClose();
}
}
/**
* Verify the connection is open.
* @throws SQLException if the connection is closed.
*/
protected void verifyOpen() throws SQLException {
if (isClosed.get()) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CONN_CLOSED);
}
}
// Add default implementation of create functions which throw.
@Override
public Struct createStruct(final String typeName, final Object[] attributes) throws SQLException {
// Only reason to do this is for parameters, if you do not support them then this is a safe implementation.
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.PARAMETERS_NOT_SUPPORTED);
}
@Override
public Blob createBlob() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Blob.class.toString());
}
@Override
public Clob createClob() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Clob.class.toString());
}
@Override
public NClob createNClob() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
NClob.class.toString());
}
@Override
public SQLXML createSQLXML() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
SQLXML.class.toString());
}
@Override
public Array createArrayOf(final String typeName, final Object[] elements) throws SQLException {
// Even though Arrays are supported, the only reason to create an Array in the application is to pass it as
// a parameter which is not supported.
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.PARAMETERS_NOT_SUPPORTED);
}
// Add default of no schema and no catalog support.
@Override
public String getSchema() throws SQLException {
// No schema support. Return null.
return null;
}
@Override
public void setSchema(final String schema) throws SQLException {
// No schema support. Do nothing.
}
@Override
public String getCatalog() throws SQLException {
// No catalog support. Return null.
return null;
}
@Override
public void setCatalog(final String catalog) throws SQLException {
// No catalog support. Do nothing.
}
// Add default read-only and autocommit only implementation.
@Override
public boolean getAutoCommit() throws SQLException {
return true;
}
@Override
public void setAutoCommit(final boolean autoCommit) throws SQLException {
// Fake allowing autoCommit to be turned off, even though transactions are not supported, as some applications
// turn this off without checking support.
LOGGER.debug("Transactions are not supported, do nothing for setAutoCommit.");
}
@Override
public boolean isReadOnly() throws SQLException {
return true;
}
@Override
public void setReadOnly(final boolean readOnly) throws SQLException {
if (!readOnly) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
}
// Default to forward only with read only concurrency.
@Override
public java.sql.Statement createStatement() throws SQLException {
verifyOpen();
return createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
}
// Add default no transaction support statement.
@Override
public java.sql.Statement createStatement(final int resultSetType, final int resultSetConcurrency,
final int resultSetHoldability) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public java.sql.PreparedStatement prepareStatement(final String sql) throws SQLException {
verifyOpen();
return prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
}
@Override
public java.sql.PreparedStatement prepareStatement(final String sql, final int autoGeneratedKeys)
throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_STATEMENT);
}
@Override
public java.sql.PreparedStatement prepareStatement(final String sql, final int resultSetType,
final int resultSetConcurrency,
final int resultSetHoldability) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_STATEMENT);
}
@Override
public java.sql.PreparedStatement prepareStatement(final String sql, final int[] columnIndexes)
throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_STATEMENT);
}
@Override
public java.sql.PreparedStatement prepareStatement(final String sql, final String[] columnNames)
throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_STATEMENT);
}
// Add default no callable statement support.
@Override
public java.sql.CallableStatement prepareCall(final String sql) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_CALL);
}
@Override
public java.sql.CallableStatement prepareCall(final String sql, final int resultSetType,
final int resultSetConcurrency)
throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_CALL);
}
@Override
public java.sql.CallableStatement prepareCall(final String sql, final int resultSetType,
final int resultSetConcurrency,
final int resultSetHoldability) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_PREPARE_CALL);
}
// Default transactions as unsupported.
@Override
public int getTransactionIsolation() throws SQLException {
return java.sql.Connection.TRANSACTION_NONE;
}
@Override
public void setTransactionIsolation(final int level) throws SQLException {
verifyOpen();
if (level != java.sql.Connection.TRANSACTION_NONE) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
}
@Override
public void releaseSavepoint(final Savepoint savepoint) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public void rollback() throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public void rollback(final Savepoint savepoint) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public Savepoint setSavepoint() throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public Savepoint setSavepoint(final String name) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public void abort(final Executor executor) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public void commit() throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
@Override
public int getHoldability() throws SQLException {
return ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public void setHoldability(final int holdability) throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.TRANSACTIONS_NOT_SUPPORTED);
}
/**
* Checks if the property is supported by the driver.
* @param name The name of the property.
* @return {@code true} if property is supported; {@code false} otherwise.
*/
public abstract boolean isSupportedProperty(final String name);
private void throwIfIsClosed(final Properties properties) throws SQLClientInfoException {
if (isClosed.get()) {
final Map<String, ClientInfoStatus> failures = new HashMap<>();
if (properties != null) {
for (final String name : properties.stringPropertyNames()) {
failures.put(name, ClientInfoStatus.REASON_UNKNOWN);
}
}
throw SqlError.createSQLClientInfoException(LOGGER, SqlError.CONN_CLOSED, failures);
}
}
}
| 4,604 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/PooledConnection.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.ConnectionEvent;
import javax.sql.ConnectionEventListener;
import javax.sql.StatementEventListener;
import java.sql.SQLException;
import java.util.LinkedList;
import java.util.List;
/**
* Abstract implementation of PooledConnection for JDBC Driver.
*/
public abstract class PooledConnection implements javax.sql.PooledConnection {
private static final Logger LOGGER = LoggerFactory.getLogger(PooledConnection.class);
private final List<ConnectionEventListener> connectionEventListeners = new LinkedList<>();
private final java.sql.Connection connection;
/**
* PooledConnection constructor.
* @param connection Connection Object.
*/
public PooledConnection(final java.sql.Connection connection) {
this.connection = connection;
}
/**
* Gets the connection.
* @return a {@link java.sql.Connection}
*/
public java.sql.Connection getConnection() throws SQLException {
return connection;
}
@Override
public void close() {
LOGGER.debug("Notify all connection listeners this PooledConnection object is closed.");
final ConnectionEvent event = new ConnectionEvent(this, null);
connectionEventListeners.forEach(l -> l.connectionClosed(event));
}
@Override
public void addConnectionEventListener(final ConnectionEventListener listener) {
LOGGER.debug("Add a ConnectionEventListener to this PooledConnection.");
connectionEventListeners.add(listener);
}
@Override
public void removeConnectionEventListener(final ConnectionEventListener listener) {
LOGGER.debug("Remove the ConnectionEventListener attached to this PooledConnection.");
connectionEventListeners.remove(listener);
}
@Override
public void addStatementEventListener(final StatementEventListener listener) {
// Do nothing, statement pooling is not supported.
LOGGER.debug("addStatementEventListener is called on the current PooledConnection object.");
}
@Override
public void removeStatementEventListener(final StatementEventListener listener) {
// Do nothing, statement pooling is not supported.
LOGGER.debug("removeStatementEventListener is called on the current PooledConnection object.");
}
}
| 4,605 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/ResultSet.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Date;
import java.sql.NClob;
import java.sql.Ref;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Statement;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Abstract implementation of ResultSet for JDBC Driver.
*/
public abstract class ResultSet implements java.sql.ResultSet {
private static final Logger LOGGER = LoggerFactory.getLogger(ResultSet.class);
private final AtomicBoolean isClosed = new AtomicBoolean(false);
private final Statement statement;
private SQLWarning warnings = null;
protected ResultSet(final Statement statement) {
this.statement = statement;
}
/**
* Closes ResultSet and releases resources.
* @throws SQLException - if a database exception occurs
*/
protected abstract void doClose() throws SQLException;
/**
* Get the driver fetch size in rows.
* @return A value representing the number of rows the driver should fetch.
* @throws SQLException - if a database exception occurs
*/
protected abstract int getDriverFetchSize() throws SQLException;
/**
* Set the driver fetch size by the number of rows.
* @param rows The number of rows for the driver to fetch.
* @throws SQLException throws SQLException
*/
protected abstract void setDriverFetchSize(int rows) throws SQLException;
/**
* Gets the current row (zero-based) index.
* @return A value representing the current row (zero-based) index.
* @throws SQLException throws SQLException
*/
protected abstract int getRowIndex() throws SQLException;
/**
* Gets the number of rows in the result set.
* @return A value representing the number of rows in the result set.
* @throws SQLException throws SQLException
*/
protected abstract int getRowCount() throws SQLException;
/**
* Verify the result set is open.
*
* @throws SQLException if the result set is closed.
*/
protected void verifyOpen() throws SQLException {
if (isClosed.get()) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.RESULT_SET_CLOSED);
}
}
@Override
public boolean isClosed() {
return isClosed.get();
}
@Override
public void close() throws SQLException {
if (isClosed.getAndSet(true)) {
return;
}
doClose();
}
// Warning implementation.
@Override
public SQLWarning getWarnings() throws SQLException {
verifyOpen();
return warnings;
}
@Override
public void clearWarnings() {
warnings = null;
}
/**
* Set a new warning if there were none, or add a new warning to the end of the list.
*
* @param warning The {@link SQLWarning} to add.
*/
protected void addWarning(final SQLWarning warning) {
LOGGER.warn(warning.getMessage());
if (this.warnings == null) {
this.warnings = warning;
return;
}
this.warnings.setNextWarning(warning);
}
@Override
public Statement getStatement() {
return statement;
}
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (iface.isAssignableFrom(this.getClass())) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
@Override
public boolean isWrapperFor(final Class<?> iface) {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public boolean isBeforeFirst() throws SQLException {
verifyOpen();
return (getRowIndex() == -1);
}
@Override
public boolean isAfterLast() throws SQLException {
return (getRowIndex() >= getRowCount());
}
@Override
public boolean isFirst() throws SQLException {
verifyOpen();
return (getRowIndex() == 0);
}
@Override
public int getFetchSize() throws SQLException {
verifyOpen();
return getDriverFetchSize();
}
@Override
public void setFetchSize(final int rows) throws SQLException {
verifyOpen();
if (rows < 0) {
throw SqlError.createSQLException(LOGGER, SqlState.DATA_EXCEPTION, SqlError.INVALID_FETCH_SIZE, rows);
}
setDriverFetchSize(rows);
}
@Override
public boolean isLast() throws SQLException {
verifyOpen();
return (getRowIndex() == (getRowCount() - 1));
}
@Override
public void beforeFirst() throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public void afterLast() throws SQLException {
verifyOpen();
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public boolean first() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public boolean last() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public int getRow() throws SQLException {
// getRow() returns 1-based row numbers where 0 indicates no current row such as when cursor
// is beforeFirst or afterLast.
return ((isBeforeFirst() || isAfterLast()) ? 0 : getRowIndex() + 1);
}
@Override
public boolean absolute(final int row) throws SQLException {
verifyOpen();
if (row < 1) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.INVALID_ROW_VALUE);
} else if (getRow() > row) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
while ((getRow() < row) && next()) {
continue;
}
return !isBeforeFirst() && !isAfterLast();
}
@Override
public int getFetchDirection() {
return java.sql.ResultSet.FETCH_FORWARD;
}
@Override
public void setFetchDirection(final int direction) throws SQLException {
if (direction != java.sql.ResultSet.FETCH_FORWARD) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
}
@Override
public int getType() throws SQLException {
return java.sql.ResultSet.TYPE_FORWARD_ONLY;
}
@Override
public int getConcurrency() throws SQLException {
return java.sql.ResultSet.CONCUR_READ_ONLY;
}
@Override
public String getCursorName() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
// Add default not supported for all types.
@Override
public String getString(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
String.class.toString());
}
@Override
public boolean getBoolean(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Boolean.class.toString());
}
@Override
public byte getByte(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Byte.class.toString());
}
@Override
public short getShort(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Short.class.toString());
}
@Override
public int getInt(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Integer.class.toString());
}
@Override
public long getLong(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Long.class.toString());
}
@Override
public float getFloat(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Float.class.toString());
}
@Override
public double getDouble(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
BigDecimal.class.toString());
}
@Override
public BigDecimal getBigDecimal(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
String.class.toString());
}
@Override
@Deprecated
public BigDecimal getBigDecimal(final int columnIndex, final int scale) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
BigDecimal.class.toString());
}
@Override
public byte[] getBytes(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Byte.class.toString());
}
@Override
public Date getDate(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Date.class.toString());
}
@Override
public Time getTime(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Time.class.toString());
}
@Override
public Timestamp getTimestamp(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Timestamp.class.toString());
}
@Override
public InputStream getAsciiStream(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
InputStream.class.toString());
}
@Override
@Deprecated
public InputStream getUnicodeStream(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
InputStream.class.toString());
}
@Override
public InputStream getBinaryStream(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
InputStream.class.toString());
}
@Override
public Object getObject(final int columnIndex, final Map<String, Class<?>> map) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Object.class.toString());
}
@Override
public Object getObject(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
String.class.toString());
}
@Override
public Ref getRef(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Ref.class.toString());
}
@Override
public Blob getBlob(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Blob.class.toString());
}
@Override
public Clob getClob(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Clob.class.toString());
}
@Override
public Array getArray(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Array.class.toString());
}
@Override
public Timestamp getTimestamp(final int columnIndex, final Calendar cal) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Timestamp.class.toString());
}
@Override
public URL getURL(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
URL.class.toString());
}
@Override
public RowId getRowId(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
RowId.class.toString());
}
@Override
public NClob getNClob(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
NClob.class.toString());
}
@Override
public SQLXML getSQLXML(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Array.class.toString());
}
@Override
public String getNString(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
String.class.toString());
}
@Override
public Date getDate(final int columnIndex, final Calendar cal) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Date.class.toString());
}
@Override
public Time getTime(final int columnIndex, final Calendar cal) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Time.class.toString());
}
@Override
public Reader getNCharacterStream(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Reader.class.toString());
}
@Override
public <T> T getObject(final int columnIndex, final Class<T> type) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
Object.class.toString());
}
@Override
public Reader getCharacterStream(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(
LOGGER,
SqlError.UNSUPPORTED_TYPE,
String.class.toString());
}
// Default implementation for all label functions to just use findColumn(label) to find idx and lookup with idx.
@Override
public Object getObject(final String columnLabel, final Map<String, Class<?>> map) throws SQLException {
return getObject(findColumn(columnLabel), map);
}
@Override
public Ref getRef(final String columnLabel) throws SQLException {
return getRef(findColumn(columnLabel));
}
@Override
public Blob getBlob(final String columnLabel) throws SQLException {
return getBlob(findColumn(columnLabel));
}
@Override
public Clob getClob(final String columnLabel) throws SQLException {
return getClob(findColumn(columnLabel));
}
@Override
public Array getArray(final String columnLabel) throws SQLException {
return getArray(findColumn(columnLabel));
}
@Override
public String getString(final String columnLabel) throws SQLException {
return getString(findColumn(columnLabel));
}
@Override
public boolean getBoolean(final String columnLabel) throws SQLException {
return getBoolean(findColumn(columnLabel));
}
@Override
public byte getByte(final String columnLabel) throws SQLException {
return getByte(findColumn(columnLabel));
}
@Override
public short getShort(final String columnLabel) throws SQLException {
return getShort(findColumn(columnLabel));
}
@Override
public int getInt(final String columnLabel) throws SQLException {
return getInt(findColumn(columnLabel));
}
@Override
public long getLong(final String columnLabel) throws SQLException {
return getLong(findColumn(columnLabel));
}
@Override
public float getFloat(final String columnLabel) throws SQLException {
return getFloat(findColumn(columnLabel));
}
@Override
public double getDouble(final String columnLabel) throws SQLException {
return getDouble(findColumn(columnLabel));
}
@Override
@Deprecated
public BigDecimal getBigDecimal(final String columnLabel, final int scale) throws SQLException {
return getBigDecimal(findColumn(columnLabel));
}
@Override
public byte[] getBytes(final String columnLabel) throws SQLException {
return getBytes(findColumn(columnLabel));
}
@Override
public Date getDate(final String columnLabel) throws SQLException {
return getDate(findColumn(columnLabel));
}
@Override
public Time getTime(final String columnLabel) throws SQLException {
return getTime(findColumn(columnLabel));
}
@Override
public Timestamp getTimestamp(final String columnLabel) throws SQLException {
return getTimestamp(findColumn(columnLabel));
}
@Override
public InputStream getAsciiStream(final String columnLabel) throws SQLException {
return getAsciiStream(findColumn(columnLabel));
}
@Override
@Deprecated
public InputStream getUnicodeStream(final String columnLabel) throws SQLException {
return getUnicodeStream(findColumn(columnLabel));
}
@Override
public InputStream getBinaryStream(final String columnLabel) throws SQLException {
return getBinaryStream(findColumn(columnLabel));
}
@Override
public Object getObject(final String columnLabel) throws SQLException {
return getObject(findColumn(columnLabel));
}
@Override
public Reader getCharacterStream(final String columnLabel) throws SQLException {
return getCharacterStream(findColumn(columnLabel));
}
@Override
public BigDecimal getBigDecimal(final String columnLabel) throws SQLException {
return getBigDecimal(findColumn(columnLabel));
}
@Override
public SQLXML getSQLXML(final String columnLabel) throws SQLException {
return getSQLXML(findColumn(columnLabel));
}
@Override
public URL getURL(final String columnLabel) throws SQLException {
return getURL(findColumn(columnLabel));
}
@Override
public RowId getRowId(final String columnLabel) throws SQLException {
return getRowId(findColumn(columnLabel));
}
@Override
public NClob getNClob(final String columnLabel) throws SQLException {
return getNClob(findColumn(columnLabel));
}
@Override
public String getNString(final String columnLabel) throws SQLException {
return getNString(findColumn(columnLabel));
}
@Override
public Reader getNCharacterStream(final String columnLabel) throws SQLException {
return getNCharacterStream(findColumn(columnLabel));
}
@Override
public Date getDate(final String columnLabel, final Calendar cal) throws SQLException {
return getDate(findColumn(columnLabel), cal);
}
@Override
public Time getTime(final String columnLabel, final Calendar cal) throws SQLException {
return getTime(findColumn(columnLabel), cal);
}
@Override
public Timestamp getTimestamp(final String columnLabel, final Calendar cal) throws SQLException {
return getTimestamp(findColumn(columnLabel), cal);
}
@Override
public <T> T getObject(final String columnLabel, final Class<T> type) throws SQLException {
return getObject(findColumn(columnLabel), type);
}
// All functions below have default implementation which is setup for read only and forward only cursors.
@Override
public int getHoldability() throws SQLException {
return java.sql.ResultSet.CLOSE_CURSORS_AT_COMMIT;
}
@Override
public boolean relative(final int rows) throws SQLException {
verifyOpen();
if (rows < 0) {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
int rowCopy = rows;
while (rowCopy-- > 0) {
if (!next()) {
return false;
}
}
return true;
}
@Override
public boolean rowDeleted() {
return false;
}
@Override
public boolean rowInserted() {
return false;
}
@Override
public boolean rowUpdated() {
return false;
}
@Override
public void moveToCurrentRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public void refreshRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_REFRESH_ROW);
}
@Override
public boolean previous() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.RESULT_FORWARD_ONLY);
}
@Override
public void insertRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void moveToInsertRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void deleteRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void cancelRowUpdates() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateArray(final int columnIndex, final Array x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateArray(final String columnLabel, final Array x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final int columnIndex, final InputStream x, final int length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final String columnLabel, final InputStream x, final int length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final int columnIndex, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final String columnLabel, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final int columnIndex, final InputStream x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateAsciiStream(final String columnLabel, final InputStream x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBigDecimal(final int columnIndex, final BigDecimal x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBigDecimal(final String columnLabel, final BigDecimal x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final int columnIndex, final InputStream x, final int i1)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final String columnLabel, final InputStream x, final int i)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final int columnIndex, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final String columnLabel, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final int columnIndex, final InputStream x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBinaryStream(final String columnLabel, final InputStream x)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final int columnIndex, final Blob x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final String columnLabel, final Blob x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final int columnIndex, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final String columnLabel, final InputStream x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final int columnIndex, final InputStream x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBlob(final String columnLabel, final InputStream x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBoolean(final int columnIndex, final boolean x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBoolean(final String columnLabel, final boolean x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateByte(final int columnIndex, final byte x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateByte(final String columnLabel, final byte x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBytes(final int columnIndex, final byte[] x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateBytes(final String columnLabel, final byte[] x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final int columnIndex, final Reader x, final int length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final String columnLabel, final Reader x, final int length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final int columnIndex, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final String columnLabel, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final int columnIndex, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateCharacterStream(final String columnLabel, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final int columnIndex, final Clob x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final String columnLabel, final Clob x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final int columnIndex, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final String columnLabel, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final int columnIndex, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateClob(final String columnLabel, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateDate(final int columnIndex, final Date x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateDate(final String columnLabel, final Date x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateDouble(final int columnIndex, final double x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateDouble(final String columnLabel, final double x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateFloat(final int columnIndex, final float x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateFloat(final String columnLabel, final float x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateInt(final int columnIndex, final int x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateInt(final String columnLabel, final int x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateLong(final int columnIndex, final long l) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateLong(final String columnLabel, final long l) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNCharacterStream(final int columnIndex, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNCharacterStream(final String columnLabel, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNCharacterStream(final int columnIndex, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNCharacterStream(final String columnLabel, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final int columnIndex, final NClob nClob) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final String columnLabel, final NClob nClob) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final int columnIndex, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final String columnLabel, final Reader x, final long length)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final int columnIndex, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNClob(final String columnLabel, final Reader x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNString(final int columnIndex, final String x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNString(final String columnLabel, final String x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNull(final int columnIndex) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateNull(final String columnLabel) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateObject(final int columnIndex, final Object x, final int scaleOrLength)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateObject(final int columnIndex, final Object x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateObject(final String columnLabel, final Object x, final int scaleOrLength)
throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateObject(final String columnLabel, final Object x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateRef(final int columnIndex, final Ref x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateRef(final String columnLabel, final Ref x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateRow() throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateRowId(final int columnIndex, final RowId x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateRowId(final String columnLabel, final RowId x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateSQLXML(final int columnIndex, final SQLXML x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateSQLXML(final String columnLabel, final SQLXML x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateShort(final int columnIndex, final short x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateShort(final String columnLabel, final short x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateString(final int columnIndex, final String x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateString(final String columnLabel, final String x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateTime(final int columnIndex, final Time x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateTime(final String columnLabel, final Time x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateTimestamp(final int columnIndex, final Timestamp x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
@Override
public void updateTimestamp(final String columnLabel, final Timestamp x) throws SQLException {
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.READ_ONLY);
}
}
| 4,606 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/ResultSetMetaData.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import java.sql.SQLException;
/**
* Abstract implementation of ResultSetMetaData for JDBC Driver.
*/
public abstract class ResultSetMetaData implements java.sql.ResultSetMetaData {
private static final Logger LOGGER = LoggerFactory.getLogger(ResultSetMetaData.class);
@Override
public boolean isWrapperFor(final Class<?> iface) {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (iface.isAssignableFrom(this.getClass())) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
}
| 4,607 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/DataSource.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.logging.Logger;
/**
* Abstract implementation of DataSource for JDBC Driver.
*/
public abstract class DataSource implements javax.sql.DataSource, javax.sql.ConnectionPoolDataSource {
private static final Logger LOGGER = Logger.getLogger(DataSource.class.toString());
@Override
public <T> T unwrap(final Class<T> iface) throws SQLException {
if (isWrapperFor(iface)) {
return iface.cast(this);
}
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.CANNOT_UNWRAP,
iface.toString());
}
@Override
public boolean isWrapperFor(final Class<?> iface) throws SQLException {
return (null != iface) && iface.isAssignableFrom(this.getClass());
}
@Override
public PrintWriter getLogWriter() {
return null;
}
@Override
public void setLogWriter(final PrintWriter out) {
// NOOP
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return LOGGER;
}
// TODO: Get and set of different properties. Either done generically through this class or inheriting implementation.
}
| 4,608 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/JdbcColumnMetaData.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Getter;
/**
* A data class to provide metadata for a result set column.
*/
@AllArgsConstructor
@Builder
@Getter
public class JdbcColumnMetaData {
/**
* Creates a new {@link JdbcColumnMetaData}
*
* @param ordinal the zero-based ordinal of the column in the result set.
* @param caseSensitive indicates whether a column's case matters.
* @param nullable indicates the nullability of values in the designated column.
* @param signed indicates whether values in the designated column are signed numbers.
* @param columnDisplaySize indicates the designated column's normal maximum width in characters.
* @param columnLabel the label of the column.
* @param columnName the name of the column.
* @param schemaName the schema the column belongs in.
* @param precision the numeric precision.
* @param scale the numeric scale.
* @param columnType the column type.
* @param columnTypeName the column type name.
* @param columnClassName the column class name.
*/
public JdbcColumnMetaData(
final int ordinal,
final boolean caseSensitive,
final int nullable,
final boolean signed,
final int columnDisplaySize,
final String columnLabel,
final String columnName,
final String schemaName,
final int precision,
final int scale,
final int columnType,
final String columnTypeName,
final String columnClassName) {
this(
ordinal,
false, //autoIncrement,
caseSensitive,
false, //searchable,
false, //currency,
nullable,
signed,
columnDisplaySize,
columnLabel,
columnName,
schemaName,
precision,
scale,
null, //tableName,
null, //catalogName,
columnType,
columnTypeName,
true, //readOnly,
false, //writable,
false, //definitelyWritable,
columnClassName);
}
/** Gets the zero-based ordinal of the column in the result set. */
private final int ordinal; // 0-based
/** Indicates whether the designated column is automatically numbered. */
private final boolean autoIncrement;
/** Indicates whether a column's case matters. */
private final boolean caseSensitive;
/** Indicates whether the designated column can be used in a where clause. */
private final boolean searchable;
/** Indicates whether the designated column is a cash value. */
private final boolean currency;
/** Indicates the nullability of values in the designated column. */
private final int nullable;
/** Indicates whether values in the designated column are signed numbers. */
private final boolean signed;
/** Indicates the designated column's normal maximum width in characters. */
private final int columnDisplaySize;
/**
* Gets the designated column's suggested title for use in printouts and displays. The suggested
* title is usually specified by the SQL <code>AS</code> clause. If a SQL <code>AS</code> is
* not specified, the value returned from
* <code>getColumnLabel</code> will be the same as the value returned by the
* <code>getColumnName</code> method.
*/
private final String columnLabel;
/** Get the designated column's name. */
private final String columnName;
/** Get the designated column's table's schema. */
private final String schemaName;
/**
* Get the designated column's specified column size. For numeric data, this is the maximum
* precision. For character data, this is the length in characters. For datetime data types,
* this is the length in characters of the String representation (assuming the maximum allowed
* precision of the fractional seconds component). For binary data, this is the length in bytes.
* For the ROWID datatype, this is the length in bytes. 0 is returned for data types where the
* column size is not applicable.
*/
private final int precision;
/**
* Gets the designated column's number of digits to right of the decimal point. 0 is returned
* for data types where the scale is not applicable.
*/
private final int scale;
/** Gets the designated column's table name. */
private final String tableName;
/** Gets the designated column's table's catalog name. */
private final String catalogName;
/** Retrieves the designated column's SQL type. */
private final int columnType;
/** Retrieves the designated column's database-specific type name. */
private final String columnTypeName;
/** Indicates whether the designated column is definitely not writable. */
private final boolean readOnly;
/** Indicates whether it is possible for a write on the designated column to succeed. */
private final boolean writable;
/** Indicates whether a write on the designated column will definitely succeed. */
private final boolean definitelyWritable;
/**
* <p>Returns the fully-qualified name of the Java class whose instances
* are manufactured if the method <code>ResultSet.getObject</code> is called to retrieve a value
* from the column. <code>ResultSet.getObject</code> may return a subclass of the class
* returned by this method.
*/
private final String columnClassName;
}
| 4,609 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/Warning.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import java.util.ResourceBundle;
/**
* Enum representing the possible warning messages and lookup facilities for localization.
*/
public enum Warning {
MAX_VALUE_TRUNCATED,
VALUE_TRUNCATED,
UNSUPPORTED_PROPERTY,
;
private static final ResourceBundle RESOURCE = ResourceBundle.getBundle("jdbc");
/**
* Looks up the resource bundle string corresponding to the key, and formats it with the provided
* arguments.
*
* @param key resource key for bundle provided to constructor.
* @param formatArgs any additional arguments to format the resource string with.
* @return resource String, formatted with formatArgs.
*/
public static String lookup(final Warning key, final Object... formatArgs) {
return String.format(RESOURCE.getString(key.name()), formatArgs);
}
}
| 4,610 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/LazyLinkedHashMap.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet;
import lombok.NonNull;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* Implements a lazy {@link LinkedHashMap} where the keySet is set in the constructor, but
* the get() is lazy loaded.
*
* @param <K> the key type.
* @param <V> the value type.
*/
public class LazyLinkedHashMap<K,V> implements Map<K,V> {
private final ImmutableSet<K> keySet;
private final Map<K,V> map;
private final Function<K,V> factory;
private final Function<Set<K>, Map<K,V>> allValuesFactory;
/**
* Constructs a new {@link LazyLinkedHashMap} with a given keySet and a factory function.
* This map is a read-only map and does not support adding entries or updating existing entries.
* The keySet should provide the fixed set of keys for this map.
* The factory function will be invoked when the client calls the {@code get(key)} method.
*
* @param keySet the keySet to use.
* @param factory the factory method to retrieve the instance at the map.
*/
public LazyLinkedHashMap(
@NonNull final Set<K> keySet,
@NonNull final Function<K,V> factory) {
this.keySet = ImmutableSet.copyOf(keySet);
this.factory = factory;
this.allValuesFactory = null;
this.map = new LinkedHashMap<>();
}
/**
* Constructs a new {@link LazyLinkedHashMap} with a given keySet and a factory function.
* This map is a read-only map and does not support adding entries or updating existing entries.
* The keySet should provide the fixed set of keys for this map.
* The factory function will be invoked when the client calls the {@code get(key)} method.
*
* @param keySet the keySet to use.
* @param factory the factory method to retrieve the instance at the map.
* @param remainingValuesFactory the factory method to retrieve all remaining instances in the map.
*/
public LazyLinkedHashMap(
@NonNull final Set<K> keySet,
@NonNull final Function<K,V> factory,
@NonNull final Function<Set<K>, Map<K,V>> remainingValuesFactory) {
this.keySet = ImmutableSet.copyOf(keySet);
this.factory = factory;
this.allValuesFactory = remainingValuesFactory;
this.map = new LinkedHashMap<>();
}
@Override
public int size() {
return keySet.size();
}
@Override
public boolean isEmpty() {
return keySet.isEmpty();
}
@Override
@SuppressWarnings("unchecked")
public boolean containsKey(final Object key) {
return keySet.contains((K) key);
}
@Override
public boolean containsValue(final Object value) {
// This would defeat purpose of "lazy load".
throw new UnsupportedOperationException();
}
@Override
@SuppressWarnings("unchecked")
public V get(final Object key) {
if (!keySet.contains((K) key)) {
return null;
}
if (!map.containsKey((K) key)) {
map.put((K) key, factory.apply((K) key));
}
return map.get(key);
}
@Override
public V put(final K key, final V value) {
throw new UnsupportedOperationException();
}
@Override
public V remove(final Object key) {
throw new UnsupportedOperationException();
}
@Override
public void putAll(final Map<? extends K, ? extends V> m) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
@Override
public Set<K> keySet() {
return keySet;
}
@Override
public Collection<V> values() {
if (keySet.size() != map.size() && allValuesFactory != null) {
putAllRemaining();
return map.values();
} else if (keySet.size() == map.size()) {
return map.values();
}
// This would defeat purpose of "lazy load".
throw new UnsupportedOperationException();
}
@Override
public Set<Entry<K, V>> entrySet() {
if (keySet.size() != map.size() && allValuesFactory != null) {
putAllRemaining();
return map.entrySet();
} else if (keySet.size() == map.size()) {
return map.entrySet();
}
// This would defeat purpose of "lazy load".
throw new UnsupportedOperationException();
}
@VisibleForTesting
int getLazyMapSize() {
return map.size();
}
private void putAllRemaining() {
final LinkedHashSet<K> missingKeySet = keySet.stream()
.filter(key -> !map.containsKey(key))
.collect(Collectors.toCollection(LinkedHashSet::new));
map.putAll(allValuesFactory.apply(missingKeySet));
}
}
| 4,611 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/SqlError.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import org.slf4j.Logger;
import java.sql.ClientInfoStatus;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Map;
import java.util.ResourceBundle;
/**
* Enum representing the possible error messages and lookup facilities for localization.
*/
public enum SqlError {
AFTER_LAST,
AUTHORIZATION_ERROR,
BEFORE_FIRST,
CANNOT_UNWRAP,
CANNOT_RETRIEVE_COLUMN,
CONN_CLOSED,
CREATE_FOLDER_FAILED,
DELETE_SCHEMA_FAILED,
DELETE_TABLE_SCHEMA_FAILED,
DELETE_TABLE_SCHEMA_INCONSISTENT,
EQUIJOINS_ON_FK_ONLY,
INCONSISTENT_SCHEMA,
INVALID_COLUMN_LABEL,
INVALID_CONNECTION_PROPERTIES,
INVALID_FETCH_SIZE,
INVALID_FORMAT,
INVALID_LARGE_MAX_ROWS_SIZE,
INVALID_MAX_FIELD_SIZE,
INVALID_ROW_VALUE,
INVALID_INDEX,
INVALID_TIMEOUT,
INVALID_STATE_SET_TABLE_FUNCTION,
JOIN_MISSING_PRIMARY_KEYS,
KNOWN_HOSTS_FILE_NOT_FOUND,
MISSING_DATABASE,
MISSING_HOSTNAME,
MISSING_JAVA_HOME,
MISSING_JAVA_BIN,
MISSING_SCHEMA,
MISSING_PASSWORD,
MISSING_SSH_USER,
MISSING_SSH_HOSTNAME,
MISSING_SSH_PRIVATE_KEY_FILE,
MISSING_USER_PASSWORD,
MISSING_LITERAL_VALUE,
MISMATCH_SCHEMA_NAME,
PARAMETERS_NOT_SUPPORTED,
PASSWORD_PROMPT,
POOLING_NOT_SUPPORTED,
QUERY_CANCELED,
QUERY_CANNOT_BE_CANCELED,
QUERY_FAILED,
QUERY_IN_PROGRESS,
QUERY_NOT_STARTED_OR_COMPLETE,
READ_ONLY,
RESULT_FORWARD_ONLY,
RESULT_SET_CLOSED,
SECURITY_ERROR,
SSH_PRIVATE_KEY_FILE_NOT_FOUND,
SSH_TUNNEL_PATH_NOT_FOUND,
SINGLE_EQUIJOIN_ONLY,
SQL_PARSE_ERROR,
SSH_TUNNEL_ERROR,
STMT_CLOSED,
TLS_CA_FILE_NOT_FOUND,
TRANSACTIONS_NOT_SUPPORTED,
UPSERT_SCHEMA_FAILED,
UNSUPPORTED_CONVERSION,
UNSUPPORTED_CROSS_REFERENCE,
UNSUPPORTED_EXPORTED_KEYS,
UNSUPPORTED_FETCH_DIRECTION,
UNSUPPORTED_FUNCTIONS,
UNSUPPORTED_FUNCTION_COLUMNS,
UNSUPPORTED_GENERATED_KEYS,
UNSUPPORTED_JOIN_TYPE,
UNSUPPORTED_PREPARE_STATEMENT,
UNSUPPORTED_PREPARE_CALL,
UNSUPPORTED_PROCEDURE_COLUMNS,
UNSUPPORTED_PROPERTY,
UNSUPPORTED_PSEUDO_COLUMNS,
UNSUPPORTED_REFRESH_ROW,
UNSUPPORTED_RESULT_SET_TYPE,
UNSUPPORTED_TABLE_PRIVILEGES,
UNSUPPORTED_TYPE,
UNSUPPORTED_SQL,
UNSUPPORTED_SUPER_TABLES,
UNSUPPORTED_SUPER_TYPES,
UNSUPPORTED_USER_DEFINED_TYPES,
UNSUPPORTED_VERSION_COLUMNS,
;
private static final ResourceBundle RESOURCE = ResourceBundle.getBundle("jdbc");
/**
* Looks up the resource bundle string corresponding to the key, and formats it with the provided
* arguments.
*
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return resource String, formatted with formatArgs.
*/
public static String lookup(final SqlError key, final Object... formatArgs) {
// Remove any new lines.
return String
.format(RESOURCE.getString(key.name()), formatArgs)
.replaceAll("[\r\n]", "");
}
/**
* Get the error message and log the message.
*
* @param logger The {@link Logger} contains log info.
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return error massage
*/
static String getErrorMessage(
final Logger logger,
final SqlError key,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.error(error);
return error;
}
/**
* Create {@link SQLException} of error and log the message with a {@link java.util.logging.Logger}.
*
* @param logger The {@link java.util.logging.Logger} contains log info.
* @param sqlState A code identifying the SQL error condition.
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return SQLException with error message.
*/
public static SQLException createSQLException(
final java.util.logging.Logger logger,
final SqlState sqlState,
final SqlError key,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.severe(error);
return new SQLException(error, sqlState.getSqlState());
}
/**
* Create SQLException of error and log the message with a {@link Logger}.
*
* @param logger The {@link Logger} contains log info.
* @param sqlState A code identifying the SQL error condition.
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return SQLException with error message.
*/
public static SQLException createSQLException(
final Logger logger,
final SqlState sqlState,
final SqlError key,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.error(error);
return new SQLException(error, sqlState.getSqlState());
}
/**
* Create SQLException of error and log the message with a {@link Logger}.
*
* @param logger The {@link Logger} contains log info.
* @param sqlState A code identifying the SQL error condition.
* @param exception An {@link Exception} instance.
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return SQLException with error message.
*/
public static SQLException createSQLException(
final Logger logger,
final SqlState sqlState,
final Exception exception,
final SqlError key,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.error(error);
return new SQLException(error, sqlState.getSqlState(), exception);
}
/**
* Create {@link SQLFeatureNotSupportedException} of error and log the message with a {@link Logger}.
*
* @param logger The {@link Logger} contains log info.
* @param key Resource key for bundle provided to constructor.
* @param formatArgs Any additional arguments to format the resource string with.
* @return SQLFeatureNotSupportedException with error message.
*/
public static SQLFeatureNotSupportedException createSQLFeatureNotSupportedException(
final Logger logger,
final SqlError key,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.trace(error);
return new SQLFeatureNotSupportedException(error);
}
/**
* Create {@link SQLClientInfoException} of error and log the message with a {@link Logger}.
*
* @param logger The {@link Logger} contains log info.
* @param key Resource key for bundle provided to constructor.
* @param map A Map containing the property values that could not be set.
* @param formatArgs Any additional arguments to format the resource string with.
* @return SQLClientInfoException with error message.
*/
public static SQLClientInfoException createSQLClientInfoException(
final Logger logger,
final SqlError key,
final Map<String, ClientInfoStatus> map,
final Object... formatArgs) {
final String error = lookup(key, formatArgs);
logger.error(error);
return new SQLClientInfoException(error, map);
}
}
| 4,612 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/ConnectionProperty.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
/**
* The interface for connection properties.
*/
public interface ConnectionProperty {
public static final String APPLICATION_NAME = "appName";
/**
* Gets the connection property name.
*
* @return the connection property.
*/
String getName();
/**
* Gets the default value of the connection property.
*
* @return the default value of the connection property.
*/
String getDefaultValue();
/**
* Gets description.
*
* @return the description.
*/
String getDescription();
}
| 4,613 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/SqlState.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
/**
* Copy of the SQLSTATE codes but as an enum, for use in throwing SQLException.
*/
public enum SqlState {
CONNECTION_EXCEPTION("08000"),
CONNECTION_FAILURE("08006"),
DATA_EXCEPTION("22000"),
DATA_TYPE_TRANSFORM_VIOLATION("0700B"),
DATA_EXCEPTION_NULL_VALUE("22002"),
EMPTY_STRING("2200F"),
FEATURE_NOT_SUPPORTED("0A000"),
INVALID_AUTHORIZATION_SPECIFICATION("28000"),
INVALID_PARAMETER_VALUE("22023"),
INVALID_QUERY_EXPRESSION("2201S"),
RESTRICTED_DATA_TYPE_VIOLATION("07006"),
NUMERIC_VALUE_OUT_OF_RANGE("22003"),
NO_RESULT_SET_RETURNED("02001"),
OPERATION_CANCELED("HY008"),
SQL_CLIENT_UNABLE_TO_ESTABLISH_SQL_CONNECTION("08001"),
;
/**
* The SQLSTATE code.
*/
private final String sqlState;
/**
* SqlState cnstructor.
* @param sqlState The SQLSTATE code associated with this sql state.
*/
SqlState(final String sqlState) {
this.sqlState = sqlState;
}
public String getSqlState() {
return sqlState;
}
}
| 4,614 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/TypeConverters.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.BaseEncoding;
import lombok.SneakyThrows;
import org.apache.commons.beanutils.Converter;
import org.apache.commons.beanutils.converters.AbstractConverter;
import org.apache.commons.beanutils.converters.ArrayConverter;
import org.apache.commons.beanutils.converters.BigDecimalConverter;
import org.apache.commons.beanutils.converters.BooleanConverter;
import org.apache.commons.beanutils.converters.ByteConverter;
import org.apache.commons.beanutils.converters.DateConverter;
import org.apache.commons.beanutils.converters.DateTimeConverter;
import org.apache.commons.beanutils.converters.DoubleConverter;
import org.apache.commons.beanutils.converters.FloatConverter;
import org.apache.commons.beanutils.converters.IntegerConverter;
import org.apache.commons.beanutils.converters.LongConverter;
import org.apache.commons.beanutils.converters.NumberConverter;
import org.apache.commons.beanutils.converters.ShortConverter;
import org.apache.commons.beanutils.converters.SqlTimestampConverter;
import org.apache.commons.beanutils.converters.StringConverter;
import org.bson.BsonRegularExpression;
import org.bson.BsonTimestamp;
import org.bson.types.Binary;
import org.bson.types.Decimal128;
import org.bson.types.MaxKey;
import org.bson.types.MinKey;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Date;
import java.util.concurrent.TimeUnit;
/**
* Provides a map of type converters.
*/
public class TypeConverters {
private static final Logger LOGGER = LoggerFactory.getLogger(TypeConverters.class);
private static final ImmutableMap<Class<?>, AbstractConverter> TYPE_CONVERTERS_MAP;
static {
TYPE_CONVERTERS_MAP = ImmutableMap.<Class<?>, AbstractConverter>builder()
.put(Decimal128.class, new Decimal128Converter(new Decimal128(0)))
.put(BigDecimal.class, new BigDecimalConverter(0))
.put(Boolean.class, new BooleanConverter(false))
.put(boolean.class, new BooleanConverter(false))
.put(BsonTimestamp.class, new BsonTimestampConverter())
.put(BsonRegularExpression.class, new StringConverter())
.put(Byte.class, new ByteConverter(0))
.put(byte.class, new ByteConverter(0))
.put(Date.class, new DateConverter(null))
.put(java.sql.Date.class, new DateConverter(null))
.put(Double.class, new DoubleConverter(0.0))
.put(double.class, new DoubleConverter(0.0))
.put(Float.class, new FloatConverter(0.0))
.put(float.class, new FloatConverter(0.0))
.put(Integer.class, new IntegerConverter(0))
.put(int.class, new IntegerConverter(0))
.put(Long.class, new LongConverter(0))
.put(long.class, new LongConverter(0))
.put(MaxKey.class, new StringConverter())
.put(MinKey.class, new StringConverter())
.put(ObjectId.class, new StringConverter())
.put(Short.class, new ShortConverter(0))
.put(short.class, new ShortConverter(0))
.put(String.class, new StringConverter())
.put(Timestamp.class, new SqlTimestampConverter())
.put(Byte[].class, new ArrayConverter(Byte[].class, new ByteConverter(), -1))
.put(byte[].class, new ArrayConverter(byte[].class, new ByteConverter(), -1))
.put(Binary.class, new BsonBinaryConverter(byte[].class, new ByteConverter()))
.build();
}
/**
* Gets the type converter for the given source type.
*
* @param sourceType the source type to get the converter for.
* @param targetType the target type used to log error in case of missing converter.
* @return a {@link AbstractConverter} instance for the source type.
*
* @throws SQLException if a converter cannot be found the source type.
*/
public static AbstractConverter get(final Class<? extends Object> sourceType,
final Class<? extends Object> targetType) throws SQLException {
final AbstractConverter converter = TYPE_CONVERTERS_MAP.get(sourceType);
if (converter == null) {
throw SqlError.createSQLException(LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.UNSUPPORTED_CONVERSION,
sourceType.getSimpleName(),
targetType.getSimpleName());
}
return converter;
}
/**
* Converter for Decimal128 type.
*/
private static class Decimal128Converter extends NumberConverter {
/**
* Default constructor for converter.
*/
public Decimal128Converter() {
super(true);
}
/**
* Constuctor for converter where you can specify the default value.
* @param defaultValue the default value for conversion.
*/
public Decimal128Converter(final Object defaultValue) {
super(true, defaultValue);
}
/**
* Converts to the target type. Specifically tries to handle conversion from {@link Decimal128} to
* type{@link BigDecimal}.
*
* @param targetType Data type to which this value should be converted.
* @param value The input value to be converted.
* @return a value converted to the target type or the value.
* @param <T> the type of return value.
* @throws Throwable thrown on conversion exception.
*/
@Override
protected <T> T convertToType(final Class<T> targetType, final Object value) throws Throwable {
if (value instanceof Decimal128) {
if (targetType.isAssignableFrom(BigDecimal.class)) {
return targetType.cast(((Decimal128) value).bigDecimalValue());
}
return super.convertToType(targetType, ((Decimal128) value).doubleValue());
}
return super.convertToType(targetType, value);
}
@Override
protected String convertToString(final Object value) throws Throwable {
if (value instanceof Decimal128) {
return ((Decimal128) value).toString();
}
return super.convertToString(value);
}
@Override
protected Class<?> getDefaultType() {
return String.class;
}
}
private static class BsonTimestampConverter extends DateTimeConverter {
/**
* Creates a {@link BsonTimestampConverter} with no default value.
*/
public BsonTimestampConverter() {
super();
}
/**
* Creates a {@link BsonTimestampConverter} with a default value.
*
* @param defaultValue the default value if source value missing or cannot be converted.
*/
public BsonTimestampConverter(final Object defaultValue) {
super(defaultValue);
}
@Override
protected <T> T convertToType(final Class<T> targetType, final Object value) throws Exception {
if (value instanceof BsonTimestamp) {
final long timeInMillisSinceEpoch = getTimeInMillisSinceEpoch((BsonTimestamp) value);
return super.convertToType(targetType, timeInMillisSinceEpoch);
}
return super.convertToType(targetType, value);
}
@Override
protected String convertToString(final Object value) throws Throwable {
if (value instanceof BsonTimestamp) {
final long timeInMillisSinceEpoch = getTimeInMillisSinceEpoch((BsonTimestamp) value);
return super.convertToString(super.convertToType(getDefaultType(), timeInMillisSinceEpoch));
}
return super.convertToString(value);
}
private static long getTimeInMillisSinceEpoch(final BsonTimestamp value) {
// This returns time in seconds since epoch.
final int timeInSecsSinceEpoch = value.getTime();
return TimeUnit.SECONDS.toMillis(timeInSecsSinceEpoch);
}
@Override
protected Class<?> getDefaultType() {
return Timestamp.class;
}
}
private static class BsonBinaryConverter extends ArrayConverter {
public BsonBinaryConverter(final Class<?> defaultType, final Converter elementConverter) {
super(defaultType, elementConverter);
}
public BsonBinaryConverter(final Class<?> defaultType, final Converter elementConverter, final int defaultSize) {
super(defaultType, elementConverter, defaultSize);
}
@SneakyThrows
@Override
@SuppressWarnings("unchecked")
public <T> T convertToType(final Class<T> type, final Object value) {
final Class<?> targetType = type == null ? getDefaultType() : type;
if (value instanceof Binary) {
if (targetType.isAssignableFrom(String.class)) {
return (T) convertToString(value);
}
return (T) super.convertToType(targetType, ((Binary) value).getData());
}
return (T) super.convertToType(targetType, value);
}
@Override
protected String convertToString(final Object value) throws Throwable {
if (value instanceof Binary) {
return BaseEncoding.base16().encode(((Binary) value).getData());
}
return super.convertToString(value);
}
@Override
protected Class<?> getDefaultType() {
return byte[].class;
}
}
}
| 4,615 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/common/utilities/JdbcType.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.common.utilities;
import java.util.HashMap;
import java.util.Map;
/**
* Copy of the java.sql.Types constants but as an enum, for use in lookups.
* Warning: if a JDBC type is added or deprecated, the change should be reflected
* on the ODBC driver as well. Files to be changed in the ODBC driver:
* namespace binary in binary_common.h, and functions BinaryToSqlTypeName,
* BinaryToSqlType and SqlTypeToBinary in namespace type_traits in type_traits.cpp
*/
public enum JdbcType {
BIT(-7),
TINYINT(-6),
SMALLINT(5),
INTEGER(4),
BIGINT(-5),
FLOAT(6),
REAL(7),
DOUBLE(8),
NUMERIC(2),
DECIMAL(3),
CHAR(1),
VARCHAR(12),
LONGVARCHAR(-1),
DATE(91),
TIME(92),
TIMESTAMP(93),
BINARY(-2),
VARBINARY(-3),
LONGVARBINARY(-4),
BLOB(2004),
CLOB(2005),
BOOLEAN(16),
ARRAY(2003),
STRUCT(2002),
JAVA_OBJECT(2000),
ROWID(-8),
NCHAR(-15),
NVARCHAR(-9),
LONGNVARCHAR(-16),
NCLOB(2011),
SQLXML(2009),
REF_CURSOR(2012),
NULL(0);
private static final Map<Integer, JdbcType> TYPE_MAP = new HashMap<>();
/**
* The java.sql.Types JDBC type.
*/
private final int jdbcType;
static {
for (JdbcType type : JdbcType.values()) {
TYPE_MAP.put(type.jdbcType, type);
}
}
/**
* JdbcType constructor.
* @param jdbcType The java.sql.Types JDBC type associated with this value.
*/
JdbcType(final int jdbcType) {
this.jdbcType = jdbcType;
}
/**
* Get the JDBC type.
*
* @return an integer for the JDBC type.
*/
public int getJdbcType() {
return jdbcType;
}
/**
* Get the type associated with the JDBC type.
*
* @param type the type value to search for.
*
* @return a {@link JdbcType} assocated with the JDBC type value.
*/
public static JdbcType fromType(final int type) {
return TYPE_MAP.get(type);
}
}
| 4,616 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/query/DocumentDbMqlQueryContext.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.query;
import lombok.Builder;
import lombok.Getter;
import org.bson.conversions.Bson;
import org.bson.json.JsonMode;
import org.bson.json.JsonWriterSettings;
import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData;
import java.util.List;
import java.util.stream.Collectors;
/**
* This is meant to carry all the information needed to execute the query in DocumentDB and
* construct a ResultSet.
*/
@Getter
@Builder
public class DocumentDbMqlQueryContext {
/** The column metadata describing the return row. */
private final List<JdbcColumnMetaData> columnMetaData;
/** The operations to use in the aggregation. */
private final List<Bson> aggregateOperations;
/** The collection name to use in the aggregation. */
private final String collectionName;
/** The path information for the output documents. Maps column names to field paths.*/
private final List<String> paths;
/**
* Gets the aggregation operations (stages) for the query as a list of strings.
*
* @return the aggregation operations as an ordered list of strings in extended JSON format.
*/
public List<String> getAggregateOperationsAsStrings() {
return aggregateOperations.stream()
.map(doc ->
doc.toBsonDocument().toJson(JsonWriterSettings.builder().outputMode(JsonMode.EXTENDED).build()))
.collect(Collectors.toList());
}
}
| 4,617 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/query/DocumentDbQueryMappingService.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.query;
import com.google.common.collect.ImmutableList;
import lombok.SneakyThrows;
import org.apache.calcite.DataContext;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.avatica.util.DateTimeUtils;
import org.apache.calcite.avatica.util.TimeUnit;
import org.apache.calcite.config.CalciteConnectionConfig;
import org.apache.calcite.config.CalciteConnectionConfigImpl;
import org.apache.calcite.jdbc.CalcitePrepare;
import org.apache.calcite.jdbc.CalcitePrepare.CalciteSignature;
import org.apache.calcite.jdbc.CalcitePrepare.Query;
import org.apache.calcite.jdbc.CalcitePrepare.SparkHandler;
import org.apache.calcite.jdbc.CalciteSchema;
import org.apache.calcite.jdbc.JavaTypeFactoryImpl;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.QueryProvider;
import org.apache.calcite.prepare.CalcitePrepareImpl;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.schema.Schema;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.SchemaVersion;
import org.apache.calcite.schema.impl.LongSchemaVersion;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlIntervalQualifier;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeUtil;
import org.apache.calcite.sql2rel.SqlRexContext;
import org.apache.calcite.sql2rel.SqlRexConvertlet;
import org.apache.calcite.sql2rel.SqlRexConvertletTable;
import org.apache.calcite.sql2rel.StandardConvertletTable;
import org.apache.calcite.tools.RelRunner;
import org.bson.BsonDocument;
import org.bson.BsonInt64;
import org.checkerframework.checker.nullness.qual.NonNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbEnumerable;
import software.amazon.documentdb.jdbc.calcite.adapter.DocumentDbSchemaFactory;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata;
import software.amazon.documentdb.jdbc.metadata.DocumentDbJdbcMetaDataConverter;
import java.math.BigDecimal;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class DocumentDbQueryMappingService {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbQueryMappingService.class);
private static final String CALCITE_DEFAULT_CHARSET = "calcite.default.charset";
private static final String CHARSET_UTF_8 = "utf8";
private final DocumentDbPrepareContext prepareContext;
private final CalcitePrepare prepare;
private final BsonDocument maxRowsBSON;
/**
* Holds the DocumentDbDatabaseSchemaMetadata, CalcitePrepare.Context and the CalcitePrepare
* generated for a particular connection.
* The default prepare factory is used like in CalciteConnectImpl.
*
* @param connectionProperties the connection properties.
* @param databaseMetadata the database schema metadata.
*/
public DocumentDbQueryMappingService(final DocumentDbConnectionProperties connectionProperties,
final DocumentDbDatabaseSchemaMetadata databaseMetadata) {
// Add MYSQL function support
connectionProperties.putIfAbsent("FUN", "standard,mysql");
// Leave unquoted identifiers in their original case. Identifiers are still case-sensitive
// but do not need to be quoted
connectionProperties.putIfAbsent("UNQUOTEDCASING", "UNCHANGED");
// Allow Unicode (utf-8) queries to be handled.
if (System.getProperty(CALCITE_DEFAULT_CHARSET) == null) {
System.setProperty(CALCITE_DEFAULT_CHARSET, CHARSET_UTF_8);
}
this.prepareContext =
new DocumentDbPrepareContext(
getRootSchemaFromDatabaseMetadata(connectionProperties, databaseMetadata),
connectionProperties.getDatabase(),
connectionProperties);
this.prepare = new DocumentDbPrepareImplementation();
this.maxRowsBSON = new BsonDocument();
}
/**
* Uses CalcitePrepare API to parse and validate sql and convert to MQL.
* @param sql the query in sql
* @param maxRowCount the max number of rows to return
* @return the query context that has the target collection, aggregation stages, and result set metadata.
*/
public DocumentDbMqlQueryContext get(final String sql, final long maxRowCount) throws SQLException {
final Query<Object> query = Query.of(sql);
// In prepareSql:
// - We validate the sql based on the schema and turn this into a tree. (SQL->AST)
// - The query planner optimizes the tree with the DocumentDb adapter rules.
// - We visit each node and go into its implement method where the nodes become a physical
// plan. (AST->MQL)
try {
// The parameter maxRowCount from prepareSql needs to be -1, we are handling max rows
// outside calcite translation
final CalciteSignature<?> signature =
prepare.prepareSql(prepareContext, query, Object[].class, -1);
// Enumerable contains the operations and fields we need to do the aggregation call.
// Signature also contains a column list that has information about the columns/types of the
// return row (ordinal, nullability, precision, etc).
final Enumerable<?> enumerable = signature.enumerable(prepareContext.getDataContext());
if (enumerable instanceof DocumentDbEnumerable) {
final DocumentDbEnumerable documentDbEnumerable = (DocumentDbEnumerable) enumerable;
// Add limit if using setMaxRows.
if (maxRowCount > 0) {
maxRowsBSON.put("$limit",new BsonInt64(maxRowCount));
documentDbEnumerable.getList().add(maxRowsBSON);
}
return DocumentDbMqlQueryContext.builder()
.columnMetaData(DocumentDbJdbcMetaDataConverter.fromCalciteColumnMetaData(signature.columns))
.aggregateOperations(documentDbEnumerable.getList())
.collectionName(documentDbEnumerable.getCollectionName())
.paths(documentDbEnumerable.getPaths())
.build();
}
} catch (Exception e) {
// TODO: [Fix this error handling](https://github.com/aws/amazon-documentdb-jdbc-driver/issues/499).
throw SqlError.createSQLException(
LOGGER, SqlState.INVALID_QUERY_EXPRESSION, e, SqlError.SQL_PARSE_ERROR, sql,
getExceptionMessages(e));
}
// Query could be parsed but cannot be executed in pure MQL (likely involves nested queries).
throw SqlError.createSQLFeatureNotSupportedException(LOGGER, SqlError.UNSUPPORTED_SQL, sql);
}
/**
* Uses CalcitePrepare API to parse and validate sql and convert to MQL.
* Assumes no max row count set.
* @param sql the query in sql
* @return the query context that has the target collection, aggregation stages, and result set metadata.
*/
public DocumentDbMqlQueryContext get(final String sql) throws SQLException {
return get(sql, 0);
}
private String getExceptionMessages(final Throwable e) {
final StringBuilder builder = new StringBuilder(e.getMessage());
if (e.getSuppressed() != null) {
for (Throwable suppressed : e.getSuppressed()) {
builder.append(" Additional info: '")
.append(getExceptionMessages(suppressed))
.append("'");
}
}
return builder.toString();
}
/**
* Creates a {@link CalciteSchema} from the database metadata.
* @param databaseMetadata the metadata for the target database.
* @return a {@link CalciteSchema} for the database described by the databaseMetadata.
*/
private static CalciteSchema getRootSchemaFromDatabaseMetadata(
final DocumentDbConnectionProperties connectionProperties,
final DocumentDbDatabaseSchemaMetadata databaseMetadata) {
final SchemaPlus parentSchema = CalciteSchema.createRootSchema(true).plus();
final Schema schema = DocumentDbSchemaFactory
.create(databaseMetadata, connectionProperties);
parentSchema.add(connectionProperties.getDatabase(), schema);
return CalciteSchema.from(parentSchema);
}
/**
* Our own implementation of {@link RelDataTypeSystem}.
* All settings are the same as the default unless otherwise overridden.
*/
private static class DocumentDbTypeSystem extends RelDataTypeSystemImpl implements RelDataTypeSystem {
/**
* Returns whether the least restrictive type of a number of CHAR types of different lengths
* should be a VARCHAR type.
* @return true to be consistent with SQLServer, MySQL and other major DBMS.
*/
@Override
public boolean shouldConvertRaggedUnionTypesToVarying() {
return true;
}
}
/**
* Our own implementation of {@link CalcitePrepare}.
* Extends {@link org.apache.calcite.prepare.CalcitePrepareImpl}.
* All settings are the same as the default unless otherwise overridden.
*/
private static class DocumentDbPrepareImplementation extends CalcitePrepareImpl implements CalcitePrepare {
@Override
protected SqlRexConvertletTable createConvertletTable() {
return DocumentDbConvertletTable.INSTANCE;
}
}
/**
* Our own implementation of {@link SqlRexConvertletTable}.
* Behaviour is the same as {@link StandardConvertletTable} unless operator is part of custom map.
*/
private static final class DocumentDbConvertletTable implements SqlRexConvertletTable {
public static final DocumentDbConvertletTable INSTANCE = new DocumentDbConvertletTable();
private final Map<SqlOperator, SqlRexConvertlet> customCovertlets = new HashMap<>();
private DocumentDbConvertletTable() {
customCovertlets.put(SqlStdOperatorTable.TIMESTAMP_DIFF, new DocumentDbTimestampDiffConvertlet());
}
@Override
public SqlRexConvertlet get(final SqlCall call) {
// Check if we override the operator conversion. Otherwise use standard conversion.
final SqlOperator op = call.getOperator();
final SqlRexConvertlet convertlet = customCovertlets.get(op);
if (convertlet != null) {
return convertlet;
}
return StandardConvertletTable.INSTANCE.get(call);
}
/**
* Replaces the TimestampDiffConvertlet in {@link StandardConvertletTable}.
* Overrides the translation of TIMESTAMPDIFF for YEAR, QUARTER, and MONTH.
* Implementation copied from original but adds lines 259-261.
*/
private static class DocumentDbTimestampDiffConvertlet implements SqlRexConvertlet {
@SneakyThrows
public RexNode convertCall(final SqlRexContext cx, final SqlCall call) {
// TIMESTAMPDIFF(unit, t1, t2) => (t2 - t1) UNIT
final RexBuilder rexBuilder = cx.getRexBuilder();
final SqlLiteral unitLiteral = call.operand(0);
TimeUnit unit = getSymbolValue(unitLiteral, TimeUnit.class);
final SqlTypeName sqlTypeName = unit == TimeUnit.NANOSECOND
? SqlTypeName.BIGINT
: SqlTypeName.INTEGER;
final BigDecimal multiplier;
final BigDecimal divider;
switch (unit) {
case MICROSECOND:
case MILLISECOND:
case NANOSECOND:
case WEEK:
multiplier = BigDecimal.valueOf(DateTimeUtils.MILLIS_PER_SECOND);
divider = unit.multiplier;
unit = TimeUnit.SECOND;
break;
default:
multiplier = BigDecimal.ONE;
divider = BigDecimal.ONE;
}
final SqlIntervalQualifier qualifier =
new SqlIntervalQualifier(unit, null, SqlParserPos.ZERO);
final RexNode op2 = cx.convertExpression(call.operand(2));
final RexNode op1 = cx.convertExpression(call.operand(1));
final RelDataType intervalType =
cx.getTypeFactory().createTypeWithNullability(
cx.getTypeFactory().createSqlIntervalType(qualifier),
op1.getType().isNullable() || op2.getType().isNullable());
final RexCall rexCall = (RexCall) rexBuilder.makeCall(
intervalType, SqlStdOperatorTable.MINUS_DATE,
ImmutableList.of(op2, op1));
final RelDataType intType =
cx.getTypeFactory().createTypeWithNullability(
cx.getTypeFactory().createSqlType(sqlTypeName),
SqlTypeUtil.containsNullable(rexCall.getType()));
// If dealing with year, quarter, or month we will calculate the difference using date parts
// and do not need any integer division.
if (unit == TimeUnit.YEAR || unit == TimeUnit.QUARTER || unit == TimeUnit.MONTH) {
return rexBuilder.makeReinterpretCast(intType, rexCall, rexBuilder.makeLiteral(false));
}
final RexNode e = rexBuilder.makeCast(intType, rexCall);
return rexBuilder.multiplyDivide(e, multiplier, divider);
}
}
}
@NonNull
private static <E extends Enum<E>> E getSymbolValue(
final SqlLiteral literal,
final Class<E> clazz) throws SQLException {
final E result = literal.symbolValue(clazz);
if (result == null) {
throw SqlError.createSQLException(LOGGER,
SqlState.INVALID_QUERY_EXPRESSION,
SqlError.MISSING_LITERAL_VALUE,
literal.getTypeName().getName());
}
return result;
}
/**
* Our own implementation of {@link CalcitePrepare.Context} to pass the schema without a {@link java.sql.Connection}.
* Based on the prepare context in CalciteConnectionImpl.
*/
private static class DocumentDbPrepareContext implements CalcitePrepare.Context {
private final CalciteSchema rootSchema;
private final CalciteSchema mutableRootSchema;
private final JavaTypeFactory typeFactory;
private final CalciteConnectionConfig config;
private final List<String> defaultSchemaPath;
private final DataContext dataContext;
DocumentDbPrepareContext(
final CalciteSchema rootSchema,
final String defaultSchema,
final DocumentDbConnectionProperties properties) {
this.typeFactory = new JavaTypeFactoryImpl(new DocumentDbTypeSystem());
this.config = new CalciteConnectionConfigImpl(properties);
final long now = System.currentTimeMillis();
final SchemaVersion schemaVersion = new LongSchemaVersion(now);
this.mutableRootSchema = rootSchema;
this.rootSchema = mutableRootSchema.createSnapshot(schemaVersion);
this.defaultSchemaPath = ImmutableList.of(defaultSchema);
this.dataContext = new DataContext() {
@Override
public SchemaPlus getRootSchema() {
return rootSchema.plus();
}
@Override
public JavaTypeFactory getTypeFactory() {
return typeFactory;
}
@Override
public QueryProvider getQueryProvider() {
return null;
}
@Override
public Object get(final String name) {
return null;
}
};
}
@Override
public JavaTypeFactory getTypeFactory() {
return typeFactory;
}
@Override
public CalciteSchema getRootSchema() {
return rootSchema;
}
@Override
public CalciteSchema getMutableRootSchema() {
return mutableRootSchema;
}
@Override
public List<String> getDefaultSchemaPath() {
return defaultSchemaPath;
}
@Override
public CalciteConnectionConfig config() {
return config;
}
@Override
public SparkHandler spark() {
final boolean enable = config().spark();
return CalcitePrepare.Dummy.getSparkHandler(enable);
}
@Override
public DataContext getDataContext() {
return dataContext;
}
// This is also returned as null in ContextImp so this should be fine.
@Override
public List<String> getObjectPath() {
return null;
}
// This seems to not be needed to get the functionality we want.
@Override
public RelRunner getRelRunner() {
return null;
}
}
}
| 4,618 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/persist/DocumentDbSchemaSecurityException.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.persist;
/**
* This exception identifies when a security exception occurs during schema operations.
*/
public class DocumentDbSchemaSecurityException extends Exception {
/**
* Constructs a new exception with the specified detail message and cause.
* Note that the detail message associated with cause is not automatically incorporated in this
* exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the getMessage() method).
* @param cause the cause (which is saved for later retrieval by the getCause() method). (A null value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
public DocumentDbSchemaSecurityException(final String message, final Throwable cause) {
super(message, cause);
}
}
| 4,619 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/persist/DocumentDbSchemaWriter.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.persist;
import com.google.common.collect.Streams;
import com.mongodb.MongoException;
import com.mongodb.client.ClientSession;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.UpdateOptions;
import com.mongodb.client.result.DeleteResult;
import com.mongodb.client.result.UpdateResult;
import com.mongodb.connection.ClusterSettings;
import lombok.NonNull;
import lombok.SneakyThrows;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable;
import java.sql.SQLException;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.mongodb.client.model.Filters.and;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Filters.or;
import static com.mongodb.client.model.Updates.combine;
import static com.mongodb.client.model.Updates.set;
import static com.mongodb.client.model.Updates.setOnInsert;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NONE;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.MODIFY_DATE_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_NAME_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_VERSION_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SQL_NAME_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.TABLES_PROPERTY;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader.POJO_CODEC_REGISTRY;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader.SCHEMA_COLLECTION;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader.TABLE_SCHEMA_COLLECTION;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader.getSchema;
public class DocumentDbSchemaWriter implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSchemaWriter.class);
static final int MONGO_AUTHORIZATION_FAILURE = 13;
private static final int MONGO_ALREADY_EXISTS = 48;
private final DocumentDbConnectionProperties properties;
private final MongoClient client;
private final boolean closeClient;
/**
* Constructs a new {@link DocumentDbSchemaWriter} with connection properties.
*
* @param properties the connection properties.
* @param client the {@link MongoClient} client.
*/
public DocumentDbSchemaWriter(final @NonNull DocumentDbConnectionProperties properties,
final MongoClient client) {
this.properties = properties;
this.client = client != null
? client
: properties.createMongoClient();
this.closeClient = client == null;
}
/**
* Writes the complete schema for the database including any associated tables.
*
* @param schema the schema to write.
*/
public void write(
final @NonNull DocumentDbSchema schema,
final @NonNull Collection<DocumentDbSchemaTable> tablesSchema)
throws SQLException, DocumentDbSchemaSecurityException {
final MongoDatabase database = getDatabase(client, properties.getDatabase());
final MongoCollection<DocumentDbSchema> schemasCollection = database
.getCollection(SCHEMA_COLLECTION, DocumentDbSchema.class);
final MongoCollection<Document> tableSchemasCollection = database
.getCollection(TABLE_SCHEMA_COLLECTION);
final boolean supportsMultiDocTransactions = supportsMultiDocTransactions(
client, database);
ensureSchemaCollections(database);
runTransactedSession(
client,
supportsMultiDocTransactions,
session -> upsertSchemaHandleSecurityException(
session,
schemasCollection,
tableSchemasCollection,
schema,
tablesSchema));
}
/**
* Writes only the specific table schemaName.
* @param schema the database schema.
* @param tableSchemas the table schema to update.
*/
public void update(
final @NonNull DocumentDbSchema schema,
final @NonNull Collection<DocumentDbSchemaTable> tableSchemas) {
final String schemaName = schema.getSchemaName();
final MongoDatabase database = getDatabase(client, properties.getDatabase());
// Get the latest schema from storage.
final DocumentDbSchema latestSchema = getSchema(
schemaName, VERSION_LATEST_OR_NONE, database);
final int schemaVersion = getSchemaVersion(schema, latestSchema) + 1;
final Set<String> tableReferences = tableSchemas.stream()
.map(DocumentDbSchemaTable::getId)
.collect(Collectors.toSet());
// Determine which table references to update/delete.
final MongoCollection<Document> tableSchemasCollection = database
.getCollection(TABLE_SCHEMA_COLLECTION);
final boolean supportsMultiDocTransactions = supportsMultiDocTransactions(
client, database);
runTransactedSession(
client,
supportsMultiDocTransactions,
session -> upsertSchemaHandleSecurityException(
session,
tableSchemasCollection,
database,
schemaName,
schemaVersion,
schema,
tableSchemas,
tableReferences));
}
/**
* Remove all versions of the schema associated with the given schema name.
*
* @param schemaName the name of the database schema.
*/
public void remove(final @NonNull String schemaName) {
remove(schemaName, 0);
}
/**
* Remove the specific version of the schema associated with the given schema name.
*
* @param schemaName the name of the database schema.
* @param schemaVersion the version of the schema.
*/
@SneakyThrows
public void remove(final @NonNull String schemaName, final int schemaVersion) {
// NOTE: schemaVersion <= 0 indicates "any" version.
final MongoDatabase database = getDatabase(client, properties.getDatabase());
final MongoCollection<DocumentDbSchema> schemasCollection = database
.getCollection(SCHEMA_COLLECTION, DocumentDbSchema.class);
final MongoCollection<Document> tableSchemasCollection = database
.getCollection(TABLE_SCHEMA_COLLECTION);
final boolean supportsMultiDocTransactions = supportsMultiDocTransactions(
client, database);
runTransactedSession(
client,
supportsMultiDocTransactions,
session -> deleteSchema(
session,
schemasCollection,
tableSchemasCollection,
schemaName,
schemaVersion));
}
private static void runTransactedSession(
final MongoClient client,
final boolean supportsMultiDocTransactions,
final Consumer<ClientSession> process) {
final ClientSession session = supportsMultiDocTransactions
? client.startSession()
: null;
try {
maybeStartTransaction(session);
process.accept(session);
maybeCommitTransaction(session);
} catch (Exception e) {
maybeAbortTransaction(session);
throw e;
} finally {
if (session != null) {
session.close();
}
}
}
@SneakyThrows
private void upsertSchemaHandleSecurityException(
final ClientSession session,
final MongoCollection<Document> tableSchemasCollection,
final MongoDatabase database,
final String schemaName,
final int schemaVersion,
final DocumentDbSchema schema,
final Collection<DocumentDbSchemaTable> tableSchemas,
final Set<String> tableReferences) {
final MongoCollection<DocumentDbSchema> schemaCollection = database
.getCollection(SCHEMA_COLLECTION, DocumentDbSchema.class);
try {
upsertSchema(session,
schemaCollection,
tableSchemasCollection,
schemaName,
schemaVersion,
schema,
tableSchemas,
tableReferences);
} catch (MongoException e) {
if (isAuthorizationFailure(e)) {
throw new DocumentDbSchemaSecurityException(e.getMessage(), e);
}
throw e;
}
}
@SneakyThrows
private void deleteSchema(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemasCollection,
final MongoCollection<Document> tableSchemasCollection,
final String schemaName,
final int schemaVersion) {
final Bson schemaFilter = getSchemaFilter(schemaName, schemaVersion);
for (DocumentDbSchema schema : schemasCollection.find(schemaFilter)) {
// Delete the table schemas associated with this database schema.
deleteTableSchemas(session, tableSchemasCollection, schema.getTableReferences());
// Delete the database schema.
final long numDeleted = deleteDatabaseSchema(
session, schemasCollection, schemaName, schema.getSchemaVersion());
if (numDeleted < 1) {
throw SqlError.createSQLException(LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.DELETE_SCHEMA_FAILED, schemaName);
}
}
}
// Use @SneakyThrows to allow it to be used in a lambda expression.
@SneakyThrows
private void upsertSchemaHandleSecurityException(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemasCollection,
final MongoCollection<Document> tableSchemasCollection,
final DocumentDbSchema schema,
final Collection<DocumentDbSchemaTable> tablesSchema) {
try {
upsertSchema(session, schemasCollection, tableSchemasCollection, schema, tablesSchema);
} catch (MongoException e) {
if (isAuthorizationFailure(e)) {
throw new DocumentDbSchemaSecurityException(e.getMessage(), e);
}
throw e;
}
}
private void upsertSchema(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemasCollection,
final MongoCollection<Document> tableSchemasCollection,
final DocumentDbSchema schema,
final Collection<DocumentDbSchemaTable> tablesSchema) throws SQLException {
for (DocumentDbSchemaTable tableSchema : tablesSchema) {
upsertTableSchema(session, tableSchemasCollection, tableSchema,
schema.getSchemaName());
}
upsertDatabaseSchema(session, schemasCollection, schema);
}
private void upsertSchema(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemaCollection,
final MongoCollection<Document> tableSchemasCollection,
final String schemaName,
final int schemaVersion,
final DocumentDbSchema schema,
final Collection<DocumentDbSchemaTable> tableSchemas,
final Set<String> tableReferences) throws SQLException {
upsertNewSchema(session, schemaCollection, tableSchemasCollection, schemaName,
schemaVersion, schema, tableSchemas, tableReferences);
}
private void ensureSchemaCollections(final MongoDatabase database)
throws DocumentDbSchemaSecurityException {
createCollectionIfNotExists(database, SCHEMA_COLLECTION);
createCollectionIfNotExists(database, TABLE_SCHEMA_COLLECTION);
}
private void upsertNewSchema(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemaCollection,
final MongoCollection<Document> tableSchemasCollection,
final String schemaName,
final int schemaVersion,
final DocumentDbSchema schema,
final Collection<DocumentDbSchemaTable> tableSchemas,
final Set<String> tableReferences) throws SQLException {
// Insert/Update the table schema.
for (DocumentDbSchemaTable tableSchema : tableSchemas) {
upsertTableSchema(session, tableSchemasCollection, tableSchema, schemaName);
}
// Insert/Update the database schema
final DocumentDbSchema newSchema = new DocumentDbSchema(
schema.getSchemaName(),
schemaVersion,
schema.getSqlName(),
new Date(Instant.now().toEpochMilli()),
tableReferences);
upsertDatabaseSchema(session, schemaCollection, newSchema);
}
private int getSchemaVersion(
final DocumentDbSchema schema,
final DocumentDbSchema latestSchema) {
return latestSchema != null
? Math.max(latestSchema.getSchemaVersion(), schema.getSchemaVersion())
: schema.getSchemaVersion();
}
static MongoDatabase getDatabase(final MongoClient client, final String databaseName) {
return client.getDatabase(databaseName)
.withCodecRegistry(POJO_CODEC_REGISTRY);
}
private static boolean supportsMultiDocTransactions(
final MongoClient client,
final MongoDatabase database) {
final boolean supportsMultiDocTransactions;
final ClusterSettings settings = client.getClusterDescription().getClusterSettings();
final Document buildInfo = database.runCommand(Document.parse("{ \"buildInfo\": 1 }"));
final List<Integer> version = buildInfo.getList("versionArray", Integer.class);
supportsMultiDocTransactions =
settings.getRequiredReplicaSetName() != null
&& version != null && !version.isEmpty()
&& version.get(0) >= 4;
return supportsMultiDocTransactions;
}
private static void maybeAbortTransaction(
final ClientSession session) {
if (session != null) {
session.abortTransaction();
}
}
private static void maybeCommitTransaction(
final ClientSession session) {
if (session != null) {
session.commitTransaction();
}
}
private static void maybeStartTransaction(
final ClientSession session) {
if (session != null) {
session.startTransaction();
}
}
private static void upsertDatabaseSchema(
final @Nullable ClientSession session,
final @NonNull MongoCollection<DocumentDbSchema> schemasCollection,
final @NonNull DocumentDbSchema schema) throws SQLException {
final Bson schemaFilter = getSchemaFilter(schema.getSchemaName(), schema.getSchemaVersion());
final Bson schemaUpdate = getSchemaUpdate(schema);
final UpdateOptions upsertOption = new UpdateOptions().upsert(true);
final UpdateResult result = session != null
? schemasCollection.updateOne(session, schemaFilter, schemaUpdate, upsertOption)
: schemasCollection.updateOne(schemaFilter, schemaUpdate, upsertOption);
if (!result.wasAcknowledged()) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.UPSERT_SCHEMA_FAILED,
schema.getSchemaName());
}
}
private static void upsertTableSchema(
final @Nullable ClientSession session,
final @NonNull MongoCollection<Document> tableSchemasCollection,
final @NonNull DocumentDbSchemaTable tableSchema,
final @NonNull String schemaName) throws SQLException {
final Bson tableSchemaFilter = getTableSchemaFilter(tableSchema.getId());
final Bson tableSchemaUpdate = getTableSchemaUpdate(tableSchema);
final UpdateOptions upsertOption = new UpdateOptions().upsert(true);
final UpdateResult result = session != null
? tableSchemasCollection.updateOne(session,
tableSchemaFilter, tableSchemaUpdate, upsertOption)
: tableSchemasCollection.updateOne(
tableSchemaFilter, tableSchemaUpdate, upsertOption);
if (!result.wasAcknowledged()) {
throw SqlError.createSQLException(
LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.UPSERT_SCHEMA_FAILED,
schemaName);
}
}
private static long deleteDatabaseSchema(
final ClientSession session,
final MongoCollection<DocumentDbSchema> schemasCollection,
final String schemaName,
final int schemaVersion) throws SQLException {
final Bson schemaFilter = getSchemaFilter(schemaName, schemaVersion);
final DeleteResult result = session != null
? schemasCollection.deleteOne(session, schemaFilter)
: schemasCollection.deleteOne(schemaFilter);
if (!result.wasAcknowledged()) {
throw SqlError.createSQLException(LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.DELETE_SCHEMA_FAILED, schemaName);
}
return result.getDeletedCount();
}
private static void deleteTableSchemas(
final ClientSession session,
final MongoCollection<Document> tableSchemasCollection,
final Collection<String> tableReferences) throws SQLException {
final List<Bson> tableReferencesFilter = tableReferences.stream()
.map(DocumentDbSchemaWriter::getTableSchemaFilter)
.collect(Collectors.toList());
if (!tableReferencesFilter.isEmpty()) {
final Bson allTableReferencesFilter = or(tableReferencesFilter);
final DeleteResult result = session != null
? tableSchemasCollection.deleteMany(session, allTableReferencesFilter)
: tableSchemasCollection.deleteMany(allTableReferencesFilter);
if (!result.wasAcknowledged()) {
throw SqlError.createSQLException(LOGGER,
SqlState.DATA_EXCEPTION,
SqlError.DELETE_TABLE_SCHEMA_FAILED);
} else if (result.getDeletedCount() != tableReferencesFilter.size()) {
LOGGER.warn(SqlError.lookup(SqlError.DELETE_TABLE_SCHEMA_INCONSISTENT,
tableReferencesFilter.size(), result.getDeletedCount()));
}
}
}
private static Bson getTableSchemaUpdate(final DocumentDbSchemaTable schemaTable) {
return combine(
set("sqlName", schemaTable.getSqlName()),
set("collectionName", schemaTable.getCollectionName()),
set("modifyDate", schemaTable.getModifyDate()),
set("columns", schemaTable.getColumnMap().values().stream()
.map(c -> new DocumentDbSchemaColumn(
c.getFieldPath(),
c.getSqlName(),
c.getSqlType(),
c.getDbType(),
c.isIndex(),
c.isPrimaryKey(),
c.getForeignKeyTableName(),
c.getForeignKeyColumnName()))
.collect(Collectors.toList())),
setOnInsert("uuid", schemaTable.getUuid()));
}
static Bson getTableSchemaFilter(final String tableId) {
return eq("_id", tableId);
}
private static Bson getSchemaUpdate(final DocumentDbSchema schema) {
return combine(
set(SQL_NAME_PROPERTY, schema.getSqlName()),
set(MODIFY_DATE_PROPERTY, schema.getModifyDate()),
set(TABLES_PROPERTY, schema.getTableReferences()),
setOnInsert(SCHEMA_NAME_PROPERTY, schema.getSchemaName()),
setOnInsert(SCHEMA_VERSION_PROPERTY, schema.getSchemaVersion()));
}
static Bson getSchemaFilter(final String schemaName, final int schemaVersion) {
return schemaVersion > 0
? and(
eq(SCHEMA_NAME_PROPERTY, schemaName),
eq(SCHEMA_VERSION_PROPERTY, schemaVersion))
: eq(SCHEMA_NAME_PROPERTY, schemaName);
}
private void createCollectionIfNotExists(
final MongoDatabase database,
final String collectionName) throws DocumentDbSchemaSecurityException {
if (Streams.stream(database.listCollectionNames())
.anyMatch(c -> c.equals(collectionName))) {
return;
}
try {
database.createCollection(collectionName);
} catch (MongoException e) {
// Handle race condition if it created after testing for existence.
if (e.getCode() == MONGO_ALREADY_EXISTS) {
LOGGER.info(String.format(
"Schema collection '%s' already exists.", collectionName));
} else if (isAuthorizationFailure(e)) {
throw new DocumentDbSchemaSecurityException(e.getMessage(), e);
} else {
throw e;
}
}
}
static boolean isAuthorizationFailure(final MongoException e) {
return e.getCode() == MONGO_AUTHORIZATION_FAILURE
|| "authorization failure".equalsIgnoreCase(e.getMessage());
}
@Override
public void close() {
if (closeClient && client != null) {
client.close();
}
}
}
| 4,620 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/persist/DocumentDbSchemaReader.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.persist;
import com.mongodb.MongoException;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import lombok.NonNull;
import org.bson.codecs.configuration.CodecRegistry;
import org.bson.codecs.pojo.PojoCodecProvider;
import org.bson.conversions.Bson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.metadata.DocumentDbMetadataColumn;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchema;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaColumn;
import software.amazon.documentdb.jdbc.metadata.DocumentDbSchemaTable;
import javax.annotation.Nullable;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static com.mongodb.MongoClientSettings.getDefaultCodecRegistry;
import static com.mongodb.client.model.Filters.or;
import static com.mongodb.client.model.Sorts.ascending;
import static com.mongodb.client.model.Sorts.descending;
import static com.mongodb.client.model.Sorts.orderBy;
import static org.bson.codecs.configuration.CodecRegistries.fromProviders;
import static org.bson.codecs.configuration.CodecRegistries.fromRegistries;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_NAME_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_VERSION_PROPERTY;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter.getDatabase;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter.getSchemaFilter;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter.getTableSchemaFilter;
import static software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter.isAuthorizationFailure;
public class DocumentDbSchemaReader implements AutoCloseable {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSchemaReader.class);
private final DocumentDbConnectionProperties properties;
private final MongoClient client;
private final boolean closeClient;
public static final String DEFAULT_SCHEMA_NAME = DocumentDbSchema.DEFAULT_SCHEMA_NAME;
public static final String SCHEMA_COLLECTION = "_sqlSchemas";
public static final String TABLE_SCHEMA_COLLECTION = "_sqlTableSchemas";
static final CodecRegistry POJO_CODEC_REGISTRY = fromRegistries(
getDefaultCodecRegistry(),
fromProviders(PojoCodecProvider.builder()
.register(DocumentDbSchema.class,
DocumentDbSchemaTable.class,
DocumentDbSchemaColumn.class,
DocumentDbMetadataColumn.class)
.build()));
/**
* Constructs a new {@link DocumentDbSchemaReader} with given connection properties.
*
* @param properties the connection properties for connecting to database.
* @param client the {@link MongoClient} client.
*
*/
public DocumentDbSchemaReader(final @NonNull DocumentDbConnectionProperties properties,
final MongoClient client) {
this.properties = properties;
this.client = client != null
? client
: properties.createMongoClient();
this.closeClient = client == null;
}
/**
* Reads the latest version of the default schema for current database.
*
* @return a {@link DocumentDbSchema} schema for the database.
*/
public DocumentDbSchema read() {
return read(DEFAULT_SCHEMA_NAME);
}
/**
* Reads the list of all schema in the current database.
*
* @return a list of {@link DocumentDbSchema} items for the current database.
*/
public List<DocumentDbSchema> list() throws SQLException {
final MongoDatabase database = client.getDatabase(properties.getDatabase());
return getAllSchema(database);
}
/**
* Reads the latest version of the specified schema for current database.
*
* @param schemaName the name of the schema to read.
* @return a {@link DocumentDbSchema} schema for the database, or {@code null}, if not found.
*/
@Nullable
public DocumentDbSchema read(final @NonNull String schemaName) {
return read(schemaName, 0);
}
/**
* Reads the given version of the specified schema for current database.
*
* @param schemaName the name of the schema to read.
* @param schemaVersion the specific version of the schema.
* @return a {@link DocumentDbSchema} schema for the database, or {@code null}, if not found.
*/
@Nullable
public DocumentDbSchema read(final @NonNull String schemaName, final int schemaVersion) {
final MongoDatabase database = getDatabase(client, properties.getDatabase());
return getSchema(schemaName, schemaVersion, database);
}
static DocumentDbSchema getSchema(
final String schemaName,
final int schemaVersion,
final MongoDatabase database) {
final MongoCollection<DocumentDbSchema> schemasCollection = database
.getCollection(SCHEMA_COLLECTION, DocumentDbSchema.class);
try {
return schemasCollection
.find(getSchemaFilter(schemaName, schemaVersion))
.sort(descending(SCHEMA_VERSION_PROPERTY))
.first();
} catch (MongoException e) {
if (isAuthorizationFailure(e)) {
LOGGER.warn(e.getMessage(), e);
return null;
}
throw e;
}
}
/**
* Reads the table schema for the given table ID.
*
* @param schemaName the name of the schema to read.
* @param schemaVersion the specific version of the schema.
* @param tableId the table ID for the table schema.
* @return a {@link DocumentDbSchemaTable} table schema.
*/
public DocumentDbSchemaTable readTable(
final @NonNull String schemaName,
final int schemaVersion,
final @NonNull String tableId) {
final MongoDatabase database = getDatabase(client, properties.getDatabase());
// Attempt to retrieve the table associated with the table ID.
final MongoCollection<DocumentDbSchemaTable> tableSchemasCollection = database
.getCollection(TABLE_SCHEMA_COLLECTION, DocumentDbSchemaTable.class);
return tableSchemasCollection
.find(getTableSchemaFilter(tableId))
.first();
}
/**
* Reads the table schema for the given set of table ID.
*
* @param schemaName the name of the database schema.
* @param schemaVersion the version of the database schema.
* @param tableIds the set of table IDs to read.
*
* @return a collection of {@link DocumentDbSchemaTable} table schema.
*/
public Collection<DocumentDbSchemaTable> readTables(
final String schemaName,
final int schemaVersion,
final Set<String> tableIds) {
final MongoDatabase database = getDatabase(client, properties.getDatabase());
// Attempt to retrieve the tables associated with the table ID.
final MongoCollection<DocumentDbSchemaTable> tableSchemasCollection = database
.getCollection(TABLE_SCHEMA_COLLECTION, DocumentDbSchemaTable.class)
.withCodecRegistry(POJO_CODEC_REGISTRY);
final List<Bson> tableFilters = tableIds.stream()
.map(DocumentDbSchemaWriter::getTableSchemaFilter)
.collect(Collectors.toList());
return StreamSupport.stream(
tableSchemasCollection.find(or(tableFilters)).spliterator(), false)
.collect(Collectors.toList());
}
static List<DocumentDbSchema> getAllSchema(final MongoDatabase database) {
final MongoCollection<DocumentDbSchema> schemasCollection = database
.getCollection(SCHEMA_COLLECTION, DocumentDbSchema.class)
.withCodecRegistry(POJO_CODEC_REGISTRY);
try {
final List<DocumentDbSchema> schemas = new ArrayList<>();
schemasCollection
.find()
.sort(orderBy(ascending(SCHEMA_NAME_PROPERTY), ascending(SCHEMA_VERSION_PROPERTY)))
.forEach(schemas::add);
return schemas;
} catch (MongoException e) {
if (isAuthorizationFailure(e)) {
LOGGER.warn(e.getMessage(), e);
return new ArrayList<>();
}
throw e;
}
}
@Override
public void close() {
if (closeClient && client != null) {
client.close();
}
}
}
| 4,621 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbSchemaColumn.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import org.bson.BsonType;
import org.bson.codecs.pojo.annotations.BsonCreator;
import org.bson.codecs.pojo.annotations.BsonProperty;
import software.amazon.documentdb.jdbc.common.utilities.JdbcType;
import java.util.Objects;
import java.util.Optional;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SQL_NAME_PROPERTY;
@Getter
@JsonSerialize(as = DocumentDbSchemaColumn.class)
public class DocumentDbSchemaColumn {
public static final String SQL_TYPE_PROPERTY = "sqlType";
public static final String DB_TYPE_PROPERTY = "dbType";
public static final String IS_INDEX_PROPERTY = "isIndex";
public static final String IS_PRIMARY_KEY_PROPERTY = "isPrimaryKey";
public static final String FOREIGN_KEY_TABLE_NAME_PROPERTY = "foreignKeyTableName";
public static final String FOREIGN_KEY_COLUMN_NAME_PROPERTY = "foreignKeyColumnName";
public static final String FIELD_PATH_PROPERTY = "fieldPath";
/** Original path to the field in the collection. */
@NonNull
@BsonProperty(FIELD_PATH_PROPERTY)
@JsonProperty(FIELD_PATH_PROPERTY)
private final String fieldPath;
/** Display name of the field. */
@Setter
@NonNull
@BsonProperty(SQL_NAME_PROPERTY)
@JsonProperty(SQL_NAME_PROPERTY)
private String sqlName;
/** SQL/JDBC type of the field. Refer to the types in {@link java.sql.Types} */
@Setter
@BsonProperty(SQL_TYPE_PROPERTY)
@JsonProperty(SQL_TYPE_PROPERTY)
private JdbcType sqlType;
/** The DocumentDB type of the field. Refer to the types in {@link BsonType} */
@BsonProperty(DB_TYPE_PROPERTY)
@JsonProperty(DB_TYPE_PROPERTY)
private final BsonType dbType;
/**
* {@code true} if this column is the index column in an array table;
* {@code false} otherwise.
*/
@BsonProperty(IS_INDEX_PROPERTY)
@JsonProperty(IS_INDEX_PROPERTY)
private final boolean index;
/**
* {@code true} if this column is part of the primary key;
* {@code false} otherwise.
*/
@BsonProperty(IS_PRIMARY_KEY_PROPERTY)
@JsonProperty(IS_PRIMARY_KEY_PROPERTY)
private final boolean primaryKey;
/** If this column is a foreign key this contains the name of the table that it refers to, null otherwise. */
@Setter
@BsonProperty(FOREIGN_KEY_TABLE_NAME_PROPERTY)
@JsonProperty(FOREIGN_KEY_TABLE_NAME_PROPERTY)
private String foreignKeyTableName;
/** If this column is a foreign key this contains the name of the column that it refers to, null otherwise. */
@Setter
@BsonProperty(FOREIGN_KEY_COLUMN_NAME_PROPERTY)
@JsonProperty(FOREIGN_KEY_COLUMN_NAME_PROPERTY)
private String foreignKeyColumnName;
/**
* All-args constructor for a column.
*
* @param fieldPath The path to this column.
* @param sqlName The name of this column.
* @param sqlType The SQL/JDBC type of this column.
* @param dbType The DocumentDB type of this column. (Optional)
* @param index Whether this is an index column. (Optional)
* @param primaryKey Whether this is part of a primary key. (Optional)
* @param foreignKeyTableName If this is a foreign key, the table that it refers to, null if not a foreign key.
* @param foreignKeyColumnName If this is a foreign key, the column that it refers to, null if not a foreign key.
*/
@BsonCreator
public DocumentDbSchemaColumn(
@JsonProperty(FIELD_PATH_PROPERTY) @BsonProperty(FIELD_PATH_PROPERTY)
final String fieldPath,
@JsonProperty(SQL_NAME_PROPERTY) @BsonProperty(SQL_NAME_PROPERTY)
final String sqlName,
@JsonProperty(SQL_TYPE_PROPERTY) @BsonProperty(SQL_TYPE_PROPERTY)
final JdbcType sqlType,
@JsonProperty(DB_TYPE_PROPERTY) @BsonProperty(DB_TYPE_PROPERTY)
final BsonType dbType,
@JsonProperty(IS_INDEX_PROPERTY) @BsonProperty(IS_INDEX_PROPERTY)
final boolean index,
@JsonProperty(IS_PRIMARY_KEY_PROPERTY) @BsonProperty(IS_PRIMARY_KEY_PROPERTY)
final boolean primaryKey,
@JsonProperty(FOREIGN_KEY_TABLE_NAME_PROPERTY) @BsonProperty(FOREIGN_KEY_TABLE_NAME_PROPERTY)
final String foreignKeyTableName,
@JsonProperty(FOREIGN_KEY_COLUMN_NAME_PROPERTY) @BsonProperty(FOREIGN_KEY_COLUMN_NAME_PROPERTY)
final String foreignKeyColumnName) {
this.fieldPath = fieldPath;
this.sqlName = sqlName;
this.sqlType = sqlType;
this.dbType = dbType;
this.index = index;
this.primaryKey = primaryKey;
this.foreignKeyTableName = foreignKeyTableName;
this.foreignKeyColumnName = foreignKeyColumnName;
}
/**
* Gets the index of the column within the given table.
* @param table The parent table of this column.
* @return the index of this column within the given table (one-indexed), will return empty optional otherwise if
* the table given does not contain this column.
*/
public Optional<Integer> getIndex(final DocumentDbSchemaTable table) {
Integer colIndex = 0;
for (DocumentDbSchemaColumn column: table.getColumnMap().values()) {
colIndex++;
if (column.getSqlName().equals(this.getSqlName())) {
return Optional.of(colIndex);
}
}
return Optional.empty(); // Column was not found in the given table.
}
/**
* Gets the index of the column within the primary key of the given table.
* @param table The parent table of this column.
* @return the index of this column within the primary key (one-indexed), will return zero if this column is not
* part of the primary key, or empty optional if it is not within the table given.
*/
public Optional<Integer> getPrimaryKeyIndex(final DocumentDbSchemaTable table) {
Integer keyIndex = 0;
for (DocumentDbSchemaColumn column: table.getColumnMap().values()) {
if (column.isPrimaryKey()) {
keyIndex++;
if (column.getSqlName().equals(this.getSqlName())) {
return Optional.of(keyIndex);
}
} else if (column.getSqlName().equals(this.getSqlName())) {
return Optional.of(0); // Column is not part of primary key.
}
}
return Optional.empty(); // Column was not found in this table.
}
/**
* Gets the index of the column within a foreign key.
* @param table The parent table of this column.
* @return the index of this column within the foreign key (one-indexed), will return empty optional if is not
* a foreign key to the given table.
*/
public Optional<Integer> getForeignKeyIndex(final DocumentDbSchemaTable table) {
if (table.getSqlName().equals(getForeignKeyTableName())) {
Integer keyIndex = 0;
for (DocumentDbSchemaColumn column : table.getColumnMap().values()) {
if (column.isPrimaryKey()) {
keyIndex++;
if (column.getSqlName().equals(this.getForeignKeyColumnName())) {
return Optional.of(keyIndex);
}
}
}
}
return Optional.empty(); // Column was not found in this table.
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DocumentDbSchemaColumn)) {
return false;
}
final DocumentDbSchemaColumn that = (DocumentDbSchemaColumn) o;
return index == that.index
&& primaryKey == that.primaryKey
&& fieldPath.equals(that.fieldPath)
&& sqlName.equals(that.sqlName)
&& sqlType == that.sqlType
&& dbType == that.dbType
&& Objects.equals(foreignKeyTableName, that.foreignKeyTableName)
&& Objects.equals(foreignKeyColumnName, that.foreignKeyColumnName);
}
@Override
public int hashCode() {
return Objects
.hash(fieldPath, sqlName, sqlType, dbType, index, primaryKey, foreignKeyTableName,
foreignKeyColumnName);
}
@Override
public String toString() {
return "DocumentDbSchemaColumn{" +
"fieldPath='" + fieldPath + '\'' +
", sqlName='" + sqlName + '\'' +
", sqlType=" + sqlType +
", dbType=" + dbType +
", index=" + index +
", primaryKey=" + primaryKey +
", foreignKeyTableName='" + foreignKeyTableName + '\'' +
", foreignKeyColumnName='" + foreignKeyColumnName + '\'' +
'}';
}
}
| 4,622 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataColumn.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Getter;
import org.bson.BsonType;
import software.amazon.documentdb.jdbc.common.utilities.JdbcType;
import java.util.Objects;
/** Represents a field in a document, embedded document or array as a column in a table. */
@Getter
@JsonSerialize(as = DocumentDbSchemaColumn.class)
public class DocumentDbMetadataColumn extends DocumentDbSchemaColumn {
/** The (one-indexed) index of the column in the table. */
private int index;
/**
* Indicates the position of the column in the primary key of the table; 0 if not part of the key.
*/
private int primaryKeyIndex;
/**
* Indicates the position of the column in the foreign key of the table; 0 if not part of the key.
*/
private int foreignKeyIndex;
/** If this column is an array index, returns the zero-indexed level of the array. Null, otherwise. */
private final Integer arrayIndexLevel;
/** The name of the table this column belongs. */
@Getter(AccessLevel.PACKAGE)
private final String tableName;
private final String virtualTableName;
/** The new path of a column that was renamed due to a path collision. Only used in query processing. **/
private final String resolvedPath;
/**
* {@code true} if the column was generated rather than taken directly from the collection;
* {@code false} otherwise.
*/
private final boolean isGenerated;
/**
* Builder for DocumentDbMetadataColumn
*
* @param index The index of the column within the table (one-indexed).
* @param primaryKeyIndex If this key is part of primary key, the index within the primary key; else zero.
* @param foreignKeyIndex If this key is part of a foreign key, the index within the foreign key; else zero.
* @param arrayIndexLevel The level of the array if column is the index column of an array.
* @param virtualTableName The name of the virtual table this column belongs to.
* @param tableName The name of the table this column belongs to.
* @param resolvedPath The modified path if this column is renamed due to a collision in joins.
* @param isGenerated Whether this column was generated.
* @param fieldPath The path to this field.
* @param sqlName The name of the column.
* @param sqlType The SQL/JDBC type, see {@link java.sql.Types}.
* @param dbType The DocumentDB type, see {@link BsonType}
* @param isIndex Whether this column is an array index column.
* @param isPrimaryKey Whether this column is part of the primary key.
* @param foreignKeyTableName The name of the table referred to if this column is part of a
* foreign key.
* @param foreignKeyColumnName The name of the column referred to if this column is part of a
* foreign key.
*/
@Builder()
public DocumentDbMetadataColumn(final int index,
final int primaryKeyIndex,
final int foreignKeyIndex,
final Integer arrayIndexLevel,
final String virtualTableName,
final String tableName,
final String resolvedPath,
final boolean isGenerated,
final String fieldPath,
final String sqlName,
final JdbcType sqlType,
final BsonType dbType,
final boolean isIndex,
final boolean isPrimaryKey,
final String foreignKeyTableName,
final String foreignKeyColumnName) {
super(fieldPath, sqlName, sqlType, dbType, isIndex, isPrimaryKey, foreignKeyTableName, foreignKeyColumnName);
this.index = index;
this.primaryKeyIndex = primaryKeyIndex;
this.foreignKeyIndex = foreignKeyIndex;
this.arrayIndexLevel = arrayIndexLevel;
this.tableName = tableName;
this.virtualTableName = virtualTableName;
this.resolvedPath = resolvedPath;
this.isGenerated = isGenerated;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DocumentDbMetadataColumn)) {
return false;
}
if (!super.equals(o)) {
return false;
}
final DocumentDbMetadataColumn that = (DocumentDbMetadataColumn) o;
return index == that.index
&& primaryKeyIndex == that.primaryKeyIndex
&& foreignKeyIndex == that.foreignKeyIndex
&& isGenerated == that.isGenerated
&& Objects.equals(arrayIndexLevel, that.arrayIndexLevel)
&& Objects .equals(tableName, that.tableName)
&& Objects.equals(virtualTableName, that.virtualTableName)
&& Objects.equals(resolvedPath, that.resolvedPath);
}
@Override
public int hashCode() {
return Objects
.hash(super.hashCode(), index, primaryKeyIndex, foreignKeyIndex, arrayIndexLevel,
tableName, virtualTableName, resolvedPath, isGenerated);
}
}
| 4,623 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataTable.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Getter;
import org.bson.codecs.pojo.annotations.BsonIgnore;
import java.util.LinkedHashMap;
/** Represents the fields in a document, embedded document or array. */
@Getter
@JsonSerialize(as = DocumentDbSchemaTable.class)
public class DocumentDbMetadataTable extends DocumentDbSchemaTable {
@BsonIgnore
@JsonIgnore
private ImmutableMap<String, DocumentDbSchemaColumn> columnsByPath;
@Getter(AccessLevel.NONE)
@BsonIgnore
@JsonIgnore
private final ImmutableList<DocumentDbSchemaColumn> foreignKeys;
/**
* Builder for DocumentDbMetadataTable
* @param sqlName The name of the table.
* @param collectionName The name of the collection to which this table belongs.
* @param columns The columns in this table, indexed by name. Uses LinkedHashMap to preserve order.
* @param columnsByPath A map of columns indexed by path.
* @param foreignKeys The foreign keys within the table.
*/
@Builder
public DocumentDbMetadataTable(final String sqlName,
final String collectionName,
final LinkedHashMap<String, DocumentDbSchemaColumn> columns,
final ImmutableMap<String, DocumentDbMetadataColumn> columnsByPath,
final ImmutableList<DocumentDbSchemaColumn> foreignKeys) {
super(sqlName, collectionName, columns);
this.foreignKeys = foreignKeys;
}
@Override
public boolean equals(final Object o) {
return super.equals(o);
}
@Override
public int hashCode() {
return super.hashCode();
}
}
| 4,624 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataService.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.EstimatedDocumentCountOptions;
import lombok.NonNull;
import lombok.SneakyThrows;
import org.bson.BsonDocument;
import org.bson.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaReader;
import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaSecurityException;
import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaWriter;
import javax.annotation.Nullable;
import java.sql.SQLException;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NEW;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_LATEST_OR_NONE;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbDatabaseSchemaMetadata.VERSION_NEW;
/**
* A service for retrieving DocumentDB database metadata.
*/
public class DocumentDbMetadataService {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbMetadataService.class);
private static final Map<String, DocumentDbSchemaTable> TABLE_MAP = new ConcurrentHashMap<>();
/**
* Gets the latest or a new {@link DocumentDbDatabaseSchemaMetadata} instance based on the
* schemaName and properties. It uses a value of {@link DocumentDbDatabaseSchemaMetadata#VERSION_LATEST_OR_NEW}
* for the version to indicate to get the latest or create a new instance if none exists.
*
* @param properties the connection properties.
* @param schemaName the client ID.
* @param client the {@link MongoClient} client.
* @return a {@link DocumentDbDatabaseSchemaMetadata} instance.
*/
public static DocumentDbSchema get(
final DocumentDbConnectionProperties properties,
final String schemaName,
final MongoClient client) throws SQLException {
return get(properties, schemaName, VERSION_LATEST_OR_NEW, client);
}
/**
* Gets an existing {@link DocumentDbDatabaseSchemaMetadata} instance based on the clientId and
* version.
*
/**
* Gets an existing {@link DocumentDbDatabaseSchemaMetadata} instance based on the clientId and
* version.
*
* @param properties the connection properties.
* @param schemaName the client ID.
* @param schemaVersion the version of the metadata. A version number of
* {@link DocumentDbDatabaseSchemaMetadata#VERSION_LATEST_OR_NEW} indicates to get the latest
* or create a new instance.
* @param client the {@link MongoClient} client.
* @return a {@link DocumentDbDatabaseSchemaMetadata} instance if the clientId and version exist,
* {@code null} otherwise.
*/
@Nullable
public static DocumentDbSchema get(
final DocumentDbConnectionProperties properties,
final String schemaName,
final int schemaVersion,
final MongoClient client) throws SQLException {
final Instant beginRetrieval = Instant.now();
final Map<String, DocumentDbSchemaTable> tableMap = new LinkedHashMap<>();
final DocumentDbSchema schema;
// ASSUMPTION: Negative versions handle special cases
final int lookupVersion = Math.max(schemaVersion, VERSION_LATEST_OR_NEW);
final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, client);
try {
// Get the latest or specific version, might not exist
schema = schemaReader.read(schemaName, lookupVersion);
switch (schemaVersion) {
case VERSION_LATEST_OR_NEW:
// If latest exist, return it.
if (schema != null) {
LOGGER.info(
String.format("Successfully retrieved metadata schema %s in %d ms.",
schemaName, Instant.now().toEpochMilli()
- beginRetrieval.toEpochMilli()));
return schema;
}
LOGGER.info(String.format(
"Existing metadata not found for schema %s, will generate new metadata instead for database %s.",
schemaName, properties.getDatabase()));
return getNewDatabaseMetadata(properties, schemaName, 1, tableMap, client);
case VERSION_NEW:
final int newVersionNumber = schema != null ? schema.getSchemaVersion() + 1 : 1;
return getNewDatabaseMetadata(properties, schemaName, newVersionNumber,
tableMap, client);
case VERSION_LATEST_OR_NONE:
default:
// Return specific version or null.
if (schema != null) {
LOGGER.info(String.format("Retrieved schema %s version %d in %d ms.",
schema.getSchemaName(), schema.getSchemaVersion(),
Instant.now().toEpochMilli() - beginRetrieval.toEpochMilli()));
} else {
LOGGER.info("Could not find schema {} in database {}.", schemaName,
properties.getDatabase());
}
return schema;
}
} finally {
closeSchemaReader(schemaReader);
}
}
private static LinkedHashMap<String, DocumentDbSchemaTable> buildTableMapById(
final Map<String, DocumentDbSchemaTable> tableMap) {
return tableMap.values().stream()
.collect(Collectors.toMap(
DocumentDbSchemaTable::getId,
t -> t,
(o, d) -> o,
LinkedHashMap::new));
}
/**
* Gets the table schema associated with the given table ID.
*
* @param properties the connection properties.
* @param schemaName the name of the schema.
* @param schemaVersion the version of the schema.
* @param tableId the table ID of the table.
* @param client the {@link MongoClient} client.
*
* @return a {@link DocumentDbSchemaTable} that matches the table if it exists,
* {@code null} if the table ID does not exist.
*/
@NonNull
@SneakyThrows
public static DocumentDbSchemaTable getTable(
final @NonNull DocumentDbConnectionProperties properties,
final @NonNull String schemaName,
final int schemaVersion,
final @NonNull String tableId,
final MongoClient client) {
// Should only be in this map if we failed to write it.
if (TABLE_MAP.containsKey(tableId)) {
return TABLE_MAP.get(tableId);
}
// Otherwise, assume it's in the stored location.
final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, client);;
try {
final DocumentDbSchemaTable schemaTable = schemaReader.readTable(schemaName, schemaVersion, tableId);
if (client != null) {
setEstimatedRecordCount(properties, client, schemaTable);
}
return schemaTable;
} finally {
closeSchemaReader(schemaReader);
}
}
/**
* Gets a map of table schema from the given set of table IDs.
*
* @param properties the connection properties.
* @param schemaName the name of the database schema.
* @param schemaVersion the version of the database schema.
* @param remainingTableIds the set of tables IDs.
* @param client the {@link MongoClient} client.
*
* @return a map of table schema using the table ID as key.
*/
@SneakyThrows
public static Map<String, DocumentDbSchemaTable> getTables(
final @NonNull DocumentDbConnectionProperties properties,
final @NonNull String schemaName,
final int schemaVersion,
final @NonNull Set<String> remainingTableIds,
final MongoClient client) {
// Should only be in this map if we failed to write it.
final LinkedHashMap<String, DocumentDbSchemaTable> map = remainingTableIds.stream()
.filter(TABLE_MAP::containsKey)
.collect(Collectors.toMap(
tableId -> tableId,
TABLE_MAP::get,
(o, d) -> d,
LinkedHashMap::new));
if (map.size() == remainingTableIds.size()) {
return map;
}
// Otherwise, assume it's in the stored location.
final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, client);
try {
final Map<String, DocumentDbSchemaTable> schemaTables = schemaReader
.readTables(schemaName, schemaVersion, remainingTableIds)
.stream()
.collect(Collectors.toMap(
DocumentDbSchemaTable::getId,
table -> table,
(o, d) -> d,
LinkedHashMap::new));
if (client != null) {
for (DocumentDbSchemaTable schemaTable : schemaTables.values()) {
setEstimatedRecordCount(properties, client, schemaTable);
}
}
return schemaTables;
} finally {
closeSchemaReader(schemaReader);
}
}
/**
* Removes all versions of the schema with given schema name.
*
* @param properties the connection properties.
* @param schemaName the name of the schema to remove.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if connection properties are incorrect.
*/
public static void remove(
final DocumentDbConnectionProperties properties,
final String schemaName,
final MongoClient client) throws SQLException {
final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client);
try {
schemaWriter.remove(schemaName);
} finally {
closeSchemaWriter(schemaWriter);
}
}
/**
* Removes the specific version of the schema with given schema name and schema version.
*
* @param properties the connection properties.
* @param schemaName the schema name.
* @param schemaVersion the schema version.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if connection properties are incorrect.
*/
public static void remove(
final DocumentDbConnectionProperties properties,
final String schemaName,
final int schemaVersion,
final MongoClient client) throws SQLException {
final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client);
try {
schemaWriter.remove(schemaName, schemaVersion);
} finally {
closeSchemaWriter(schemaWriter);
}
}
/**
* Gets the list of all persisted schema.
*
* @param properties the connection properties.
* @return a list of {@link DocumentDbSchema} schemas.
* @throws SQLException if unable to connect.
*/
public static List<DocumentDbSchema> getSchemaList(
final DocumentDbConnectionProperties properties,
final MongoClient client) throws SQLException {
final DocumentDbSchemaReader schemaReader = new DocumentDbSchemaReader(properties, client);
try {
return schemaReader.list();
} finally {
closeSchemaReader(schemaReader);
}
}
/**
* Updates schema with the given table schema.
*
* @param properties the connection properties.
* @param schemaName the name of the schema.
* @param schemaTables the collection of updated table schema.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if unable to connect or other exception.
* @throws DocumentDbSchemaSecurityException if unable to write to the database due to
* unauthorized user.
*/
public static void update(
final DocumentDbConnectionProperties properties,
final String schemaName,
final Collection<DocumentDbSchemaTable> schemaTables,
final MongoClient client) throws SQLException, DocumentDbSchemaSecurityException {
DocumentDbSchema schema = get(properties, schemaName, VERSION_LATEST_OR_NONE, client);
if (schema == null) {
// This is intentional because the update will increment the version.
final int schemaVersion = 0;
schema = new DocumentDbSchema(
schemaName,
properties.getDatabase(),
schemaVersion,
new LinkedHashMap<>());
LOGGER.info("A new schema {} will be created.", schemaName);
}
final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client);
try {
schemaWriter.update(schema, schemaTables);
} finally {
closeSchemaWriter(schemaWriter);
}
}
private static DocumentDbSchema getNewDatabaseMetadata(
final DocumentDbConnectionProperties properties,
final String schemaName,
final int schemaVersion,
final Map<String, DocumentDbSchemaTable> tableMap,
final MongoClient client) throws SQLException {
LOGGER.debug("Beginning generation of new metadata.");
final Instant beginGeneration = Instant.now();
final DocumentDbSchema schema = getCollectionMetadataDirect(
schemaName,
schemaVersion,
properties.getDatabase(),
properties,
tableMap,
client);
final DocumentDbSchemaWriter schemaWriter = new DocumentDbSchemaWriter(properties, client);
try {
schemaWriter.write(schema, tableMap.values());
} catch (DocumentDbSchemaSecurityException e) {
TABLE_MAP.putAll(buildTableMapById(tableMap));
LOGGER.warn(e.getMessage(), e);
} finally {
closeSchemaWriter(schemaWriter);
}
LOGGER.info(String.format("Successfully generated metadata in %d ms.",
Instant.now().toEpochMilli() - beginGeneration.toEpochMilli()));
return schema;
}
/**
* Gets the metadata for all the collections in a DocumentDB database.
*
* @param properties the connection properties.
*
* @return a map of the collection metadata.
*/
private static DocumentDbSchema getCollectionMetadataDirect(
final String schemaName,
final int schemaVersion,
final String databaseName,
final DocumentDbConnectionProperties properties,
final Map<String, DocumentDbSchemaTable> tableMap,
final MongoClient client) throws SQLException {
final MongoClient mongoClient = client != null
? client
: properties.createMongoClient();
try {
final MongoDatabase database = mongoClient.getDatabase(databaseName);
for (String collectionName : getFilteredCollectionNames(database)) {
final MongoCollection<BsonDocument> collection = database
.getCollection(collectionName, BsonDocument.class);
final Iterator<BsonDocument> cursor = DocumentDbMetadataScanner
.getIterator(properties, collection);
// Create the schema metadata.
final Map<String, DocumentDbSchemaTable> tableSchemaMap =
DocumentDbTableSchemaGenerator.generate(
collectionName, cursor);
tableMap.putAll(tableSchemaMap);
}
final Set<String> tableReferences = tableMap.values().stream()
.map(DocumentDbSchemaTable::getId)
.collect(Collectors.toSet());
return new DocumentDbSchema(schemaName, schemaVersion, databaseName,
new Date(Instant.now().toEpochMilli()), tableReferences);
} finally {
if (client == null) {
mongoClient.close();
}
}
}
private static List<String> getFilteredCollectionNames(final MongoDatabase database) {
final Iterable<String> collectionNames = database.listCollectionNames();
return StreamSupport
.stream(collectionNames.spliterator(), false)
.filter(c ->
!c.equals(DocumentDbSchemaReader.SCHEMA_COLLECTION)
&& !c.equals(DocumentDbSchemaReader.TABLE_SCHEMA_COLLECTION))
.collect(Collectors.toList());
}
private static void closeSchemaReader(final DocumentDbSchemaReader schemaReader) throws SQLException {
try {
schemaReader.close();
} catch (Exception e) {
throw new SQLException(e.getMessage(), e);
}
}
private static void closeSchemaWriter(final DocumentDbSchemaWriter schemaWriter) throws SQLException {
try {
schemaWriter.close();
} catch (Exception e) {
throw new SQLException(e.getMessage(), e);
}
}
private static void setEstimatedRecordCount(
final DocumentDbConnectionProperties properties,
final MongoClient client,
final DocumentDbSchemaTable schemaTable) {
final EstimatedDocumentCountOptions options = new EstimatedDocumentCountOptions()
.maxTime(1, TimeUnit.SECONDS);
final MongoCollection<Document> collection = client
.getDatabase(properties.getDatabase())
.getCollection(schemaTable.getCollectionName());
final long estimatedRecordCount = collection.estimatedDocumentCount(options);
schemaTable.setEstimatedRecordCount(estimatedRecordCount);
}
}
| 4,625 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGenerator.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import lombok.Getter;
import org.bson.BsonArray;
import org.bson.BsonDocument;
import org.bson.BsonType;
import org.bson.BsonValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.JdbcType;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Collectors;
import static org.bson.BsonType.OBJECT_ID;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.EMPTY_STRING;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.KEY_COLUMN_NONE;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.addToForeignKeysIfIsPrimary;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.checkVirtualTablePrimaryKeys;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.combinePath;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getFieldNameIfIsPrimaryKey;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getPrevIndexOrDefault;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getPrevSqlTypeOrDefault;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getPrimaryKeyColumn;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getPromotedSqlType;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getSqlTypeIfIsPrimaryKey;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.getVirtualTableNameIfIsPrimaryKey;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.handleArrayLevelConflict;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.handleComplexScalarConflict;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.isComplexType;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.isIdField;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbTableSchemaGeneratorHelper.toName;
/**
* Represents the fields in a collection and their data types. A collection can be broken up into
* one (the base table) or more tables (virtual tables from embedded documents or arrays).
*/
@Getter
public class DocumentDbTableSchemaGenerator {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbTableSchemaGenerator.class);
private static final String INDEX_COLUMN_NAME_PREFIX = "index_lvl_";
private static final String VALUE_COLUMN_NAME = "value";
/**
* Creates new collection metadata for a given collection from the provided data.
*
* @param collectionName the name of the collection this model should refer.
* @param cursor the cursor for the data from which to create a model.
* @return a new {@link DocumentDbTableSchemaGenerator} built from the data.
*/
public static Map<String, DocumentDbSchemaTable> generate(
final String collectionName,
final Iterator<BsonDocument> cursor) {
final LinkedHashMap<String, DocumentDbSchemaTable> tableMap = new LinkedHashMap<>();
final Map<String, String> tableNameMap = new HashMap<>();
while (cursor.hasNext()) {
final BsonDocument document = cursor.next();
processDocument(document, tableMap, new ArrayList<>(),
EMPTY_STRING, collectionName, true, tableNameMap);
}
// Remove array and document columns that are used for interim processing.
filterArrayAndDocumentColumns(tableMap);
return tableMap;
}
private static void filterArrayAndDocumentColumns(final LinkedHashMap<String, DocumentDbSchemaTable> tableMap) {
for (DocumentDbSchemaTable table : tableMap.values()) {
final boolean needsUpdate = table.getColumnMap().values().stream()
.anyMatch(c -> c.getSqlType() == JdbcType.ARRAY || c.getSqlType() == JdbcType.JAVA_OBJECT);
if (needsUpdate) {
final LinkedHashMap<String, DocumentDbSchemaColumn> columns = table
.getColumnMap().values().stream()
.filter(c -> c.getSqlType() != JdbcType.ARRAY && c.getSqlType() != JdbcType.JAVA_OBJECT)
.collect(Collectors.toMap(
DocumentDbSchemaColumn::getSqlName,
c -> c,
(o, d) -> o,
LinkedHashMap::new));
if (LOGGER.isDebugEnabled() && !tableMap.containsKey(table.getSqlName())) {
LOGGER.debug(String.format("Added schema for table %s.", table.getSqlName()));
}
tableMap.put(table.getSqlName(), DocumentDbMetadataTable.builder()
.sqlName(table.getSqlName())
.collectionName(table.getCollectionName())
.columns(columns)
.build());
}
}
}
/**
* Process a document including fields, sub-documents and arrays.
*
* @param document the document to process.
* @param tableMap the map of virtual tables
* @param foreignKeys the list of foreign keys.
* @param path the path for this field.
* @param tableNameMap the map of table path to (shortened) names.
*/
private static void processDocument(
final BsonDocument document,
final Map<String, DocumentDbSchemaTable> tableMap,
final List<DocumentDbMetadataColumn> foreignKeys,
final String path,
final String collectionName,
final boolean isRootDocument,
final Map<String, String> tableNameMap) {
// Need to preserve order of fields.
final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>();
final String tableName = toName(combinePath(collectionName, path), tableNameMap);
if (tableMap.containsKey(tableName)) {
// If we've already visited this document/table,
// start with the previously discovered columns.
// This will have included and primary/foreign key definitions.
columnMap.putAll(tableMap.get(tableName).getColumnMap());
} else {
// Add foreign keys.
//
// Foreign key(s) are the primary key(s) passed from the parent table.
// Minimally, this is the primary key for the "_id" field.
//
// If called from an array parent, it will also include the "index_lvl_<n>"
// column(s) from the previous level in the array.
//
// The primaryKeyColumn and foreignKeyColumn are the one-indexed value
// referencing the order withing the primary or foreign key column.
int primaryKeyColumn = KEY_COLUMN_NONE;
for (DocumentDbMetadataColumn column : foreignKeys) {
primaryKeyColumn++;
buildForeignKeysFromDocument(columnMap, tableName, primaryKeyColumn, column);
}
}
final Map<String, String> columnNameMap = columnMap.values().stream().collect(
Collectors.toMap(
DocumentDbSchemaColumn::getSqlName,
DocumentDbSchemaColumn::getSqlName));
// Process all fields in the document
for (Entry<String, BsonValue> entry : document.entrySet()) {
final String fieldName = entry.getKey();
final String fieldPath = combinePath(path, fieldName);
final BsonValue bsonValue = entry.getValue();
final BsonType bsonType = bsonValue.getBsonType();
final boolean isPrimaryKey = isRootDocument && isIdField(fieldName);
final String columnName = getFieldNameIfIsPrimaryKey(
collectionName, fieldName, isPrimaryKey, columnNameMap);
final DocumentDbMetadataColumn prevMetadataColumn = (DocumentDbMetadataColumn) columnMap
.getOrDefault(columnName, null);
// ASSUMPTION: relying on the behaviour that the "_id" field will ALWAYS be first
// in the root document.
final JdbcType prevSqlType = getPrevSqlTypeOrDefault(prevMetadataColumn);
final JdbcType nextSqlType = getSqlTypeIfIsPrimaryKey(bsonType, prevSqlType, isPrimaryKey);
if (LOGGER.isDebugEnabled()) {
final JdbcType currentDocType = getSqlTypeIfIsPrimaryKey(bsonType, JdbcType.NULL, isPrimaryKey);
if (!prevSqlType.equals(currentDocType) && prevMetadataColumn != null) {
LOGGER.debug(String.format("Type conflict in table %s, types %s and %s mapped to %s.",
tableName, prevSqlType.name(), currentDocType, nextSqlType.name()));
}
}
processComplexTypes(
tableMap,
new ArrayList<>(foreignKeys),
collectionName,
entry,
fieldPath,
bsonType,
prevMetadataColumn,
nextSqlType,
tableNameMap);
final DocumentDbMetadataColumn metadataColumn = DocumentDbMetadataColumn
.builder()
.fieldPath(fieldPath)
.sqlName(columnName)
.sqlType(nextSqlType)
.dbType(getPromotedBsonType(bsonType, prevMetadataColumn))
.isIndex(false)
.isPrimaryKey(isPrimaryKey)
.index(getPrevIndexOrDefault(prevMetadataColumn, columnMap.size() + 1))
.tableName(tableName)
.primaryKeyIndex(getPrimaryKeyColumn(isPrimaryKey))
.foreignKeyIndex(KEY_COLUMN_NONE)
.isGenerated(false)
.virtualTableName(getVirtualTableNameIfIsPrimaryKey(
fieldPath, nextSqlType, isPrimaryKey, collectionName, tableNameMap))
.build();
columnMap.put(metadataColumn.getSqlName(), metadataColumn);
addToForeignKeysIfIsPrimary(foreignKeys, isPrimaryKey, metadataColumn);
}
// Ensure virtual table primary key column data types are consistent.
if (isRootDocument) {
checkVirtualTablePrimaryKeys(tableMap, collectionName, columnMap, columnNameMap);
}
// Add virtual table.
final DocumentDbMetadataTable metadataTable = DocumentDbMetadataTable
.builder()
.sqlName(tableName)
.collectionName(collectionName)
.columns(columnMap)
.build();
if (LOGGER.isDebugEnabled() && !tableMap.containsKey(metadataTable.getSqlName())) {
LOGGER.debug(String.format("Added schema for table %s.", metadataTable.getSqlName()));
}
tableMap.put(metadataTable.getSqlName(), metadataTable);
}
private static BsonType getPromotedBsonType(
final BsonType bsonType,
final DocumentDbMetadataColumn prevMetadataColumn) {
final BsonType returnBsonType;
// OBJECT_ID
if (isPreviousOfType(prevMetadataColumn, OBJECT_ID) && bsonType != OBJECT_ID) {
returnBsonType = OBJECT_ID;
} else {
returnBsonType = bsonType;
}
return returnBsonType;
}
private static boolean isPreviousOfType(
final DocumentDbMetadataColumn prevMetadataColumn,
final BsonType testType) {
return prevMetadataColumn != null
&& prevMetadataColumn.getDbType() == testType;
}
/**
* Processes an array field, including sub-documents, and sub-arrays.
*
* @param array the array value to process.
* @param tableMap the map of virtual tables
* @param foreignKeys the list of foreign keys.
* @param path the path for this field.
* @param arrayLevel the zero-indexed level of the array.
* @param collectionName the name of the collection.
* @param tableNameMap the map of table path to (shortened) names.
*/
private static void processArray(
final BsonArray array,
final Map<String, DocumentDbSchemaTable> tableMap,
final List<DocumentDbMetadataColumn> foreignKeys,
final String path,
final int arrayLevel,
final String collectionName,
final Map<String, String> tableNameMap) {
// Need to preserve order of fields.
final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap = new LinkedHashMap<>();
int primaryKeyColumn = KEY_COLUMN_NONE;
int level = arrayLevel;
DocumentDbMetadataColumn metadataColumn;
JdbcType prevSqlType = JdbcType.NULL;
JdbcType sqlType;
final String tableName = toName(combinePath(collectionName, path), tableNameMap);
if (tableMap.containsKey(tableName)) {
// If we've already visited this document/table,
// start with the previously discovered columns.
// This will have included and primary/foreign key definitions.
columnMap.putAll(tableMap.get(tableName).getColumnMap());
final String valueColumnPath = VALUE_COLUMN_NAME;
// TODO: Figure out if previous type was array of array.
if (columnMap.containsKey(toName(valueColumnPath, tableNameMap))) {
prevSqlType = columnMap.get(toName(valueColumnPath, tableNameMap)).getSqlType();
} else {
prevSqlType = JdbcType.JAVA_OBJECT;
}
}
// Find the promoted SQL data type for all elements.
sqlType = prevSqlType;
for (BsonValue element : array) {
sqlType = getPromotedSqlType(element.getBsonType(), sqlType);
if (LOGGER.isDebugEnabled()) {
final JdbcType currentSqlType = getPromotedSqlType(element.getBsonType(), JdbcType.NULL);
if (!prevSqlType.equals(currentSqlType)) {
LOGGER.debug(String.format("Type conflict in array table %s, types %s and %s mapped to %s.",
tableName, prevSqlType.name(), currentSqlType, sqlType.name()));
}
}
}
if (!isComplexType(sqlType)) {
if (isComplexType(prevSqlType)) {
// If promoted to scalar type from complex type, remove previous definition.
handleComplexScalarConflict(tableMap, tableName, columnMap);
} else {
// Check to see if we're processing scalars at a different level than previously
// detected.
sqlType = handleArrayLevelConflict(columnMap, level, sqlType);
}
} else if (isComplexType(sqlType) && !isComplexType(prevSqlType)) {
// Promoted from NULL to ARRAY or OBJECT.
handleComplexScalarConflict(tableMap, tableName, columnMap);
}
if (!tableMap.containsKey(tableName)) {
// Add foreign keys.
//
// Foreign key(s) are the primary key(s) passed from the parent table.
// Minimally, this is the primary key for the "_id" field.
//
// If called from an array parent, it will also include the "index_lvl_<n>"
// column(s) from the previous level in the array.
//
// The primaryKeyColumn and foreignKeyColumn are the one-indexed value
// referencing the order withing the primary or foreign key column.
for (DocumentDbMetadataColumn column : foreignKeys) {
primaryKeyColumn++;
metadataColumn = DocumentDbMetadataColumn
.builder()
.fieldPath(column.getFieldPath())
.sqlName(column.getSqlName())
.sqlType(column.getSqlType())
.dbType(column.getDbType())
.isIndex(column.isIndex())
.isPrimaryKey(primaryKeyColumn != 0)
.foreignKeyTableName(column.getTableName().equals(tableName)
? null
: column.getTableName())
.index(column.getIndex())
.tableName(tableName)
.primaryKeyIndex(primaryKeyColumn)
.foreignKeyIndex(column.getTableName().equals(tableName)
? KEY_COLUMN_NONE
: primaryKeyColumn)
.virtualTableName(column.getVirtualTableName())
.arrayIndexLevel(column.getArrayIndexLevel())
.isGenerated(column.isGenerated())
.build();
metadataColumn.setForeignKeyColumnName(metadataColumn.getForeignKeyTableName() != null
? column.getSqlName()
: null);
columnMap.put(metadataColumn.getSqlName(), metadataColumn);
}
}
final Map<String, String> columnNameMap = columnMap.values().stream().collect(
Collectors.toMap(
DocumentDbSchemaColumn::getSqlName,
DocumentDbSchemaColumn::getSqlName));
final String indexColumnName = toName(
combinePath(path, INDEX_COLUMN_NAME_PREFIX + level),
columnNameMap);
final DocumentDbMetadataColumn indexColumn;
if (!columnMap.containsKey(indexColumnName)) {
// Add index column. Although it has no path in the original document, we will
// use the path of the generated index field once the original document is unwound.
primaryKeyColumn++;
indexColumn = DocumentDbMetadataColumn
.builder()
.sqlName(indexColumnName)
.fieldPath(path) // Once unwound, the index will be at root level so path = name.
.sqlType(JdbcType.BIGINT)
.isIndex(true)
.isPrimaryKey(true)
.index(columnMap.size() + 1)
.tableName(tableName)
.primaryKeyIndex(primaryKeyColumn)
.foreignKeyIndex(KEY_COLUMN_NONE)
.arrayIndexLevel(level)
.isGenerated(true)
.build();
columnMap.put(indexColumn.getSqlName(), indexColumn);
} else {
// Cast exception should not occur, because we are always creating DocumentDbMetadataColumn.
indexColumn = (DocumentDbMetadataColumn) columnMap.get(indexColumnName);
}
// Add index column to foreign keys
foreignKeys.add(indexColumn);
// Add documents, arrays or just the scalar value.
switch (sqlType) {
case JAVA_OBJECT:
processDocumentsInArray(array,
tableMap,
foreignKeys,
path,
collectionName,
tableNameMap);
break;
case ARRAY:
// This will add another level to the array.
level++;
processArrayInArray(array,
tableMap,
foreignKeys,
path,
collectionName,
level,
tableNameMap);
break;
default:
processValuesInArray(
tableMap,
path,
collectionName,
columnMap,
sqlType,
tableNameMap);
break;
}
}
/**
* Processes value elements as a value column.
*
* @param tableMap the table map of virtual tables.
* @param path the path to this array
* @param collectionName the name of the collection.
* @param columnMap the map of columns for this virtual table.
* @param sqlType the promoted SQL data type to use for this array.
* @param tableNameMap the map of table path to (shortened) names.
*/
private static void processValuesInArray(
final Map<String, DocumentDbSchemaTable> tableMap,
final String path,
final String collectionName,
final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap,
final JdbcType sqlType,
final Map<String, String> tableNameMap) {
final String tableName = toName(combinePath(collectionName, path), tableNameMap);
final Map<String, String> columnNameMap = columnMap.values().stream().collect(
Collectors.toMap(
DocumentDbSchemaColumn::getSqlName,
DocumentDbSchemaColumn::getSqlName));
// Get column if it already exists, so we can preserve index order.
final String valueColumnName = toName(VALUE_COLUMN_NAME, columnNameMap);
final DocumentDbMetadataColumn prevMetadataColumn = (DocumentDbMetadataColumn) columnMap
.get(valueColumnName);
// Add value column
final DocumentDbMetadataColumn metadataColumn = DocumentDbMetadataColumn
.builder()
.fieldPath(path)
.sqlName(valueColumnName)
.sqlType(sqlType)
.isIndex(false)
.isPrimaryKey(false)
.index(getPrevIndexOrDefault(prevMetadataColumn, columnMap.size() + 1))
.tableName(tableName)
.primaryKeyIndex(KEY_COLUMN_NONE)
.foreignKeyIndex(KEY_COLUMN_NONE)
.isGenerated(false)
.build();
columnMap.put(metadataColumn.getSqlName(), metadataColumn);
final DocumentDbMetadataTable metadataTable = DocumentDbMetadataTable
.builder()
.sqlName(tableName)
.collectionName(collectionName)
.columns(columnMap)
.build();
if (LOGGER.isDebugEnabled() && !tableMap.containsKey(metadataTable.getSqlName())) {
LOGGER.debug(String.format("Added schema for table %s.", metadataTable.getSqlName()));
}
tableMap.put(metadataTable.getSqlName(), metadataTable);
}
/**
* Processes array elements within an array.
*
* @param array the array elements to scan.
* @param tableMap the table map of virtual tables.
* @param foreignKeys the list of foreign keys.
* @param path the path to this array
* @param collectionName the name of the collection.
* @param level the current level of this array.
* @param tableNameMap the map of table path to (shortened) names.
*/
private static void processArrayInArray(
final BsonArray array,
final Map<String, DocumentDbSchemaTable> tableMap,
final List<DocumentDbMetadataColumn> foreignKeys,
final String path,
final String collectionName,
final int level,
final Map<String, String> tableNameMap) {
for (BsonValue element : array) {
if (!element.isNull()) {
processArray(
element.asArray(),
tableMap,
foreignKeys,
path,
level,
collectionName,
tableNameMap);
}
}
}
/**
* Processes document elements in an array.
*
* @param array the array elements to scan.
* @param tableMap the table map of virtual tables.
* @param foreignKeys the list of foreign keys.
* @param path the path to this array
* @param collectionName the name of the collection encountered.
* @param tableNameMap the map of table path to (shortened) names.
*/
private static void processDocumentsInArray(
final BsonArray array,
final Map<String, DocumentDbSchemaTable> tableMap,
final List<DocumentDbMetadataColumn> foreignKeys,
final String path,
final String collectionName,
final Map<String, String> tableNameMap) {
// This will make the document fields part of this array.
for (BsonValue element : array) {
if (!element.isNull()) {
processDocument(element.asDocument(),
tableMap, foreignKeys, path, collectionName, false, tableNameMap);
}
}
}
private static void processComplexTypes(
final Map<String, DocumentDbSchemaTable> tableMap,
final List<DocumentDbMetadataColumn> foreignKeys,
final String collectionName,
final Entry<String, BsonValue> entry,
final String fieldPath,
final BsonType bsonType,
final DocumentDbMetadataColumn prevMetadataColumn,
final JdbcType nextSqlType,
final Map<String, String> tableNameMap) {
if (nextSqlType == JdbcType.JAVA_OBJECT && bsonType != BsonType.NULL) {
// This will create/update virtual table.
processDocument(entry.getValue().asDocument(),
tableMap, foreignKeys, fieldPath, collectionName, false, tableNameMap);
} else if (nextSqlType == JdbcType.ARRAY && bsonType != BsonType.NULL) {
// This will create/update virtual table.
processArray(entry.getValue().asArray(),
tableMap, foreignKeys, fieldPath, 0, collectionName, tableNameMap);
} else {
// Process a scalar data type.
if (prevMetadataColumn != null && prevMetadataColumn.getVirtualTableName() != null
&& bsonType != BsonType.NULL) {
// This column has been promoted to a scalar type from a complex type.
// Remove the previously defined virtual table.
tableMap.remove(prevMetadataColumn.getVirtualTableName());
}
}
}
@Override
public boolean equals(final Object o) {
return super.equals(o);
}
@Override
public int hashCode() {
return super.hashCode();
}
private static void buildForeignKeysFromDocument(
final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap,
final String tableName,
final int primaryKeyColumn,
final DocumentDbMetadataColumn column) {
final DocumentDbMetadataColumn metadataColumn = DocumentDbMetadataColumn
.builder()
.fieldPath(column.getFieldPath())
.sqlName(column.getSqlName())
.sqlType(column.getSqlType())
.dbType(column.getDbType())
.isIndex(column.isIndex())
.isPrimaryKey(column.isPrimaryKey())
.foreignKeyTableName(column.getTableName().equals(tableName)
? null
: column.getTableName())
.foreignKeyColumnName(column.getTableName().equals(tableName)
? null
: column.getSqlName())
.index(columnMap.size() + 1)
.tableName(column.getTableName())
.primaryKeyIndex(primaryKeyColumn)
.foreignKeyIndex(column.getTableName().equals(tableName)
? KEY_COLUMN_NONE
: primaryKeyColumn)
.arrayIndexLevel(column.getArrayIndexLevel())
.isGenerated(column.isGenerated())
.build();
columnMap.put(metadataColumn.getSqlName(), metadataColumn);
}
}
| 4,626 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbTableSchemaGeneratorHelper.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import org.bson.BsonType;
import software.amazon.documentdb.jdbc.common.utilities.JdbcType;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.regex.MatchResult;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.calcite.sql.parser.SqlParser.DEFAULT_IDENTIFIER_MAX_LENGTH;
public class DocumentDbTableSchemaGeneratorHelper {
static final String EMPTY_STRING = "";
static final int KEY_COLUMN_NONE = 0;
private static final String PATH_SEPARATOR = ".";
private static final String ID_FIELD_NAME = "_id";
private static final int ID_PRIMARY_KEY_COLUMN = 1;
/**
* The map of data type promotions.
*
* @see <a href="https://github.com/aws/amazon-documentdb-jdbc-driver#data-type-conflict-promotion">
* Map Relational Schemas to DocumentDB - Scalar-Scalar Conflicts</a>
*/
private static final ImmutableMap<Entry<JdbcType, BsonType>, JdbcType> PROMOTION_MAP =
new ImmutableMap.Builder<Entry<JdbcType, BsonType>, JdbcType>()
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.BOOLEAN), JdbcType.BOOLEAN)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.DATE_TIME), JdbcType.TIMESTAMP)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.DOUBLE), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.INT32), JdbcType.INTEGER)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.INT64), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.TIMESTAMP), JdbcType.TIMESTAMP)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.NULL), JdbcType.NULL)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.ARRAY), JdbcType.ARRAY)
.put(new SimpleEntry<>(JdbcType.NULL, BsonType.DOCUMENT), JdbcType.JAVA_OBJECT)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.BOOLEAN), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.DECIMAL128), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.DOUBLE), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.INT32), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.INT64), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.NULL), JdbcType.ARRAY)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.ARRAY), JdbcType.ARRAY)
.put(new SimpleEntry<>(JdbcType.ARRAY, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.BOOLEAN), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.DECIMAL128), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.DOUBLE), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.INT32), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.INT64), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.NULL), JdbcType.JAVA_OBJECT)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.JAVA_OBJECT, BsonType.DOCUMENT), JdbcType.JAVA_OBJECT)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.BOOLEAN), JdbcType.BOOLEAN)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.DOUBLE), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.INT32), JdbcType.INTEGER)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.INT64), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.NULL), JdbcType.BOOLEAN)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BOOLEAN, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.BOOLEAN), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.DOUBLE), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.INT32), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.INT64), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.NULL), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.BIGINT, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.BOOLEAN), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.DOUBLE), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.INT32), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.INT64), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.NULL), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DECIMAL, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.BOOLEAN), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.DOUBLE), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.INT32), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.INT64), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.NULL), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.DOUBLE, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.BOOLEAN), JdbcType.INTEGER)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.DECIMAL128), JdbcType.DECIMAL)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.DOUBLE), JdbcType.DOUBLE)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.INT32), JdbcType.INTEGER)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.INT64), JdbcType.BIGINT)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.NULL), JdbcType.INTEGER)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.INTEGER, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.BOOLEAN), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.DATE_TIME), JdbcType.TIMESTAMP)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.DECIMAL128), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.DOUBLE), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.INT32), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.INT64), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.TIMESTAMP), JdbcType.TIMESTAMP)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.NULL), JdbcType.TIMESTAMP)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.TIMESTAMP, BsonType.DOCUMENT), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.BOOLEAN), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.DATE_TIME), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.DECIMAL128), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.DOUBLE), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.INT32), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.INT64), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.TIMESTAMP), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.MAX_KEY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.MIN_KEY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.NULL), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.OBJECT_ID), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.STRING), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.ARRAY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARBINARY, BsonType.DOCUMENT), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.BOOLEAN), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.BINARY), JdbcType.VARBINARY)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.DATE_TIME), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.DECIMAL128), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.DOUBLE), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.INT32), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.INT64), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.TIMESTAMP), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.MAX_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.MIN_KEY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.NULL), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.OBJECT_ID), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.STRING), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.ARRAY), JdbcType.VARCHAR)
.put(new SimpleEntry<>(JdbcType.VARCHAR, BsonType.DOCUMENT), JdbcType.VARCHAR)
.build();
/**
* Combines two paths to form a new path.
*
* @param path the root/parent path.
* @param fieldName the field name to append to the path.
* @return a new path with the fieldName append to the root path separated by a period.
*/
public static String combinePath(final String path, final String fieldName) {
final boolean isPathEmpty = Strings.isNullOrEmpty(path);
final boolean isFieldNameEmpty = Strings.isNullOrEmpty(fieldName);
final String pathSeparator = !isPathEmpty && !isFieldNameEmpty ? PATH_SEPARATOR : EMPTY_STRING;
final String newPath = !isPathEmpty ? path : EMPTY_STRING;
final String newFieldName = !isFieldNameEmpty ? fieldName : EMPTY_STRING;
return String.format("%s%s%s", newPath, pathSeparator, newFieldName);
}
/**
* Gets the promoted SQL data type from previous SQL data type and the current BSON data type.
*
* @param bsonType the current BSON data type.
* @param prevSqlType the previous SQL data type.
* @return returns the promoted SQL data type.
*/
@VisibleForTesting
static JdbcType getPromotedSqlType(final BsonType bsonType, final JdbcType prevSqlType) {
final Entry<JdbcType, BsonType> key = new SimpleEntry<>(prevSqlType, bsonType);
return PROMOTION_MAP.getOrDefault(key, JdbcType.VARCHAR);
}
/**
* Gets whether the field is the "_id" field.
*
* @param fieldName the name of the field.
* @return returns {@code true} if the field name if "_id", {@code false} otherwise.
*/
static boolean isIdField(final String fieldName) {
return ID_FIELD_NAME.equals(fieldName);
}
/**
* Handles a complex to scalar conflict by removing the previous table map and clearing existing
* column map.
*
* @param tableMap the table map.
* @param path the path to the table.
* @param columnMap the column map.
*/
static void handleComplexScalarConflict(
final Map<String, DocumentDbSchemaTable> tableMap,
final String path,
final Map<String, DocumentDbSchemaColumn> columnMap) {
tableMap.remove(path);
columnMap.clear();
}
/**
* Detects and handles the case were a conflict occurs at a lower lever in the array.
* It removes the index column for the higher level array index.
* It ensures the SQL type is set to VARCHAR.
*
* @param columnMap the column map to modify.
* @param level the current array level.
* @param sqlType the previous SQL type.
* @return if a conflict is detected, returns VARCHAR, otherwise, the original SQL type.
*/
static JdbcType handleArrayLevelConflict(
final Map<String, DocumentDbSchemaColumn> columnMap,
final int level,
final JdbcType sqlType) {
JdbcType newSqlType = sqlType;
// Remove previously detect index columns at higher index level,
// if we now have scalars at a lower index level.
final Map<String, DocumentDbSchemaColumn> origColumns = new LinkedHashMap<>(columnMap);
for (Entry<String, DocumentDbSchemaColumn> entry : origColumns.entrySet()) {
final DocumentDbMetadataColumn column = (DocumentDbMetadataColumn) entry.getValue();
if (column.getArrayIndexLevel() != null && column.getArrayIndexLevel() > level) {
columnMap.remove(entry.getKey());
// We're collapsing an array level, so revert to VARCHAR/VARBINARY this and for the higher
// level array components.
newSqlType = getPromotedSqlType(BsonType.STRING, newSqlType);
}
}
return newSqlType;
}
/**
* Gets the primary key column number depending on whether we are whether it is the primary key.
*
* @param isPrimaryKey an indicator of whether we are dealing with the primary key.
* @return the value {@link #ID_PRIMARY_KEY_COLUMN} if the primary key, other it
* returns {@link #KEY_COLUMN_NONE}.
*/
static int getPrimaryKeyColumn(final boolean isPrimaryKey) {
// If primary key, then first column, zero indicates not part of primary key.
return isPrimaryKey ? ID_PRIMARY_KEY_COLUMN : KEY_COLUMN_NONE;
}
/**
* Gets the field name, depending on whether it is the primary key.
*
* @param path the path the field belongs to.
* @param fieldName the name of the field in the path.
* @param isPrimaryKey an indicator of whether this is the primary key.
* @param columnNameMap a map of unique column names.
* @return a column name for the field.
*/
static String getFieldNameIfIsPrimaryKey(
final String path, final String fieldName,
final boolean isPrimaryKey,
final Map<String, String> columnNameMap) {
return isPrimaryKey
// For the primary key, qualify it with the parent name.
? toName(combinePath(getParentName(path), fieldName), columnNameMap)
: fieldName;
}
/**
* Gets the virtual table name, depending on whether this is the primary key.
*
* @param fieldPath the path the field belongs to.
* @param nextSqlType the next SQL type.
* @param isPrimaryKey an indicator of whether this is the primary key.
* @param collectionName a map of unique column names.
* @param tableNameMap a map of unique table names.
* @return the name of the virtual table if not a primary key (base table) and type is not
* ARRAY or JAVA_OBJECT. Otherwise, null.
*/
static String getVirtualTableNameIfIsPrimaryKey(
final String fieldPath,
final JdbcType nextSqlType,
final boolean isPrimaryKey,
final String collectionName,
final Map<String, String> tableNameMap) {
return !isPrimaryKey && (nextSqlType == JdbcType.ARRAY || nextSqlType == JdbcType.JAVA_OBJECT)
? toName(combinePath(collectionName, fieldPath), tableNameMap)
: null;
}
/**
* Gets the SQL type, depending on whether this is the primary key.
*
* @param bsonType the underlying source data type.
* @param prevSqlType the previous SQL type detected.
* @param isPrimaryKey an indicator of whether this is the primary key.
* @return the SQL type to use.
*/
static JdbcType getSqlTypeIfIsPrimaryKey(
final BsonType bsonType,
final JdbcType prevSqlType,
final boolean isPrimaryKey) {
return isPrimaryKey && bsonType == BsonType.DOCUMENT
? JdbcType.VARCHAR
: getPromotedSqlType(bsonType, prevSqlType);
}
/**
* Gets the previous SQL data type.
*
* @param prevMetadataColumn the column to get the SQL type for. Can be null.
* @return the previous SQL data type if the column is not null, {@link JdbcType#NULL},
* otherwise.
*/
static JdbcType getPrevSqlTypeOrDefault(
final DocumentDbMetadataColumn prevMetadataColumn) {
return prevMetadataColumn != null
? prevMetadataColumn.getSqlType()
: JdbcType.NULL;
}
/**
* Gets whether the given SQL type is a complex type (ARRAY or JAVA_OBJECT).
*
* @param sqlType the SQL type to tests.
* @return {@code true} if a complex type, {@code false}, otherwise.
*/
static boolean isComplexType(final JdbcType sqlType) {
return sqlType == JdbcType.JAVA_OBJECT || sqlType == JdbcType.ARRAY;
}
/**
* Adds to the list of primary keys, if is a primary key.
*
* @param foreignKeys the list of foreign keys.
* @param isPrimaryKey an indicator of whether this is a primary key.
* @param metadataColumn the column to add.
*/
static void addToForeignKeysIfIsPrimary(
final List<DocumentDbMetadataColumn> foreignKeys,
final boolean isPrimaryKey,
final DocumentDbMetadataColumn metadataColumn) {
// Add the key to the foreign keys for child tables.
if (isPrimaryKey) {
foreignKeys.add(metadataColumn);
}
}
/**
* Gets the previous index for this column.
*
* @param prevMetadataColumn the previous column to use. Can be null.
* @param defaultValue the default index value to use if the column is null.
* @return the index of the column, if not null. Otherwise, the default value.
*/
static int getPrevIndexOrDefault(final DocumentDbMetadataColumn prevMetadataColumn,
final int defaultValue) {
return prevMetadataColumn != null
? prevMetadataColumn.getIndex()
: defaultValue;
}
/**
* Checks and ensures consistency of SQL type between the primary key of the base table and any
* generated virtual tables.
*
* @param tableMap the map of tables.
* @param path the path of the collection.
* @param columnMap the column map of the base table.
* @param columnNameMap the map of unique column names.
*/
static void checkVirtualTablePrimaryKeys(
final Map<String, DocumentDbSchemaTable> tableMap,
final String path,
final LinkedHashMap<String, DocumentDbSchemaColumn> columnMap,
final Map<String, String> columnNameMap) {
final String primaryKeyColumnName = toName(combinePath(path, ID_FIELD_NAME), columnNameMap);
final DocumentDbMetadataColumn primaryKeyColumn = (DocumentDbMetadataColumn) columnMap
.get(primaryKeyColumnName);
for (DocumentDbSchemaTable table : tableMap.values()) {
final DocumentDbMetadataColumn column = (DocumentDbMetadataColumn) table
.getColumnMap().get(primaryKeyColumnName);
if (column != null && !column.getSqlType().equals(primaryKeyColumn.getSqlType())) {
column.setSqlType(primaryKeyColumn.getSqlType());
}
}
}
/**
* Converts the path to name swapping the period character for an underscore character. Unique
* names of maximum length {@link org.apache.calcite.sql.parser.SqlParser#DEFAULT_IDENTIFIER_MAX_LENGTH}
* are maintained in the uniqueNameMap parameter.
*
* @param path the path to convert.
* @param uniqueNameMap the map of unique names.
* @return a string the period character swapped for an underscore character, of correct maximum
* length and unique within the map of given paths
*/
@VisibleForTesting
static String toName(final String path, final Map<String, String> uniqueNameMap) {
return toName(path, uniqueNameMap, DEFAULT_IDENTIFIER_MAX_LENGTH);
}
/**
* Converts the path to name swapping the period character for an underscore character. Unique
* names of maximum length given in identifierMaxLength parameter. are maintained in the
* uniqueNameMap parameter.
*
* @param path the path to convert.
* @param uniqueNameMap the map of unique names.
* @param identifierMaxLength the maximum length of identifier name.
* @return a string the period character swapped for an underscore character, of correct maximum
* length and unique within the map of given paths
*/
@VisibleForTesting
static String toName(
final String path,
final Map<String, String> uniqueNameMap,
final int identifierMaxLength) {
final String fullPathName = path.replaceAll("\\.", "_");
// If already mapped, return the mapped value.
if (uniqueNameMap.containsKey(path)) {
return uniqueNameMap.get(path);
}
// If not greater the maximum allowed length, return value.
if (path.length() <= identifierMaxLength) {
return fullPathName;
}
// Shorten the name and ensure uniqueness.
final StringBuilder shortenedName = new StringBuilder(fullPathName);
final List<MatchResult> matches = getSeparatorMatches(path);
if (matches.isEmpty()) {
// Only "base table"
shortenBaseName(
path,
uniqueNameMap,
identifierMaxLength,
shortenedName);
} else if (matches.get(0).start() < identifierMaxLength) {
// Base table shorter than max length - combine with trailing path.
shortenWithBaseNameLessThanMaxLength(
path,
uniqueNameMap,
identifierMaxLength,
shortenedName,
matches);
} else {
// Base table too long. Combine on trailing path.
shortenWithBaseNameLongerThanMaxLength(
path,
uniqueNameMap,
identifierMaxLength,
shortenedName,
matches);
}
return shortenedName.toString();
}
private static void shortenWithBaseNameLongerThanMaxLength(
final String path,
final Map<String, String> uniqueNameMap,
final int identifierMaxLength,
final StringBuilder shortenedName,
final List<MatchResult> matches) {
int lastMatchIndex = 0;
for (int matchIndex = matches.size() - 1; matchIndex > 0; matchIndex--) {
if ((path.length() - matches.get(matchIndex).start()) >= identifierMaxLength) {
break;
}
lastMatchIndex = matchIndex;
}
if (lastMatchIndex > 0) {
shortenedName.delete(0, matches.get(lastMatchIndex).start());
} else {
shortenedName.delete(0, shortenedName.length() - identifierMaxLength);
}
ensureUniqueName(uniqueNameMap, shortenedName, path);
}
private static void shortenWithBaseNameLessThanMaxLength(
final String path,
final Map<String, String> uniqueNameMap,
final int identifierMaxLength,
final StringBuilder shortenedName,
final List<MatchResult> matches) {
int lastMatchIndex = 0;
for (int matchIndex = matches.size() - 1; matchIndex > 0; matchIndex--) {
if ((path.length() - matches.get(matchIndex).start()) + matches.get(0).start()
>= identifierMaxLength) {
break;
}
lastMatchIndex = matchIndex;
}
final int deleteChars;
if (lastMatchIndex > 0) {
deleteChars = matches.get(lastMatchIndex).start() - matches.get(0).start();
} else {
deleteChars = path.length() - identifierMaxLength;
}
shortenedName.delete(matches.get(0).start(), matches.get(0).start() + deleteChars);
ensureUniqueName(uniqueNameMap, shortenedName, path);
}
private static void shortenBaseName(
final String path,
final Map<String, String> uniqueNameMap,
final int identifierMaxLength,
final StringBuilder shortenedName) {
shortenedName.delete(identifierMaxLength, shortenedName.length());
ensureUniqueName(uniqueNameMap, shortenedName, path);
}
private static void ensureUniqueName(
final Map<String, String> uniqueNameMap,
final StringBuilder shortenedName,
final String path) {
int counter = 0;
final StringBuilder tempName = new StringBuilder(shortenedName);
while (uniqueNameMap.values().stream().anyMatch(s -> tempName.toString().equals(s))) {
counter++;
final String counterString = String.valueOf(counter);
tempName.setLength(0);
tempName.append(
shortenedName.substring(0, shortenedName.length() - counterString.length()))
.append(counterString);
}
shortenedName.setLength(0);
shortenedName.append(tempName);
uniqueNameMap.put(path, shortenedName.toString());
}
private static List<MatchResult> getSeparatorMatches(final String path) {
final List<MatchResult> matches = new ArrayList<>();
final Pattern separatorPattern = Pattern.compile("\\.");
final Matcher separatorMatcher = separatorPattern.matcher(path);
separatorMatcher.reset();
while (separatorMatcher.find()) {
matches.add(separatorMatcher.toMatchResult());
}
return matches;
}
/**
* Gets the parent name (last node) in the path.
*
* @param path the path to read.
* @return the last node in the path.
*/
private static String getParentName(final String path) {
return path.substring(path.lastIndexOf('.') + 1);
}
}
| 4,627 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbJdbcMetaDataConverter.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.calcite.avatica.ColumnMetaData;
import org.apache.calcite.avatica.ColumnMetaData.Rep;
import software.amazon.documentdb.jdbc.common.utilities.JdbcColumnMetaData;
import software.amazon.documentdb.jdbc.common.utilities.JdbcType;
import java.util.List;
public class DocumentDbJdbcMetaDataConverter {
private static final ImmutableMap<JdbcType, Rep> JDBC_TYPE_TO_REP;
static {
JDBC_TYPE_TO_REP = ImmutableMap.<JdbcType, Rep>builder()
.put(JdbcType.BIGINT, Rep.PRIMITIVE_LONG)
.put(JdbcType.BOOLEAN, Rep.PRIMITIVE_BOOLEAN)
.put(JdbcType.DECIMAL, Rep.NUMBER)
.put(JdbcType.DOUBLE, Rep.PRIMITIVE_DOUBLE)
.put(JdbcType.INTEGER, Rep.PRIMITIVE_INT)
.put(JdbcType.NULL, Rep.STRING)
.put(JdbcType.TIMESTAMP, Rep.JAVA_SQL_TIMESTAMP)
.put(JdbcType.VARCHAR, Rep.STRING)
.put(JdbcType.VARBINARY, Rep.BYTE_STRING)
.build();
}
/**
* Converts from list of {@link ColumnMetaData} to list of {@link JdbcColumnMetaData}.
*
* @param columnMetaData the list of column metadata.
* @return a list of {@link JdbcColumnMetaData}.
*/
public static List<JdbcColumnMetaData> fromCalciteColumnMetaData(final List<ColumnMetaData> columnMetaData) {
final ImmutableList.Builder<JdbcColumnMetaData> builder = ImmutableList.builder();
for (ColumnMetaData columnMetaDataItem : columnMetaData) {
builder.add(fromCalciteColumnMetaData(columnMetaDataItem));
}
return builder.build();
}
/**
* Converts a {@link ColumnMetaData} to a {@link JdbcColumnMetaData}.
*
* @param columnMetaData the column metadata.
* @return a {@link JdbcColumnMetaData} instance.
*/
public static JdbcColumnMetaData fromCalciteColumnMetaData(final ColumnMetaData columnMetaData) {
return new JdbcColumnMetaData(
columnMetaData.ordinal,
columnMetaData.autoIncrement,
columnMetaData.caseSensitive,
columnMetaData.searchable,
columnMetaData.currency,
columnMetaData.nullable,
columnMetaData.signed,
columnMetaData.displaySize,
columnMetaData.label,
columnMetaData.columnName,
columnMetaData.schemaName,
columnMetaData.precision,
columnMetaData.scale,
columnMetaData.tableName,
columnMetaData.catalogName,
columnMetaData.type.id,
columnMetaData.type.name,
columnMetaData.readOnly,
columnMetaData.writable,
columnMetaData.definitelyWritable,
columnMetaData.columnClassName);
}
}
| 4,628 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbDatabaseSchemaMetadata.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.mongodb.client.MongoClient;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.persist.DocumentDbSchemaSecurityException;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Contains the metadata for a DocumentDB database including all of the collection and any
* virtual tables.
*/
public final class DocumentDbDatabaseSchemaMetadata {
public static final int VERSION_LATEST_OR_NEW = 0;
public static final int VERSION_NEW = -1;
public static final int VERSION_LATEST_OR_NONE = -2;
private final DocumentDbSchema schema;
/**
* Gets the schema name for this database metadata.
*
* @return a String representing the schema name.
*/
public String getSchemaName() {
return schema.getSchemaName();
}
/**
* Gets the version of this database metadata.
*
* @return a number representing the version of the database metadata.
*/
public int getSchemaVersion() {
return schema.getSchemaVersion();
}
public Map<String, DocumentDbSchemaTable> getTableSchemaMap() {
return schema.getTableMap();
}
/**
* Constructs a {@link DocumentDbDatabaseSchemaMetadata} instance from properties.
*
* @param schema the database schema.
*/
protected DocumentDbDatabaseSchemaMetadata(
final DocumentDbSchema schema) {
this.schema = schema;
}
/**
* Gets the latest or a new {@link DocumentDbDatabaseSchemaMetadata} instance based on the
* schemaName and properties. It uses a value of {@link DocumentDbDatabaseSchemaMetadata#VERSION_LATEST_OR_NEW}
* for the version to indicate to get the latest or create a new instance if none exists.
*
* @param properties the connection properties.
* @param schemaName the schema name.
* @param client the {@link MongoClient} client.
* @return a {@link DocumentDbDatabaseSchemaMetadata} instance.
*/
public static DocumentDbDatabaseSchemaMetadata get(
final DocumentDbConnectionProperties properties,
final String schemaName,
final MongoClient client)
throws SQLException {
return get(properties, schemaName, VERSION_LATEST_OR_NEW, client);
}
/**
* Gets an existing {@link DocumentDbDatabaseSchemaMetadata} instance based on the schema name
* and version.
*
* @param properties the properties of the connection.
* @param schemaName the name of the schema.
* @param schemaVersion the version of the schema. A version number of
* {@link DocumentDbDatabaseSchemaMetadata#VERSION_LATEST_OR_NEW} indicates to get the latest
* or create a new instance.
* @param client the {@link MongoClient} client.
* @return a {@link DocumentDbDatabaseSchemaMetadata} instance if the schema and version exist,
* null otherwise.
*/
public static DocumentDbDatabaseSchemaMetadata get(
final DocumentDbConnectionProperties properties, final String schemaName,
final int schemaVersion,
final MongoClient client) throws SQLException {
// Try to get it from the service.
final DocumentDbDatabaseSchemaMetadata databaseMetadata;
final DocumentDbSchema schema = DocumentDbMetadataService
.get(properties, schemaName, schemaVersion, client);
if (schema != null) {
// Setup lazy load based on table ID.
setSchemaGetTableFunction(properties, schemaName, schemaVersion, schema, client);
databaseMetadata = new DocumentDbDatabaseSchemaMetadata(schema);
} else {
databaseMetadata = null;
}
return databaseMetadata;
}
/**
* Removes all versions of the schema for the given schema name.
*
* @param properties the connection properties.
* @param schemaName the name of the schema.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if invalid connection properties.
*/
public static void remove(
final DocumentDbConnectionProperties properties,
final String schemaName,
final MongoClient client) throws SQLException {
DocumentDbMetadataService.remove(properties, schemaName, client);
}
/**
* Removes the specific schema for the given schema name and version.
*
* @param properties the connection properties.
* @param schemaName the name of the schema.
* @param schemaVersion the version of the schema.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if invalid connection properties.
*/
public static void remove(
final DocumentDbConnectionProperties properties,
final String schemaName,
final int schemaVersion,
final MongoClient client) throws SQLException {
DocumentDbMetadataService.remove(properties, schemaName, schemaVersion, client);
}
/**
* Gets the list of all persisted schema.
*
* @param properties the connection properties.
* @param client the {@link MongoClient} client.
* @return a list of {@link DocumentDbSchema} schema.
* @throws SQLException if unable to connect.
*/
public static List<DocumentDbSchema> getSchemaList(
final DocumentDbConnectionProperties properties,
final MongoClient client) throws SQLException {
final List<DocumentDbSchema> schemas = DocumentDbMetadataService
.getSchemaList(properties, client);
schemas.forEach(schema -> setSchemaGetTableFunction(
properties, schema.getSchemaName(), schema.getSchemaVersion(), schema, client));
return schemas;
}
/**
* Updates schema with the given table schema.
*
* @param properties the connection properties.
* @param schemaName the name of the schema.
* @param schemaTables the collection of updated table schema.
* @param client the {@link MongoClient} client.
*
* @throws SQLException if unable to connect or other exception.
* @throws DocumentDbSchemaSecurityException if unable to write to the database due to
* unauthorized user.
*/
public static void update(
final DocumentDbConnectionProperties properties,
final String schemaName,
final Collection<DocumentDbSchemaTable> schemaTables,
final MongoClient client)
throws SQLException, DocumentDbSchemaSecurityException {
DocumentDbMetadataService.update(properties, schemaName, schemaTables, client);
}
private static void setSchemaGetTableFunction(
final DocumentDbConnectionProperties properties,
final String schemaName,
final int schemaVersion,
final DocumentDbSchema schema,
final MongoClient client) {
schema.setGetTableFunction(
tableId -> DocumentDbMetadataService
.getTable(properties, schemaName, schemaVersion, tableId, client),
remainingTableIds -> DocumentDbMetadataService
.getTables(properties, schemaName, schemaVersion, remainingTableIds, client));
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DocumentDbDatabaseSchemaMetadata)) {
return false;
}
final DocumentDbDatabaseSchemaMetadata metadata = (DocumentDbDatabaseSchemaMetadata) o;
return schema.equals(metadata.schema);
}
@Override
public int hashCode() {
return Objects.hash(schema);
}
}
| 4,629 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbMetadataScanner.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.google.common.annotations.VisibleForTesting;
import com.mongodb.client.MongoCollection;
import org.bson.BsonDocument;
import org.bson.BsonInt32;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.DocumentDbConnectionProperties;
import software.amazon.documentdb.jdbc.DocumentDbMetadataScanMethod;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import software.amazon.documentdb.jdbc.common.utilities.SqlState;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Provides a way to scan metadata in DocumentDB collections
*/
public class DocumentDbMetadataScanner {
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbMetadataScanner.class);
private static final String ID = "_id";
private static final BsonInt32 FORWARD = new BsonInt32(1);
private static final BsonInt32 REVERSE = new BsonInt32(-1);
private static final String RANDOM = "$sample";
/**
* Gets an iterator for the requested scan type.
*
* @param properties the connection properties including scan type and limit.
* @param collection the {@link MongoCollection} to scan.
* @return an {@link Iterator} for the documents.
* @throws SQLException if unsupported scan type provided.
*/
@VisibleForTesting
public static Iterator<BsonDocument> getIterator(
final DocumentDbConnectionProperties properties,
final MongoCollection<BsonDocument> collection) throws SQLException {
final int scanLimit = properties.getMetadataScanLimit();
final DocumentDbMetadataScanMethod method = properties.getMetadataScanMethod();
switch (method) {
case ALL:
return collection.find().cursor();
case ID_FORWARD:
return collection.find().sort(new BsonDocument(ID, FORWARD)).limit(scanLimit).cursor();
case ID_REVERSE:
return collection.find().sort(new BsonDocument(ID, REVERSE)).limit(scanLimit).cursor();
case RANDOM:
final List<BsonDocument> aggregations = new ArrayList<>();
aggregations.add(new BsonDocument(RANDOM, new BsonDocument("size", new BsonInt32(scanLimit))));
return collection.aggregate(aggregations).cursor();
}
throw SqlError.createSQLException(
LOGGER,
SqlState.CONNECTION_FAILURE,
SqlError.UNSUPPORTED_PROPERTY,
method.getName()
);
}
}
| 4,630 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbSchemaException.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
public class DocumentDbSchemaException extends Exception {
/**
* Creates a {@link DocumentDbSchemaException}.
*/
public DocumentDbSchemaException() {
super("DocumentDB JDBC Driver schema exception.");
}
/**
* Constructs a new {@link DocumentDbSchemaException} with the specified detail message.
* @param message the detail message (which is saved for later retrieval by the getMessage() method).
*/
public DocumentDbSchemaException(final String message) {
super(message);
}
/**
* Constructs a new {@link DocumentDbSchemaException} with the specified detail message and cause.
* Note that the detail message associated with cause is not automatically incorporated in
* this exception's detail message.
*
* @param message the detail message (which is saved for later retrieval by the getMessage() method).
* @param cause the cause (which is saved for later retrieval by the getCause() method). (A null value is permitted, and indicates that the cause is nonexistent or unknown.)
*/
public DocumentDbSchemaException(final String message, final Throwable cause) {
super(message, cause);
}
}
| 4,631 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbSchema.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.json.JsonMapper;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import lombok.SneakyThrows;
import org.bson.codecs.pojo.annotations.BsonCreator;
import org.bson.codecs.pojo.annotations.BsonIgnore;
import org.bson.codecs.pojo.annotations.BsonProperty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import software.amazon.documentdb.jdbc.common.utilities.LazyLinkedHashMap;
import software.amazon.documentdb.jdbc.common.utilities.SqlError;
import java.time.Instant;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
@Getter
@JsonSerialize(as = DocumentDbSchema.class)
public class DocumentDbSchema {
public static final String SCHEMA_NAME_PROPERTY = "schemaName";
public static final String SCHEMA_VERSION_PROPERTY = "schemaVersion";
public static final String SQL_NAME_PROPERTY = "sqlName";
public static final String ID_PROPERTY = "_id";
public static final String MODIFY_DATE_PROPERTY = "modifyDate";
public static final String TABLES_PROPERTY = "tables";
public static final String SCHEMA_TABLE_ID_SEPARATOR = "::";
private static final Logger LOGGER = LoggerFactory.getLogger(DocumentDbSchema.class);
private static final ObjectMapper JSON_OBJECT_MAPPER = JsonMapper.builder()
.serializationInclusion(Include.NON_NULL)
.serializationInclusion(Include.NON_EMPTY)
.enable(SerializationFeature.INDENT_OUTPUT)
.build();
private static final String EMPTY_STRING = "";
public static final String DEFAULT_SCHEMA_NAME = "_default";
/**
* The name of the schema.
*/
@NonNull
@Setter
@BsonProperty(SCHEMA_NAME_PROPERTY)
private String schemaName;
/**
* The version number of this metadata.
*/
@Setter
@BsonProperty(SCHEMA_VERSION_PROPERTY)
private int schemaVersion;
/**
* The name of the database, same as the DocumentDB database by default.
*/
@NonNull
@BsonProperty(SQL_NAME_PROPERTY)
private final String sqlName;
/**
* The time this metadata was created or updated.
*/
@NonNull
@BsonProperty(MODIFY_DATE_PROPERTY)
private final Date modifyDate;
/**
* The map of schema tables.
*/
@Getter(AccessLevel.NONE)
@BsonIgnore
@JsonIgnore
private Map<String, DocumentDbSchemaTable> tables;
@BsonIgnore
@JsonIgnore
public Map<String, DocumentDbSchemaTable> getTableMap() {
return tables;
}
/**
* The list of table references.
*/
@BsonProperty(TABLES_PROPERTY)
@JsonProperty(TABLES_PROPERTY)
private final Set<String> tableReferences;
/**
* Sets the lazy load function for table schema retrieval.
*
* @param getTableFunction the function to retrieve table schema using the table ID as
* the input parameter to the lambda function.
* @throws IllegalStateException if the function is already set or the #tables collection
* is already set.
*/
@BsonIgnore
@JsonIgnore
public void setGetTableFunction(
@NonNull final Function<String, DocumentDbSchemaTable> getTableFunction,
@NonNull final Function<Set<String>, Map<String, DocumentDbSchemaTable>> getRemainingTablesFunction)
throws IllegalStateException {
if (this.tables != null || this.tableReferences == null) {
throw new IllegalStateException(
SqlError.lookup(SqlError.INVALID_STATE_SET_TABLE_FUNCTION));
}
final Map<String, String> tableIdByTableName = this.tableReferences.stream()
.collect(Collectors.toMap(
DocumentDbSchema::parseSqlTableName,
tableId -> tableId,
(a, b) -> b,
LinkedHashMap::new));
this.tables = new LazyLinkedHashMap<>(
new LinkedHashSet<>(tableIdByTableName.keySet()),
tableName -> getTableFunction
.apply(tableIdByTableName.get(tableName)),
remainingTableNames -> getRemainingTablesFunction
.apply(tableIdByTableName.keySet().stream()
.filter(remainingTableNames::contains)
.map(tableIdByTableName::get)
.collect(Collectors.toCollection(LinkedHashSet::new))));
}
/**
* Parses the SQL table name from the given table ID.
*
* @param tableId the table ID to parse.
*
* @return a SQL table name.
*/
public static String parseSqlTableName(final String tableId) {
return parseTableNameAndUuid(tableId)[0];
}
@SneakyThrows
private static String[] parseTableNameAndUuid(final String tableId) {
final String[] tableNameAndUuid = tableId.split("[:][:]");
if (tableNameAndUuid.length != 2) {
throw new DocumentDbSchemaException(
SqlError.lookup(SqlError.INVALID_FORMAT,
tableId, "<tableName>::<tableId>"));
}
return tableNameAndUuid;
}
/**
* All args constructor for collection metadata.
* @param sqlName Name of the collection.
* @param schemaVersion Version of this metadata.
*/
public DocumentDbSchema(
final String sqlName,
final int schemaVersion,
final Map<String, DocumentDbSchemaTable> tables) {
this(DEFAULT_SCHEMA_NAME, sqlName, schemaVersion, tables);
}
/**
* All args constructor for collection metadata.
* @param schemaName the name of the schema.
* @param sqlName the name of SQL table
* @param schemaVersion Version of this metadata.
*/
public DocumentDbSchema(
final String schemaName,
final String sqlName,
final int schemaVersion,
final Map<String, DocumentDbSchemaTable> tables) {
this.schemaName = schemaName;
this.sqlName = sqlName;
this.schemaVersion = schemaVersion;
this.modifyDate = new Date(Instant.now().toEpochMilli());
this.tableReferences = tables.values().stream()
.map(DocumentDbSchemaTable::getId)
.collect(Collectors.toSet());
this.tables = tables;
}
/**
* Creates in instance of {@link DocumentDbSchema}. Used for reading/writing
* to the persistent storage.
* @param schemaName the name of the schema.
* @param schemaVersion the version of the schema.
* @param sqlName the name of the database or collection.
* @param modifyDate the last modified date of the schema.
*/
@BsonCreator
@JsonCreator
public DocumentDbSchema(
@JsonProperty(SCHEMA_NAME_PROPERTY) @BsonProperty(SCHEMA_NAME_PROPERTY) final String schemaName,
@JsonProperty(SCHEMA_VERSION_PROPERTY) @BsonProperty(SCHEMA_VERSION_PROPERTY) final int schemaVersion,
@JsonProperty(SQL_NAME_PROPERTY) @BsonProperty(SQL_NAME_PROPERTY) final String sqlName,
@JsonProperty(MODIFY_DATE_PROPERTY) @BsonProperty(MODIFY_DATE_PROPERTY) final Date modifyDate,
@JsonProperty(TABLES_PROPERTY) @BsonProperty(TABLES_PROPERTY) final Set<String> tableReferences) {
this.schemaName = schemaName;
this.sqlName = sqlName;
this.schemaVersion = schemaVersion;
this.modifyDate = new Date(modifyDate.getTime());
// TODO: Use this to setup the LazyLinkedHashMap for the tables map.
this.tableReferences = tableReferences != null ? tableReferences : new LinkedHashSet<>();
}
/**
* Creates and returns the time the metadata was created or updated.
*
* @return a copy of the Date of modification.
*/
public Date getModifyDate() {
return new Date(modifyDate.getTime());
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DocumentDbSchema)) {
return false;
}
final DocumentDbSchema that = (DocumentDbSchema) o;
return schemaVersion == that.schemaVersion
&& schemaName.equals(that.schemaName)
&& sqlName.equals(that.sqlName)
&& modifyDate.equals(that.modifyDate)
&& Objects.equals(tableReferences, that.tableReferences);
}
@Override
public int hashCode() {
return Objects.hash(schemaName, schemaVersion, sqlName, modifyDate, tableReferences);
}
@Override
public String toString() {
try {
return JSON_OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException e) {
LOGGER.error("Error converting object to JSON.", e);
}
return EMPTY_STRING;
}
}
| 4,632 |
0 | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc | Create_ds/amazon-documentdb-jdbc-driver/src/main/java/software/amazon/documentdb/jdbc/metadata/DocumentDbSchemaTable.java | /*
* Copyright <2021> Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package software.amazon.documentdb.jdbc.metadata;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import lombok.Getter;
import lombok.NonNull;
import lombok.Setter;
import org.bson.codecs.pojo.annotations.BsonCreator;
import org.bson.codecs.pojo.annotations.BsonId;
import org.bson.codecs.pojo.annotations.BsonIgnore;
import org.bson.codecs.pojo.annotations.BsonProperty;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import java.util.stream.Collectors;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.ID_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.MODIFY_DATE_PROPERTY;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SCHEMA_TABLE_ID_SEPARATOR;
import static software.amazon.documentdb.jdbc.metadata.DocumentDbSchema.SQL_NAME_PROPERTY;
@Getter
@JsonSerialize(as = DocumentDbSchemaTable.class)
public class DocumentDbSchemaTable {
public static final String UUID_PROPERTY = "uuid";
public static final String COLLECTION_NAME_PROPERTY = "collectionName";
public static final String COLUMNS_PROPERTY = "columns";
public static final int UNKNOWN_RECORD_COUNT = -1;
/**
* The unique ID for the table schema.
*
* @return a {@link String} representing the combination of {@link #getSqlName()} and
* {@link #getUuid}.
*/
@BsonId
@JsonProperty(ID_PROPERTY)
public String getId() {
return getSchemaId(sqlName, uuid);
}
private static String getSchemaId(final String sqlName, final String uuid) {
return sqlName + SCHEMA_TABLE_ID_SEPARATOR + uuid;
}
/**
* The schema's unique ID.
*/
@Setter
private String uuid;
/**
* The display name of the table.
*/
@Setter
@NonNull
private String sqlName;
/**
* The name of the DocumentDB collection.
*/
@NonNull
private final String collectionName;
/**
* The time the metadata was created or updated.
*/
private final Date modifyDate;
/**
* The list of columns in the table.
*/
@NonNull
@BsonIgnore
@JsonIgnore
private final ImmutableMap<String, DocumentDbSchemaColumn> columnMap;
@JsonProperty(COLUMNS_PROPERTY)
@BsonProperty(COLUMNS_PROPERTY)
private final List<DocumentDbSchemaColumn> columns;
@Setter
@BsonIgnore
@JsonIgnore
private long estimatedRecordCount = UNKNOWN_RECORD_COUNT;
/**
* Creates an instance from deserializing a document.
*
* @param uuid the version of the table schema.
* @param modifyDate the last modified date of the schema.
* @param sqlName the SQL name of the table.
* @param collectionName the reference collection.
* @param columns the list of columns in the schema.
*/
@BsonCreator
@JsonCreator
public DocumentDbSchemaTable(
@JsonProperty(ID_PROPERTY) @BsonId
final String id,
@JsonProperty(UUID_PROPERTY) @BsonProperty(UUID_PROPERTY)
final String uuid,
@JsonProperty(MODIFY_DATE_PROPERTY) @BsonProperty(MODIFY_DATE_PROPERTY)
final Date modifyDate,
@JsonProperty(SQL_NAME_PROPERTY) @BsonProperty(SQL_NAME_PROPERTY)
final String sqlName,
@JsonProperty(COLLECTION_NAME_PROPERTY) @BsonProperty(COLLECTION_NAME_PROPERTY)
final String collectionName,
@JsonProperty(COLUMNS_PROPERTY) @BsonProperty(COLUMNS_PROPERTY)
final List<DocumentDbSchemaColumn> columns) {
this.uuid = !Strings.isNullOrEmpty(uuid) ? uuid : UUID.randomUUID().toString();
this.modifyDate = new Date(modifyDate.getTime());
this.sqlName = sqlName;
this.collectionName = collectionName;
this.columns = columns;
final LinkedHashMap<String, DocumentDbSchemaColumn> map = columns.stream()
.collect(Collectors.toMap(
DocumentDbSchemaColumn::getSqlName,
documentDbSchemaColumn -> documentDbSchemaColumn,
(original, duplicate) -> original, // Ignore duplicates
LinkedHashMap::new));
this.columnMap = ImmutableMap.copyOf(map);
}
/**
* Basic all argument constructor for table.
*
* @param sqlName The name of the table.
* @param collectionName The DocumentDB collection name.
* @param columnMap The columns contained in the table indexed by name. Uses LinkedHashMap to preserve order.
*/
public DocumentDbSchemaTable(final String sqlName,
final String collectionName,
final Map<String, DocumentDbSchemaColumn> columnMap) {
this.uuid = UUID.randomUUID().toString();
this.modifyDate = new Date(Instant.now().toEpochMilli());
this.sqlName = sqlName;
this.collectionName = collectionName;
this.columns = new ArrayList<>(columnMap.values());
this.columnMap = ImmutableMap.copyOf(columnMap);
}
public Date getModifyDate() {
return new Date(modifyDate.getTime());
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DocumentDbSchemaTable)) {
return false;
}
final DocumentDbSchemaTable that = (DocumentDbSchemaTable) o;
return Objects.equals(uuid, that.uuid)
&& sqlName.equals(that.sqlName)
&& collectionName.equals(that.collectionName)
&& Objects.equals(modifyDate, that.modifyDate)
&& columnMap.equals(that.columnMap)
&& Objects.equals(columns, that.columns);
}
@Override
public int hashCode() {
return Objects.hash(uuid, sqlName, collectionName, modifyDate, columnMap, columns);
}
}
| 4,633 |
0 | Create_ds/asgard/src/java/com | Create_ds/asgard/src/java/com/onelogin/AppSettings.java | package com.onelogin;
public class AppSettings {
private String assertionConsumerServiceUrl;
private String issuer;
public String getAssertionConsumerServiceUrl() {
return assertionConsumerServiceUrl;
}
public void setAssertionConsumerServiceUrl(String assertionConsumerServiceUrl) {
this.assertionConsumerServiceUrl = assertionConsumerServiceUrl;
}
public String getIssuer() {
return issuer;
}
public void setIssuer(String issuer) {
this.issuer = issuer;
}
}
| 4,634 |
0 | Create_ds/asgard/src/java/com | Create_ds/asgard/src/java/com/onelogin/AccountSettings.java | package com.onelogin;
public class AccountSettings {
private String certificate;
private String idp_sso_target_url;
public String getCertificate() {
return certificate;
}
public void setCertificate(String certificate) {
this.certificate = certificate;
}
public String getIdp_sso_target_url() {
return idp_sso_target_url;
}
public void setIdpSsoTargetUrl(String idp_sso_target_url) {
this.idp_sso_target_url = idp_sso_target_url;
}
}
| 4,635 |
0 | Create_ds/asgard/src/java/com/onelogin | Create_ds/asgard/src/java/com/onelogin/saml/Response.java | package com.onelogin.saml;
import com.onelogin.AccountSettings;
import org.apache.commons.codec.binary.Base64;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.xml.crypto.dsig.XMLSignature;
import javax.xml.crypto.dsig.XMLSignatureFactory;
import javax.xml.crypto.dsig.dom.DOMValidateContext;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
public class Response {
private Document xmlDoc;
private AccountSettings accountSettings;
private Certificate certificate;
public Response(AccountSettings accountSettings) throws CertificateException {
this.accountSettings = accountSettings;
certificate = new Certificate();
certificate.loadCertificate(this.accountSettings.getCertificate());
}
public void loadXml(String xml) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory fty = DocumentBuilderFactory.newInstance();
fty.setNamespaceAware(true);
DocumentBuilder builder = fty.newDocumentBuilder();
ByteArrayInputStream bais = new ByteArrayInputStream(xml.getBytes());
xmlDoc = builder.parse(bais);
}
public void loadXmlFromBase64(String response) throws ParserConfigurationException, SAXException, IOException {
Base64 base64 = new Base64();
byte [] decodedB = base64.decode(response);
String decodedS = new String(decodedB);
loadXml(decodedS);
}
public boolean isValid() throws Exception {
NodeList nodes = xmlDoc.getElementsByTagNameNS(XMLSignature.XMLNS, "Signature");
if (nodes == null || nodes.getLength() == 0) {
throw new Exception("Can't find signature in document.");
}
if (setIdAttributeExists()) {
tagIdAttributes(xmlDoc);
}
X509Certificate cert = certificate.getX509Cert();
DOMValidateContext ctx = new DOMValidateContext(cert.getPublicKey(), nodes.item(0));
XMLSignatureFactory sigF = XMLSignatureFactory.getInstance("DOM");
XMLSignature xmlSignature = sigF.unmarshalXMLSignature(ctx);
return xmlSignature.validate(ctx);
}
public String getNameId() throws Exception {
NodeList nodes = xmlDoc.getElementsByTagNameNS("urn:oasis:names:tc:SAML:2.0:assertion", "NameID");
if(nodes.getLength()==0){
throw new Exception("No name id found in document");
}
return nodes.item(0).getTextContent();
}
private void tagIdAttributes(Document xmlDoc) {
NodeList nodeList = xmlDoc.getElementsByTagName("*");
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (node.getNodeType() == Node.ELEMENT_NODE) {
if (node.getAttributes().getNamedItem("ID") != null) {
((Element) node).setIdAttribute("ID", true);
}
}
}
}
private boolean setIdAttributeExists() {
for (Method method : Element.class.getDeclaredMethods()) {
if (method.getName().equals("setIdAttribute")) {
return true;
}
}
return false;
}
}
| 4,636 |
0 | Create_ds/asgard/src/java/com/onelogin | Create_ds/asgard/src/java/com/onelogin/saml/Certificate.java | package com.onelogin.saml;
import java.io.ByteArrayInputStream;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import org.apache.commons.codec.binary.Base64;
public class Certificate {
private X509Certificate x509Cert;
/**
* Loads certificate from a base64 encoded string
*/
public void loadCertificate(String certificate) throws CertificateException {
CertificateFactory fty = CertificateFactory.getInstance("X.509");
ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(certificate.getBytes()));
x509Cert = (X509Certificate)fty.generateCertificate(bais);
}
/**
* Loads a certificate from a encoded base64 byte array.
* @param certificate an encoded base64 byte array.
* @throws CertificateException In case it can't load the certificate.
*/
public void loadCertificate(byte[] certificate) throws CertificateException {
CertificateFactory fty = CertificateFactory.getInstance("X.509");
ByteArrayInputStream bais = new ByteArrayInputStream(Base64.decodeBase64(certificate));
x509Cert = (X509Certificate)fty.generateCertificate(bais);
}
public X509Certificate getX509Cert() {
return x509Cert;
}
}
| 4,637 |
0 | Create_ds/asgard/src/java/com/onelogin | Create_ds/asgard/src/java/com/onelogin/saml/AuthRequest.java | package com.onelogin.saml;
import java.io.ByteArrayOutputStream;
import java.nio.charset.Charset;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import org.apache.commons.codec.binary.Base64;
import com.onelogin.AccountSettings;
import com.onelogin.AppSettings;
public class AuthRequest {
private String id;
private String issueInstant;
private AppSettings appSettings;
public static final int base64 = 1;
public AuthRequest(AppSettings appSettings, AccountSettings accountSettings){
this.appSettings = appSettings;
id="_"+UUID.randomUUID().toString();
SimpleDateFormat simpleDf = new SimpleDateFormat("yyyy-MM-dd'T'H:mm:ss");
issueInstant = simpleDf.format(new Date());
}
public String getRequest(int format) throws XMLStreamException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
XMLOutputFactory factory = XMLOutputFactory.newInstance();
XMLStreamWriter writer = factory.createXMLStreamWriter(baos);
writer.writeStartElement("samlp", "AuthnRequest", "urn:oasis:names:tc:SAML:2.0:protocol");
writer.writeNamespace("samlp","urn:oasis:names:tc:SAML:2.0:protocol");
writer.writeAttribute("ID", id);
writer.writeAttribute("Version", "2.0");
writer.writeAttribute("IssueInstant", this.issueInstant);
writer.writeAttribute("ProtocolBinding", "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST");
writer.writeAttribute("AssertionConsumerServiceURL", this.appSettings.getAssertionConsumerServiceUrl());
writer.writeStartElement("saml","Issuer","urn:oasis:names:tc:SAML:2.0:assertion");
writer.writeNamespace("saml","urn:oasis:names:tc:SAML:2.0:assertion");
writer.writeCharacters(this.appSettings.getIssuer());
writer.writeEndElement();
writer.writeStartElement("samlp", "NameIDPolicy", "urn:oasis:names:tc:SAML:2.0:protocol");
writer.writeAttribute("Format", "urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified");
writer.writeAttribute("AllowCreate", "true");
writer.writeEndElement();
writer.writeStartElement("samlp","RequestedAuthnContext","urn:oasis:names:tc:SAML:2.0:protocol");
writer.writeAttribute("Comparison", "exact");
writer.writeEndElement();
writer.writeStartElement("saml","AuthnContextClassRef","urn:oasis:names:tc:SAML:2.0:assertion");
writer.writeNamespace("saml", "urn:oasis:names:tc:SAML:2.0:assertion");
writer.writeCharacters("urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport");
writer.writeEndElement();
writer.writeEndElement();
writer.flush();
if (format == base64) {
byte [] encoded = Base64.encodeBase64Chunked(baos.toByteArray());
String result = new String(encoded,Charset.forName("UTF-8"));
return result;
}
return null;
}
public static String getRidOfCRLF(String what) {
String lf = "%0D";
String cr = "%0A";
String now = lf;
int index = what.indexOf(now);
StringBuffer r = new StringBuffer();
while (index!=-1) {
r.append(what.substring(0,index));
what = what.substring(index+3,what.length());
if (now.equals(lf)) {
now = cr;
} else {
now = lf;
}
index = what.indexOf(now);
}
return r.toString();
}
}
| 4,638 |
0 | Create_ds/asgard/src/java/com/netflix | Create_ds/asgard/src/java/com/netflix/asgard/ValidationException.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.asgard;
/**
* Exception to throw when a service rejects a set of inputs due to violation of business rules.
*/
public class ValidationException extends RuntimeException implements NonAlertable {
/**
* Constructor with error message.
*
* @param message the message to show the user who violated a rule
*/
public ValidationException(String message) {
super(message);
}
}
| 4,639 |
0 | Create_ds/asgard/src/java/com/netflix | Create_ds/asgard/src/java/com/netflix/asgard/ServiceUnavailableException.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.asgard;
/**
* Exception that occurs when a service dependency cannot be reached.
*/
public class ServiceUnavailableException extends Exception {
/**
* Constructs the exception as a general failure of a named service.
*
* @param serviceName the name of the unavailable service
*/
public ServiceUnavailableException(String serviceName) {
this(serviceName, null);
}
/**
* Constructs the exception with a specific message.
*
* @param serviceName the name of the unavailable service
* @param msg the error message captured from the failure
*/
public ServiceUnavailableException(String serviceName, String msg) {
this(serviceName, msg, null);
}
/**
* Constructs the exception with an existing throwable.
*
* @param serviceName the name of the unavailable service
* @param msg the error message captured from the failure
* @param throwable the existing problem that should be wrapped in this exception for extra context and typing
*/
public ServiceUnavailableException(String serviceName, String msg, Throwable throwable) {
super((msg == null || msg.isEmpty()) ? serviceName + " could not be contacted." : msg, throwable);
}
}
| 4,640 |
0 | Create_ds/asgard/src/java/com/netflix | Create_ds/asgard/src/java/com/netflix/asgard/CancelledException.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.asgard;
/**
* Exception thrown as a mechanism to interrupt a sleeping thread during a long running in-memory task.
*/
public class CancelledException extends RuntimeException {}
| 4,641 |
0 | Create_ds/asgard/src/java/com/netflix | Create_ds/asgard/src/java/com/netflix/asgard/CollectedExceptions.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.asgard;
import com.google.common.collect.ImmutableList;
import java.util.List;
/**
* Wrapper exception type for multiple other exceptions.
*/
public class CollectedExceptions extends Exception {
/**
* The list of exceptions that occurred while doing a multi-step action.
*/
private ImmutableList<Exception> exceptions;
/**
* Constructor
*
* @param message the detail message for the overall collection of exceptions
* @param exceptions the ordered list of exceptions that happened before they were collected here
*/
public CollectedExceptions(String message, List<Exception> exceptions) {
super(message);
this.exceptions = ImmutableList.copyOf(exceptions);
}
/**
* Gets exceptions that occurred while doing a multi-step action.
*
* @return the list of exceptions
*/
public List<Exception> getExceptions() {
return exceptions;
}
}
| 4,642 |
0 | Create_ds/asgard/src/java/com/netflix/asgard | Create_ds/asgard/src/java/com/netflix/asgard/push/PushException.java | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.asgard.push;
import com.netflix.asgard.NonAlertable;
/**
* Exception thrown when there is a problem doing a push.
*/
public class PushException extends RuntimeException implements NonAlertable {
/**
* Constructor with error message.
*
* @param message the explanation of what went wrong
*/
public PushException(String message) {
super(message);
}
/**
* Constructor with error message and cause.
*
* @param message the explanation of what went wrong
* @param cause root exception that caused this one
*/
public PushException(String message, Throwable cause) {
super(message, cause);
}
}
| 4,643 |
0 | Create_ds/playtorch/app/android/app/src/androidTest/java/dev | Create_ds/playtorch/app/android/app/src/androidTest/java/dev/playtorch/DetoxTest.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
import com.wix.detox.Detox;
import com.wix.detox.config.DetoxConfig;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
@LargeTest
public class DetoxTest {
// Replace 'MainActivity' with the value of android:name entry in
// <activity> in AndroidManifest.xml
@Rule
public ActivityTestRule<MainActivity> mActivityRule =
new ActivityTestRule<>(MainActivity.class, false, false);
@Test
public void runDetoxTests() {
// // This is optional - in case you've decided to integrate TestButler
// // See
// https://github.com/wix/Detox/blob/master/docs/Introduction.Android.md#8-test-butler-support-optional
// TestButlerProbe.assertReadyIfInstalled();
DetoxConfig detoxConfig = new DetoxConfig();
detoxConfig.idlePolicyConfig.masterTimeoutSec = 90;
detoxConfig.idlePolicyConfig.idleResourceTimeoutSec = 60;
detoxConfig.rnContextLoadTimeoutSec = (BuildConfig.DEBUG ? 180 : 60);
Detox.runTests(mActivityRule, detoxConfig);
}
}
| 4,644 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/TwoFingerPressGestureDetector.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch;
import android.os.SystemClock;
import android.view.MotionEvent;
import android.view.MotionEvent.PointerCoords;
/** Detects two finger long press. */
class TwoFingerLongPressDetector {
private static final double PRECISION = 20.0;
private static final int NEEDED_PRESS_TIME = 800;
private static final int NEEDED_POINTER_COUNT = 2;
private Boolean startedDetecting = false;
private Long startTime = Long.MAX_VALUE;
private MotionEvent.PointerCoords[] startPosition = {new PointerCoords(), new PointerCoords()};
TwoFingerLongPressListener longPressListener;
TwoFingerLongPressDetector(TwoFingerLongPressListener listener) {
longPressListener = listener;
}
/** Handles touch event. If it detects long press then [longPressListener] is called. */
void onTouchEvent(MotionEvent event) {
if (event == null) {
return;
}
if (!startedDetecting
&& event.getAction() == MotionEvent.ACTION_MOVE
&& event.getPointerCount() == NEEDED_POINTER_COUNT) {
startedDetecting = true;
startTime = SystemClock.uptimeMillis();
for (int i = 0; i < startPosition.length; i++) {
event.getPointerCoords(i, startPosition[i]);
}
return;
}
if (event.getAction() != MotionEvent.ACTION_MOVE
|| event.getPointerCount() != NEEDED_POINTER_COUNT) {
startedDetecting = false;
return;
}
for (int i = 0; i < startPosition.length; i++) {
MotionEvent.PointerCoords out = new MotionEvent.PointerCoords();
event.getPointerCoords(i, out);
if (Math.abs(out.x - startPosition[i].x) > PRECISION
|| Math.abs(out.y - startPosition[i].y) > PRECISION) {
startedDetecting = false;
return;
}
}
if (SystemClock.uptimeMillis() - startTime >= NEEDED_PRESS_TIME) {
longPressListener.onTwoFingerLongPress();
startedDetecting = false;
}
}
interface TwoFingerLongPressListener {
void onTwoFingerLongPress();
}
}
| 4,645 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/MainApplication.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch;
import android.app.Application;
import android.content.Context;
import android.content.res.Configuration;
import androidx.annotation.NonNull;
import com.facebook.react.PackageList;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.soloader.SoLoader;
import expo.modules.ApplicationLifecycleDispatcher;
import expo.modules.ReactNativeHostWrapper;
import expo.modules.devlauncher.DevLauncherController;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost =
new ReactNativeHostWrapper(
this,
new ReactNativeHost(this) {
@Override
public boolean getUseDeveloperSupport() {
return DevLauncherController.getInstance().getUseDeveloperSupport();
}
@Override
protected List<ReactPackage> getPackages() {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for example:
// packages.add(new MyReactNativePackage());
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
});
@Override
public ReactNativeHost getReactNativeHost() {
return mReactNativeHost;
}
@Override
public void onCreate() {
super.onCreate();
SoLoader.init(this, /* native exopackage */ false);
// The PyTorch Mobile Android shared object library needs to load before
// the torchvision ops because latter registers ops for the loaded PyTorch
// Mobile library. If only the torchvision ops are loaded here, then the
// PyTorch Mobile library will be loaded after as part of the
// react-native-pytorch-core package and therefore, torchvision ops won't
// be able to register for the runtime.
//
// Note: Loading the libraries have to happen after the SoLoader.init call
// above and therefore can't be done in a static block.
SoLoader.loadLibrary("pytorch_jni_lite");
SoLoader.loadLibrary("torchvision_ops");
DevLauncherController.initialize(this, getReactNativeHost());
initializeFlipper(this, getReactNativeHost().getReactInstanceManager());
ApplicationLifecycleDispatcher.onApplicationCreate(this);
}
@Override
public void onConfigurationChanged(@NonNull Configuration newConfig) {
super.onConfigurationChanged(newConfig);
ApplicationLifecycleDispatcher.onConfigurationChanged(this, newConfig);
}
/**
* Loads Flipper in React Native templates. Call this in the onCreate method with something like
* initializeFlipper(this, getReactNativeHost().getReactInstanceManager());
*
* @param context
* @param reactInstanceManager
*/
private static void initializeFlipper(
Context context, ReactInstanceManager reactInstanceManager) {
if (BuildConfig.DEBUG) {
try {
/*
We use reflection here to pick up the class that initializes Flipper,
since Flipper library is not available in release mode
*/
Class<?> aClass = Class.forName("dev.playtorch.test.ReactNativeFlipper");
aClass
.getMethod("initializeFlipper", Context.class, ReactInstanceManager.class)
.invoke(null, context, reactInstanceManager);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
| 4,646 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/MainActivity.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import androidx.annotation.Nullable;
import com.facebook.react.ReactRootView;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.WritableMap;
import com.facebook.react.modules.core.DeviceEventManagerModule;
import expo.modules.devlauncher.DevLauncherController;
import expo.modules.devmenu.react.DevMenuAwareReactActivity;
import expo.modules.splashscreen.SplashScreenImageResizeMode;
import expo.modules.splashscreen.singletons.SplashScreen;
public class MainActivity extends DevMenuAwareReactActivity
implements TwoFingerLongPressDetector.TwoFingerLongPressListener {
TwoFingerLongPressDetector twoFingerLongPressDetector = new TwoFingerLongPressDetector(this);
@Override
public void onTwoFingerLongPress() {
// Create map for params
WritableMap payload = Arguments.createMap();
// Put data to map
// Get EventEmitter from context and send event thanks to it
ReactApplicationContext reactContext =
(ReactApplicationContext)
getReactNativeHost().getReactInstanceManager().getCurrentReactContext();
if (reactContext != null) {
reactContext
.getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class)
.emit("onTwoFingerLongPress", payload);
}
}
@Override
public void onNewIntent(Intent intent) {
if (DevLauncherController.tryToHandleIntent(this, intent)) {
return;
}
super.onNewIntent(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// Set the theme to AppTheme BEFORE onCreate to support
// coloring the background, status bar, and navigation bar.
// This is required for expo-splash-screen.
setTheme(R.style.AppTheme);
Window w = getWindow();
w.setStatusBarColor(Color.TRANSPARENT);
w.setNavigationBarColor(Color.TRANSPARENT);
w.getDecorView()
.setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
super.onCreate(null);
SplashScreen.show(this, SplashScreenImageResizeMode.COVER, ReactRootView.class, false);
}
/**
* Returns the name of the main component registered from JavaScript. This is used to schedule
* rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "main";
}
@Override
public boolean dispatchTouchEvent(@Nullable MotionEvent ev) {
twoFingerLongPressDetector.onTouchEvent(ev);
return super.dispatchTouchEvent(ev);
}
}
| 4,647 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/newarchitecture/MainApplicationReactNativeHost.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch.newarchitecture;
import android.app.Application;
import androidx.annotation.NonNull;
import com.facebook.react.PackageList;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.react.ReactPackageTurboModuleManagerDelegate;
import com.facebook.react.bridge.JSIModulePackage;
import com.facebook.react.bridge.JSIModuleProvider;
import com.facebook.react.bridge.JSIModuleSpec;
import com.facebook.react.bridge.JSIModuleType;
import com.facebook.react.bridge.JavaScriptContextHolder;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.UIManager;
import com.facebook.react.fabric.ComponentFactory;
import com.facebook.react.fabric.CoreComponentsRegistry;
import com.facebook.react.fabric.FabricJSIModuleProvider;
import com.facebook.react.fabric.ReactNativeConfig;
import com.facebook.react.uimanager.ViewManagerRegistry;
import dev.playtorch.BuildConfig;
import dev.playtorch.newarchitecture.components.MainComponentsRegistry;
import dev.playtorch.newarchitecture.modules.MainApplicationTurboModuleManagerDelegate;
import java.util.ArrayList;
import java.util.List;
/**
* A {@link ReactNativeHost} that helps you load everything needed for the New Architecture, both
* TurboModule delegates and the Fabric Renderer.
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
public class MainApplicationReactNativeHost extends ReactNativeHost {
public MainApplicationReactNativeHost(Application application) {
super(application);
}
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for example:
// packages.add(new MyReactNativePackage());
// TurboModules must also be loaded here providing a valid TurboReactPackage implementation:
// packages.add(new TurboReactPackage() { ... });
// If you have custom Fabric Components, their ViewManagers should also be loaded here
// inside a ReactPackage.
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
@NonNull
@Override
protected ReactPackageTurboModuleManagerDelegate.Builder
getReactPackageTurboModuleManagerDelegateBuilder() {
// Here we provide the ReactPackageTurboModuleManagerDelegate Builder. This is necessary
// for the new architecture and to use TurboModules correctly.
return new MainApplicationTurboModuleManagerDelegate.Builder();
}
@Override
protected JSIModulePackage getJSIModulePackage() {
return new JSIModulePackage() {
@Override
public List<JSIModuleSpec> getJSIModules(
final ReactApplicationContext reactApplicationContext,
final JavaScriptContextHolder jsContext) {
final List<JSIModuleSpec> specs = new ArrayList<>();
// Here we provide a new JSIModuleSpec that will be responsible of providing the
// custom Fabric Components.
specs.add(
new JSIModuleSpec() {
@Override
public JSIModuleType getJSIModuleType() {
return JSIModuleType.UIManager;
}
@Override
public JSIModuleProvider<UIManager> getJSIModuleProvider() {
final ComponentFactory componentFactory = new ComponentFactory();
CoreComponentsRegistry.register(componentFactory);
// Here we register a Components Registry.
// The one that is generated with the template contains no components
// and just provides you the one from React Native core.
MainComponentsRegistry.register(componentFactory);
final ReactInstanceManager reactInstanceManager = getReactInstanceManager();
ViewManagerRegistry viewManagerRegistry =
new ViewManagerRegistry(
reactInstanceManager.getOrCreateViewManagers(reactApplicationContext));
return new FabricJSIModuleProvider(
reactApplicationContext,
componentFactory,
ReactNativeConfig.DEFAULT_CONFIG,
viewManagerRegistry);
}
});
return specs;
}
};
}
}
| 4,648 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/newarchitecture | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/newarchitecture/components/MainComponentsRegistry.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch.newarchitecture.components;
import com.facebook.jni.HybridData;
import com.facebook.proguard.annotations.DoNotStrip;
import com.facebook.react.fabric.ComponentFactory;
import com.facebook.soloader.SoLoader;
/**
* Class responsible to load the custom Fabric Components. This class has native methods and needs a
* corresponding C++ implementation/header file to work correctly (already placed inside the jni/
* folder for you).
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
@DoNotStrip
public class MainComponentsRegistry {
static {
SoLoader.loadLibrary("fabricjni");
}
@DoNotStrip private final HybridData mHybridData;
@DoNotStrip
private native HybridData initHybrid(ComponentFactory componentFactory);
@DoNotStrip
private MainComponentsRegistry(ComponentFactory componentFactory) {
mHybridData = initHybrid(componentFactory);
}
@DoNotStrip
public static MainComponentsRegistry register(ComponentFactory componentFactory) {
return new MainComponentsRegistry(componentFactory);
}
}
| 4,649 |
0 | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/newarchitecture | Create_ds/playtorch/app/android/app/src/main/java/dev/playtorch/newarchitecture/modules/MainApplicationTurboModuleManagerDelegate.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch.newarchitecture.modules;
import com.facebook.jni.HybridData;
import com.facebook.react.ReactPackage;
import com.facebook.react.ReactPackageTurboModuleManagerDelegate;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.soloader.SoLoader;
import java.util.List;
/**
* Class responsible to load the TurboModules. This class has native methods and needs a
* corresponding C++ implementation/header file to work correctly (already placed inside the jni/
* folder for you).
*
* <p>Please note that this class is used ONLY if you opt-in for the New Architecture (see the
* `newArchEnabled` property). Is ignored otherwise.
*/
public class MainApplicationTurboModuleManagerDelegate
extends ReactPackageTurboModuleManagerDelegate {
private static volatile boolean sIsSoLibraryLoaded;
protected MainApplicationTurboModuleManagerDelegate(
ReactApplicationContext reactApplicationContext, List<ReactPackage> packages) {
super(reactApplicationContext, packages);
}
protected native HybridData initHybrid();
native boolean canCreateTurboModule(String moduleName);
public static class Builder extends ReactPackageTurboModuleManagerDelegate.Builder {
protected MainApplicationTurboModuleManagerDelegate build(
ReactApplicationContext context, List<ReactPackage> packages) {
return new MainApplicationTurboModuleManagerDelegate(context, packages);
}
}
@Override
protected synchronized void maybeLoadOtherSoLibraries() {
if (!sIsSoLibraryLoaded) {
// If you change the name of your application .so file in the Android.mk file,
// make sure you update the name here as well.
SoLoader.loadLibrary("playtorch_appmodules");
sIsSoLibraryLoaded = true;
}
}
}
| 4,650 |
0 | Create_ds/playtorch/app/android/app/src/debug/java/dev/playtorch | Create_ds/playtorch/app/android/app/src/debug/java/dev/playtorch/test/ReactNativeFlipper.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package dev.playtorch.test;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.CustomClientBuilder;
import com.facebook.react.modules.network.NetworkingModule;
import okhttp3.OkHttpClient;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
new CustomClientBuilder() {
@Override
public void apply(OkHttpClient.Builder builder) {
builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin));
}
});
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceManager.ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
new Runnable() {
@Override
public void run() {
client.addPlugin(new FrescoFlipperPlugin());
}
});
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
| 4,651 |
0 | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/androidTest/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/androidTest/java/org/pytorch/rn/core/example/DetoxTest.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.example;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.LargeTest;
import androidx.test.rule.ActivityTestRule;
import com.wix.detox.Detox;
import com.wix.detox.config.DetoxConfig;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
@LargeTest
public class DetoxTest {
@Rule
public ActivityTestRule<MainActivity> mActivityRule =
new ActivityTestRule<>(MainActivity.class, false, false);
@Test
public void runDetoxTests() {
DetoxConfig detoxConfig = new DetoxConfig();
detoxConfig.idlePolicyConfig.masterTimeoutSec = 90;
detoxConfig.idlePolicyConfig.idleResourceTimeoutSec = 60;
detoxConfig.rnContextLoadTimeoutSec = (BuildConfig.DEBUG ? 180 : 60);
Detox.runTests(mActivityRule, detoxConfig);
}
}
| 4,652 |
0 | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/main/java/org/pytorch/rn/core/example/MainApplication.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.example;
import android.app.Application;
import android.content.Context;
import com.facebook.react.PackageList;
import com.facebook.react.ReactApplication;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.ReactNativeHost;
import com.facebook.react.ReactPackage;
import com.facebook.soloader.SoLoader;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
import org.pytorch.rn.core.PyTorchCorePackage;
public class MainApplication extends Application implements ReactApplication {
private final ReactNativeHost mReactNativeHost =
new ReactNativeHost(this) {
@Override
public boolean getUseDeveloperSupport() {
return BuildConfig.DEBUG;
}
@Override
protected List<ReactPackage> getPackages() {
@SuppressWarnings("UnnecessaryLocalVariable")
List<ReactPackage> packages = new PackageList(this).getPackages();
// Packages that cannot be autolinked yet can be added manually here, for
// PyTorchCoreExample:
// packages.add(new MyReactNativePackage());
packages.add(new PyTorchCorePackage());
return packages;
}
@Override
protected String getJSMainModuleName() {
return "index";
}
};
@Override
public ReactNativeHost getReactNativeHost() {
return mReactNativeHost;
}
@Override
public void onCreate() {
super.onCreate();
SoLoader.init(this, /* native exopackage */ false);
initializeFlipper(
this,
getReactNativeHost()
.getReactInstanceManager()); // Remove this line if you don't want Flipper enabled
}
/**
* Loads Flipper in React Native templates.
*
* @param context
*/
private static void initializeFlipper(
Context context, ReactInstanceManager reactInstanceManager) {
if (BuildConfig.DEBUG) {
try {
/*
We use reflection here to pick up the class that initializes Flipper,
since Flipper library is not available in release mode
*/
Class<?> aClass = Class.forName("org.pytorch.rn.core.example.ReactNativeFlipper");
aClass
.getMethod("initializeFlipper", Context.class, ReactInstanceManager.class)
.invoke(null, context, reactInstanceManager);
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
}
| 4,653 |
0 | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/main/java/org/pytorch/rn/core/example/MainActivity.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.example;
import com.facebook.react.ReactActivity;
public class MainActivity extends ReactActivity {
/**
* Returns the name of the main component registered from JavaScript. This is used to schedule
* rendering of the component.
*/
@Override
protected String getMainComponentName() {
return "PyTorchCoreExample";
}
}
| 4,654 |
0 | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/debug/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/example/android/app/src/debug/java/org/pytorch/rn/core/example/ReactNativeFlipper.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.example;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.CustomClientBuilder;
import com.facebook.react.modules.network.NetworkingModule;
import okhttp3.OkHttpClient;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
new CustomClientBuilder() {
@Override
public void apply(OkHttpClient.Builder builder) {
builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin));
}
});
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceManager.ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
new Runnable() {
@Override
public void run() {
client.addPlugin(new FrescoFlipperPlugin());
}
});
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
| 4,655 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/androidTest/java/org | Create_ds/playtorch/react-native-pytorch-core/android/src/androidTest/java/org/pytorch/IValuePackerInstrumentedTest.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import androidx.annotation.ColorInt;
import androidx.test.InstrumentationRegistry;
import androidx.test.filters.SmallTest;
import androidx.test.runner.AndroidJUnit4;
import com.facebook.react.bridge.JavaOnlyArray;
import com.facebook.react.bridge.JavaOnlyMap;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.soloader.SoLoader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
import org.json.JSONException;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.pytorch.rn.core.audio.Audio;
import org.pytorch.rn.core.image.Image;
import org.pytorch.rn.core.javascript.JSContext;
import org.pytorch.rn.core.ml.processing.BaseIValuePacker;
import org.pytorch.rn.core.ml.processing.GPT2Tokenizer;
import org.pytorch.rn.core.ml.processing.IIValuePacker;
import org.pytorch.rn.core.ml.processing.PackerContext;
import org.pytorch.rn.core.ml.processing.PackerRegistry;
import org.pytorch.rn.core.ml.processing.packer.ScalarBoolPacker;
import org.pytorch.rn.core.ml.processing.packer.ScalarDoublePacker;
import org.pytorch.rn.core.ml.processing.packer.ScalarLongPacker;
import org.pytorch.rn.core.ml.processing.packer.TensorFromAudioPacker;
import org.pytorch.rn.core.ml.processing.packer.TensorFromImagePacker;
import org.pytorch.rn.core.ml.processing.packer.TensorFromStringPacker;
import org.pytorch.rn.core.ml.processing.packer.TensorPacker;
import org.pytorch.rn.core.ml.processing.packer.TuplePacker;
import org.pytorch.rn.core.ml.processing.unpacker.ArgmaxUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.BertDecodeQAAnswerUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.BoundingBoxesUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.DictStringKeyUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.ListUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.ScalarBoolUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.ScalarFloatUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.ScalarLongUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.StringUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.TensorToImageUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.TensorToStringUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.TensorUnpacker;
import org.pytorch.rn.core.ml.processing.unpacker.TupleUnpacker;
@RunWith(AndroidJUnit4.class)
@SmallTest
public class IValuePackerInstrumentedTest {
public static final float DOUBLE_EQUALS_DELTA = 1e-6f;
static {
Context ctx = InstrumentationRegistry.getTargetContext();
SoLoader.init(ctx, false);
// Pack
PackerRegistry.register("tuple", new TuplePacker());
PackerRegistry.register("scalar_bool", new ScalarBoolPacker());
PackerRegistry.register("scalar_long", new ScalarLongPacker());
PackerRegistry.register("scalar_double", new ScalarDoublePacker());
PackerRegistry.register("tensor", new TensorPacker());
PackerRegistry.register("tensor_from_image", new TensorFromImagePacker());
PackerRegistry.register("tensor_from_string", new TensorFromStringPacker());
PackerRegistry.register("tensor_from_audio", new TensorFromAudioPacker());
// Unpack
PackerRegistry.register("tuple", new TupleUnpacker());
PackerRegistry.register("list", new ListUnpacker());
PackerRegistry.register("dict_string_key", new DictStringKeyUnpacker());
PackerRegistry.register("tensor", new TensorUnpacker());
PackerRegistry.register("scalar_long", new ScalarLongUnpacker());
PackerRegistry.register("scalar_float", new ScalarFloatUnpacker());
PackerRegistry.register("scalar_bool", new ScalarBoolUnpacker());
PackerRegistry.register("argmax", new ArgmaxUnpacker());
PackerRegistry.register("string", new StringUnpacker());
PackerRegistry.register("tensor_to_image", new TensorToImageUnpacker());
PackerRegistry.register("bounding_boxes", new BoundingBoxesUnpacker());
PackerRegistry.register("tensor_to_string", new TensorToStringUnpacker());
PackerRegistry.register("bert_decode_qa_answer", new BertDecodeQAAnswerUnpacker());
}
public String readAsset(String name) throws IOException {
final Context testContext = InstrumentationRegistry.getInstrumentation().getContext();
int bufferSize = 1024;
char[] buffer = new char[bufferSize];
StringBuilder out = new StringBuilder();
Reader in = new InputStreamReader(testContext.getAssets().open(name), StandardCharsets.UTF_8);
for (int len; (len = in.read(buffer, 0, buffer.length)) > 0; ) {
out.append(buffer, 0, len);
}
return out.toString();
}
private static double[] doubleArrayFromReadableArray(ReadableArray array) {
final int n = array.size();
final double[] ret = new double[n];
for (int i = 0; i < n; ++i) {
ret[i] = array.getDouble(i);
}
return ret;
}
private static int[] intArrayFromReadableArray(ReadableArray array) {
final int n = array.size();
final int[] ret = new int[n];
for (int i = 0; i < n; ++i) {
ret[i] = array.getInt(i);
}
return ret;
}
@Test
public void bodyTrackingTest() throws Exception {
final JavaOnlyMap params = new JavaOnlyMap();
params.putInt("width", 224);
params.putInt("height", 224);
params.putDouble("scale", 1.0f);
params.putInt("rois_n", 3);
params.putArray("rois", JavaOnlyArray.of(0, 0, 20, 20, 10, 10, 50, 50, 30, 30, 60, 60));
params.putDouble("should_run_track", 0.0f);
Bitmap bitmap = Bitmap.createBitmap(224, 224, Bitmap.Config.ARGB_8888);
JSContext.NativeJSRef ref = JSContext.wrapObject(new Image(bitmap));
params.putMap("image1", ref.getJSRef());
final String spec = readAsset("body_tracking_spec.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final PackerContext packerContext = packer.newContext();
final IValue result = packer.pack(params, packerContext);
final ReadableMap map = packer.unpack(result, new JavaOnlyMap(), packerContext);
final ReadableArray image1Array = map.getArray("image1");
final ReadableArray imageInfoArray = map.getArray("image_info");
final ReadableArray shouldRunTrackArray = map.getArray("should_run_track");
final ReadableArray roisArray = map.getArray("previous_rois");
Assert.assertTrue(image1Array.size() == 3 * 224 * 224);
Assert.assertTrue(imageInfoArray.size() == 3);
Assert.assertTrue(shouldRunTrackArray.size() == 1);
Assert.assertTrue(shouldRunTrackArray.getDouble(0) == 0.0f);
Assert.assertTrue(roisArray.size() == 12);
double[] expectedRois =
new double[] {0.f, 0.f, 20.f, 20.f, 10.f, 10.f, 50.f, 50.f, 30.f, 30.f, 60.f, 60.f};
double[] rois = doubleArrayFromReadableArray(roisArray);
for (int i = 0; i < 12; ++i) {
Assert.assertEquals(expectedRois[i], rois[i], DOUBLE_EQUALS_DELTA);
}
}
@Test
public void bodyTrackingTest2() throws Exception {
final long n = 3;
final float[] bboxesData = {
0.f, 0.f, 20.f, 20.f,
10.f, 10.f, 30.f, 30.f,
15.f, 15.f, 45.f, 45.f
};
final Tensor bboxes = Tensor.fromBlob(bboxesData, new long[] {n, 4});
final float[] scoresData = {0.9f, 0.8f, 0.7f};
final Tensor scores = Tensor.fromBlob(scoresData, new long[] {n});
final long[] indicesData = {0, 1, 2};
final Tensor indices = Tensor.fromBlob(indicesData, new long[] {n});
final IValue ivalue =
IValue.tupleFrom(
IValue.from(n), IValue.from(bboxes), IValue.from(scores), IValue.from(indices));
final String spec = readAsset("body_tracking_spec2.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final PackerContext packerContext = packer.newContext();
final ReadableMap map = packer.unpack(ivalue, new JavaOnlyMap(), packerContext);
final int unpack_n = map.getInt("n");
final double[] unpack_bboxes = doubleArrayFromReadableArray(map.getArray("bboxes"));
final double[] unpack_scores = doubleArrayFromReadableArray(map.getArray("scores"));
final int[] unpack_indices = intArrayFromReadableArray(map.getArray("indices"));
Assert.assertTrue(n == unpack_n);
Assert.assertTrue(unpack_bboxes.length == 4 * n);
Assert.assertTrue(unpack_scores.length == n);
Assert.assertTrue(unpack_indices.length == n);
for (int i = 0; i < 4 * n; ++i) {
Assert.assertEquals(unpack_bboxes[i], bboxesData[i], DOUBLE_EQUALS_DELTA);
}
for (int i = 0; i < n; ++i) {
Assert.assertEquals(unpack_scores[i], scoresData[i], DOUBLE_EQUALS_DELTA);
}
for (int i = 0; i < n; ++i) {
Assert.assertEquals(unpack_indices[i], indicesData[i], DOUBLE_EQUALS_DELTA);
}
}
@Test
public void bertTest() throws Exception {
final String spec = readAsset("bert_qa_spec.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final JavaOnlyMap params = new JavaOnlyMap();
final String testText = "[CLS] Who was Jim Henson ? [SEP] Jim Henson was a puppeteer [SEP]";
params.putString("string", testText);
params.putInt("model_input_length", 50);
final PackerContext packerContext = packer.newContext();
IValue ivalue = packer.pack(params, packerContext);
long[] data = ivalue.toTensor().getDataAsLongArray();
final long[] tokenIds =
new long[] {
101, 2040, 2001, 3958, 27227, 1029, 102, 3958, 27227, 2001, 1037, 13997, 11510, 102
};
final long[] expected = new long[50];
Arrays.fill(expected, 0);
for (int i = 0; i < tokenIds.length; ++i) {
expected[i] = tokenIds[i];
}
Assert.assertTrue(Arrays.equals(expected, data));
final int n = tokenIds.length;
final float[] startLogits = new float[n];
Arrays.fill(startLogits, 0.f);
startLogits[0] = 1.0f;
final float[] endLogits = new float[n];
Arrays.fill(endLogits, 0.f);
endLogits[n - 1] = 1.0f;
final Map<String, IValue> map = new HashMap<>();
map.put("start_logits", IValue.from(Tensor.fromBlob(startLogits, new long[] {1, n})));
map.put("end_logits", IValue.from(Tensor.fromBlob(endLogits, new long[] {1, n})));
packerContext.store("token_ids", tokenIds);
final ReadableMap output =
packer.unpack(IValue.dictStringKeyFrom(map), new JavaOnlyMap(), packerContext);
final String answer = output.getString("bert_answer");
Assert.assertEquals(
"[CLS] who was jim henson ? [SEP] jim henson was a puppeteer [SEP]", answer);
}
@Test
public void gpt2PackTest() throws Exception {
final String spec = readAsset("gpt2_spec.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final JavaOnlyMap params = new JavaOnlyMap();
params.putString("string", "Umka is a white fluffy pillow.");
PackerContext packerContext = packer.newContext();
IValue ivalue = packer.pack(params, packerContext);
long[] data = ivalue.toTensor().getDataAsLongArray();
final long[] expected = new long[] {37280, 4914, 318, 257, 2330, 39145, 28774, 13};
Assert.assertTrue(Arrays.equals(expected, data));
}
@Test
public void gpt2UnpackTest() throws Exception {
final String spec = readAsset("gpt2_spec.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final long[] data = new long[] {37280, 4914, 318, 257, 2330, 39145, 28774, 13};
PackerContext packerContext = packer.newContext();
final ReadableMap map =
packer.unpack(
IValue.from(Tensor.fromBlob(data, new long[] {1, 8})),
new JavaOnlyMap(),
packerContext);
Assert.assertEquals("Umka is a white fluffy pillow.", map.getString("text"));
}
public static class TestCustomPacker extends BaseIValuePacker {
public TestCustomPacker(@Nullable String specSrc) throws JSONException {
super(specSrc);
// Register additional packers/unpackers
PackerRegistry.register(
"tensor_from_string_custom",
(jobject, params, packerContext) -> {
final long[] tokenIds =
getGPT2Tokenizer(packerContext).tokenize(jobject.getString("string"));
return IValue.from(Tensor.fromBlob(tokenIds, new long[] {1, tokenIds.length}));
});
PackerRegistry.register(
"tensor_to_string_custom",
(ivalue, jobject, map, packerContext) -> {
final long[] tokenIds = ivalue.toTensor().getDataAsLongArray();
map.putString(
jobject.getString("key"), getGPT2Tokenizer(packerContext).decode(tokenIds));
});
}
private GPT2Tokenizer getGPT2Tokenizer(PackerContext packerContext)
throws JSONException, UnsupportedEncodingException {
GPT2Tokenizer gpt2Tokenizer = (GPT2Tokenizer) packerContext.get("gpt2_tokenizer_custom");
if (gpt2Tokenizer == null) {
gpt2Tokenizer =
new GPT2Tokenizer(packerContext.specSrcJson.getJSONObject("vocabulary_gpt2_custom"));
packerContext.store("gpt2_tokenizer_custom", gpt2Tokenizer);
}
return gpt2Tokenizer;
}
}
private static String colorHexString(@ColorInt int color) {
return String.format("#%06X", (0xFFFFFF & color));
}
@Test
public void testAudio() throws Exception {
final String spec = readAsset("speech_recognition.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final JavaOnlyMap params = new JavaOnlyMap();
final int sec = 3;
final short[] sdata = new short[16000 * sec];
JSContext.NativeJSRef ref = JSContext.wrapObject(new Audio(sdata));
params.putMap("audio", ref.getJSRef());
params.putInt("sample_rate", 16000);
PackerContext packerContext = packer.newContext();
IValue packRes = packer.pack(params, packerContext);
final float[] fdata = packRes.toTensor().getDataAsFloatArray();
Assert.assertEquals(sdata.length, fdata.length);
for (int i = 0; i < sdata.length; ++i) {
Assert.assertTrue(
Math.abs(sdata[i] / (float) Short.MAX_VALUE - fdata[i]) < DOUBLE_EQUALS_DELTA);
}
}
@Test
public void mnist() throws Exception {
final String spec = readAsset("mnist.json");
final IIValuePacker packer = new BaseIValuePacker(spec);
final JavaOnlyMap params = new JavaOnlyMap();
Bitmap bitmap = Bitmap.createBitmap(224, 224, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
Paint paintBg = new Paint();
final @ColorInt int colorBg = Color.BLUE;
final @ColorInt int colorFg = Color.YELLOW;
paintBg.setColor(colorBg);
canvas.drawRect(new Rect(0, 0, 228, 228), paintBg);
Paint paintFg = new Paint();
paintFg.setColor(colorFg);
paintFg.setStrokeWidth(40.f);
canvas.drawLine(112, 40, 112, 228 - 40, paintFg);
JSContext.NativeJSRef ref = JSContext.wrapObject(new Image(bitmap));
params.putMap("image", ref.getJSRef());
params.putString("colorBackground", colorHexString(colorBg));
params.putString("colorForeground", colorHexString(colorFg));
params.putInt("crop_width", 28);
params.putInt("crop_height", 28);
params.putInt("scale_width", 28);
params.putInt("scale_height", 28);
PackerContext packerContext = packer.newContext();
IValue packRes = packer.pack(params, packerContext);
final float[] data = packRes.toTensor().getDataAsFloatArray();
Assert.assertEquals(28 * 28, data.length);
final Set<Float> set = new HashSet<>();
for (float f : data) {
set.add(f);
}
Assert.assertEquals(2, set.size());
for (float f : set) {
float v = 0.3081f * f + 0.1307f;
Assert.assertTrue(
Math.abs(v - 1.f) < DOUBLE_EQUALS_DELTA || Math.abs(v) < DOUBLE_EQUALS_DELTA);
}
}
}
| 4,656 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/androidTest/java/org/pytorch | Create_ds/playtorch/react-native-pytorch-core/android/src/androidTest/java/org/pytorch/suite/InstrumentedTestSuite.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.suite;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.pytorch.IValuePackerInstrumentedTest;
@RunWith(Suite.class)
@Suite.SuiteClasses({IValuePackerInstrumentedTest.class})
public class InstrumentedTestSuite {}
| 4,657 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/PyTorchCorePackage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core;
import com.facebook.react.ReactPackage;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.ViewManager;
import java.util.Arrays;
import java.util.List;
import org.jetbrains.annotations.NotNull;
import org.pytorch.rn.core.audio.AudioModule;
import org.pytorch.rn.core.camera.CameraViewManager;
import org.pytorch.rn.core.canvas.CanvasRenderingContext2DModule;
import org.pytorch.rn.core.canvas.CanvasViewManager;
import org.pytorch.rn.core.canvas.ImageDataModule;
import org.pytorch.rn.core.image.ImageModule;
import org.pytorch.rn.core.jsi.PyTorchCoreJSIModule;
import org.pytorch.rn.core.ml.ModelLoaderModule;
public class PyTorchCorePackage implements ReactPackage {
@Override
public List<NativeModule> createNativeModules(@NotNull ReactApplicationContext reactContext) {
return Arrays.<NativeModule>asList(
new ModelLoaderModule(reactContext),
new ImageModule(reactContext),
new ImageDataModule(reactContext),
new CanvasRenderingContext2DModule(reactContext),
new AudioModule(reactContext),
new PyTorchCoreJSIModule(reactContext));
}
@Override
public List<ViewManager> createViewManagers(@NotNull ReactApplicationContext reactContext) {
return Arrays.<ViewManager>asList(
new CameraViewManager(reactContext), new CanvasViewManager(reactContext));
}
}
| 4,658 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/camera/CameraView.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.camera;
import android.Manifest;
import android.animation.ValueAnimator;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.drawable.GradientDrawable;
import android.graphics.drawable.LayerDrawable;
import android.util.Log;
import android.util.Size;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import androidx.annotation.NonNull;
import androidx.camera.core.Camera;
import androidx.camera.core.CameraInfoUnavailableException;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.ImageProxy;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.lifecycle.LifecycleOwner;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.pytorch.rn.core.R;
import org.pytorch.rn.core.image.IImage;
import org.pytorch.rn.core.image.Image;
import org.pytorch.rn.core.javascript.JSContext;
public class CameraView extends ConstraintLayout {
public static final String TAG = "PTLCameraView";
public static final String REACT_CLASS = "PyTorchCameraView";
private final String[] REQUIRED_PERMISSIONS = new String[] {Manifest.permission.CAMERA};
private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
private PreviewView mPreviewView;
private Button mCaptureButton;
private Button mFlipButton;
private ImageCapture mImageCapture;
private ValueAnimator pressAnimation, releaseAnimation;
private LayerDrawable mCaptureButtonLayerDrawable;
private GradientDrawable mCaptureButtonInnerCircle;
private CameraSelector mPreferredCameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
private final int DURATION = 100;
private final float SCALE_BUTTON_BY = 1.15f;
private boolean mIsDirty = false;
private boolean mIsDirtyForCameraRestart = false;
private boolean mHideCaptureButton = false;
private boolean mHideFlipButton = false;
private Size mTargetResolution = new Size(480, 640);
private Camera mCamera;
/** Blocking camera operations are performed using this executor */
private ExecutorService cameraExecutor;
private final ReactApplicationContext mReactContext;
public CameraView(ReactApplicationContext context) {
super(context);
mReactContext = context;
init();
}
private void init() {
cameraProviderFuture = ProcessCameraProvider.getInstance(mReactContext.getCurrentActivity());
// Initialize our background executor
cameraExecutor = Executors.newSingleThreadExecutor();
View mLayout = inflate(mReactContext, R.layout.activity_camera, this);
mPreviewView = mLayout.findViewById(R.id.preview_view);
mCaptureButton = mLayout.findViewById(R.id.capture_button);
mFlipButton = mLayout.findViewById(R.id.flip_button);
// Initialize drawables to change (inner circle stroke)
mCaptureButtonLayerDrawable =
(LayerDrawable)
mReactContext
.getResources()
.getDrawable(R.drawable.camera_button, mReactContext.getTheme())
.mutate();
mCaptureButtonInnerCircle =
(GradientDrawable)
mCaptureButtonLayerDrawable.findDrawableByLayerId(R.id.camera_button_inner_circle);
// Calculate pixel values of borders
float density = mReactContext.getResources().getDisplayMetrics().density;
int cameraButtonInnerBorderNormal = (int) (density * 18);
int cameraButtonInnerBorderPressed = (int) (density * 24);
// Initialize Value Animators and Listeners
// grow means grow border so the circle shrinks
pressAnimation =
ValueAnimator.ofInt(cameraButtonInnerBorderNormal, cameraButtonInnerBorderPressed)
.setDuration(DURATION);
releaseAnimation =
ValueAnimator.ofInt(cameraButtonInnerBorderPressed, cameraButtonInnerBorderNormal)
.setDuration(DURATION);
ValueAnimator.AnimatorUpdateListener animatorUpdateListener =
new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator updatedAnimation) {
int animatedValue = (int) updatedAnimation.getAnimatedValue();
mCaptureButtonInnerCircle.setStroke(animatedValue, Color.TRANSPARENT);
mCaptureButton.setBackground(mCaptureButtonLayerDrawable);
}
};
pressAnimation.addUpdateListener(animatorUpdateListener);
releaseAnimation.addUpdateListener(animatorUpdateListener);
mCaptureButton.setOnTouchListener(
(v, e) -> {
switch (e.getAction()) {
case MotionEvent.ACTION_DOWN:
pressAnimation.start();
mCaptureButton.animate().scaleX(SCALE_BUTTON_BY).setDuration(DURATION);
mCaptureButton.animate().scaleY(SCALE_BUTTON_BY).setDuration(DURATION);
takePicture();
break;
case MotionEvent.ACTION_UP:
releaseAnimation.start();
mCaptureButton.animate().scaleX(1f).setDuration(DURATION);
mCaptureButton.animate().scaleY(1f).setDuration(DURATION);
}
return false;
});
mFlipButton.setOnTouchListener(
(v, e) -> {
if (e.getAction() == MotionEvent.ACTION_DOWN) {
flipCamera();
}
return false;
});
// The PreviewView has a width/height of 0/0. This was reported as an issue in the CameraX
// issue tracker and is supposedly a bug in how React Native calculates children dimension.
// A manual remeasure of the layout fixes this.
// Link to issue: https://issuetracker.google.com/issues/177245493#comment8
mPreviewView.setOnHierarchyChangeListener(new CameraView.ReactNativeCameraPreviewRemeasure());
if (allPermissionsGranted()) {
startCamera(); // start camera if permission has been granted by user
} else {
int REQUEST_CODE_PERMISSIONS = 200;
ActivityCompat.requestPermissions(
mReactContext.getCurrentActivity(), REQUIRED_PERMISSIONS, REQUEST_CODE_PERMISSIONS);
}
}
private void startCamera() {
cameraProviderFuture.addListener(
() -> {
try {
ProcessCameraProvider cameraProvider = cameraProviderFuture.get();
bindPreview(cameraProvider);
} catch (ExecutionException | InterruptedException e) {
// No errors need to be handled for this Future.
// This should never be reached.
Log.e(TAG, e.getMessage());
}
},
ContextCompat.getMainExecutor(mReactContext));
}
protected void takePicture() {
if (mImageCapture != null) {
mImageCapture.takePicture(
ContextCompat.getMainExecutor(mReactContext),
new ImageCapture.OnImageCapturedCallback() {
@Override
public void onCaptureSuccess(@NonNull ImageProxy imageProxy) {
super.onCaptureSuccess(imageProxy);
IImage image = new Image(imageProxy, mReactContext.getApplicationContext());
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
mReactContext
.getJSModule(RCTEventEmitter.class)
.receiveEvent(CameraView.this.getId(), "onCapture", ref.getJSRef());
}
@Override
public void onError(@NonNull ImageCaptureException exception) {
super.onError(exception);
Log.e(TAG, exception.getLocalizedMessage(), exception);
}
});
}
}
protected void flipCamera() {
if (mPreferredCameraSelector == CameraSelector.DEFAULT_BACK_CAMERA) {
mPreferredCameraSelector = CameraSelector.DEFAULT_FRONT_CAMERA;
} else {
mPreferredCameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
}
startCamera();
}
void bindPreview(@NonNull ProcessCameraProvider cameraProvider) {
// Unbind previous use cases. Without this, on unmounting and mounting CameraView again, the
// app will crash.
cameraProvider.unbindAll();
Preview preview = new Preview.Builder().build();
// Some devices do not have front and back camera, like the emulator on a laptop.
CameraSelector cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
try {
if (cameraProvider.hasCamera(mPreferredCameraSelector)) {
cameraSelector = mPreferredCameraSelector;
}
} catch (CameraInfoUnavailableException e) {
}
ImageAnalysis imageAnalysis =
new ImageAnalysis.Builder()
.setTargetResolution(mTargetResolution)
.setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
.build();
imageAnalysis.setAnalyzer(
cameraExecutor,
imageProxy -> {
IImage image = new Image(imageProxy, mReactContext.getApplicationContext());
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
mReactContext
.getJSModule(RCTEventEmitter.class)
.receiveEvent(CameraView.this.getId(), "onFrame", ref.getJSRef());
});
mImageCapture = new ImageCapture.Builder().setTargetResolution(mTargetResolution).build();
OrientationEventListener orientationEventListener =
new OrientationEventListener(mReactContext.getApplicationContext()) {
@Override
public void onOrientationChanged(int orientation) {
int rotation;
// Monitors orientation values to determine the target rotation value
if (orientation >= 45 && orientation < 135) {
rotation = Surface.ROTATION_270;
} else if (orientation >= 135 && orientation < 225) {
rotation = Surface.ROTATION_180;
} else if (orientation >= 225 && orientation < 315) {
rotation = Surface.ROTATION_90;
} else {
rotation = Surface.ROTATION_0;
}
mImageCapture.setTargetRotation(rotation);
imageAnalysis.setTargetRotation(rotation);
}
};
orientationEventListener.enable();
mCamera =
cameraProvider.bindToLifecycle(
(LifecycleOwner) mReactContext.getCurrentActivity(),
cameraSelector,
preview,
imageAnalysis,
mImageCapture);
preview.setSurfaceProvider(
ContextCompat.getMainExecutor(mReactContext), mPreviewView.getSurfaceProvider());
}
private boolean allPermissionsGranted() {
for (String permission : REQUIRED_PERMISSIONS) {
if (ContextCompat.checkSelfPermission(mReactContext, permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
public void setHideCaptureButton(boolean hideCaptureButton) {
if (mHideCaptureButton != hideCaptureButton) {
mHideCaptureButton = hideCaptureButton;
mIsDirty = true;
}
}
public void setHideFlipButton(boolean hideFlipButton) {
if (mHideFlipButton != hideFlipButton) {
mHideFlipButton = hideFlipButton;
mIsDirty = true;
}
}
public void setCameraSelector(CameraSelector cameraSelector) {
if (!mPreferredCameraSelector.equals(cameraSelector)) {
mPreferredCameraSelector = cameraSelector;
mIsDirty = true;
mIsDirtyForCameraRestart = true;
}
}
public void setTargetResolution(Size size) {
if (!mTargetResolution.equals(size)) {
mTargetResolution = size;
mIsDirty = true;
mIsDirtyForCameraRestart = true;
}
}
public void maybeUpdateView() {
if (!mIsDirty) {
return;
}
mCaptureButton.post(
() -> {
mCaptureButton.setVisibility(mHideCaptureButton ? View.INVISIBLE : View.VISIBLE);
});
mFlipButton.post(
() -> {
mFlipButton.setVisibility(mHideFlipButton ? View.INVISIBLE : View.VISIBLE);
});
if (mIsDirtyForCameraRestart) {
startCamera();
}
}
private static class ReactNativeCameraPreviewRemeasure
implements ViewGroup.OnHierarchyChangeListener {
@Override
public void onChildViewAdded(View parent, View child) {
parent.post(
() -> {
parent.measure(
View.MeasureSpec.makeMeasureSpec(
parent.getMeasuredWidth(), View.MeasureSpec.EXACTLY),
View.MeasureSpec.makeMeasureSpec(
parent.getMeasuredHeight(), View.MeasureSpec.EXACTLY));
parent.layout(0, 0, parent.getMeasuredWidth(), parent.getMeasuredHeight());
Log.d(
TAG,
String.format(
"Measured width=%s, height=%s",
parent.getMeasuredWidth(), parent.getMeasuredHeight()));
});
}
@Override
public void onChildViewRemoved(View parent, View child) {
// empty
}
}
}
| 4,659 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/camera/CameraViewManager.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.camera;
import android.util.Size;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.camera.core.CameraSelector;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.SimpleViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.annotations.ReactProp;
import java.util.Map;
public class CameraViewManager extends SimpleViewManager<CameraView> {
public static final String REACT_CLASS = "PyTorchCoreCameraView";
public final int COMMAND_TAKE_PICTURE = 1;
public final int COMMAND_FLIP = 2;
private final ReactApplicationContext mReactContext;
public CameraViewManager(ReactApplicationContext reactContext) {
this.mReactContext = reactContext;
}
@NonNull
@Override
public String getName() {
return REACT_CLASS;
}
@NonNull
@Override
protected CameraView createViewInstance(@NonNull ThemedReactContext reactContext) {
return new CameraView(mReactContext);
}
@Nullable
@Override
public Map<String, Integer> getCommandsMap() {
return MapBuilder.of(
"takePicture", COMMAND_TAKE_PICTURE,
"flip", COMMAND_FLIP);
}
@Override
public void receiveCommand(
@NonNull CameraView cameraView, int commandId, @Nullable ReadableArray args) {
switch (commandId) {
case COMMAND_TAKE_PICTURE:
cameraView.takePicture();
break;
case COMMAND_FLIP:
cameraView.flipCamera();
break;
}
}
@Override
public void receiveCommand(
@NonNull CameraView cameraView, String commandId, @Nullable ReadableArray args) {
int commandIdInt = Integer.parseInt(commandId);
switch (commandIdInt) {
case COMMAND_TAKE_PICTURE:
cameraView.takePicture();
break;
case COMMAND_FLIP:
cameraView.flipCamera();
break;
}
}
@Nullable
@Override
public Map<String, Object> getExportedCustomBubblingEventTypeConstants() {
final MapBuilder.Builder<String, Object> builder = MapBuilder.builder();
return builder
.put(
"onFrame",
MapBuilder.of("phasedRegistrationNames", MapBuilder.of("bubbled", "onFrame")))
.put(
"onCapture",
MapBuilder.of("phasedRegistrationNames", MapBuilder.of("bubbled", "onCapture")))
.build();
}
@ReactProp(name = "hideCaptureButton")
public void setCaptureButtonVisibility(CameraView view, boolean hideCaptureButton) {
view.setHideCaptureButton(hideCaptureButton);
}
@ReactProp(name = "hideFlipButton")
public void setFlipButtonVisibility(CameraView view, boolean hideFlipButton) {
view.setHideFlipButton(hideFlipButton);
}
@ReactProp(name = "targetResolution")
public void setTargetResolution(CameraView view, @Nullable ReadableMap targetResolution) {
if (targetResolution != null) {
int width = targetResolution.getInt("width");
int height = targetResolution.getInt("height");
view.setTargetResolution(new Size(width, height));
}
}
@ReactProp(name = "facing")
public void setFacing(CameraView view, String facing) {
if (facing.equals("back")) {
view.setCameraSelector(CameraSelector.DEFAULT_BACK_CAMERA);
} else if (facing.equals("front")) {
view.setCameraSelector(CameraSelector.DEFAULT_FRONT_CAMERA);
}
}
@Override
protected void onAfterUpdateTransaction(CameraView view) {
super.onAfterUpdateTransaction(view);
view.maybeUpdateView();
}
}
| 4,660 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/jsi/PyTorchCoreJSIModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.jsi;
import android.util.Log;
import androidx.annotation.NonNull;
import com.facebook.react.bridge.JavaScriptContextHolder;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.RuntimeExecutor;
import com.facebook.react.module.annotations.ReactModule;
import com.facebook.react.turbomodule.core.CallInvokerHolderImpl;
import com.facebook.soloader.SoLoader;
@ReactModule(name = PyTorchCoreJSIModule.NAME)
public class PyTorchCoreJSIModule extends ReactContextBaseJavaModule {
public static final String TAG = "PTLJSIModule";
public static final String NAME = "PyTorchCoreJSI";
static {
try {
SoLoader.loadLibrary("torchlive");
} catch (Exception e) {
Log.e(TAG, e.getMessage());
}
}
public PyTorchCoreJSIModule(ReactApplicationContext reactContext) {
super(reactContext);
}
@Override
@NonNull
public String getName() {
return NAME;
}
@ReactMethod(isBlockingSynchronousMethod = true)
public boolean install() {
ReactApplicationContext reactContext = getReactApplicationContext();
JavaScriptContextHolder contextHolder = reactContext.getJavaScriptContextHolder();
installLib(contextHolder);
return true;
}
private native void nativeInstall(
long jsi, RuntimeExecutor runtimeExecutor, CallInvokerHolderImpl jsCallInvokerHolder);
public void installLib(JavaScriptContextHolder reactContext) {
if (reactContext.get() != 0) {
RuntimeExecutor runtimeExecutor =
getReactApplicationContext().getCatalystInstance().getRuntimeExecutor();
CallInvokerHolderImpl jsCallInvokerHolder =
(CallInvokerHolderImpl)
getReactApplicationContext().getCatalystInstance().getJSCallInvokerHolder();
this.nativeInstall(reactContext.get(), runtimeExecutor, jsCallInvokerHolder);
} else {
Log.e(TAG, "JSI Runtime is not available in debug mode");
}
}
}
| 4,661 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/jsi/PyTorchCoreJSIModulePackage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.jsi;
import com.facebook.react.bridge.JSIModulePackage;
import com.facebook.react.bridge.JSIModuleSpec;
import com.facebook.react.bridge.JavaScriptContextHolder;
import com.facebook.react.bridge.ReactApplicationContext;
import java.util.Collections;
import java.util.List;
public class PyTorchCoreJSIModulePackage implements JSIModulePackage {
@Override
public List<JSIModuleSpec> getJSIModules(
ReactApplicationContext reactApplicationContext, JavaScriptContextHolder jsContext) {
reactApplicationContext.getNativeModule(PyTorchCoreJSIModule.class).installLib(jsContext);
return Collections.emptyList();
}
}
| 4,662 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/utils/FileUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.utils;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
public class FileUtils {
public static void downloadUriToFile(String uriString, File destFile) {
// Always try to load image from uri to make sure it's always the latest version. Only if
// fetching the model from the uri fails, it will load the cached version (if exists).
try {
InputStream inputStream = new URL(uriString).openStream();
// Create directory for model if they don't exist
File parentFile = destFile.getParentFile();
if (parentFile != null && !parentFile.exists()) {
parentFile.mkdirs();
}
if (!destFile.exists()) {
destFile.createNewFile();
}
// Save content from stream to cache file
FileUtils.saveStreamToFile(inputStream, destFile);
// Close stream properly
inputStream.close();
} catch (IOException e) {
// ignore, load image from cache instead
}
}
public static void saveStreamToFile(InputStream inputStream, File destFile) throws IOException {
BufferedInputStream in = null;
FileOutputStream fileOutputStream = null;
try {
in = new BufferedInputStream(inputStream);
fileOutputStream = new FileOutputStream(destFile);
byte[] dataBuffer = new byte[1024];
int bytesRead;
while ((bytesRead = in.read(dataBuffer, 0, 1024)) != -1) {
fileOutputStream.write(dataBuffer, 0, bytesRead);
}
} catch (IOException e) {
if (in != null) {
in.close();
}
if (fileOutputStream != null) {
fileOutputStream.close();
}
} finally {
if (in != null) {
in.close();
}
if (fileOutputStream != null) {
fileOutputStream.close();
}
}
}
public static String readFileToString(File file) {
StringBuilder contentBuilder = new StringBuilder();
try (BufferedReader br = new BufferedReader(new FileReader(file.getPath()))) {
String sCurrentLine;
while ((sCurrentLine = br.readLine()) != null) {
contentBuilder.append(sCurrentLine).append("\n");
}
} catch (IOException e) {
e.printStackTrace();
}
return contentBuilder.toString();
}
}
| 4,663 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/utils/ModelUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.utils;
import android.net.Uri;
import android.util.Log;
import com.facebook.react.bridge.ReactContext;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class ModelUtils {
public static final String TAG = "PTLModelUtils";
public static File downloadModel(final ReactContext reactContext, final String modelUri)
throws IOException {
Log.d(TAG, "Load model: " + modelUri);
Uri uri = Uri.parse(modelUri);
File targetFile;
if (uri.getScheme() == null) {
// A uri with no scheme (i.e., `null`) is likely to be a resource or local file. Release mode
// builds bundle the model file in the APK as a raw resource.
int resourceId =
reactContext.getResources().getIdentifier(modelUri, "raw", reactContext.getPackageName());
if (resourceId != 0) {
targetFile = new File(rawResourceFilePath(reactContext, modelUri, resourceId));
} else {
// Fall back to the local file system
targetFile = new File(uri.getPath());
}
} else if ("file".equals(uri.getScheme())) {
// Load model from local file system if the scheme is file
targetFile = new File(uri.getPath());
} else {
// Get file path to cache model or load model from cache if loading from URI fails
targetFile = new File(reactContext.getCacheDir(), uri.getPath());
// Always try to load model from uri to make sure it's always the latest version. Only if
// fetching the model from the uri fails, it will load the cached version (if exists).
FileUtils.downloadUriToFile(modelUri, targetFile);
}
Log.d(TAG, "Absolute local model path: " + targetFile.getAbsolutePath());
return targetFile;
}
/**
* Copy specified raw resource to the cache directory and return its absolute path.
*
* <p>This is a workaround because org.pytorch.LiteModuleLoader as of 1.10.0 does not have an API
* to load a model from an asset with extra_files, although the API exists in C++.
*
* @return absolute file path
*/
private static String rawResourceFilePath(
final ReactContext reactContext, final String resourceName, final int resourceId)
throws IOException {
File file = new File(reactContext.getCacheDir(), resourceName);
try (InputStream is = reactContext.getResources().openRawResource(resourceId)) {
try (OutputStream os = new FileOutputStream(file)) {
byte[] buffer = new byte[4 * 1024];
int read;
while ((read = is.read(buffer)) != -1) {
os.write(buffer, 0, read);
}
os.flush();
}
}
return file.getAbsolutePath();
}
}
| 4,664 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/IImage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.graphics.Bitmap;
import android.media.Image;
import androidx.annotation.Nullable;
public interface IImage extends AutoCloseable {
float getPixelDensity();
int getWidth();
int getHeight();
float getNaturalWidth();
float getNaturalHeight();
IImage scale(float sx, float sy);
Bitmap getBitmap();
@Nullable
Image getImage();
int getImageRotationDegrees();
}
| 4,665 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/Image.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.content.Context;
import android.graphics.Bitmap;
import androidx.annotation.Nullable;
import androidx.camera.core.ImageProxy;
import org.pytorch.rn.core.canvas.ImageData;
public class Image implements IImage {
private final IImage mImage;
public Image(Bitmap bitmap) {
mImage = new BitmapImage(bitmap);
}
public Image(ImageProxy imageProxy, Context context) {
mImage = new ImageProxyImage(imageProxy, context);
}
public Image(ImageData imageData, float pixelDensity) {
mImage = new ImageDataImage(imageData, pixelDensity);
}
@Override
public float getPixelDensity() {
return mImage.getPixelDensity();
}
@Override
public int getWidth() {
return mImage.getWidth();
}
@Override
public int getHeight() {
return mImage.getHeight();
}
@Override
public float getNaturalWidth() {
return mImage.getNaturalWidth();
}
@Override
public float getNaturalHeight() {
return mImage.getNaturalHeight();
}
@Override
public IImage scale(float sx, float sy) {
return mImage.scale(sx, sy);
}
@Override
public Bitmap getBitmap() {
return mImage.getBitmap();
}
@Override
public @Nullable android.media.Image getImage() {
return mImage.getImage();
}
@Override
public int getImageRotationDegrees() {
return mImage.getImageRotationDegrees();
}
@Override
public void close() throws Exception {
mImage.close();
}
}
| 4,666 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/BitmapImage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.graphics.Bitmap;
import android.media.Image;
import androidx.annotation.Nullable;
public class BitmapImage extends AbstractImage {
private final Bitmap mBitmap;
public BitmapImage(Bitmap bitmap) {
mBitmap = bitmap;
}
@Override
public float getNaturalWidth() {
return mBitmap.getWidth();
}
@Override
public float getNaturalHeight() {
return mBitmap.getHeight();
}
@Override
public Bitmap getBitmap() {
return mBitmap;
}
@Nullable
@Override
public Image getImage() {
return null;
}
@Override
public int getImageRotationDegrees() {
return 0;
}
@Override
public void close() throws Exception {
mBitmap.recycle();
}
}
| 4,667 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/ImageDataImage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.graphics.Bitmap;
import android.media.Image;
import androidx.annotation.Nullable;
import org.pytorch.rn.core.canvas.ImageData;
public class ImageDataImage extends AbstractImage {
private final ImageData mImageData;
private final float mPixelDensity;
public ImageDataImage(ImageData imageData, float pixelDensity) {
mImageData = imageData;
mPixelDensity = pixelDensity;
}
@Override
public float getPixelDensity() {
return mPixelDensity;
}
@Override
public float getNaturalWidth() {
return mImageData.getWidth();
}
@Override
public float getNaturalHeight() {
return mImageData.getHeight();
}
@Override
public Bitmap getBitmap() {
return mImageData.getBitmap();
}
@Override
public void close() throws Exception {
mImageData.getBitmap().recycle();
}
@Nullable
@Override
public Image getImage() {
return null;
}
@Override
public int getImageRotationDegrees() {
return 0;
}
}
| 4,668 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/ImageProxyImage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.media.Image;
import androidx.annotation.Nullable;
import androidx.annotation.experimental.UseExperimental;
import androidx.camera.core.ExperimentalGetImage;
import androidx.camera.core.ImageProxy;
public class ImageProxyImage extends AbstractImage {
private final ImageProxy mImageProxy;
private final Context mContext;
private Bitmap mBitmap;
public ImageProxyImage(ImageProxy imageProxy, Context context) {
mImageProxy = imageProxy;
mContext = context;
// Eager bitmap conversion
mBitmap = getBitmap();
}
@Override
public float getNaturalWidth() {
return mImageProxy.getWidth();
}
@Override
public float getNaturalHeight() {
return mImageProxy.getHeight();
}
@ExperimentalGetImage
@Override
public @Nullable Image getImage() {
return mImageProxy.getImage();
}
@Override
public int getImageRotationDegrees() {
return mImageProxy.getImageInfo().getRotationDegrees();
}
@Override
@UseExperimental(markerClass = androidx.camera.core.ExperimentalGetImage.class)
public Bitmap getBitmap() {
if (mBitmap != null) {
return mBitmap;
}
android.media.Image image = mImageProxy.getImage();
assert image != null;
mBitmap = ImageUtils.toBitmap(image, mContext);
// Rotate bitmap based on image rotation. The image rotation for ImageProxy is set in the
// CameraManager whenever the device rotates.
int rotation = mImageProxy.getImageInfo().getRotationDegrees();
if (rotation != 0) {
Matrix matrix = new Matrix();
matrix.postRotate(rotation);
mBitmap =
Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, true);
}
return mBitmap;
}
@Override
public void close() throws Exception {
if (mBitmap != null) {
mBitmap.recycle();
}
mImageProxy.close();
}
}
| 4,669 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/ImageException.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
public class ImageException extends RuntimeException {
public ImageException(String message) {
super(message);
}
}
| 4,670 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/ImageUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer;
public class ImageUtils {
/**
* The supported formats to convert an image to a Bitmap are JPEG and YUV_420_888.
*
* @param image A media Image from the camera X.
* @return A Bitmap converted from the input image.
*/
public static Bitmap toBitmap(android.media.Image image, @Nullable Context context) {
android.media.Image.Plane[] planes = image.getPlanes();
int format = image.getFormat();
switch (format) {
// This is the format from the CameraX image capture take picture call
case ImageFormat.JPEG:
ByteBuffer buffer = planes[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
return BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
case ImageFormat.YUV_420_888:
// This is the format from the CameraX image analyzer
return ImageUtils.yuv420ToBitmap(image, context);
}
throw new ImageException(String.format("unsupported image format %s", format));
}
public static byte[] bitmapToRGBA(final Bitmap bitmap) {
int[] pixels = new int[bitmap.getWidth() * bitmap.getHeight()];
byte[] bytes = new byte[pixels.length * 4];
bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int i = 0;
for (int pixel : pixels) {
// Get components assuming is ARGB
int A = (pixel >> 24) & 0xff;
int R = (pixel >> 16) & 0xff;
int G = (pixel >> 8) & 0xff;
int B = pixel & 0xff;
bytes[i++] = (byte) R;
bytes[i++] = (byte) G;
bytes[i++] = (byte) B;
bytes[i++] = (byte) A;
}
return bytes;
}
public static Bitmap bitmapFromRGBA(final int width, final int height, final byte[] bytes) {
int[] pixels = new int[bytes.length / 4];
int j = 0;
for (int i = 0; i < pixels.length; i++) {
int R = bytes[j++] & 0xff;
int G = bytes[j++] & 0xff;
int B = bytes[j++] & 0xff;
int A = bytes[j++] & 0xff;
int pixel = (A << 24) | (R << 16) | (G << 8) | B;
pixels[i] = pixel;
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
/**
* Convert an image in YUV_420_888 format to bitmap. The code was adapted from the following
* website: {@link
* https://blog.minhazav.dev/how-to-convert-yuv-420-sp-android.media.Image-to-Bitmap-or-jpeg/}
*
* @param image An image in YUV_420_888 format
* @return A bitmap converted from input image
*/
private static Bitmap yuv420ToBitmap(android.media.Image image, Context context) {
RenderScript rs = RenderScript.create(context);
ScriptIntrinsicYuvToRGB script = ScriptIntrinsicYuvToRGB.create(rs, Element.U8_4(rs));
// Refer the logic in a section below on how to convert a YUV_420_888 image
// to single channel flat 1D array. For sake of this example I'll abstract it
// as a method.
byte[] nv21 = yuvToNV21(image);
Type.Builder yuvType = new Type.Builder(rs, Element.U8(rs)).setX(nv21.length);
Allocation in = Allocation.createTyped(rs, yuvType.create(), Allocation.USAGE_SCRIPT);
Type.Builder rgbaType =
new Type.Builder(rs, Element.RGBA_8888(rs)).setX(image.getWidth()).setY(image.getHeight());
Allocation out = Allocation.createTyped(rs, rgbaType.create(), Allocation.USAGE_SCRIPT);
// The allocations above "should" be cached if you are going to perform
// repeated conversion of YUV_420_888 to Bitmap.
in.copyFrom(nv21);
script.setInput(in);
script.forEach(out);
Bitmap bitmap =
Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.ARGB_8888);
out.copyTo(bitmap);
return bitmap;
}
private static byte[] yuvToNV21(android.media.Image image) {
if (image.getFormat() != ImageFormat.YUV_420_888) {
throw new IllegalArgumentException("Invalid image format");
}
ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();
int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();
byte[] nv21 = new byte[ySize + uSize + vSize];
// U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);
return nv21;
}
}
| 4,671 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/ImageModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.widget.Toast;
import androidx.annotation.NonNull;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.build.ReactBuildConfig;
import com.facebook.react.module.annotations.ReactModule;
import com.facebook.react.views.imagehelper.ImageSource;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.pytorch.rn.core.canvas.ImageData;
import org.pytorch.rn.core.javascript.JSContext;
import org.pytorch.rn.core.utils.FileUtils;
@ReactModule(name = "PyTorchCoreImageModule")
public class ImageModule extends ReactContextBaseJavaModule {
public static final String TAG = "PTLImageModule";
public static final String NAME = "PyTorchCoreImageModule";
private ReactApplicationContext mReactContext;
public ImageModule(ReactApplicationContext reactContext) {
super(reactContext);
mReactContext = reactContext;
}
@NonNull
@Override
public String getName() {
return NAME;
}
@ReactMethod
public void release(ReadableMap imageRef, Promise promise) throws Exception {
JSContext.release(imageRef);
promise.resolve(null);
}
@ReactMethod(isBlockingSynchronousMethod = true)
public int getWidth(ReadableMap imageRef) {
IImage image = JSContext.unwrapObject(imageRef);
return image.getWidth();
}
@ReactMethod(isBlockingSynchronousMethod = true)
public int getHeight(ReadableMap imageRef) {
IImage image = JSContext.unwrapObject(imageRef);
return image.getHeight();
}
@ReactMethod(isBlockingSynchronousMethod = true)
public float getNaturalWidth(ReadableMap imageRef) {
IImage image = JSContext.unwrapObject(imageRef);
return image.getNaturalWidth();
}
@ReactMethod(isBlockingSynchronousMethod = true)
public float getNaturalHeight(ReadableMap imageRef) {
IImage image = JSContext.unwrapObject(imageRef);
return image.getNaturalHeight();
}
@ReactMethod(isBlockingSynchronousMethod = true)
public float getPixelDensity(ReadableMap imageRef) {
IImage image = JSContext.unwrapObject(imageRef);
return image.getPixelDensity();
}
@ReactMethod
public void scale(ReadableMap imageRef, double sx, double sy, Promise promise) {
IImage image = JSContext.unwrapObject(imageRef);
IImage scaledImage = image.scale((float) sx, (float) sy);
JSContext.NativeJSRef ref = JSContext.wrapObject(scaledImage);
promise.resolve(ref.getJSRef());
}
@ReactMethod
public void fromURL(String urlString, Promise promise) {
try {
URL url = new URL(urlString);
Bitmap bitmap = BitmapFactory.decodeStream(url.openConnection().getInputStream());
IImage image = new Image(bitmap);
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
promise.resolve(ref.getJSRef());
} catch (IOException e) {
promise.reject(e);
}
promise.reject(new Error("Could not load image from " + urlString));
}
@ReactMethod
public void fromFile(String filepath, Promise promise) {
File file = new File(filepath);
if (file.exists()) {
Bitmap bitmap = BitmapFactory.decodeFile(filepath);
IImage image = new Image(bitmap);
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
promise.resolve(ref.getJSRef());
} else {
promise.reject(new Error("File does not exist " + filepath));
}
}
@ReactMethod
public void fromBundle(final ReadableMap source, Promise promise) {
final String uri = source.getString("uri");
final ImageSource imageSource = new ImageSource(mReactContext, uri);
if (Uri.EMPTY.equals(imageSource.getUri())) {
warnImageSource(uri);
}
try {
InputStream inputStream;
if (imageSource.isResource()) {
// A uri with no scheme (i.e., `null`) is likely to be a resource or
// local file. Release mode builds bundle the model file in the APK as
// a raw resource.
final int resourceId =
mReactContext
.getResources()
.getIdentifier(imageSource.getSource(), "drawable", mReactContext.getPackageName());
if (resourceId != 0) {
inputStream = mReactContext.getResources().openRawResource(resourceId);
} else {
// Fall back to the local file system
inputStream = new FileInputStream(uri);
}
} else {
// Get file path to cache image resource or load image resource from
// cache if loading from URI fails
final File targetFile = new File(mReactContext.getCacheDir(), uri);
// Always try to load image resource from URI to make sure it's always
// the latest version. Only if fetching the image resource from the URI
// fails, it will load the cached version (if exists).
FileUtils.downloadUriToFile(imageSource.getSource(), targetFile);
inputStream = new FileInputStream(targetFile);
}
Bitmap bitmap = BitmapFactory.decodeStream(inputStream);
IImage image = new Image(bitmap);
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
promise.resolve(ref.getJSRef());
} catch (IOException e) {
promise.reject(e);
}
}
@ReactMethod
public void fromImageData(final ReadableMap imageDataRef, final boolean scaled, Promise promise) {
ImageData imageData = JSContext.unwrapObject(imageDataRef);
IImage image;
if (scaled) {
Bitmap bitmap = imageData.getScaledBitmap();
image = new Image(bitmap);
} else {
// Create a copy of the bitmap to allow developers to independently
// release the image data and this new image. Without a copy, the image
// will have an invalid bitmap when the image data is released and vice
// versa.
Bitmap bitmap = imageData.getBitmap();
Bitmap bitmapCopy = bitmap.copy(bitmap.getConfig(), bitmap.isMutable());
image = new Image(bitmapCopy);
}
JSContext.NativeJSRef ref = JSContext.wrapObject(image);
promise.resolve(ref.getJSRef());
}
@ReactMethod
public void toFile(final ReadableMap imageRef, Promise promise) {
try {
IImage image = JSContext.unwrapObject(imageRef);
Bitmap bitmap = image.getBitmap();
File cacheDir = mReactContext.getCacheDir();
File file = File.createTempFile("image", ".png", cacheDir);
FileOutputStream outputStream = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
promise.resolve(file.getAbsolutePath());
} catch (IOException e) {
promise.reject(e);
}
}
private void warnImageSource(String uri) {
if (ReactBuildConfig.DEBUG) {
Toast.makeText(
mReactContext,
"Warning: Image source \"" + uri + "\" doesn't exist",
Toast.LENGTH_SHORT)
.show();
}
}
}
| 4,672 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/image/AbstractImage.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.image;
import android.graphics.Bitmap;
import android.graphics.Matrix;
public abstract class AbstractImage implements IImage {
@Override
public float getPixelDensity() {
return 1.0f;
}
@Override
public int getWidth() {
int rotation = getImageRotationDegrees();
if (rotation == 90 || rotation == 270) {
return Math.round(getNaturalHeight() / getPixelDensity());
}
return Math.round(getNaturalWidth() / getPixelDensity());
}
@Override
public int getHeight() {
int rotation = getImageRotationDegrees();
if (rotation == 90 || rotation == 270) {
return Math.round(getNaturalWidth() / getPixelDensity());
}
return Math.round(getNaturalHeight() / getPixelDensity());
}
@Override
public IImage scale(float sx, float sy) {
Bitmap bitmap = getBitmap();
int width = getWidth();
int height = getHeight();
Matrix matrix = new Matrix();
matrix.postScale(sx, sy);
Bitmap scaledBitmap = Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, true);
return new Image(scaledBitmap);
}
}
| 4,673 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/CanvasViewManager.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.uimanager.ReactStylesDiffMap;
import com.facebook.react.uimanager.SimpleViewManager;
import com.facebook.react.uimanager.ThemedReactContext;
import java.util.Map;
public class CanvasViewManager extends SimpleViewManager<CanvasView> {
public static final String REACT_CLASS = "PyTorchCoreCanvasView";
private final ReactApplicationContext mReactContext;
public CanvasViewManager(ReactApplicationContext reactContext) {
mReactContext = reactContext;
}
@NonNull
@Override
public String getName() {
return REACT_CLASS;
}
@NonNull
@Override
protected CanvasView createViewInstance(@NonNull ThemedReactContext reactContext) {
return new CanvasView(reactContext);
}
@Nullable
@Override
public Map<String, Object> getExportedCustomBubblingEventTypeConstants() {
final MapBuilder.Builder<String, Object> builder = MapBuilder.builder();
return builder
.put(
"onContext2D",
MapBuilder.of("phasedRegistrationNames", MapBuilder.of("bubbled", "onContext2D")))
.build();
}
@Override
public void updateProperties(@NonNull CanvasView viewToUpdate, ReactStylesDiffMap props) {
super.updateProperties(viewToUpdate, props);
// Update the overflow style property also requires to update the clip to outline for the canvas
// view.
String overflow = props.getString("overflow");
viewToUpdate.setOverflow(overflow);
}
}
| 4,674 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/CanvasView.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import androidx.annotation.Nullable;
import com.facebook.react.bridge.LifecycleEventListener;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.uimanager.events.RCTEventEmitter;
import org.pytorch.rn.core.javascript.JSContext;
public class CanvasView extends View {
private CanvasRenderingContext2D mRenderingContext;
public CanvasView(Context context) {
super(context);
initialize();
}
public CanvasView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
initialize();
}
public CanvasView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initialize();
}
public CanvasView(
Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
initialize();
}
private void initialize() {
ReactContext reactContext = (ReactContext) getContext();
reactContext.addLifecycleEventListener(
new LifecycleEventListener() {
@Override
public void onHostResume() {
JSContext.NativeJSRef ref = JSContext.wrapObject(mRenderingContext);
reactContext
.getJSModule(RCTEventEmitter.class)
.receiveEvent(getId(), "onContext2D", ref.getJSRef());
}
@Override
public void onHostPause() {
// empty
}
@Override
public void onHostDestroy() {
// empty
}
});
mRenderingContext = new CanvasRenderingContext2D(this);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
mRenderingContext.onDraw(canvas);
}
public void setOverflow(@Nullable String overflow) {
if (overflow == null) {
setClipToOutline(true);
return;
}
switch (overflow) {
case "visible":
case "scroll":
setClipToOutline(false);
break;
case "hidden":
setClipToOutline(true);
break;
}
}
// TODO(raedle) onDestroy view, release the NativeJSRef that wraps the context 2d
}
| 4,675 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/ImageDataModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import androidx.annotation.NonNull;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.module.annotations.ReactModule;
import org.pytorch.rn.core.javascript.JSContext;
@ReactModule(name = "PyTorchCoreImageDataModule")
public class ImageDataModule extends ReactContextBaseJavaModule {
public static final String NAME = "PyTorchCoreImageDataModule";
public ImageDataModule(ReactApplicationContext reactContext) {
super(reactContext);
}
@NonNull
@Override
public String getName() {
return NAME;
}
@ReactMethod
public void release(ReadableMap imageDataRef, Promise promise) throws Exception {
JSContext.release(imageDataRef);
promise.resolve(null);
}
}
| 4,676 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/ImageData.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import android.graphics.Bitmap;
public class ImageData {
private final int mScaledWidth;
private final int mScaledHeight;
private final Bitmap mBitmap;
public ImageData(Bitmap bitmap, int scaledWidth, int scaledHeight) {
mBitmap = bitmap;
mScaledWidth = scaledWidth;
mScaledHeight = scaledHeight;
}
public int getWidth() {
return mBitmap.getWidth();
}
public int getHeight() {
return mBitmap.getHeight();
}
public Bitmap getBitmap() {
return mBitmap;
}
public Bitmap getScaledBitmap() {
return Bitmap.createScaledBitmap(mBitmap, mScaledWidth, mScaledHeight, false);
}
}
| 4,677 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/CanvasRenderingContext2D.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.text.TextPaint;
import android.util.DisplayMetrics;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.uimanager.PixelUtil;
import com.facebook.react.uimanager.ReactInvalidPropertyException;
import java.util.Stack;
import org.pytorch.rn.core.image.IImage;
public class CanvasRenderingContext2D {
private final CanvasView mCanvasView;
private final Stack<CanvasState> mSavedStates;
private Paint mFillPaint;
private Paint mStrokePaint;
private Paint mBitmapPaint;
private Paint mClearPaint;
private TextPaint mTextFillPaint;
private Bitmap mBitmap;
private Canvas mCanvas;
private Path mPath;
public CanvasRenderingContext2D(CanvasView canvasView) {
mCanvasView = canvasView;
mSavedStates = new Stack<>();
initPaint();
init();
}
private void initPaint() {
mFillPaint = new Paint();
mFillPaint.setAntiAlias(true);
mFillPaint.setDither(true);
mFillPaint.setColor(Color.BLACK);
mFillPaint.setStyle(Paint.Style.FILL);
mStrokePaint = new Paint();
mStrokePaint.setAntiAlias(true);
mStrokePaint.setDither(true);
mStrokePaint.setColor(Color.BLACK);
mStrokePaint.setStyle(Paint.Style.STROKE);
// Initialize stroke width to be pixel density, which matches 1px for a web canvas.
mStrokePaint.setStrokeWidth(PixelUtil.toPixelFromDIP(1));
mClearPaint = new Paint();
mClearPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.CLEAR));
mBitmapPaint = new Paint();
mBitmapPaint.setFilterBitmap(true);
mTextFillPaint = new TextPaint();
mTextFillPaint.set(mFillPaint);
mTextFillPaint.setStyle(Paint.Style.FILL);
// The default font size for web canvas is 10px.
mTextFillPaint.setTextSize(PixelUtil.toPixelFromDIP(10));
// Initialize stroke width to be pixel density, which matches 1px for a web canvas.
mTextFillPaint.setStrokeWidth(PixelUtil.toPixelFromDIP(1));
}
private void init() {
mPath = new Path();
DisplayMetrics displayMetrics = mCanvasView.getResources().getDisplayMetrics();
mBitmap =
Bitmap.createBitmap(
displayMetrics.widthPixels, displayMetrics.heightPixels, Bitmap.Config.ARGB_8888);
mCanvas = new Canvas(mBitmap);
}
protected void onDraw(Canvas canvas) {
canvas.drawBitmap(mBitmap, 0, 0, mBitmapPaint);
}
public void setFillStyle(int color) {
mFillPaint.setColor(color);
mTextFillPaint.setColor(color);
}
public void setStrokeStyle(int color) {
mStrokePaint.setColor(color);
}
public void setLineWidth(float width) {
final float strokeWidth = PixelUtil.toPixelFromDIP(width);
mStrokePaint.setStrokeWidth(strokeWidth);
mTextFillPaint.setStrokeWidth(strokeWidth);
}
public void setLineCap(String lineCap) throws ReactInvalidPropertyException {
Paint.Cap cap = null;
switch (lineCap) {
case "butt":
cap = Paint.Cap.BUTT;
break;
case "round":
cap = Paint.Cap.ROUND;
break;
case "square":
cap = Paint.Cap.SQUARE;
break;
default:
throw new ReactInvalidPropertyException("lineCap", lineCap, "butt | round | square");
}
Paint.Cap strokeCap = mStrokePaint.getStrokeCap();
if (!cap.equals(strokeCap)) {
mStrokePaint.setStrokeCap(cap);
}
Paint.Cap textCap = mTextFillPaint.getStrokeCap();
if (!cap.equals(textCap)) {
mTextFillPaint.setStrokeCap(cap);
}
}
public void setLineJoin(String lineJoin) {
Paint.Join join = null;
switch (lineJoin) {
case "bevel":
join = Paint.Join.BEVEL;
break;
case "round":
join = Paint.Join.ROUND;
break;
case "miter":
join = Paint.Join.MITER;
break;
default:
throw new ReactInvalidPropertyException("lineJoin", lineJoin, "bevel | round | miter");
}
Paint.Join strokeCap = mStrokePaint.getStrokeJoin();
if (!join.equals(strokeCap)) {
mStrokePaint.setStrokeJoin(join);
}
Paint.Join textCap = mTextFillPaint.getStrokeJoin();
if (!join.equals(textCap)) {
mTextFillPaint.setStrokeJoin(join);
}
}
public void setMiterLimit(final float miterLimit) {
mStrokePaint.setStrokeMiter(miterLimit);
mTextFillPaint.setStrokeMiter(miterLimit);
}
public void setTextAlign(final String textAlign) {
Paint.Align align = null;
switch (textAlign) {
case "left":
align = Paint.Align.LEFT;
break;
case "center":
align = Paint.Align.CENTER;
break;
case "right":
align = Paint.Align.RIGHT;
break;
default:
throw new ReactInvalidPropertyException("textAlign", textAlign, "left | center | right");
}
Paint.Align currentAlign = mTextFillPaint.getTextAlign();
if (!align.equals(currentAlign)) {
mTextFillPaint.setTextAlign(align);
}
}
public void clear() {
mCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
mPath.reset();
}
public void clearRect(float x, float y, float width, float height) {
mCanvas.drawRect(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(x + width),
PixelUtil.toPixelFromDIP(y + height),
mClearPaint);
}
public void strokeRect(float x, float y, float width, float height) {
mCanvas.drawRect(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(x + width),
PixelUtil.toPixelFromDIP(y + height),
mStrokePaint);
}
public void fillRect(float x, float y, float width, float height) {
mFillPaint.setStyle(Paint.Style.FILL);
mCanvas.drawRect(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(x + width),
PixelUtil.toPixelFromDIP(y + height),
mFillPaint);
}
public void beginPath() {
mPath.reset();
}
public void closePath() {
mPath.close();
}
public void stroke() {
mCanvas.drawPath(mPath, mStrokePaint);
}
public void fill() {
mCanvas.drawPath(mPath, mFillPaint);
}
public void arc(
float x, float y, float radius, float startAngle, float endAngle, boolean counterclockwise) {
RectF rect =
new RectF(
PixelUtil.toPixelFromDIP(x - radius),
PixelUtil.toPixelFromDIP(y - radius),
PixelUtil.toPixelFromDIP(x + radius),
PixelUtil.toPixelFromDIP(y + radius));
float PI2 = (float) Math.PI * 2;
float sweepAngle = endAngle - startAngle;
float initialAngle = startAngle % PI2;
if (!counterclockwise && sweepAngle < 0) {
sweepAngle %= PI2;
if (sweepAngle < 0 || initialAngle == 0) {
sweepAngle += PI2;
}
} else if (counterclockwise && sweepAngle > 0) {
sweepAngle %= PI2;
if (sweepAngle > 0 || initialAngle == 0) {
sweepAngle -= PI2;
}
}
mPath.addArc(rect, radiansToDegrees(initialAngle), radiansToDegrees(sweepAngle));
}
public void rect(float x, float y, float width, float height) {
mPath.addRect(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(x + width),
PixelUtil.toPixelFromDIP(y + height),
Path.Direction.CW);
}
public void lineTo(float x, float y) {
mPath.lineTo(PixelUtil.toPixelFromDIP(x), PixelUtil.toPixelFromDIP(y));
}
public void moveTo(float x, float y) {
mPath.moveTo(PixelUtil.toPixelFromDIP(x), PixelUtil.toPixelFromDIP(y));
}
public void drawCircle(float x, float y, float radius) {
mCanvas.drawCircle(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(radius),
mStrokePaint);
}
public void fillCircle(float x, float y, float radius) {
mCanvas.drawCircle(
PixelUtil.toPixelFromDIP(x),
PixelUtil.toPixelFromDIP(y),
PixelUtil.toPixelFromDIP(radius),
mFillPaint);
}
public void drawImage(IImage image, float dx, float dy) {
float imagePixelDensity = image.getPixelDensity();
Matrix matrix = new Matrix();
matrix.postScale(
PixelUtil.getDisplayMetricDensity() / imagePixelDensity,
PixelUtil.getDisplayMetricDensity() / imagePixelDensity);
matrix.postTranslate(PixelUtil.toPixelFromDIP(dx), PixelUtil.toPixelFromDIP(dy));
mCanvas.drawBitmap(image.getBitmap(), matrix, null);
}
public void drawImage(IImage image, float dx, float dy, float dWidth, float dHeight) {
Bitmap bitmap = image.getBitmap();
int sWidth = bitmap.getWidth();
int sHeight = bitmap.getHeight();
float scaleX = PixelUtil.toPixelFromDIP(dWidth) / sWidth;
float scaleY = PixelUtil.toPixelFromDIP(dHeight) / sHeight;
Matrix matrix = new Matrix();
matrix.postScale(scaleX, scaleY);
matrix.postTranslate(PixelUtil.toPixelFromDIP(dx), PixelUtil.toPixelFromDIP(dy));
mCanvas.drawBitmap(bitmap, matrix, null);
}
public void drawImage(
IImage image,
float sx,
float sy,
float sWidth,
float sHeight,
float dx,
float dy,
float dWidth,
float dHeight) {
Bitmap bitmap = image.getBitmap();
// Extract source x,y,width,height from original bitmap into destination bitmap
Bitmap destBitmap =
Bitmap.createBitmap(bitmap, (int) sx, (int) sy, (int) sWidth, (int) sHeight);
float scaleX = PixelUtil.toPixelFromDIP(dWidth) / sWidth;
float scaleY = PixelUtil.toPixelFromDIP(dHeight) / sHeight;
Matrix matrix = new Matrix();
matrix.postScale(scaleX, scaleY);
matrix.postTranslate(PixelUtil.toPixelFromDIP(dx), PixelUtil.toPixelFromDIP(dy));
mCanvas.drawBitmap(destBitmap, matrix, null);
}
public ImageData getImageData(float sx, float sy, float sw, float sh) {
int x = (int) PixelUtil.toPixelFromDIP(sx);
int y = (int) PixelUtil.toPixelFromDIP(sy);
int pixelWidth = (int) PixelUtil.toPixelFromDIP(sw);
int pixelHeight = (int) PixelUtil.toPixelFromDIP(sh);
Bitmap bitmap = Bitmap.createBitmap(mBitmap, x, y, pixelWidth, pixelHeight);
return new ImageData(bitmap, (int) sw, (int) sh);
}
public void putImageData(ImageData imageData, float dx, float dy) {
// This method is not affected by the canvas transformation matrix. We save the canvas transform
// and restore it afterwards.
mCanvas.save();
Bitmap bitmap = imageData.getBitmap();
mCanvas.drawBitmap(bitmap, PixelUtil.toPixelFromDIP(dx), PixelUtil.toPixelFromDIP(dy), null);
mCanvas.restore();
}
public void setFont(final ReadableMap font) {
String textSizeString = font.getString("fontSize");
int textSize = Integer.parseInt(textSizeString.substring(0, textSizeString.length() - 2));
mTextFillPaint.setTextSize(PixelUtil.toPixelFromDIP(textSize));
String fontFamily = font.getArray("fontFamily").getString(0);
Typeface typeface = Typeface.DEFAULT;
switch (fontFamily) {
case "serif":
typeface = Typeface.SERIF;
break;
case "sans-serif":
typeface = Typeface.SANS_SERIF;
break;
case "monospace":
typeface = Typeface.MONOSPACE;
break;
}
int typefaceStyle = Typeface.NORMAL;
String fontWeight = font.getString("fontWeight");
if ("bold".equals(fontWeight)) {
typefaceStyle += Typeface.BOLD;
}
String fontStyle = font.getString("fontStyle");
if ("italic".equals(fontStyle)) {
typefaceStyle += Typeface.ITALIC;
}
Typeface newTypeface = Typeface.create(typeface, typefaceStyle);
mTextFillPaint.setTypeface(newTypeface);
}
public void fillText(String text, float x, float y) {
mTextFillPaint.setStyle(Paint.Style.FILL);
mCanvas.drawText(
text, PixelUtil.toPixelFromDIP(x), PixelUtil.toPixelFromDIP(y), mTextFillPaint);
}
public void strokeText(String text, float x, float y) {
mTextFillPaint.setStyle(Paint.Style.STROKE);
mCanvas.drawText(
text, PixelUtil.toPixelFromDIP(x), PixelUtil.toPixelFromDIP(y), mTextFillPaint);
}
/**
* The CanvasRenderingContext2D.setTransform() method of the Canvas 2D API resets (overrides) the
* current transformation to the identity matrix, and then invokes a transformation described by
* the arguments of this method. This lets you scale, rotate, translate (move), and skew the
* context.
*
* <p>The transformation matrix is described by:
*
* <p>a & c & e
*
* <p>b & d & f
*
* <p>0 & 0 & 1
*
* <p>setTransform() has two types of parameter that it can accept. The older type consists of
* several parameters representing the individual components of the transformation matrix to set:
*
* @param a (m11) Horizontal scaling. A value of 1 results in no scaling.
* @param b (m12) Vertical skewing.
* @param c (m21) Horizontal skewing.
* @param d (m22) Vertical scaling. A value of 1 results in no scaling.
* @param e (dx) Horizontal translation (moving).
* @param f (dy)Vertical translation (moving).
* <p>{@link
* https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/setTransform}
*/
protected void setTransform(float a, float b, float c, float d, float e, float f) {
Matrix matrix = new Matrix();
matrix.setValues(
new float[] {
a, c, PixelUtil.toPixelFromDIP(e), b, d, PixelUtil.toPixelFromDIP(f), 0, 0, 1
});
mCanvas.setMatrix(matrix);
}
protected void scale(float x, float y) {
mCanvas.scale(x, y);
}
protected void rotate(float angle) {
mCanvas.rotate(radiansToDegrees(angle));
}
protected void translate(float x, float y) {
mCanvas.translate(PixelUtil.toPixelFromDIP(x), PixelUtil.toPixelFromDIP(y));
}
/**
* The CanvasRenderingContext2D save function saves the following states:
*
* <p>The drawing state that gets saved onto a stack consists of:
*
* <p>* The current transformation matrix. * The current clipping region. * The current dash list.
* * The current values of the following attributes: strokeStyle, fillStyle, globalAlpha,
* lineWidth, lineCap, lineJoin, miterLimit, lineDashOffset, shadowOffsetX, shadowOffsetY,
* shadowBlur, shadowColor, globalCompositeOperation, font, textAlign, textBaseline, direction,
* imageSmoothingEnabled.
*
* <p>However, the Android Canvas only saves the canvas, therefore the implementation needs to
* additionally copy the current paints etc. into a canvas state that can be loaded on restore.
*
* <p>{@link https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/save}
*/
protected void save() {
// The Canvas.save() only saves current matrix and clip, so we have to separately save
// additional drawing state.
CanvasState savedState = new CanvasState(mStrokePaint, mFillPaint);
// Create new paint objects with current paint state.
mStrokePaint = new Paint(mStrokePaint);
mFillPaint = new Paint(mFillPaint);
mCanvas.save();
mSavedStates.push(savedState);
}
protected void restore() {
if (!mSavedStates.empty()) {
CanvasState canvasState = mSavedStates.pop();
mStrokePaint = canvasState.mStrokePaint;
mFillPaint = canvasState.mFillPaint;
mCanvas.restore();
}
}
public void invalidate() {
mCanvasView.invalidate();
}
private float radiansToDegrees(float radians) {
return (float) (radians * 180 / Math.PI);
}
static class CanvasState {
Paint mStrokePaint;
Paint mFillPaint;
private CanvasState(Paint strokePaint, Paint fillPaint) {
mStrokePaint = strokePaint;
mFillPaint = fillPaint;
}
}
}
| 4,678 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/canvas/CanvasRenderingContext2DModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.canvas;
import android.os.Handler;
import androidx.annotation.NonNull;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.module.annotations.ReactModule;
import com.facebook.react.uimanager.ReactInvalidPropertyException;
import org.pytorch.rn.core.image.IImage;
import org.pytorch.rn.core.javascript.JSContext;
@ReactModule(name = "PyTorchCoreCanvasRenderingContext2DModule")
public class CanvasRenderingContext2DModule extends ReactContextBaseJavaModule {
public static final String NAME = "PyTorchCoreCanvasRenderingContext2DModule";
private final ReactApplicationContext mReactContext;
private final Handler mMainHandler;
public CanvasRenderingContext2DModule(ReactApplicationContext reactContext) {
super(reactContext);
mReactContext = reactContext;
mMainHandler = new Handler(mReactContext.getMainLooper());
}
@NonNull
@Override
public String getName() {
return NAME;
}
@ReactMethod
public void invalidate(ReadableMap canvasRef, Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.invalidate();
promise.resolve(null);
});
}
@ReactMethod
public void setFillStyle(ReadableMap canvasRef, double color) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setFillStyle((int) color);
});
}
@ReactMethod
public void setStrokeStyle(ReadableMap canvasRef, double color) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setStrokeStyle((int) color);
});
}
@ReactMethod
public void setLineWidth(ReadableMap canvasRef, double width) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setLineWidth((int) width);
});
}
@ReactMethod
public void setLineCap(final ReadableMap canvasRef, final String lineCap, final Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
try {
ctx.setLineCap(lineCap);
} catch (ReactInvalidPropertyException e) {
promise.reject(e);
}
});
}
@ReactMethod
public void setLineJoin(
final ReadableMap canvasRef, final String lineJoin, final Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
try {
ctx.setLineJoin(lineJoin);
} catch (ReactInvalidPropertyException e) {
promise.reject(e);
}
});
}
@ReactMethod
public void setMiterLimit(final ReadableMap canvasRef, final double miterLimit) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setMiterLimit((float) miterLimit);
});
}
@ReactMethod
public void setTextAlign(ReadableMap canvasRef, String textAlign, final Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
try {
ctx.setTextAlign(textAlign);
} catch (ReactInvalidPropertyException e) {
promise.reject(e);
}
});
}
@ReactMethod
public void clear(ReadableMap canvasRef) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.clear();
});
}
@ReactMethod
public void clearRect(ReadableMap canvasRef, double x, double y, double width, double height) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.clearRect((float) x, (float) y, (float) width, (float) height);
});
}
@ReactMethod
public void strokeRect(ReadableMap canvasRef, double x, double y, double width, double height) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.strokeRect((float) x, (float) y, (float) width, (float) height);
});
}
@ReactMethod
public void fillRect(ReadableMap canvasRef, double x, double y, double width, double height) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.fillRect((float) x, (float) y, (float) width, (float) height);
});
}
@ReactMethod
public void beginPath(ReadableMap canvasRef) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.beginPath();
});
}
@ReactMethod
public void closePath(ReadableMap canvasRef) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.closePath();
});
}
@ReactMethod
public void stroke(ReadableMap canvasRef) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.stroke();
});
}
@ReactMethod
public void fill(ReadableMap canvasRef) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.fill();
});
}
@ReactMethod
public void arc(
ReadableMap canvasRef,
double x,
double y,
double radius,
double startAngle,
double endAngle,
boolean anticlockwise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.arc(
(float) x,
(float) y,
(float) radius,
(float) startAngle,
(float) endAngle,
anticlockwise);
});
}
@ReactMethod
public void rect(ReadableMap canvasRef, double x, double y, double width, double height) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.rect((float) x, (float) y, (float) width, (float) height);
});
}
@ReactMethod
public void lineTo(ReadableMap canvasRef, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.lineTo((float) x, (float) y);
});
}
@ReactMethod
public void moveTo(ReadableMap canvasRef, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.moveTo((float) x, (float) y);
});
}
@ReactMethod
public void drawCircle(ReadableMap canvasRef, double x, double y, double radius) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.drawCircle((float) x, (float) y, (float) radius);
});
}
@ReactMethod
public void fillCircle(ReadableMap canvasRef, double x, double y, double radius) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.fillCircle((float) x, (float) y, (float) radius);
});
}
/**
* This method is the equivalent to the ctx.drawImage of the web CanvasRenderingContext2D {@link
* https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/drawImage}.
*
* <p>Because React Native NativeModules don't support method overloading, it will be a single
* method in Java (native) and the three different cases for the drawImage functions will be
* dicerned by INVALID function params. The INVALID function value in this case will be any
* negative value.
*
* <p>For more details on the method params, check out the web documentation for the
* CanvasRenderingContext2D.
*/
@ReactMethod
public void drawImage(
ReadableMap canvasRef,
ReadableMap imageRef,
double dx_sx,
double dy_sy,
double dWidth_sWidth,
double dHeight_sHeight,
double dx,
double dy,
double dWidth,
double dHeight) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
IImage image = JSContext.unwrapObject(imageRef);
if (image != null) {
if (dWidth_sWidth < 0 || dHeight_sHeight < 0) {
ctx.drawImage(image, (float) dx_sx, (float) dy_sy);
} else if (dx < 0 || dy < 0) {
ctx.drawImage(
image,
(float) dx_sx,
(float) dy_sy,
(float) dWidth_sWidth,
(float) dHeight_sHeight);
} else {
ctx.drawImage(
image,
(float) dx_sx,
(float) dy_sy,
(float) dWidth_sWidth,
(float) dHeight_sHeight,
(float) dx,
(float) dy,
(float) dWidth,
(float) dHeight);
}
}
});
}
@ReactMethod
public void getImageData(
ReadableMap canvasRef, double sx, double sy, double sw, double sh, Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ImageData imageData = ctx.getImageData((float) sx, (float) sy, (float) sw, (float) sh);
JSContext.NativeJSRef imageDataRef = JSContext.wrapObject(imageData);
promise.resolve(imageDataRef.getJSRef());
});
}
@ReactMethod
public void putImageData(ReadableMap canvasRef, ReadableMap imageDataRef, double dx, double dy) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ImageData imageData = JSContext.unwrapObject(imageDataRef);
ctx.putImageData(imageData, (float) dx, (float) dy);
});
}
@ReactMethod
public void setFont(ReadableMap canvasRef, ReadableMap font) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setFont(font);
});
}
@ReactMethod
public void fillText(ReadableMap canvasRef, String text, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.fillText(text, (float) x, (float) y);
});
}
@ReactMethod
public void strokeText(ReadableMap canvasRef, String text, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.strokeText(text, (float) x, (float) y);
});
}
/**
* The CanvasRenderingContext2D.setTransform() method of the Canvas 2D API resets (overrides) the
* current transformation to the identity matrix, and then invokes a transformation described by
* the arguments of this method. This lets you scale, rotate, translate (move), and skew the
* context.
*
* <p>The transformation matrix is described by:
*
* <p>a & c & e
*
* <p>b & d & f
*
* <p>0 & 0 & 1
*
* <p>setTransform() has two types of parameter that it can accept. The older type consists of
* several parameters representing the individual components of the transformation matrix to set:
*
* @param canvasRef The JS ref to the native canvas instance.
* @param a (m11) Horizontal scaling. A value of 1 results in no scaling.
* @param b (m12) Vertical skewing.
* @param c (m21) Horizontal skewing.
* @param d (m22) Vertical scaling. A value of 1 results in no scaling.
* @param e (dx) Horizontal translation (moving).
* @param f (dy)Vertical translation (moving).
* <p>{@link
* https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/setTransform}
*/
@ReactMethod
public void setTransform(
ReadableMap canvasRef, double a, double b, double c, double d, double e, double f) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.setTransform((float) a, (float) b, (float) c, (float) d, (float) e, (float) f);
});
}
@ReactMethod
public void scale(ReadableMap canvasRef, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.scale((float) x, (float) y);
});
}
@ReactMethod
public void rotate(ReadableMap canvasRef, double angle) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.rotate((float) angle);
});
}
@ReactMethod
public void translate(ReadableMap canvasRef, double x, double y) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.translate((float) x, (float) y);
});
}
@ReactMethod
public void save(ReadableMap canvasRef, Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.save();
promise.resolve(null);
});
}
@ReactMethod
public void restore(ReadableMap canvasRef, Promise promise) {
mMainHandler.post(
() -> {
CanvasRenderingContext2D ctx = JSContext.unwrapObject(canvasRef);
ctx.restore();
promise.resolve(null);
});
}
}
| 4,679 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/Audio.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
import android.media.MediaDataSource;
import android.media.MediaMetadataRetriever;
import android.media.MediaPlayer;
import android.util.Log;
import androidx.annotation.Nullable;
import java.io.File;
public class Audio implements IAudio {
public static final String TAG = "PTLTypeAudio";
private static final int DEFAULT_SPEED = 1;
private final short[] mData;
@Nullable private MediaPlayer mPlayer;
public Audio(short[] data) {
this.mData = data;
}
public short[] getData() {
return mData;
}
public void play() {
if (mPlayer == null) {
mPlayer = new MediaPlayer();
}
MediaDataSource mediaDataSource = AudioUtils.getAudioAsMediaDataSource(mData);
try {
mPlayer.setDataSource(mediaDataSource);
mPlayer.prepare();
mPlayer.setPlaybackParams(mPlayer.getPlaybackParams().setSpeed(DEFAULT_SPEED));
} catch (Exception e) {
Log.e(TAG, "Could not play the audio.", e);
}
mPlayer.start();
}
public void pause() {
if (mPlayer != null && mPlayer.isPlaying()) {
mPlayer.pause();
}
}
public void stop() {
if (mPlayer != null && mPlayer.isPlaying()) {
mPlayer.stop();
try {
mPlayer.prepare();
} catch (Exception e) {
Log.e(TAG, "Could not prepare the audio.", e);
}
}
}
public int getDuration() {
final File tempFile = AudioUtils.toTempFile(mData);
if (tempFile == null) {
return -1;
}
try {
final MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever();
mediaMetadataRetriever.setDataSource(tempFile.getAbsolutePath());
final String durationStr =
mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
return Integer.parseInt(durationStr);
} catch (final Exception e) {
Log.e(TAG, "Could not extract the audio clip duration.", e);
return -1;
}
}
}
| 4,680 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/IAudio.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
public interface IAudio {
short[] getData();
void play();
void pause();
void stop();
int getDuration();
}
| 4,681 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/AudioDataSource.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
import android.media.MediaDataSource;
import java.io.IOException;
public class AudioDataSource extends MediaDataSource {
private byte[] dataBuffer;
public AudioDataSource(final byte[] buffer) {
this.dataBuffer = buffer;
}
@Override
public synchronized int readAt(long position, byte[] buffer, int offset, int size)
throws IOException {
synchronized (dataBuffer) {
int length = dataBuffer.length;
if (position >= length) {
return -1; // -1 indicates EOF
}
if (position + size > length) {
size -= (position + size) - length;
}
System.arraycopy(dataBuffer, (int) position, buffer, offset, size);
return size;
}
}
@Override
public synchronized long getSize() throws IOException {
synchronized (dataBuffer) {
return dataBuffer.length;
}
}
@Override
public synchronized void close() throws IOException {}
}
| 4,682 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/IAudioRecord.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
public interface IAudioRecord {
short[] getData();
}
| 4,683 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/AudioUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
import android.media.MediaDataSource;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
public class AudioUtils {
public static final String TAG = "PTLAudioUtils";
private static final int SAMPLE_RATE = 16000;
private static final int CHANNELS = 1;
private static final int PCM_BITS = 16;
private static final int HEADER_FORMAT_PCM = 1;
private static final int HEADER_SIZE_BYTES = 44;
public static MediaDataSource getAudioAsMediaDataSource(short[] data) {
try {
final byte[] completeAudioFileData = prepareAudioData(data);
return new AudioDataSource(completeAudioFileData);
} catch (Exception e) {
Log.e(TAG, "Exception while creating the audio data source : ", e);
return null;
}
}
/**
* Helper method to convert an array of short to an array of byte.
*
* @param data short[] to be converted
* @return converted byte[]
*/
public static byte[] toByteArray(final short[] data) {
int index;
int iterations = data.length;
ByteBuffer bb = ByteBuffer.allocate(data.length * 2);
bb.order(ByteOrder.LITTLE_ENDIAN);
for (index = 0; index != iterations; ++index) {
bb.putShort(data[index]);
}
return bb.array();
}
/**
* Helper method to convert an array of byte to an array of short.
*
* @param data byte[] to be converted
* @return converted short[]
*/
public static short[] toShortArray(final byte[] data) {
final short[] shorts = new short[data.length / 2];
ByteBuffer.wrap(data).order(ByteOrder.nativeOrder()).asShortBuffer().get(shorts);
return shorts;
}
/**
* Helper method to write the audio data to a temp file.
*
* @param data short[] to be written to a file
* @return file reference which holds the audio data
*/
public static File toTempFile(final short[] data) {
try {
final byte[] completeAudioFileData = prepareAudioData(data);
final File tempFile = File.createTempFile("temp", "wav");
final OutputStream fileOutputStream = new FileOutputStream(tempFile);
fileOutputStream.write(completeAudioFileData);
fileOutputStream.flush();
fileOutputStream.close();
return tempFile;
} catch (Exception e) {
Log.e(TAG, "Exception while writing audio data to tempfile : ", e);
return null;
}
}
/**
* Helper method to generate a standard 'wav' file header.
*
* @param fileSize Size of the audio data
* @param audioFormat Audio Header format
* @param channels Number of channels
* @param sampleRate Sample rate of the audio
* @param bitsPerSample Bits per sample of the audio
* @return byte[] data of the header of the wav file
*/
private static byte[] getWaveHeader(
int fileSize, int audioFormat, int channels, int sampleRate, int bitsPerSample) {
int byteRate = sampleRate * bitsPerSample * channels / 8;
int bitSampleChannelRate = bitsPerSample * channels / 8;
int chunkSize = fileSize - 8; // The file size - 8 bytes for RIFF and chunk size.
int audioLength = fileSize - HEADER_SIZE_BYTES; // The file size - 44 bytes.
byte[] header = new byte[HEADER_SIZE_BYTES];
header[0] = 'R'; // RIFF in 4 bytes.
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (chunkSize & 0xff); // Chunk size in 4 bytes.
header[5] = (byte) ((chunkSize >> 8) & 0xff);
header[6] = (byte) ((chunkSize >> 16) & 0xff);
header[7] = (byte) ((chunkSize >> 24) & 0xff);
header[8] = 'W'; // WAVE in 4 bytes.
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk in 4 bytes with trailing space in 4 bytes.
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk in 4 bytes.
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = (byte) audioFormat; // Format (1 for PCM) in 2 bytes.
header[21] = 0;
header[22] = (byte) channels; // Number of channels in 2 bytes.
header[23] = 0;
header[24] = (byte) (sampleRate & 0xff); // Sample rate in 4 bytes.
header[25] = (byte) ((sampleRate >> 8) & 0xff);
header[26] = (byte) ((sampleRate >> 16) & 0xff);
header[27] = (byte) ((sampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff); // Byte rate in 4 bytes.
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) bitSampleChannelRate; // bitSampleChannelRate in 2 bytes.
header[33] = 0;
header[34] = (byte) bitsPerSample; // Bits per sample in 2 bytes.
header[35] = 0;
header[36] = 'd'; // Data in 4 bytes.
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (audioLength & 0xff);
header[41] = (byte) ((audioLength >> 8) & 0xff);
header[42] = (byte) ((audioLength >> 16) & 0xff);
header[43] = (byte) ((audioLength >> 24) & 0xff);
return header;
}
/**
* Helper method to convert the audio file data by appending the 'wav' header to the raw audio
* bytes.
*
* @param data short[] to be written to a file which holds raw audio data
* @return byte[] reference which holds the raw audio data and wav header
*/
private static byte[] prepareAudioData(final short[] data) {
final byte[] audioDataBytes = AudioUtils.toByteArray(data);
final byte[] header =
getWaveHeader(
(int) audioDataBytes.length, HEADER_FORMAT_PCM, CHANNELS, SAMPLE_RATE, PCM_BITS);
final byte[] completeAudioFileData =
Arrays.copyOf(header, header.length + audioDataBytes.length);
System.arraycopy(
audioDataBytes, 0, completeAudioFileData, header.length, audioDataBytes.length);
return completeAudioFileData;
}
}
| 4,684 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/audio/AudioModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.audio;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.module.annotations.ReactModule;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import org.jetbrains.annotations.NotNull;
import org.pytorch.rn.core.javascript.JSContext;
import org.pytorch.rn.core.utils.FileUtils;
@ReactModule(name = "PyTorchCoreAudioModule")
public class AudioModule extends ReactContextBaseJavaModule {
public static final String TAG = "PTLAudioModule";
public static final String NAME = "PyTorchCoreAudioModule";
private static final int REQUEST_RECORD_AUDIO = 13;
private static final int SAMPLE_RATE = 16000;
private static final String DEFAULT_AUDIO_FILE_PREFIX = "audio";
private static final String DEFAULT_AUDIO_FILE_EXTENSION = ".wav";
private final ReactApplicationContext mReactContext;
private volatile boolean mIsRecording;
private int mBufferSize;
private List<short[]> mAudioDataChunks = new ArrayList<>();
@Nullable private AudioRecord audioRecorder;
public AudioModule(ReactApplicationContext reactContext) {
super(reactContext);
mReactContext = reactContext;
mBufferSize =
AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
}
@NotNull
@Override
public String getName() {
return NAME;
}
@ReactMethod
public void isRecording(final Promise promise) {
boolean recording = false;
if (audioRecorder != null) {
recording = audioRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING;
}
promise.resolve(recording);
}
@ReactMethod
public void startRecord() {
Log.d(TAG, "started recording");
requestMicrophonePermission();
Thread recordingThread = new Thread(getStartRecordingThread());
recordingThread.start();
}
@ReactMethod
public void stopRecord(final Promise promise) {
if (!mIsRecording || mAudioDataChunks.isEmpty()) {
promise.resolve(null);
}
mIsRecording = false;
synchronized (mAudioDataChunks) {
// Wait for the recording thread to finish recording
final Audio recordedAudio = processRecordedAudio();
JSContext.NativeJSRef ref = JSContext.wrapObject(recordedAudio);
promise.resolve(ref.getJSRef());
}
}
@ReactMethod
public void play(ReadableMap audioRef) {
IAudio audio = JSContext.unwrapObject(audioRef);
audio.play();
}
@ReactMethod
public void pause(ReadableMap audioRef) {
IAudio audio = JSContext.unwrapObject(audioRef);
audio.pause();
}
@ReactMethod
public void stop(ReadableMap audioRef) {
IAudio audio = JSContext.unwrapObject(audioRef);
audio.stop();
}
@ReactMethod(isBlockingSynchronousMethod = true)
public int getDuration(ReadableMap audioRef) {
IAudio audio = JSContext.unwrapObject(audioRef);
return audio.getDuration();
}
@ReactMethod
public void release(ReadableMap audioRef, Promise promise) throws Exception {
JSContext.release(audioRef);
promise.resolve(null);
}
@ReactMethod
public void toFile(final ReadableMap audioRef, Promise promise) {
try {
final IAudio audio = JSContext.unwrapObject(audioRef);
final File cacheDir = mReactContext.getCacheDir();
final File file =
File.createTempFile(DEFAULT_AUDIO_FILE_PREFIX, DEFAULT_AUDIO_FILE_EXTENSION, cacheDir);
final FileOutputStream outputStream = new FileOutputStream(file);
final byte[] audioBytes = AudioUtils.toByteArray(audio.getData());
outputStream.write(audioBytes);
promise.resolve(file.getAbsolutePath());
} catch (final IOException e) {
promise.reject(e);
}
}
@ReactMethod
public void fromFile(final String filePath, final Promise promise) {
try {
final byte[] audioData = Files.readAllBytes(Paths.get(filePath));
final IAudio audio = new Audio(AudioUtils.toShortArray(audioData));
final JSContext.NativeJSRef ref = JSContext.wrapObject(audio);
promise.resolve(ref.getJSRef());
} catch (final IOException | OutOfMemoryError | SecurityException exp) {
promise.reject(new Error("Could not load audio from " + filePath + " " + exp.getMessage()));
}
}
@ReactMethod
public void fromBundle(final String uriString, final Promise promise) {
final Uri uri = Uri.parse(uriString);
final File targetFile = new File(getReactApplicationContext().getCacheDir(), uri.getPath());
FileUtils.downloadUriToFile(uriString, targetFile);
fromFile(targetFile.getPath(), promise);
}
private void requestMicrophonePermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
ActivityCompat.requestPermissions(
mReactContext.getCurrentActivity(),
new String[] {android.Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO);
}
}
private Audio processRecordedAudio() {
int index = 0;
final short[] audioData = new short[mAudioDataChunks.size() * mBufferSize / 2];
for (int i = 0; i < mAudioDataChunks.size(); i++) {
final short[] audioDataChunk = mAudioDataChunks.get(i);
for (int j = 0; j < audioDataChunk.length; j++) {
audioData[index++] = audioDataChunk[j];
}
}
return new Audio(audioData);
}
private Runnable getStartRecordingThread() {
return new Runnable() {
@Override
public void run() {
synchronized (mAudioDataChunks) {
try {
audioRecorder =
new AudioRecord(
MediaRecorder.AudioSource.DEFAULT,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
mBufferSize);
if (audioRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
Log.e(TAG, "Audio Record can't initialize!");
return;
}
audioRecorder.startRecording();
mIsRecording = true;
mAudioDataChunks = new ArrayList<>();
while (mIsRecording) {
final short[] audioBuffer = new short[mBufferSize / 2];
final int numberOfShort = audioRecorder.read(audioBuffer, 0, audioBuffer.length);
mAudioDataChunks.add(audioBuffer);
}
} catch (final Exception e) {
mIsRecording = false;
mAudioDataChunks.clear();
Log.e(TAG, "Error recording audio:", e);
throw new RuntimeException(
"Exception encountered while recording audio. " + e.getMessage());
} finally {
if (audioRecorder != null) {
audioRecorder.stop();
audioRecorder.release();
audioRecorder = null;
}
}
}
}
};
}
}
| 4,685 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/ml/ModelLoaderModule.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.ml;
import android.util.Log;
import androidx.annotation.NonNull;
import com.facebook.react.bridge.Promise;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.module.annotations.ReactModule;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.pytorch.rn.core.utils.ModelUtils;
@ReactModule(name = "PyTorchCoreModelLoaderModule")
public class ModelLoaderModule extends ReactContextBaseJavaModule {
public static final String TAG = "PTLModelLoaderModule";
public static final String NAME = "PyTorchCoreModelLoaderModule";
private final ReactApplicationContext mReactContext;
private final ExecutorService executorService = Executors.newFixedThreadPool(1);
public ModelLoaderModule(ReactApplicationContext reactContext) {
super(reactContext);
mReactContext = reactContext;
}
@NonNull
@Override
public String getName() {
return NAME;
}
@ReactMethod
public void download(final String modelUri, Promise promise) {
executorService.execute(
() -> {
try {
Log.d(TAG, "Preload model: " + modelUri);
File targetFile = ModelUtils.downloadModel(mReactContext, modelUri);
promise.resolve(targetFile.getAbsolutePath());
} catch (IOException e) {
promise.reject(e);
}
});
}
}
| 4,686 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/javascript/JSContext.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.javascript;
import com.facebook.react.bridge.Arguments;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.bridge.WritableMap;
import java.util.UUID;
import java.util.WeakHashMap;
public class JSContext {
protected static final String ID_KEY = "ID";
private static final WeakHashMap<String, NativeJSRef> refs = new WeakHashMap<>();
public static String setRef(NativeJSRef ref) {
String id = UUID.randomUUID().toString();
JSContext.refs.put(id, ref);
return id;
}
public static NativeJSRef getRef(String id) {
return JSContext.refs.get(id);
}
public static NativeJSRef get(ReadableMap jsRef) {
String id = jsRef.getString(ID_KEY);
return JSContext.getRef(id);
}
public static void release(String id) throws Exception {
NativeJSRef ref = JSContext.getRef(id);
ref.release();
JSContext.refs.remove(id);
}
public static void release(ReadableMap jsRef) throws Exception {
String id = jsRef.getString(ID_KEY);
JSContext.release(id);
}
public static NativeJSRef wrapObject(Object object) {
return new NativeJSRef(object);
}
public static <T> T unwrapObject(String id) {
NativeJSRef ref = JSContext.getRef(id);
return (T) ref.getObject();
}
public static <T> T unwrapObject(ReadableMap jsRef) {
String id = jsRef.getString(ID_KEY);
return JSContext.unwrapObject(id);
}
public static final class NativeJSRef {
private String mId;
private Object mObject;
private WritableMap mJSRef;
protected NativeJSRef(Object object) {
mObject = object;
mId = JSContext.setRef(this);
mJSRef = Arguments.createMap();
mJSRef.putString(JSContext.ID_KEY, mId);
}
public WritableMap getJSRef() {
return mJSRef;
}
public Object getObject() {
return mObject;
}
/**
* This method is deliberately private to only allow the outer class to release a NativeJSRef
* instance.
*
* @throws Exception Forwarding any exception that is raised during the close call of the
* wrapped object.
*/
private void release() throws Exception {
if (mObject instanceof AutoCloseable) {
((AutoCloseable) mObject).close();
}
// Remove reference to id, which will release the associated NativeJSRef object stored as
// value for the ID key.
mId = null;
mJSRef = null;
mObject = null;
}
}
}
| 4,687 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/media/BlobUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.media;
import android.graphics.Bitmap;
import androidx.annotation.Keep;
import com.facebook.proguard.annotations.DoNotStrip;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import org.pytorch.rn.core.audio.IAudio;
import org.pytorch.rn.core.image.IImage;
import org.pytorch.rn.core.javascript.JSContext;
public class BlobUtils {
// Keep blob type constants in sync with cxx/src/torchlive/media/Blob.h
@DoNotStrip public static final String kBlobTypeImageGrayscale = "image/x-playtorch-grayscale";
@DoNotStrip public static final String kBlobTypeImageRGB = "image/x-playtorch-rgb";
@DoNotStrip public static final String kBlobTypeImageRGBA = "image/x-playtorch-rgba";
@DoNotStrip public static final String kBlobTypeAudio = "audio/x-playtorch";
@DoNotStrip
@Keep
public static ByteBuffer nativeJSRefToByteBuffer(final String refId) {
final JSContext.NativeJSRef nativeJSRef = JSContext.getRef(refId);
final Object obj = nativeJSRef.getObject();
if (obj instanceof IImage) {
final IImage image = (IImage) obj;
final Bitmap bitmap = image.getBitmap();
return bitmapToByteBuffer(bitmap);
} else if (obj instanceof IAudio) {
final IAudio audio = (IAudio) obj;
return shortToByteBuffer(audio.getData());
}
throw new UnsupportedOperationException(
"Cannot create ByteBuffer for type " + obj.getClass().getName());
}
@DoNotStrip
@Keep
public static String nativeJSRefToType(final String refId) {
final JSContext.NativeJSRef nativeJSRef = JSContext.getRef(refId);
final Object obj = nativeJSRef.getObject();
if (obj instanceof IImage) {
return kBlobTypeImageRGB;
} else if (obj instanceof IAudio) {
return kBlobTypeAudio;
}
throw new UnsupportedOperationException(
"Cannot get type for class: " + obj.getClass().getName());
}
@DoNotStrip
@Keep
public static ByteBuffer bitmapToByteBuffer(Bitmap bitmap) {
byte[] buffer = bitmapToRGB(bitmap);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(buffer.length);
byteBuffer.order(ByteOrder.nativeOrder());
byteBuffer.put(buffer);
return byteBuffer;
}
@DoNotStrip
@Keep
public static byte[] bitmapToRGB(final Bitmap bitmap) {
final int[] pixels = new int[bitmap.getWidth() * bitmap.getHeight()];
final byte[] bytes = new byte[pixels.length * 3];
bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int i = 0;
for (int pixel : pixels) {
// Get components assuming is ARGB
int R = (pixel >> 16) & 0xff;
int G = (pixel >> 8) & 0xff;
int B = pixel & 0xff;
bytes[i++] = (byte) R;
bytes[i++] = (byte) G;
bytes[i++] = (byte) B;
}
return bytes;
}
@DoNotStrip
@Keep
public static ByteBuffer shortToByteBuffer(final short[] data) {
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(data.length * 2); // 1 short = 2 bytes
byteBuffer.order(ByteOrder.nativeOrder());
for (short s : data) {
byteBuffer.putShort(s);
}
return byteBuffer;
}
}
| 4,688 |
0 | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core | Create_ds/playtorch/react-native-pytorch-core/android/src/main/java/org/pytorch/rn/core/media/MediaUtils.java | /*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package org.pytorch.rn.core.media;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import androidx.annotation.Keep;
import com.facebook.proguard.annotations.DoNotStrip;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import org.pytorch.rn.core.audio.Audio;
import org.pytorch.rn.core.audio.AudioUtils;
import org.pytorch.rn.core.audio.IAudio;
import org.pytorch.rn.core.image.IImage;
import org.pytorch.rn.core.image.Image;
import org.pytorch.rn.core.javascript.JSContext;
public class MediaUtils {
@DoNotStrip
@Keep
public static IImage resolveNativeJSRefToImage_DO_NOT_USE(final String refId) {
return JSContext.unwrapObject(refId);
}
@DoNotStrip
@Keep
public static String wrapObject(final Object obj) {
final JSContext.NativeJSRef ref = JSContext.wrapObject(obj);
return ref.getJSRef().getString("ID");
}
@DoNotStrip
@Keep
public static void releaseObject(final String id) throws Exception {
JSContext.release(id);
}
@DoNotStrip
@Keep
public static String imageToFile(final IImage image, final String filepath) throws Exception {
final Bitmap bitmap = image.getBitmap();
final FileOutputStream outputStream = new FileOutputStream(filepath);
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
return filepath;
}
@DoNotStrip
@Keep
public static IImage imageFromFile(final String filepath) {
Bitmap bitmap = BitmapFactory.decodeFile(filepath);
return new Image(bitmap);
}
@DoNotStrip
@Keep
public static IImage imageFromBlob(
final ByteBuffer buffer, final double width, final double height, final String type) {
buffer.order(ByteOrder.nativeOrder());
boolean hasAlpha = false;
int channels;
if (BlobUtils.kBlobTypeImageGrayscale.equals(type)) {
channels = 1;
hasAlpha = false;
} else if (BlobUtils.kBlobTypeImageRGB.equals(type)) {
channels = 3;
hasAlpha = false;
} else if (BlobUtils.kBlobTypeImageRGBA.equals(type)) {
channels = 4;
hasAlpha = true;
} else {
throw new UnsupportedOperationException("Cannot create image from blob with type: " + type);
}
final Bitmap bitmap = blobToBitmap(buffer, (int) width, (int) height, hasAlpha, channels);
return new Image(bitmap);
}
@DoNotStrip
@Keep
public static ByteBuffer imageToByteBuffer(final IImage image) {
final Bitmap bitmap = image.getBitmap();
return bitmapToByteBuffer(bitmap);
}
@DoNotStrip
@Keep
public static Bitmap blobToBitmap(
final ByteBuffer buffer,
final int width,
final int height,
final boolean hasAlpha,
final int channels) {
final Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
int[] pixels = new int[width * height];
if (channels == 1) {
// Grayscale with 1 channel
for (int i = 0; i < width * height; i++) {
final int value = (int) (buffer.get(i) & 0xff);
pixels[i] = Color.rgb(value, value, value);
}
} else {
final int length = buffer.limit();
final byte[] data = new byte[length];
buffer.get(data);
int n = 0;
bitmap.setPremultiplied(true);
for (int i = 0; i < width * height; i++) {
int a = (int) (hasAlpha ? (data[n + 3] & 0xff) : 255);
int r = (int) (data[n++] & 0xff) * a / 255;
int g = (int) (data[n++] & 0xff) * a / 255;
int b = (int) (data[n++] & 0xff) * a / 255;
if (hasAlpha) {
n++;
}
pixels[i] = (hasAlpha ? Color.argb(a, r, g, b) : Color.rgb(r, g, b));
}
}
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
@DoNotStrip
@Keep
public static ByteBuffer bitmapToByteBuffer(Bitmap bitmap) {
byte[] buffer = bitmapToRGB(bitmap);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect(buffer.length);
byteBuffer.order(ByteOrder.nativeOrder());
byteBuffer.put(buffer);
return byteBuffer;
}
@DoNotStrip
@Keep
public static byte[] bitmapToRGB(final Bitmap bitmap) {
final int[] pixels = new int[bitmap.getWidth() * bitmap.getHeight()];
final byte[] bytes = new byte[pixels.length * 3];
bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int i = 0;
for (int pixel : pixels) {
// Get components assuming is ARGB
int R = (pixel >> 16) & 0xff;
int G = (pixel >> 8) & 0xff;
int B = pixel & 0xff;
bytes[i++] = (byte) R;
bytes[i++] = (byte) G;
bytes[i++] = (byte) B;
}
return bytes;
}
@DoNotStrip
@Keep
public static IAudio audioFromBytes(final byte[] bytes, int sampleRate) {
final IAudio audio = new Audio(AudioUtils.toShortArray(bytes));
return audio;
}
}
| 4,689 |
0 | Create_ds/sagemaker-scikit-learn-container/docker/1.2-1/resources | Create_ds/sagemaker-scikit-learn-container/docker/1.2-1/resources/mms/ExecutionParameters.java | package software.amazon.ai.mms.plugins.endpoint;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.SerializedName;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;
import software.amazon.ai.mms.servingsdk.Context;
import software.amazon.ai.mms.servingsdk.ModelServerEndpoint;
import software.amazon.ai.mms.servingsdk.annotations.Endpoint;
import software.amazon.ai.mms.servingsdk.annotations.helpers.EndpointTypes;
import software.amazon.ai.mms.servingsdk.http.Request;
import software.amazon.ai.mms.servingsdk.http.Response;
/**
The modified endpoint source code for the jar used in this container.
You can create this endpoint by moving it by cloning the MMS repo:
> git clone https://github.com/awslabs/mxnet-model-server.git
Copy this file into plugins/endpoints/src/main/java/software/amazon/ai/mms/plugins/endpoints/
and then from the plugins directory, run:
> ./gradlew fJ
Modify file in plugins/endpoint/resources/META-INF/services/* to specify this file location
Then build the JAR:
> ./gradlew build
The jar should be available in plugins/endpoints/build/libs as endpoints-1.0.jar
**/
@Endpoint(
urlPattern = "execution-parameters",
endpointType = EndpointTypes.INFERENCE,
description = "Execution parameters endpoint")
public class ExecutionParameters extends ModelServerEndpoint {
@Override
public void doGet(Request req, Response rsp, Context ctx) throws IOException {
Properties prop = ctx.getConfig();
// 6 * 1024 * 1024
int maxRequestSize = Integer.parseInt(prop.getProperty("max_request_size", "6291456"));
SagemakerXgboostResponse response = new SagemakerXgboostResponse();
response.setMaxConcurrentTransforms(Integer.parseInt(prop.getProperty("NUM_WORKERS", "1")));
response.setBatchStrategy("MULTI_RECORD");
response.setMaxPayloadInMB(maxRequestSize / (1024 * 1024));
rsp.getOutputStream()
.write(
new GsonBuilder()
.setPrettyPrinting()
.create()
.toJson(response)
.getBytes(StandardCharsets.UTF_8));
}
/** Response for Model server endpoint */
public static class SagemakerXgboostResponse {
@SerializedName("MaxConcurrentTransforms")
private int maxConcurrentTransforms;
@SerializedName("BatchStrategy")
private String batchStrategy;
@SerializedName("MaxPayloadInMB")
private int maxPayloadInMB;
public SagemakerXgboostResponse() {
maxConcurrentTransforms = 4;
batchStrategy = "MULTI_RECORD";
maxPayloadInMB = 6;
}
public int getMaxConcurrentTransforms() {
return maxConcurrentTransforms;
}
public String getBatchStrategy() {
return batchStrategy;
}
public int getMaxPayloadInMB() {
return maxPayloadInMB;
}
public void setMaxConcurrentTransforms(int newMaxConcurrentTransforms) {
maxConcurrentTransforms = newMaxConcurrentTransforms;
}
public void setBatchStrategy(String newBatchStrategy) {
batchStrategy = newBatchStrategy;
}
public void setMaxPayloadInMB(int newMaxPayloadInMB) {
maxPayloadInMB = newMaxPayloadInMB;
}
}
}
| 4,690 |
0 | Create_ds/dgs-intellij-plugin/src/test/testdata | Create_ds/dgs-intellij-plugin/src/test/testdata/DgsComponentInspectorTest/MissingDgsComponent.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.netflix.graphql.dgs.DgsQuery;
public class <warning descr="A class should be annotated @DgsComponent when DGS annotations are used within the class">MissingDgsComponent<caret></warning> {
@DgsQuery
public String hello() {
return "hello";
}
} | 4,691 |
0 | Create_ds/dgs-intellij-plugin/src/test/testdata | Create_ds/dgs-intellij-plugin/src/test/testdata/DgsComponentInspectorTest/FixedMissingDgsComponent.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.netflix.graphql.dgs.DgsComponent;
import com.netflix.graphql.dgs.DgsQuery;
@DgsComponent
public class MissingDgsComponent {
@DgsQuery
public String hello() {
return "hello";
}
} | 4,692 |
0 | Create_ds/dgs-intellij-plugin/src/test/testdata | Create_ds/dgs-intellij-plugin/src/test/testdata/DgsEntityFetcherInspectorTest/MissingDgsEntityFetcher.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@DgsComponent
public class MissingEntityFetcher {
@DgsQuery
public String hello() {
return "hello";
}
public String movie() {
return "MovieA";
}
} | 4,693 |
0 | Create_ds/dgs-intellij-plugin/src/test/testdata | Create_ds/dgs-intellij-plugin/src/test/testdata/DgsDataSimplifyingInspectorTest/FixedDgsDataForQuery.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.netflix.graphql.dgs.DgsComponent;
import com.netflix.graphql.dgs.DgsData;
import com.netflix.graphql.dgs.DgsQuery;
@DgsComponent
public class MissingDgsComponent {
@DgsQuery
public String hello() {
return "hello";
}
} | 4,694 |
0 | Create_ds/dgs-intellij-plugin/src/test/testdata | Create_ds/dgs-intellij-plugin/src/test/testdata/DgsDataSimplifyingInspectorTest/UsingDgsDataForQuery.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.netflix.graphql.dgs.DgsComponent;
import com.netflix.graphql.dgs.DgsData;
@DgsComponent
public class MissingDgsComponent {
<warning descr="@DgsData(parentType=\"Query\") can be simplified to @DgsQuery"><caret>@DgsData(parentType = "Query", field = "hello")</warning>
public String hello() {
return "hello";
}
} | 4,695 |
0 | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin/services/DgsService.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.dgs.plugin.services;
import com.intellij.openapi.project.Project;
import org.jetbrains.kotlin.modules.Module;
public interface DgsService {
DgsComponentIndex getDgsComponentIndex();
boolean isDgsProject(Project project);
void clearCache();
}
| 4,696 |
0 | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin/services | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin/services/internal/GraphQLSchemaRegistry.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.dgs.plugin.services.internal;
import com.intellij.lang.jsgraphql.schema.GraphQLRegistryProvider;
import com.intellij.lang.jsgraphql.types.language.*;
import com.intellij.lang.jsgraphql.types.schema.idl.TypeDefinitionRegistry;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
public class GraphQLSchemaRegistry {
private final Project project;
public GraphQLSchemaRegistry(Project project) {
this.project = project;
}
public @Nullable
Optional<PsiElement> psiForSchemaType(@NotNull PsiElement psiElement, @Nullable String parentType, @Nullable String field) {
TypeDefinitionRegistry registry = getRegistry(psiElement);
List<ObjectTypeDefinition> objectTypes = getTypeDefinitions(registry, parentType);
if (!objectTypes.isEmpty()) {
Optional<FieldDefinition> schemaField = objectTypes.stream().map(ObjectTypeDefinition::getFieldDefinitions).flatMap(Collection::stream)
.filter(e -> e.getName().equals(field)).findAny();
if (schemaField.isPresent()) {
return Optional.ofNullable(schemaField.get().getSourceLocation().getElement());
}
} else if ("_entities".equals(parentType)) {
Optional<ObjectTypeDefinition> entitiesType = getTypeDefinition(registry, field);
if (entitiesType.isPresent()) {
return Optional.ofNullable(entitiesType.get().getElement());
}
} else {
Optional<InterfaceTypeDefinition> interfaceType = getInterfaceTypeDefinition(registry, parentType);
if (interfaceType.isPresent()) {
Optional<FieldDefinition> schemaField = interfaceType.get().getFieldDefinitions().stream().filter(f -> f.getName().equals(field)).findAny();
if (schemaField.isPresent()) {
return Optional.ofNullable(schemaField.get().getSourceLocation().getElement());
}
}
}
return Optional.empty();
}
public Optional<PsiElement> psiForDirective(@NotNull PsiElement psiElement, @NotNull String name) {
TypeDefinitionRegistry registry = getRegistry(psiElement);
Optional<DirectiveDefinition> directiveDefinition = registry.getDirectiveDefinition(name);
return directiveDefinition.map(AbstractNode::getElement);
}
public Optional<PsiElement> psiForScalar(@NotNull PsiElement psiElement, @NotNull String name) {
TypeDefinitionRegistry registry = getRegistry(psiElement);
ScalarTypeDefinition scalarTypeDefinition = registry.scalars().get(name);
if (scalarTypeDefinition != null) {
return Optional.ofNullable(scalarTypeDefinition.getElement());
} else {
return Optional.empty();
}
}
private Optional<ObjectTypeDefinition> getTypeDefinition(TypeDefinitionRegistry registry, String schemaType) {
Optional<ObjectTypeDefinition> objectTypeDefinition = registry.getType(schemaType, ObjectTypeDefinition.class);
if (objectTypeDefinition.isPresent()) {
return objectTypeDefinition;
}
List<ObjectTypeExtensionDefinition> objectTypeExtensionDefinitions = registry.objectTypeExtensions().get(schemaType);
if (objectTypeExtensionDefinitions != null && !objectTypeExtensionDefinitions.isEmpty()) {
return Optional.ofNullable(objectTypeExtensionDefinitions.get(0));
}
return Optional.empty();
}
private List<ObjectTypeDefinition> getTypeDefinitions(TypeDefinitionRegistry registry, String schemaType) {
List<ObjectTypeDefinition> list = new ArrayList<>();
Optional<ObjectTypeDefinition> objectTypeDefinition = registry.getType(schemaType, ObjectTypeDefinition.class);
objectTypeDefinition.ifPresent(list::add);
List<ObjectTypeExtensionDefinition> objectTypeExtensionDefinitions = registry.objectTypeExtensions().get(schemaType);
if (objectTypeExtensionDefinitions != null && !objectTypeExtensionDefinitions.isEmpty()) {
list.addAll(objectTypeExtensionDefinitions);
}
return list;
}
private Optional<InterfaceTypeDefinition> getInterfaceTypeDefinition(TypeDefinitionRegistry registry, String schemaType) {
Optional<InterfaceTypeDefinition> interfaceTypeDefinition = registry.getType(schemaType, InterfaceTypeDefinition.class);
if (interfaceTypeDefinition.isPresent()) {
return interfaceTypeDefinition;
}
List<InterfaceTypeExtensionDefinition> interfaceTypeExtensionDefinitions = registry.interfaceTypeExtensions().get(schemaType);
if (interfaceTypeExtensionDefinitions != null && !interfaceTypeExtensionDefinitions.isEmpty()) {
return Optional.ofNullable(interfaceTypeExtensionDefinitions.get(0));
}
return Optional.empty();
}
private TypeDefinitionRegistry getRegistry(@NotNull PsiElement psiElement) {
return GraphQLRegistryProvider.getInstance(project)
.getRegistryInfo(psiElement).getTypeDefinitionRegistry();
}
} | 4,697 |
0 | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin/services | Create_ds/dgs-intellij-plugin/src/main/java/com/netflix/dgs/plugin/services/internal/DgsServiceImpl.java | /*
* Copyright 2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.dgs.plugin.services.internal;
import com.intellij.ide.projectView.ProjectView;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.ModificationTracker;
import com.intellij.psi.PsiAnnotation;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiReferenceList;
import com.intellij.psi.impl.java.stubs.index.JavaStubIndexKeys;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.stubs.StubIndex;
import com.intellij.psi.stubs.StubIndexKey;
import com.intellij.psi.util.PsiModificationTracker;
import com.intellij.psi.util.PsiTreeUtil;
import com.netflix.dgs.plugin.DgsCustomContext;
import com.netflix.dgs.plugin.services.DgsComponentIndex;
import com.netflix.dgs.plugin.services.DgsComponentProcessor;
import com.netflix.dgs.plugin.services.DgsService;
import org.jetbrains.kotlin.idea.KotlinLanguage;
import org.jetbrains.kotlin.idea.stubindex.KotlinAnnotationsIndex;
import org.jetbrains.kotlin.idea.stubindex.KotlinSuperClassIndex;
import org.jetbrains.kotlin.psi.KtAnnotationEntry;
import org.jetbrains.kotlin.psi.KtClassOrObject;
import org.jetbrains.uast.UAnnotation;
import org.jetbrains.uast.UastContextKt;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
public class DgsServiceImpl implements DgsService, Disposable {
private final Project project;
private final Set<String> annotations = Set.of(
"DgsQuery",
"DgsMutation",
"DgsSubscription",
"DgsData",
"DgsEntityFetcher",
"DgsDataLoader",
"DgsDirective",
"DgsRuntimeWiring",
"DgsScalar");
private volatile DgsComponentIndex cachedComponentIndex;
public DgsServiceImpl(Project project) {
this.project = project;
}
private volatile long javaModificationCount;
private volatile long kotlinModificationCount;
private final AtomicBoolean dependencyFound = new AtomicBoolean(false);
private final AtomicBoolean dependenciesProcessed = new AtomicBoolean(false);
@Override
public DgsComponentIndex getDgsComponentIndex() {
if(DumbService.isDumb(project)) {
return new DgsComponentIndex();
}
ModificationTracker javaModificationTracker = PsiModificationTracker.getInstance(project).forLanguage(JavaLanguage.INSTANCE);
ModificationTracker kotlinModificationTracker = PsiModificationTracker.getInstance(project).forLanguage(KotlinLanguage.INSTANCE);
if (cachedComponentIndex != null && javaModificationCount == javaModificationTracker.getModificationCount() && kotlinModificationCount == kotlinModificationTracker.getModificationCount()) {
return cachedComponentIndex;
} else {
javaModificationCount = javaModificationTracker.getModificationCount();
kotlinModificationCount = kotlinModificationTracker.getModificationCount();
StubIndex stubIndex = StubIndex.getInstance();
DgsComponentIndex dgsComponentIndex = new DgsComponentIndex();
GraphQLSchemaRegistry graphQLSchemaRegistry = project.getService(GraphQLSchemaRegistry.class);
var processor = new DgsComponentProcessor(graphQLSchemaRegistry, dgsComponentIndex);
annotations.forEach(dataFetcherAnnotation -> {
stubIndex.processElements(JavaStubIndexKeys.ANNOTATIONS, dataFetcherAnnotation, project, GlobalSearchScope.projectScope(project), PsiAnnotation.class, annotation -> {
UAnnotation uElement = (UAnnotation) UastContextKt.toUElement(annotation);
if(uElement != null) {
processor.process(uElement);
}
return true;
});
});
stubIndex.processElements(JavaStubIndexKeys.SUPER_CLASSES, "DgsCustomContextBuilder", project, GlobalSearchScope.projectScope(project), PsiReferenceList.class, refList -> {
PsiClass clazz = PsiTreeUtil.getParentOfType(refList, PsiClass.class);
if(clazz != null) {
dgsComponentIndex.getCustomContexts().add(new DgsCustomContext(clazz.getName(), clazz, clazz.getContainingFile()));
}
return true;
});
StubIndexKey<String, KtAnnotationEntry> key = KotlinAnnotationsIndex.getInstance().getKey();
List<String> list = new ArrayList<>();
stubIndex.processAllKeys(key, project, e -> {
ProgressManager.checkCanceled();
return list.add(e);
});
for(String annotation : list) {
if (annotations.contains(annotation)) {
StubIndex.getElements(key, annotation, project, GlobalSearchScope.projectScope(project), KtAnnotationEntry.class).forEach(dataFetcherAnnotation -> {
UAnnotation uElement = (UAnnotation) UastContextKt.toUElement(dataFetcherAnnotation);
if(uElement != null) {
processor.process(uElement);
}
});
}
}
StubIndexKey<String, KtClassOrObject> superClassIndexKey = KotlinSuperClassIndex.getInstance().getKey();
stubIndex.processElements(superClassIndexKey, "DgsCustomContextBuilder", project, GlobalSearchScope.projectScope(project), KtClassOrObject.class, clazz -> {
dgsComponentIndex.getCustomContexts().add(new DgsCustomContext(clazz.getName(), clazz, clazz.getContainingFile()));
return true;
});
cachedComponentIndex = dgsComponentIndex;
ProjectView.getInstance(project).refresh();
return dgsComponentIndex;
}
}
@Override
public boolean isDgsProject(Project project) {
if(!dependenciesProcessed.get()) {
ReadAction.run(() -> {
for (Module m : ModuleManager.getInstance(project).getModules()) {
ModuleRootManager.getInstance(m).orderEntries().librariesOnly().compileOnly().forEachLibrary(l -> {
String name = l.getName();
if(name != null && name.contains("com.netflix.graphql.dgs:graphql-dgs")) {
dependencyFound.set(true);
return false;
}
return true;
});
}
});
dependenciesProcessed.getAndSet(true);
}
return dependencyFound.get();
}
@Override
public void clearCache() {
cachedComponentIndex = null;
}
@Override
public void dispose() {
}
}
| 4,698 |
0 | Create_ds/SimianArmy/src/test/java/com/netflix | Create_ds/SimianArmy/src/test/java/com/netflix/simianarmy/TestUtils.java | /*
*
* Copyright 2012 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.simianarmy;
import static org.joda.time.DateTimeConstants.MILLIS_PER_DAY;
import org.joda.time.DateTime;
import org.testng.Assert;
/** Utility class for test cases.
* @author mgeis
*
*/
public final class TestUtils {
private TestUtils() {
//this should never be called
//if called internally, throw an error
throw new InstantiationError("Instantiation of TestUtils utility class prohibited.");
}
/** Verify that the termination date is roughly retentionDays from now
* By 'roughly' we mean within one day. There are times (twice per year)
* when certain tests execute and the Daylight Savings cutover makes it not
* a precisely rounded day amount (for example, a termination policy of 4 days
* will really be about 3.95 days, or 95 hours, because one hour is lost as
* the clocks "spring ahead").
*
* A more precise, but complicated logic could be written to make sure that "roughly"
* means not more than an hour before and not more than an hour after the anticipated
* cutoff, but that makes the test much less readable.
*
* By just making sure that the difference between the actual and proposed dates
* is less than one day, we get a rough idea of whether the termination time was correct.
* @param resource The AWS Resource to be checked
* @param retentionDays number of days it should be kept around
* @param timeOfCheck The time the check is run
*/
public static void verifyTerminationTimeRough(Resource resource, int retentionDays, DateTime timeOfCheck) {
long days = (resource.getExpectedTerminationTime().getTime() - timeOfCheck.getMillis()) / MILLIS_PER_DAY;
Assert.assertTrue(Math.abs(days - retentionDays) <= 1);
}
}
| 4,699 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.